diff --git a/.codespellexcludelines b/.codespellexcludelines index 3539a6074..cd204a1a2 100644 --- a/.codespellexcludelines +++ b/.codespellexcludelines @@ -13,6 +13,7 @@ rsource "Kconfig.tls-generic" const uint8_t* hashIn, int hashSz) XMEMCPY(hash + (curveSz - hashSz), hashIn, hashSz); 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, /* creen would be i */ + 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, /* creen would be i */ \pagenumbering{alph} DES3_KEY_SIZE = 24, /* 3 des ede */ /* functions added to support above needed, removed TOOM and KARATSUBA */ diff --git a/.github/renode-test/stm32h753/CMakeLists.txt b/.github/renode-test/stm32h753/CMakeLists.txt new file mode 100644 index 000000000..5bd077e49 --- /dev/null +++ b/.github/renode-test/stm32h753/CMakeLists.txt @@ -0,0 +1,108 @@ +cmake_minimum_required(VERSION 3.18) +project(wolfcrypt_stm32h753 LANGUAGES C ASM) + +set(WOLFSSL_ROOT "/opt/wolfssl" CACHE PATH "wolfSSL source") + +set(CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) +enable_language(ASM) + +# Include paths for CMSIS device headers and STM32 HAL +# Order matters: CMSIS must come before HAL +include_directories(BEFORE + ${CMAKE_SOURCE_DIR} + /opt/CMSIS_5/CMSIS/Core/Include # Core CMSIS (core_cm7.h, etc.) - must be first + /opt/cmsis-device-h7/Include # Device-specific CMSIS (stm32h7xx.h) + /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Inc/Legacy + /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Inc +) + +# STM32 HAL source files (minimal set for CRYP and HASH) +# Note: These files are cloned in the Dockerfile before CMake runs +set(HAL_SRC_DIR /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Src) + +# Check if HAL directory exists, then add source files +if(EXISTS ${HAL_SRC_DIR}) + set(HAL_SOURCES + ${HAL_SRC_DIR}/stm32h7xx_hal.c + ${HAL_SRC_DIR}/stm32h7xx_hal_rcc.c + ${HAL_SRC_DIR}/stm32h7xx_hal_rcc_ex.c + ${HAL_SRC_DIR}/stm32h7xx_hal_cortex.c + ${HAL_SRC_DIR}/stm32h7xx_hal_dma.c + ${HAL_SRC_DIR}/stm32h7xx_hal_dma_ex.c + ${HAL_SRC_DIR}/stm32h7xx_hal_rng.c + # CRYP HAL files enabled for AES_GCM only + ${HAL_SRC_DIR}/stm32h7xx_hal_cryp.c + ${HAL_SRC_DIR}/stm32h7xx_hal_cryp_ex.c + # HASH HAL files disabled - Renode doesn't implement HASH peripheral + # ${HAL_SRC_DIR}/stm32h7xx_hal_hash.c + # ${HAL_SRC_DIR}/stm32h7xx_hal_hash_ex.c + ) +else() + message(WARNING "HAL source directory not found: ${HAL_SRC_DIR}") + set(HAL_SOURCES "") +endif() + +# wolfSSL build options +set(WOLFSSL_USER_SETTINGS ON CACHE BOOL "Use user_settings.h") +set(WOLFSSL_CRYPT_TESTS OFF CACHE BOOL "") +set(WOLFSSL_EXAMPLES OFF CACHE BOOL "") +set(BUILD_SHARED_LIBS OFF CACHE BOOL "") + +add_subdirectory(${WOLFSSL_ROOT} ${CMAKE_BINARY_DIR}/wolfssl-build EXCLUDE_FROM_ALL) +target_include_directories(wolfssl PRIVATE + /opt/CMSIS_5/CMSIS/Core/Include # Core CMSIS first + /opt/cmsis-device-h7/Include # Device CMSIS + /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Inc/Legacy + /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Inc + ${CMAKE_SOURCE_DIR} # For stm32h7xx_hal_conf.h +) +# Suppress the GENSEED_FORTEST warning (expected for emulation/test builds) +target_compile_options(wolfssl PRIVATE -Wno-cpp) + +# wolfSSL STM32 port source file (needed for HASH and CRYPTO hardware acceleration) +set(WOLFSSL_STM32_PORT_SRC ${WOLFSSL_ROOT}/wolfcrypt/src/port/st/stm32.c) + +add_executable(wolfcrypt_test.elf + startup_stm32h753.c + main.c + ${WOLFSSL_ROOT}/wolfcrypt/test/test.c + ${HAL_SOURCES} + ${WOLFSSL_STM32_PORT_SRC} +) + +target_include_directories(wolfcrypt_test.elf PRIVATE + ${CMAKE_SOURCE_DIR} + ${WOLFSSL_ROOT} + /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Inc + /opt/STM32CubeH7/Drivers/STM32H7xx_HAL_Driver/Inc/Legacy +) + +target_compile_definitions(wolfcrypt_test.elf PRIVATE + WOLFSSL_USER_SETTINGS + STM32H753xx + USE_HAL_DRIVER + USE_HAL_CONF # Enable HAL configuration + # NO_AES_CBC is defined in user_settings.h, no need to define it here +) + +# HAL source files need the same compile options and must include stdint.h +# Disable all warnings for HAL files (third-party code we don't control) +set_source_files_properties(${HAL_SOURCES} PROPERTIES + COMPILE_FLAGS "-mcpu=cortex-m7 -mthumb -mfpu=fpv5-d16 -mfloat-abi=hard -ffunction-sections -fdata-sections -Os -include stdint.h -w" +) + +target_compile_options(wolfcrypt_test.elf PRIVATE + -mcpu=cortex-m7 -mthumb -mfpu=fpv5-d16 -mfloat-abi=hard + -ffunction-sections -fdata-sections -Os +) + +target_link_options(wolfcrypt_test.elf PRIVATE + -T${CMAKE_SOURCE_DIR}/stm32h753.ld + -Wl,--gc-sections + -nostartfiles + -specs=nano.specs + -specs=nosys.specs +) + +target_link_libraries(wolfcrypt_test.elf PRIVATE wolfssl m c gcc nosys) + diff --git a/.github/renode-test/stm32h753/entrypoint.sh b/.github/renode-test/stm32h753/entrypoint.sh new file mode 100755 index 000000000..ba12bc33b --- /dev/null +++ b/.github/renode-test/stm32h753/entrypoint.sh @@ -0,0 +1,194 @@ +#!/bin/bash +set -euo pipefail + +LOG=/tmp/wolfcrypt-renode.log +TIMEOUT=300 # Maximum 5 minutes + +echo "Running wolfCrypt test in Renode..." + +# Try to find Renode binary in common installation locations +# When installed via .deb package, Renode is typically in /usr/bin/renode +RENODE_BIN="${RENODE_BIN:-$(command -v renode 2>/dev/null || true)}" +if [ -z "$RENODE_BIN" ]; then + # Check common installation paths (order matters - check standard locations first) + for path in /usr/bin/renode /usr/local/bin/renode /opt/renode/renode; do + if [ -x "$path" ]; then + RENODE_BIN="$path" + break + fi + done +fi + +if [ -z "$RENODE_BIN" ] || [ ! -x "$RENODE_BIN" ]; then + echo "Renode binary not found in image." + echo "Checked paths: /usr/bin/renode, /usr/local/bin/renode, /opt/renode/renode" + echo "PATH: $PATH" + which renode || echo "renode not in PATH" + exit 2 +fi + +echo "Using Renode binary: $RENODE_BIN" + +# Determine Renode root directory (where platforms/ directory is located) +if [ -d "/opt/renode/platforms" ]; then + RENODE_ROOT="/opt/renode" +elif [ -d "/usr/lib/renode/platforms" ]; then + RENODE_ROOT="/usr/lib/renode" +elif [ -d "/usr/share/renode/platforms" ]; then + RENODE_ROOT="/usr/share/renode" +else + # Try to find Renode root by checking where the binary is + RENODE_DIR=$(dirname "$(readlink -f "${RENODE_BIN}" 2>/dev/null || echo "${RENODE_BIN}")") + if [ -d "${RENODE_DIR}/../platforms" ]; then + RENODE_ROOT=$(readlink -f "${RENODE_DIR}/.." 2>/dev/null || echo "${RENODE_DIR}/..") + else + echo "Warning: Could not determine Renode root directory" + RENODE_ROOT="" + fi +fi + +# Set RENODE_ROOT environment variable (Renode uses this to find platform files) +if [ -n "$RENODE_ROOT" ]; then + export RENODE_ROOT + echo "Using Renode root: ${RENODE_ROOT}" + # Also create .renode-root file in firmware directory as backup + echo "${RENODE_ROOT}" > /opt/firmware/.renode-root + chmod 644 /opt/firmware/.renode-root +else + echo "ERROR: Could not determine Renode root directory" + exit 1 +fi + +# Verify platform file exists +PLATFORM_FILE="${RENODE_ROOT}/platforms/cpus/stm32h753.repl" +if [ ! -f "${PLATFORM_FILE}" ]; then + echo "ERROR: Platform file not found at ${PLATFORM_FILE}" + echo "Searching for platform files..." + find "${RENODE_ROOT}" -name "stm32h753.repl" 2>/dev/null | head -5 || true + exit 1 +fi + +echo "Platform file found at: ${PLATFORM_FILE}" + +# Change to firmware directory +cd /opt/firmware + +# Create a modified Renode script with absolute path to platform file +# This avoids the .renode-root file lookup issue +cat > /opt/firmware/run-renode-absolute.resc < "${LOG}" 2>&1 & +RENODE_PID=$! +echo "Renode PID: $RENODE_PID" + +# Monitor the log for completion, errors, and flush output frequently +START_TIME=$(date +%s) +RESULT="" +LAST_LOG_SIZE=0 + +while true; do + # Check if Renode is still running + if ! kill -0 "$RENODE_PID" 2>/dev/null; then + break + fi + + # Flush new log content to stdout (unbuffered) + if [ -f "${LOG}" ]; then + CURRENT_LOG_SIZE=$(stat -f%z "${LOG}" 2>/dev/null || stat -c%s "${LOG}" 2>/dev/null || echo 0) + if [ "$CURRENT_LOG_SIZE" -gt "$LAST_LOG_SIZE" ]; then + # Output new lines + tail -c +$((LAST_LOG_SIZE + 1)) "${LOG}" 2>/dev/null | head -c $((CURRENT_LOG_SIZE - LAST_LOG_SIZE)) + LAST_LOG_SIZE=$CURRENT_LOG_SIZE + fi + fi + + # Check for Renode errors (must check before completion to catch errors early) + if grep -q "\[ERROR\]" "${LOG}" 2>/dev/null; then + echo "" + echo "ERROR: Renode reported an error!" + RESULT="renode_error" + break + fi + + # Check for completion messages + if grep -q "=== wolfCrypt test passed! ===" "${LOG}" 2>/dev/null; then + RESULT="passed" + break + fi + + if grep -q "=== wolfCrypt test FAILED ===" "${LOG}" 2>/dev/null; then + RESULT="failed" + break + fi + + # Check timeout + CURRENT_TIME=$(date +%s) + ELAPSED=$((CURRENT_TIME - START_TIME)) + if [ "$ELAPSED" -ge "$TIMEOUT" ]; then + echo "" + echo "Timeout after ${TIMEOUT} seconds" + RESULT="timeout" + break + fi + + sleep 0.5 +done + +# Kill Renode if still running +if kill -0 "$RENODE_PID" 2>/dev/null; then + kill "$RENODE_PID" 2>/dev/null || true + wait "$RENODE_PID" 2>/dev/null || true +fi + +# Show the log output +cat "${LOG}" + +# Report result +case "$RESULT" in + passed) + echo "" + echo "wolfCrypt tests completed successfully." + exit 0 + ;; + failed) + echo "" + echo "wolfCrypt tests FAILED." + exit 1 + ;; + renode_error) + echo "" + echo "Renode reported an error - test aborted." + exit 1 + ;; + timeout) + echo "" + echo "wolfCrypt tests timed out after ${TIMEOUT} seconds." + exit 1 + ;; + *) + echo "" + echo "wolfCrypt tests did not report a result." + exit 1 + ;; +esac + diff --git a/.github/renode-test/stm32h753/main.c b/.github/renode-test/stm32h753/main.c new file mode 100644 index 000000000..98b6842b8 --- /dev/null +++ b/.github/renode-test/stm32h753/main.c @@ -0,0 +1,137 @@ +/* main.c - Entry point for wolfCrypt test on STM32H753 under Renode + * + * Runs the wolfCrypt test suite with output via USART3. + */ + +#include +#include +#include + +/* wolfCrypt test entry point */ +extern int wolfcrypt_test(void *args); + +/* USART3 registers (STM32H7) */ +#define USART3_BASE 0x40004800UL +#define USART3_CR1 (*(volatile uint32_t *)(USART3_BASE + 0x00)) +#define USART3_BRR (*(volatile uint32_t *)(USART3_BASE + 0x0C)) +#define USART3_ISR (*(volatile uint32_t *)(USART3_BASE + 0x1C)) +#define USART3_TDR (*(volatile uint32_t *)(USART3_BASE + 0x28)) + +#define USART_CR1_UE (1 << 0) +#define USART_CR1_TE (1 << 3) +#define USART_ISR_TXE (1 << 7) + +/* RCC registers for enabling USART3 clock */ +#define RCC_BASE 0x58024400UL +#define RCC_APB1LENR (*(volatile uint32_t *)(RCC_BASE + 0xE8)) +#define RCC_APB1LENR_USART3EN (1 << 18) + +static void uart_init(void) +{ + /* Enable USART3 clock */ + RCC_APB1LENR |= RCC_APB1LENR_USART3EN; + + /* Configure USART3: 115200 baud at 64MHz HSI */ + USART3_BRR = 64000000 / 115200; + USART3_CR1 = USART_CR1_UE | USART_CR1_TE; +} + +static void uart_putc(char c) +{ + while (!(USART3_ISR & USART_ISR_TXE)) + ; + USART3_TDR = c; +} + +static void uart_puts(const char *s) +{ + while (*s) { + if (*s == '\n') + uart_putc('\r'); + uart_putc(*s++); + } +} + +/* newlib _write syscall - redirects printf to UART */ +int _write(int fd, const char *buf, int len) +{ + (void)fd; + for (int i = 0; i < len; i++) { + if (buf[i] == '\n') + uart_putc('\r'); + uart_putc(buf[i]); + } + return len; +} + +/* Heap management for malloc - required by printf with format strings */ +extern char __heap_start__; +extern char __heap_end__; + +void *_sbrk(ptrdiff_t incr) +{ + static char *heap_ptr = NULL; + char *prev_heap_ptr; + + if (heap_ptr == NULL) { + heap_ptr = &__heap_start__; + } + + prev_heap_ptr = heap_ptr; + + if (heap_ptr + incr > &__heap_end__) { + /* Out of heap memory */ + return (void *)-1; + } + + heap_ptr += incr; + return prev_heap_ptr; +} + +/* Simple counter for time - used by GENSEED_FORTEST */ +static volatile uint32_t tick_counter = 0; + +/* time() stub for wolfSSL GENSEED_FORTEST */ +#include +time_t time(time_t *t) +{ + tick_counter += 12345; /* Simple pseudo-random increment */ + time_t val = (time_t)tick_counter; + if (t) + *t = val; + return val; +} + +/* Result variable - can be monitored by Renode at fixed address */ +volatile int test_result __attribute__((section(".data"))) = -1; +volatile int test_complete __attribute__((section(".data"))) = 0; + + +int main(int argc, char **argv) +{ + (void)argc; + (void)argv; + + setvbuf(stdin, NULL, _IONBF, 0); + setvbuf(stdout, NULL, _IONBF, 0); + setvbuf(stderr, NULL, _IONBF, 0); + uart_init(); + uart_puts("\n\n=== Starting wolfCrypt test ===\n\n"); + + test_result = wolfcrypt_test(NULL); + test_complete = 1; + + if (test_result == 0) { + uart_puts("\n\n=== wolfCrypt test passed! ===\n"); + } else { + uart_puts("\n\n=== wolfCrypt test FAILED ===\n"); + } + + /* Spin forever after the test completes */ + while (1) { + __asm__ volatile ("wfi"); + } + + return test_result; +} + diff --git a/.github/renode-test/stm32h753/run-renode.resc b/.github/renode-test/stm32h753/run-renode.resc new file mode 100644 index 000000000..662895456 --- /dev/null +++ b/.github/renode-test/stm32h753/run-renode.resc @@ -0,0 +1,20 @@ +# Renode test script for STM32H753 +# Note: @platforms/cpus/stm32h753.repl is relative to Renode root +# If RENODE_ROOT is set, Renode will use it; otherwise it looks for .renode-root file +using sysbus + +mach create "stm32h753" + +# Try relative path first (works if RENODE_ROOT or .renode-root is set correctly) +# If this fails, the absolute path will be tried in entrypoint.sh +machine LoadPlatformDescription @platforms/cpus/stm32h753.repl + +sysbus LoadELF @/opt/firmware/wolfcrypt_test.elf + +# Connect USART3 to the console for wolfCrypt output +showAnalyzer usart3 + +# Start emulation and run for a long time +# The entrypoint script will kill Renode when test completes +emulation RunFor "600s" + diff --git a/.github/renode-test/stm32h753/startup_stm32h753.c b/.github/renode-test/stm32h753/startup_stm32h753.c new file mode 100644 index 000000000..5c3a78802 --- /dev/null +++ b/.github/renode-test/stm32h753/startup_stm32h753.c @@ -0,0 +1,101 @@ +/* Minimal startup code for STM32H753 running under Renode */ + +#include +#include + +extern int main(int argc, char** argv); + +void Default_Handler(void); +void Reset_Handler(void); + +/* Symbols provided by the linker script */ +extern unsigned long _estack; +extern unsigned long __data_start__; +extern unsigned long __data_end__; +extern unsigned long __bss_start__; +extern unsigned long __bss_end__; +extern unsigned long _sidata; /* start of .data in flash */ + +/* Minimal init_array support */ +extern void (*__preinit_array_start[])(void); +extern void (*__preinit_array_end[])(void); +extern void (*__init_array_start[])(void); +extern void (*__init_array_end[])(void); + +static void call_init_array(void) +{ + size_t count, i; + + count = __preinit_array_end - __preinit_array_start; + for (i = 0; i < count; i++) + __preinit_array_start[i](); + + count = __init_array_end - __init_array_start; + for (i = 0; i < count; i++) + __init_array_start[i](); +} + +void Reset_Handler(void) +{ + unsigned long *src, *dst; + + /* Copy .data from flash to RAM */ + src = &_sidata; + for (dst = &__data_start__; dst < &__data_end__;) + *dst++ = *src++; + + /* Zero .bss */ + for (dst = &__bss_start__; dst < &__bss_end__;) + *dst++ = 0; + + /* Call static constructors */ + call_init_array(); + + /* Call main */ + (void)main(0, (char**)0); + + /* Infinite loop after main returns */ + while (1) { + __asm__ volatile ("wfi"); + } +} + +void Default_Handler(void) +{ + while (1) { + __asm__ volatile ("wfi"); + } +} + +/* Exception handlers - all weak aliases to Default_Handler */ +void NMI_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void HardFault_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void MemManage_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void BusFault_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void UsageFault_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void SVC_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void DebugMon_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void PendSV_Handler(void) __attribute__((weak, alias("Default_Handler"))); +void SysTick_Handler(void) __attribute__((weak, alias("Default_Handler"))); + +/* Vector table */ +__attribute__ ((section(".isr_vector"), used)) +void (* const g_pfnVectors[])(void) = { + (void (*)(void))(&_estack), /* Initial stack pointer */ + Reset_Handler, /* Reset Handler */ + NMI_Handler, /* NMI Handler */ + HardFault_Handler, /* Hard Fault Handler */ + MemManage_Handler, /* MPU Fault Handler */ + BusFault_Handler, /* Bus Fault Handler */ + UsageFault_Handler, /* Usage Fault Handler */ + 0, /* Reserved */ + 0, /* Reserved */ + 0, /* Reserved */ + 0, /* Reserved */ + SVC_Handler, /* SVCall Handler */ + DebugMon_Handler, /* Debug Monitor Handler */ + 0, /* Reserved */ + PendSV_Handler, /* PendSV Handler */ + SysTick_Handler /* SysTick Handler */ + /* IRQ vectors would continue here */ +}; diff --git a/.github/renode-test/stm32h753/stm32h753.ld b/.github/renode-test/stm32h753/stm32h753.ld new file mode 100644 index 000000000..5900f3eed --- /dev/null +++ b/.github/renode-test/stm32h753/stm32h753.ld @@ -0,0 +1,109 @@ +/* Minimal STM32H753 memory map for Renode run */ +MEMORY +{ + FLASH (rx) : ORIGIN = 0x08000000, LENGTH = 2048K + DTCM (xrw) : ORIGIN = 0x20000000, LENGTH = 128K + RAM (xrw) : ORIGIN = 0x24000000, LENGTH = 512K +} + +_estack = ORIGIN(RAM) + LENGTH(RAM); +_Min_Heap_Size = 128K; +_Min_Stack_Size = 128K; + +ENTRY(Reset_Handler) + +SECTIONS +{ + .isr_vector : + { + . = ALIGN(4); + KEEP(*(.isr_vector)) + . = ALIGN(4); + } > FLASH + + .text : + { + . = ALIGN(4); + *(.text*) + *(.rodata*) + *(.glue_7) + *(.glue_7t) + *(.eh_frame) + . = ALIGN(4); + _etext = .; + } > FLASH + + .ARM.extab : + { + *(.ARM.extab* .gnu.linkonce.armextab.*) + } > FLASH + + .ARM.exidx : + { + __exidx_start = .; + *(.ARM.exidx*) + __exidx_end = .; + } > FLASH + + .preinit_array : + { + PROVIDE_HIDDEN(__preinit_array_start = .); + KEEP(*(.preinit_array*)) + PROVIDE_HIDDEN(__preinit_array_end = .); + } > FLASH + + .init_array : + { + PROVIDE_HIDDEN(__init_array_start = .); + KEEP(*(SORT(.init_array.*))) + KEEP(*(.init_array*)) + PROVIDE_HIDDEN(__init_array_end = .); + } > FLASH + + .fini_array : + { + PROVIDE_HIDDEN(__fini_array_start = .); + KEEP(*(SORT(.fini_array.*))) + KEEP(*(.fini_array*)) + PROVIDE_HIDDEN(__fini_array_end = .); + } > FLASH + + /* Location in flash where .data will be stored */ + _sidata = LOADADDR(.data); + + .data : + { + . = ALIGN(4); + __data_start__ = .; + *(.data*) + . = ALIGN(4); + __data_end__ = .; + } > RAM AT> FLASH + + .bss : + { + . = ALIGN(4); + __bss_start__ = .; + *(.bss*) + *(COMMON) + . = ALIGN(4); + __bss_end__ = .; + } > RAM + + .heap_stack (NOLOAD): + { + . = ALIGN(8); + PROVIDE(__heap_start__ = .); + . = . + _Min_Heap_Size; + PROVIDE(__heap_end__ = .); + PROVIDE(end = __heap_end__); + . = ALIGN(8); + PROVIDE(__stack_start__ = .); + . = . + _Min_Stack_Size; + PROVIDE(__stack_end__ = .); + } > RAM +} + +PROVIDE(_init = 0); +PROVIDE(_fini = 0); + diff --git a/.github/renode-test/stm32h753/stm32h7xx_hal_conf.h b/.github/renode-test/stm32h753/stm32h7xx_hal_conf.h new file mode 100644 index 000000000..eb6430b16 --- /dev/null +++ b/.github/renode-test/stm32h753/stm32h7xx_hal_conf.h @@ -0,0 +1,208 @@ +/* Minimal HAL configuration for STM32H753 wolfCrypt build under Renode. + * RNG and CRYP HAL are enabled. CRYP is used for AES_GCM only (other AES modes disabled). + * HASH is disabled as Renode doesn't implement it. + */ + +#ifndef STM32H7xx_HAL_CONF_H +#define STM32H7xx_HAL_CONF_H + +#ifdef __cplusplus +extern "C" { +#endif + +/* ------------------------- Module Selection ----------------------------- */ +#define HAL_MODULE_ENABLED +#define HAL_CORTEX_MODULE_ENABLED +#define HAL_RCC_MODULE_ENABLED +#define HAL_GPIO_MODULE_ENABLED +#define HAL_RNG_MODULE_ENABLED +#define HAL_CRYP_MODULE_ENABLED /* Enabled for AES_GCM only */ +/* #define HAL_HASH_MODULE_ENABLED */ /* Disabled - Renode doesn't implement HASH */ +#define HAL_DMA_MODULE_ENABLED +#define HAL_FLASH_MODULE_ENABLED +#define HAL_PWR_MODULE_ENABLED +#define HAL_EXTI_MODULE_ENABLED + +/* Disabled modules (explicit for clarity) */ +/* #define HAL_SDRAM_MODULE_ENABLED */ + +/* ------------------------- Oscillator Values ---------------------------- */ +#if !defined(HSE_VALUE) +#define HSE_VALUE 25000000UL /* External oscillator frequency in Hz */ +#endif + +#if !defined(HSE_STARTUP_TIMEOUT) +#define HSE_STARTUP_TIMEOUT 100UL /* Time out for HSE start up in ms */ +#endif + +#if !defined(CSI_VALUE) +#define CSI_VALUE 4000000UL /* Internal oscillator CSI in Hz */ +#endif + +#if !defined(HSI_VALUE) +#define HSI_VALUE 64000000UL /* Internal oscillator HSI in Hz */ +#endif + +#if !defined(HSI48_VALUE) +#define HSI48_VALUE 48000000UL /* Value of the Internal High Speed oscillator for USB in Hz */ +#endif + +#if !defined(LSE_VALUE) +#define LSE_VALUE 32768UL /* External low speed oscillator in Hz */ +#endif + +#if !defined(LSE_STARTUP_TIMEOUT) +#define LSE_STARTUP_TIMEOUT 5000UL /* Time out for LSE start up in ms */ +#endif + +#if !defined(LSI_VALUE) +#define LSI_VALUE 32000UL /* Internal low speed oscillator in Hz */ +#endif + +#if !defined(EXTERNAL_CLOCK_VALUE) +#define EXTERNAL_CLOCK_VALUE 12288000UL /* External audio clock in Hz */ +#endif + +/* ------------------------- System Configuration -------------------------- */ +#define VDD_VALUE 3300UL /* Value of VDD in mV */ +#define TICK_INT_PRIORITY 0x0FUL /* Tick interrupt priority */ +#define USE_RTOS 0U +#define PREFETCH_ENABLE 0U +#define USE_HAL_ADC_REGISTER_CALLBACKS 0U +#define USE_HAL_CEC_REGISTER_CALLBACKS 0U +#define USE_HAL_COMP_REGISTER_CALLBACKS 0U +#define USE_HAL_CORDIC_REGISTER_CALLBACKS 0U +#define USE_HAL_CRYP_REGISTER_CALLBACKS 0U +#define USE_HAL_DAC_REGISTER_CALLBACKS 0U +#define USE_HAL_DCMI_REGISTER_CALLBACKS 0U +#define USE_HAL_DFSDM_REGISTER_CALLBACKS 0U +#define USE_HAL_DMA_REGISTER_CALLBACKS 0U +#define USE_HAL_DMA2D_REGISTER_CALLBACKS 0U +#define USE_HAL_DSI_REGISTER_CALLBACKS 0U +#define USE_HAL_DTS_REGISTER_CALLBACKS 0U +#define USE_HAL_ETH_REGISTER_CALLBACKS 0U +#define USE_HAL_FDCAN_REGISTER_CALLBACKS 0U +#define USE_HAL_FMAC_REGISTER_CALLBACKS 0U +#define USE_HAL_GFXMMU_REGISTER_CALLBACKS 0U +#define USE_HAL_HASH_REGISTER_CALLBACKS 0U +#define USE_HAL_HCD_REGISTER_CALLBACKS 0U +#define USE_HAL_HRTIM_REGISTER_CALLBACKS 0U +#define USE_HAL_I2C_REGISTER_CALLBACKS 0U +#define USE_HAL_I2S_REGISTER_CALLBACKS 0U +#define USE_HAL_IRDA_REGISTER_CALLBACKS 0U +#define USE_HAL_JPEG_REGISTER_CALLBACKS 0U +#define USE_HAL_LPTIM_REGISTER_CALLBACKS 0U +#define USE_HAL_LTDC_REGISTER_CALLBACKS 0U +#define USE_HAL_MDIOS_REGISTER_CALLBACKS 0U +#define USE_HAL_MMC_REGISTER_CALLBACKS 0U +#define USE_HAL_NAND_REGISTER_CALLBACKS 0U +#define USE_HAL_NOR_REGISTER_CALLBACKS 0U +#define USE_HAL_OPAMP_REGISTER_CALLBACKS 0U +#define USE_HAL_OSPI_REGISTER_CALLBACKS 0U +#define USE_HAL_OTFDEC_REGISTER_CALLBACKS 0U +#define USE_HAL_PCD_REGISTER_CALLBACKS 0U +#define USE_HAL_PSSI_REGISTER_CALLBACKS 0U +#define USE_HAL_QSPI_REGISTER_CALLBACKS 0U +#define USE_HAL_RNG_REGISTER_CALLBACKS 0U +#define USE_HAL_RTC_REGISTER_CALLBACKS 0U +#define USE_HAL_SAI_REGISTER_CALLBACKS 0U +#define USE_HAL_SD_REGISTER_CALLBACKS 0U +#define USE_HAL_SDRAM_REGISTER_CALLBACKS 0U +#define USE_HAL_SMARTCARD_REGISTER_CALLBACKS 0U +#define USE_HAL_SMBUS_REGISTER_CALLBACKS 0U +#define USE_HAL_SPDIFRX_REGISTER_CALLBACKS 0U +#define USE_HAL_SPI_REGISTER_CALLBACKS 0U +#define USE_HAL_SRAM_REGISTER_CALLBACKS 0U +#define USE_HAL_SWPMI_REGISTER_CALLBACKS 0U +#define USE_HAL_TIM_REGISTER_CALLBACKS 0U +#define USE_HAL_UART_REGISTER_CALLBACKS 0U +#define USE_HAL_USART_REGISTER_CALLBACKS 0U +#define USE_HAL_WWDG_REGISTER_CALLBACKS 0U +#define USE_HAL_XSPI_REGISTER_CALLBACKS 0U + +/* ------------------------- SPI peripheral configuration ------------------ */ +#define USE_SPI_CRC 0U + +/* ------------------------- Assertion ------------------------------------- */ +/* #define USE_FULL_ASSERT 1U */ +#define assert_param(expr) ((void)0U) + +/* ------------------------- Ethernet Configuration ------------------------ */ +#define ETH_TX_DESC_CNT 4U +#define ETH_RX_DESC_CNT 4U +#define ETH_MAC_ADDR0 0x02U +#define ETH_MAC_ADDR1 0x00U +#define ETH_MAC_ADDR2 0x00U +#define ETH_MAC_ADDR3 0x00U +#define ETH_MAC_ADDR4 0x00U +#define ETH_MAC_ADDR5 0x00U + +/* ------------------------- Include HAL headers --------------------------- */ +/** + * @brief Include module's header file + */ + +#ifdef HAL_RCC_MODULE_ENABLED + #include "stm32h7xx_hal_rcc.h" +#endif /* HAL_RCC_MODULE_ENABLED */ + +#ifdef HAL_GPIO_MODULE_ENABLED + #include "stm32h7xx_hal_gpio.h" +#endif /* HAL_GPIO_MODULE_ENABLED */ + +#ifdef HAL_DMA_MODULE_ENABLED + #include "stm32h7xx_hal_dma.h" +#endif /* HAL_DMA_MODULE_ENABLED */ + +#ifdef HAL_CORTEX_MODULE_ENABLED + #include "stm32h7xx_hal_cortex.h" +#endif /* HAL_CORTEX_MODULE_ENABLED */ + +#ifdef HAL_EXTI_MODULE_ENABLED + #include "stm32h7xx_hal_exti.h" +#endif /* HAL_EXTI_MODULE_ENABLED */ + +#ifdef HAL_FLASH_MODULE_ENABLED + #include "stm32h7xx_hal_flash.h" +#endif /* HAL_FLASH_MODULE_ENABLED */ + +#ifdef HAL_PWR_MODULE_ENABLED + #include "stm32h7xx_hal_pwr.h" +#endif /* HAL_PWR_MODULE_ENABLED */ + +#ifdef HAL_RNG_MODULE_ENABLED + #include "stm32h7xx_hal_rng.h" +#endif /* HAL_RNG_MODULE_ENABLED */ + +/* CRYP enabled for AES_GCM only */ +#ifdef HAL_CRYP_MODULE_ENABLED + #include "stm32h7xx_hal_cryp.h" +#endif + +/* #ifdef HAL_HASH_MODULE_ENABLED + #include "stm32h7xx_hal_hash.h" +#endif */ + +/* Exported macro ------------------------------------------------------------*/ +#ifdef USE_FULL_ASSERT +/** + * @brief The assert_param macro is used for function's parameters check. + * @param expr: If expr is false, it calls assert_failed function + * which reports the name of the source file and the source + * line number of the call that failed. + * If expr is true, it returns no value. + * @retval None + */ + #define assert_param(expr) ((expr) ? (void)0U : assert_failed((uint8_t *)__FILE__, __LINE__)) +/* Exported functions ------------------------------------------------------- */ + void assert_failed(uint8_t *file, uint32_t line); +#else + #define assert_param(expr) ((void)0U) +#endif /* USE_FULL_ASSERT */ + +#ifdef __cplusplus +} +#endif + +#endif /* STM32H7xx_HAL_CONF_H */ + diff --git a/.github/renode-test/stm32h753/toolchain-arm-none-eabi.cmake b/.github/renode-test/stm32h753/toolchain-arm-none-eabi.cmake new file mode 100644 index 000000000..1ea559a57 --- /dev/null +++ b/.github/renode-test/stm32h753/toolchain-arm-none-eabi.cmake @@ -0,0 +1,24 @@ +set(CMAKE_SYSTEM_NAME Generic) +set(CMAKE_SYSTEM_PROCESSOR arm) + +set(CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) + +set(CMAKE_C_COMPILER arm-none-eabi-gcc) +set(CMAKE_CXX_COMPILER arm-none-eabi-g++) +set(CMAKE_ASM_COMPILER arm-none-eabi-gcc) + +set(CMAKE_AR arm-none-eabi-ar) +set(CMAKE_RANLIB arm-none-eabi-ranlib) + +set(CMAKE_C_STANDARD 11) + +set(CPU_FLAGS "-mcpu=cortex-m7 -mthumb -mfpu=fpv5-d16 -mfloat-abi=hard") +set(OPT_FLAGS "-Os -ffunction-sections -fdata-sections") +set(CMSIS_INCLUDES "-I/opt/cmsis-device-h7/Include -I/opt/CMSIS_5/CMSIS/Core/Include -I/opt/firmware") + +set(CMAKE_C_FLAGS_INIT "${CPU_FLAGS} ${OPT_FLAGS} ${CMSIS_INCLUDES} -DSTM32H753xx") +set(CMAKE_CXX_FLAGS_INIT "${CPU_FLAGS} ${OPT_FLAGS} ${CMSIS_INCLUDES} -DSTM32H753xx") +set(CMAKE_ASM_FLAGS_INIT "${CPU_FLAGS}") + +set(CMAKE_EXE_LINKER_FLAGS_INIT "-Wl,--gc-sections -static") + diff --git a/.github/renode-test/stm32h753/user_settings.h b/.github/renode-test/stm32h753/user_settings.h new file mode 100644 index 000000000..19ac73b4f --- /dev/null +++ b/.github/renode-test/stm32h753/user_settings.h @@ -0,0 +1,95 @@ +/* user_settings_renode.h - wolfSSL/wolfCrypt configuration for STM32H753 under Renode + * + * Minimal, semihosting-friendly build for Cortex-M7 / STM32H753. + * Hardware RNG and CRYPTO (AES-GCM only) are enabled via Renode's STM32H753 emulation. + * HASH is disabled as Renode doesn't implement the HASH peripheral. + */ + +#ifndef USER_SETTINGS_RENODE_H +#define USER_SETTINGS_RENODE_H + +/* ------------------------- Platform ------------------------------------- */ +#define WOLFSSL_ARM_CORTEX_M +#define WOLFSSL_STM32H7 /* STM32H7 series (includes H753) */ +#define WOLFSSL_STM32_CUBEMX /* Use STM32 HAL for CRYPTO */ +/* NO_STM32_CRYPTO is NOT defined, so CRYPTO will be enabled */ +/* Disable HASH - Renode doesn't implement HASH peripheral */ +#define NO_STM32_HASH + +/* Required for consistent math library settings (CTC_SETTINGS) */ +#define SIZEOF_LONG 4 +#define SIZEOF_LONG_LONG 8 + +/* ------------------------- Threading / OS ------------------------------- */ +#define SINGLE_THREADED + +/* ------------------------- Filesystem / I/O ----------------------------- */ +#define WOLFSSL_NO_CURRDIR +#define NO_FILESYSTEM +#define NO_WRITEV + +/* ------------------------- wolfCrypt Only ------------------------------- */ +#define WOLFCRYPT_ONLY +#define NO_DH +#define NO_DSA +/* Disable DES/3DES - Renode CRYPTO only supports AES_GCM */ +#define NO_DES +#define NO_DES3 + +/* ------------------------- AES Mode Configuration ----------------------- */ +/* Disable all AES modes except GCM - Renode CRYPTO only supports AES_GCM */ +/* NO_AES_CBC prevents HAVE_AES_CBC from being defined in settings.h */ +#define NO_AES_CBC + +/* ------------------------- RNG Configuration ---------------------------- */ +/* Enable STM32 hardware RNG (emulated by Renode) using direct register access */ +#define WOLFSSL_STM32_RNG_NOLIB +/* NO_STM32_RNG is NOT defined, so STM32_RNG will be auto-enabled */ +#define NO_DEV_RANDOM +#define HAVE_HASHDRBG + +/* ------------------------- Math Library --------------------------------- */ +/* Use SP Math (Single Precision) - modern, efficient, and secure */ +#define WOLFSSL_SP_MATH_ALL +#define WOLFSSL_HAVE_SP_RSA +#define WOLFSSL_HAVE_SP_DH +#define WOLFSSL_HAVE_SP_ECC +#define WOLFSSL_SP_ARM_CORTEX_M_ASM +#define SP_WORD_SIZE 32 + +/* ------------------------- Crypto Hardening ----------------------------- */ +#define WC_RSA_BLINDING +#define ECC_TIMING_RESISTANT + +/* ------------------------- Size Optimization ---------------------------- */ +#define WOLFSSL_SMALL_STACK + +/* ------------------------- Test Configuration --------------------------- */ +/* Use smaller key sizes for faster test runs in emulation */ +#define BENCH_EMBEDDED + +/* Use our own main() instead of the one in test.c */ +#define NO_MAIN_DRIVER + +/* ------------------------- Post-options.h cleanup ----------------------- */ +/* Ensure unsupported AES modes stay disabled even after options.h processing */ +/* These undefs will be processed after options.h includes, preventing + * Renode-unsupported modes from being used */ +#ifdef HAVE_AES_CBC +#undef HAVE_AES_CBC +#endif +#ifdef HAVE_AES_ECB +#undef HAVE_AES_ECB +#endif +#ifdef HAVE_AES_CTR +#undef HAVE_AES_CTR +#endif +#ifdef HAVE_AES_CFB +#undef HAVE_AES_CFB +#endif +#ifdef HAVE_AES_OFB +#undef HAVE_AES_OFB +#endif + +#endif /* USER_SETTINGS_RENODE_H */ + diff --git a/.github/workflows/arduino.yml b/.github/workflows/arduino.yml index 54b878ad4..4ed695b2f 100644 --- a/.github/workflows/arduino.yml +++ b/.github/workflows/arduino.yml @@ -59,7 +59,7 @@ on: pull_request: branches: [ '**' ] paths: - - 'github/workflows/arduino.yml' + - '.github/workflows/arduino.yml' - 'IDE/ARDUINO/**' - 'src/**' - 'wolfcrypt/**' @@ -122,6 +122,15 @@ jobs: REPO_OWNER: ${{ github.repository_owner }} steps: + - name: Free disk space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /usr/local/lib/android + sudo rm -rf /opt/ghc + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo apt-get clean + df -h + - name: Checkout Repository uses: actions/checkout@v4 @@ -248,7 +257,8 @@ jobs: path: | ~/.arduino15 ~/.cache/arduino - ~/.arduino15/staging + # Exclude staging directory from cache to save space + !~/.arduino15/staging # Arduino libraries # Specific to Arduino CI Build (2 of 4) Arduinbo Release wolfSSL for Local Examples @@ -405,6 +415,9 @@ jobs: WOLFSSL_EXAMPLES_DIRECTORY="$ARDUINO_ROOT/wolfssl/examples" echo "WOLFSSL_EXAMPLES_DIRECTORY: $WOLFSSL_EXAMPLES_DIRECTORY" + # Limit the number of jobs to 1 to avoid running out of memory + export ARDUINO_CLI_MAX_JOBS=1 + echo "Change directory to Arduino examples..." pushd "$WOLFSSL_EXAMPLES_DIRECTORY" chmod +x ./compile-all-examples.sh @@ -416,3 +429,37 @@ jobs: bash ./compile-all-examples.sh ./board_list.txt "${{ matrix.fqbn }}" popd # End Compile Arduino Sketches for Various Boards + + - name: Cleanup to Save Disk Space + if: always() + run: | + echo "Disk usage before cleanup:" + df -h + echo "" + echo "Cleaning up build artifacts and temporary files..." + + # Clean up Arduino build artifacts + find ~/Arduino -name "*.hex" -delete 2>/dev/null || true + find ~/Arduino -name "*.elf" -delete 2>/dev/null || true + find ~/Arduino -name "*.bin" -delete 2>/dev/null || true + find ~/Arduino -name "build" -type d -exec rm -rf {} + 2>/dev/null || true + + rm -rf ~/.arduino15/packages/esp32/tools || true + rm -rf ~/.arduino15/packages/esp32/hardware || true + rm -rf ~/.espressif || true + + # Clean up staging directories + rm -rf ~/.arduino15/staging/* || true + rm -rf ~/.cache/arduino/* || true + + # Clean up git clone of wolfssl-examples + GITHUB_WORK=$(realpath "$GITHUB_WORKSPACE/../..") + rm -rf "$GITHUB_WORK/wolfssl-examples-publish" || true + + # Clean up any temporary files in workspace + find "$GITHUB_WORKSPACE" -name "*.o" -delete 2>/dev/null || true + find "$GITHUB_WORKSPACE" -name "*.a" -delete 2>/dev/null || true + + echo "" + echo "Disk usage after cleanup:" + df -h diff --git a/.github/workflows/async.yml b/.github/workflows/async.yml index 168450a95..8a572c328 100644 --- a/.github/workflows/async.yml +++ b/.github/workflows/async.yml @@ -24,7 +24,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/bind.yml b/.github/workflows/bind.yml index c26646fea..a427ead43 100644 --- a/.github/workflows/bind.yml +++ b/.github/workflows/bind.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -47,7 +47,7 @@ jobs: ref: [ 9.18.0, 9.18.28, 9.18.33 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 10 needs: build_wolfssl diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 00b64013d..33688cc05 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -14,7 +14,7 @@ concurrency: jobs: codespell: if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 @@ -23,7 +23,7 @@ jobs: check_filenames: true check_hidden: true # Add comma separated list of words that occur multiple times that should be ignored (sorted alphabetically, case sensitive) - ignore_words_list: adin,aNULL,brunch,carryIn,chainG,ciph,cLen,cliKs,dout,haveA,inCreated,inOut,inout,larg,LEAPYEAR,Merget,optionA,parm,parms,repid,rIn,userA,ser,siz,te,Te, + ignore_words_list: adin,aNULL,brunch,carryIn,chainG,ciph,cLen,cliKs,dout,haveA,inCreated,inOut,inout,larg,LEAPYEAR,Merget,optionA,parm,parms,repid,rIn,userA,ser,siz,te,Te,HSI, # The exclude_file contains lines of code that should be ignored. This is useful for individual lines which have non-words that can safely be ignored. exclude_file: '.codespellexcludelines' # To skip files entirely from being processed, add it to the following list: diff --git a/.github/workflows/coverity-scan-fixes.yml b/.github/workflows/coverity-scan-fixes.yml index 9a70e080b..301df2374 100644 --- a/.github/workflows/coverity-scan-fixes.yml +++ b/.github/workflows/coverity-scan-fixes.yml @@ -10,7 +10,7 @@ on: jobs: coverity: if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 with: diff --git a/.github/workflows/curl.yml b/.github/workflows/curl.yml index 90aaa7c2d..26b7afa97 100644 --- a/.github/workflows/curl.yml +++ b/.github/workflows/curl.yml @@ -16,7 +16,7 @@ jobs: build_wolfssl: name: Build wolfSSL if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -40,7 +40,7 @@ jobs: test_curl: name: ${{ matrix.curl_ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 15 needs: build_wolfssl diff --git a/.github/workflows/cyrus-sasl.yml b/.github/workflows/cyrus-sasl.yml index 910c87122..2e5068d71 100644 --- a/.github/workflows/cyrus-sasl.yml +++ b/.github/workflows/cyrus-sasl.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -48,7 +48,7 @@ jobs: ref: [ 2.1.28 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 needs: build_wolfssl diff --git a/.github/workflows/disable-pk-algs.yml b/.github/workflows/disable-pk-algs.yml index 123dfe221..30573ee94 100644 --- a/.github/workflows/disable-pk-algs.yml +++ b/.github/workflows/disable-pk-algs.yml @@ -36,7 +36,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/docker-Espressif.yml b/.github/workflows/docker-Espressif.yml index 384509ecd..4e79636f3 100644 --- a/.github/workflows/docker-Espressif.yml +++ b/.github/workflows/docker-Espressif.yml @@ -15,7 +15,7 @@ jobs: espressif_latest: name: latest Docker container if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 12 container: @@ -29,7 +29,7 @@ jobs: espressif_v4_4: name: v4.4 Docker container if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 container: image: espressif/idf:release-v4.4 steps: @@ -39,7 +39,7 @@ jobs: espressif_v5_0: name: v5.0 Docker container if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 container: image: espressif/idf:release-v5.0 steps: diff --git a/.github/workflows/docker-OpenWrt.yml b/.github/workflows/docker-OpenWrt.yml index 05890ffae..1d8db9c2c 100644 --- a/.github/workflows/docker-OpenWrt.yml +++ b/.github/workflows/docker-OpenWrt.yml @@ -18,7 +18,7 @@ jobs: build_library: name: Compile libwolfssl.so if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 container: @@ -42,7 +42,7 @@ jobs: compile_container: name: Compile container if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 2 needs: build_library diff --git a/.github/workflows/fil-c.yml b/.github/workflows/fil-c.yml index 3372969c3..410ba0272 100644 --- a/.github/workflows/fil-c.yml +++ b/.github/workflows/fil-c.yml @@ -28,7 +28,7 @@ jobs: # This should be a safe limit for the tests to run. timeout-minutes: 30 if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 name: ${{ matrix.config }} steps: - name: Download fil-c release diff --git a/.github/workflows/grpc.yml b/.github/workflows/grpc.yml index 4259b2a93..019c57632 100644 --- a/.github/workflows/grpc.yml +++ b/.github/workflows/grpc.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 10 steps: @@ -52,7 +52,7 @@ jobs: h2_ssl_cert_test h2_ssl_session_reuse_test name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 30 needs: build_wolfssl diff --git a/.github/workflows/haproxy.yml b/.github/workflows/haproxy.yml index 99db830f9..17f0f3f08 100644 --- a/.github/workflows/haproxy.yml +++ b/.github/workflows/haproxy.yml @@ -16,7 +16,7 @@ jobs: build_wolfssl: name: Build wolfSSL if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -40,7 +40,7 @@ jobs: test_haproxy: name: ${{ matrix.haproxy_ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 15 needs: build_wolfssl diff --git a/.github/workflows/hostap-vm.yml b/.github/workflows/hostap-vm.yml index 47e053baa..0a6056249 100644 --- a/.github/workflows/hostap-vm.yml +++ b/.github/workflows/hostap-vm.yml @@ -80,7 +80,7 @@ jobs: - name: Checkout hostap if: steps.cache.outputs.cache-hit != 'true' - run: git clone git://w1.fi/hostap.git hostap + run: git clone https://w1.fi/hostap.git hostap build_uml_linux: name: Build UML (UserMode Linux) diff --git a/.github/workflows/intelasm-c-fallback.yml b/.github/workflows/intelasm-c-fallback.yml index 49d3639bc..adbe94218 100644 --- a/.github/workflows/intelasm-c-fallback.yml +++ b/.github/workflows/intelasm-c-fallback.yml @@ -22,7 +22,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/ipmitool.yml b/.github/workflows/ipmitool.yml index 9c38a66e8..bbcdd9028 100644 --- a/.github/workflows/ipmitool.yml +++ b/.github/workflows/ipmitool.yml @@ -18,7 +18,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -48,11 +48,11 @@ jobs: git_ref: [ c3939dac2c060651361fc71516806f9ab8c38901 ] name: ${{ matrix.git_ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: build_wolfssl steps: - name: Install dependencies - run: export DEBIAN_FRONTEND=noninteractive && sudo apt-get update && sudo apt-get install -y libreadline8 + run: export DEBIAN_FRONTEND=noninteractive && sudo apt-get update && sudo apt-get install -y libreadline-dev - name: Download lib uses: actions/download-artifact@v4 with: diff --git a/.github/workflows/jwt-cpp.yml b/.github/workflows/jwt-cpp.yml index 2dcb209b5..09d1151df 100644 --- a/.github/workflows/jwt-cpp.yml +++ b/.github/workflows/jwt-cpp.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL # Just to keep it the same as the testing target if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -47,9 +47,9 @@ jobs: matrix: config: - ref: 0.7.0 - runner: ubuntu-22.04 + runner: ubuntu-24.04 - ref: 0.6.0 - runner: ubuntu-22.04 + runner: ubuntu-24.04 name: ${{ matrix.config.ref }} runs-on: ${{ matrix.config.runner }} needs: build_wolfssl diff --git a/.github/workflows/libspdm.yml b/.github/workflows/libspdm.yml index 855d3d825..098881e97 100644 --- a/.github/workflows/libspdm.yml +++ b/.github/workflows/libspdm.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -46,7 +46,7 @@ jobs: ref: [ 3.7.0 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 needs: build_wolfssl diff --git a/.github/workflows/libvncserver.yml b/.github/workflows/libvncserver.yml index 87a772bf9..8964a57b9 100644 --- a/.github/workflows/libvncserver.yml +++ b/.github/workflows/libvncserver.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL # Just to keep it the same as the testing target if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -47,7 +47,7 @@ jobs: ref: [ 0.9.13, 0.9.14 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: build_wolfssl steps: - name: Download lib diff --git a/.github/workflows/memcached.yml b/.github/workflows/memcached.yml index bdd0c0593..128c03d47 100644 --- a/.github/workflows/memcached.yml +++ b/.github/workflows/memcached.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL # Just to keep it the same as the testing target if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Build wolfSSL uses: wolfSSL/actions-build-autotools-project@v1 @@ -48,7 +48,7 @@ jobs: - ref: 1.6.22 name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: build_wolfssl steps: - name: Download lib diff --git a/.github/workflows/mosquitto.yml b/.github/workflows/mosquitto.yml index 97afaf282..3e14debc3 100644 --- a/.github/workflows/mosquitto.yml +++ b/.github/workflows/mosquitto.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL # Just to keep it the same as the testing target if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -45,7 +45,7 @@ jobs: ref: [ 2.0.18 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 needs: build_wolfssl diff --git a/.github/workflows/multi-compiler.yml b/.github/workflows/multi-compiler.yml index d2e048696..349ec385d 100644 --- a/.github/workflows/multi-compiler.yml +++ b/.github/workflows/multi-compiler.yml @@ -31,18 +31,12 @@ jobs: - CC: gcc-12 CXX: g++-12 OS: ubuntu-24.04 - - CC: clang-11 - CXX: clang++-11 - OS: ubuntu-22.04 - - CC: clang-12 - CXX: clang++-12 - OS: ubuntu-22.04 - - CC: clang-13 - CXX: clang++-13 - OS: ubuntu-22.04 - CC: clang-14 CXX: clang++-14 OS: ubuntu-24.04 + - CC: clang-19 + CXX: clang++-19 + OS: ubuntu-24.04 if: github.repository_owner == 'wolfssl' runs-on: ${{ matrix.OS }} # This should be a safe limit for the tests to run. diff --git a/.github/workflows/net-snmp.yml b/.github/workflows/net-snmp.yml index 7ce030b80..3146e7369 100644 --- a/.github/workflows/net-snmp.yml +++ b/.github/workflows/net-snmp.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -48,7 +48,7 @@ jobs: test_opts: -e 'agentxperl' name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 needs: build_wolfssl diff --git a/.github/workflows/nginx.yml b/.github/workflows/nginx.yml index c85161a0e..cc67610f2 100644 --- a/.github/workflows/nginx.yml +++ b/.github/workflows/nginx.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -107,7 +107,7 @@ jobs: stream_proxy_ssl_verify.t name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 needs: build_wolfssl diff --git a/.github/workflows/no-malloc.yml b/.github/workflows/no-malloc.yml index f2ec8eda9..1ed247122 100644 --- a/.github/workflows/no-malloc.yml +++ b/.github/workflows/no-malloc.yml @@ -22,7 +22,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/no-tls.yml b/.github/workflows/no-tls.yml index 5fd4004b8..fb6ff9cad 100644 --- a/.github/workflows/no-tls.yml +++ b/.github/workflows/no-tls.yml @@ -22,7 +22,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/nss.yml b/.github/workflows/nss.yml index 821bc2c91..f88f20592 100644 --- a/.github/workflows/nss.yml +++ b/.github/workflows/nss.yml @@ -21,7 +21,7 @@ jobs: build_nss: name: Build nss if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 30 steps: @@ -60,7 +60,7 @@ jobs: nss_test: name: Test interop with nss if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: build_nss timeout-minutes: 10 steps: diff --git a/.github/workflows/ntp.yml b/.github/workflows/ntp.yml index 2acd82b22..98beed629 100644 --- a/.github/workflows/ntp.yml +++ b/.github/workflows/ntp.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -47,7 +47,7 @@ jobs: ref: [ 4.2.8p15, 4.2.8p17 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 10 needs: build_wolfssl diff --git a/.github/workflows/ocsp.yml b/.github/workflows/ocsp.yml index b7c8f8ef5..3cd5636d9 100644 --- a/.github/workflows/ocsp.yml +++ b/.github/workflows/ocsp.yml @@ -16,7 +16,7 @@ jobs: ocsp_stapling: name: ocsp stapling if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 10 steps: - name: Checkout wolfSSL diff --git a/.github/workflows/openldap.yml b/.github/workflows/openldap.yml index 2074b2df9..6d9c76867 100644 --- a/.github/workflows/openldap.yml +++ b/.github/workflows/openldap.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -51,7 +51,7 @@ jobs: git_ref: OPENLDAP_REL_ENG_2_6_7 name: ${{ matrix.osp_ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 20 needs: build_wolfssl diff --git a/.github/workflows/openssh.yml b/.github/workflows/openssh.yml index adbf82081..99e90b4d2 100644 --- a/.github/workflows/openssh.yml +++ b/.github/workflows/openssh.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -71,7 +71,7 @@ jobs: connection-timeout name: ${{ matrix.osp_ver }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: build_wolfssl steps: - name: Download lib diff --git a/.github/workflows/opensslcoexist.yml b/.github/workflows/opensslcoexist.yml index e116a2107..1b26ed947 100644 --- a/.github/workflows/opensslcoexist.yml +++ b/.github/workflows/opensslcoexist.yml @@ -23,7 +23,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/openvpn.yml b/.github/workflows/openvpn.yml index 974630145..34ea28751 100644 --- a/.github/workflows/openvpn.yml +++ b/.github/workflows/openvpn.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -46,7 +46,7 @@ jobs: ref: [ release/2.6, master ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 10 needs: build_wolfssl diff --git a/.github/workflows/os-check.yml b/.github/workflows/os-check.yml index 3329cc39c..02ff88e32 100644 --- a/.github/workflows/os-check.yml +++ b/.github/workflows/os-check.yml @@ -17,7 +17,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ ubuntu-22.04, macos-latest ] + os: [ ubuntu-24.04, macos-latest ] config: [ # Add new configs here '', @@ -87,7 +87,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ ubuntu-22.04, macos-latest ] + os: [ ubuntu-24.04, macos-latest ] user-settings: [ # Add new user_settings.h here 'examples/configs/user_settings_all.h', @@ -109,7 +109,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ ubuntu-22.04, macos-latest ] + os: [ ubuntu-24.04, macos-latest ] user-settings: [ # Add new user_settings.h here 'examples/configs/user_settings_eccnonblock.h', @@ -140,7 +140,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ ubuntu-22.04, macos-latest ] + os: [ ubuntu-24.04, macos-latest ] name: make user_setting.h (with sed) if: github.repository_owner == 'wolfssl' runs-on: ${{ matrix.os }} diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml index 2b78fc8f9..ec55f410f 100644 --- a/.github/workflows/packaging.yml +++ b/.github/workflows/packaging.yml @@ -16,7 +16,7 @@ jobs: build_wolfssl: name: Package wolfSSL if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 10 steps: diff --git a/.github/workflows/pam-ipmi.yml b/.github/workflows/pam-ipmi.yml index 22da7d6b6..78b162a3c 100644 --- a/.github/workflows/pam-ipmi.yml +++ b/.github/workflows/pam-ipmi.yml @@ -18,7 +18,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -48,7 +48,7 @@ jobs: git_ref: [ e4b13e6725abb178f62ee897fe1c0e81b06a9431 ] name: ${{ matrix.git_ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: build_wolfssl steps: - name: Install dependencies diff --git a/.github/workflows/pq-all.yml b/.github/workflows/pq-all.yml index fc32344f6..4aeaa5eb0 100644 --- a/.github/workflows/pq-all.yml +++ b/.github/workflows/pq-all.yml @@ -25,7 +25,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/psk.yml b/.github/workflows/psk.yml index 54ad8737f..5026c4bdf 100644 --- a/.github/workflows/psk.yml +++ b/.github/workflows/psk.yml @@ -24,7 +24,7 @@ jobs: ] name: make check if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/renode-stm32h753.yml b/.github/workflows/renode-stm32h753.yml new file mode 100644 index 000000000..9a56d39cf --- /dev/null +++ b/.github/workflows/renode-stm32h753.yml @@ -0,0 +1,271 @@ +name: Renode STM32H753 Test + +# Platform-specific configuration +# To add a new platform, create a new workflow file based on this template +# and update these variables for the target MCU +env: + PLATFORM_NAME: stm32h753 + PLATFORM_DISPLAY_NAME: STM32H753 + CMSIS_DEVICE_REPO: cmsis-device-h7 + CMSIS_DEVICE_PATH: /opt/cmsis-device-h7 + CMSIS_DEVICE_CACHE_KEY: cmsis-device-h7-v1 + STM32CUBE_REPO: STM32CubeH7 + STM32CUBE_BRANCH: v1.11.2 + STM32CUBE_PATH: /opt/STM32CubeH7 + STM32CUBE_CACHE_KEY: stm32cubeh7-v1.11.2-v1 + HAL_CONFIG_FILE: stm32h7xx_hal_conf.h + HAL_DRIVER_INC_PATH: STM32H7xx_HAL_Driver/Inc + HAL_DRIVER_SRC_PATH: STM32H7xx_HAL_Driver/Src + RENODE_PLATFORM_NAME: stm32h753 + RENODE_REPL_PATH: platforms/cpus/stm32h753.repl + RENODE_TEST_DIR: .github/renode-test/stm32h753 + +on: + push: + branches: [ main, master, develop ] + pull_request: + branches: [ main, master, develop ] + workflow_dispatch: + +jobs: + test: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout wolfSSL + uses: actions/checkout@v4 + + - name: Set up build environment + run: | + sudo apt-get update + sudo apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + cmake \ + ninja-build \ + python3 \ + git \ + gcc-arm-none-eabi \ + libnewlib-arm-none-eabi \ + libstdc++-arm-none-eabi-newlib \ + wget \ + unzip + + - name: Cache CMSIS Device + id: cache-cmsis-device + uses: actions/cache@v4 + with: + path: ${{ env.CMSIS_DEVICE_PATH }} + key: ${{ env.CMSIS_DEVICE_CACHE_KEY }} + restore-keys: | + ${{ env.CMSIS_DEVICE_CACHE_KEY }}- + + - name: Cache CMSIS 5 + id: cache-cmsis-5 + uses: actions/cache@v4 + with: + path: /opt/CMSIS_5 + key: cmsis-5-v1 + restore-keys: | + cmsis-5- + + - name: Cache STM32Cube + id: cache-stm32cube + uses: actions/cache@v4 + with: + path: ${{ env.STM32CUBE_PATH }} + key: ${{ env.STM32CUBE_CACHE_KEY }} + restore-keys: | + ${{ env.STM32CUBE_CACHE_KEY }}- + + - name: Cache Renode + id: cache-renode + uses: actions/cache@v4 + with: + path: /opt/renode + key: renode-1.15.3-v1 + restore-keys: | + renode-1.15.3- + + - name: Install Renode dependencies + run: | + # Install Mono and other dependencies needed for Renode (always needed, even when cached) + sudo apt-get install -y --no-install-recommends \ + mono-runtime \ + libmono-cil-dev \ + screen \ + policykit-1 || true + + - name: Install Renode (if not cached) + if: steps.cache-renode.outputs.cache-hit != 'true' + run: | + # Install Renode by extracting .deb (avoids GUI dependency issues for headless use) + cd /tmp + wget -q https://github.com/renode/renode/releases/download/v1.15.3/renode_1.15.3_amd64.deb + # Extract the .deb file + dpkg-deb -x renode_1.15.3_amd64.deb /tmp/renode-extract + # Copy Renode files to system locations + sudo mkdir -p /opt/renode + sudo cp -r /tmp/renode-extract/opt/renode/* /opt/renode/ || true + sudo cp -r /tmp/renode-extract/usr/* /usr/ || true + # Create symlink for easy access + if [ -f /opt/renode/renode ]; then + sudo ln -sf /opt/renode/renode /usr/local/bin/renode + elif [ -f /usr/bin/renode ]; then + echo "Renode already in PATH at /usr/bin/renode" + fi + # Cleanup + rm -rf /tmp/renode-extract renode_1.15.3_amd64.deb + + - name: Setup Renode symlinks and permissions + run: | + # When Renode is cached, we need to recreate /usr/bin/renode wrapper script + # The /usr/bin/renode is a wrapper that checks Mono and calls /opt/renode/bin/Renode.exe + if [ -d /opt/renode ] && [ ! -x /usr/bin/renode ]; then + echo "Renode cached but /usr/bin/renode wrapper missing, recreating..." + # Create the wrapper script + sudo bash -c 'cat > /usr/bin/renode << '\''SCRIPT_EOF'\'' + #!/bin/sh + MONOVERSION=5.20 + REQUIRED_MAJOR=5 + REQUIRED_MINOR=20 + + LAUNCHER=mono + + if ! [ -x "$(command -v $LAUNCHER)" ] + then + echo "$LAUNCHER not found. Renode requires Mono $MONOVERSION or newer. Please refer to documentation for installation instructions. Exiting!" + exit 1 + fi + + # Check installed mono version + INSTALLED_MONO=`$LAUNCHER --version | head -n1 | cut -d'\'' '\'' -f5` + INSTALLED_MONO_MAJOR=`echo $INSTALLED_MONO | cut -d'\''.'\'' -f1` + INSTALLED_MONO_MINOR=`echo $INSTALLED_MONO | cut -d'\''.'\'' -f2` + + if [ $INSTALLED_MONO_MAJOR -lt $REQUIRED_MAJOR ] || [ $INSTALLED_MONO_MAJOR -eq $REQUIRED_MAJOR -a $INSTALLED_MONO_MINOR -lt $REQUIRED_MINOR ] + then + echo "Wrong Mono version detected: $INSTALLED_MONO. Renode requires Mono $MONOVERSION or newer. Please refer to documentation for installation instructions. Exiting!" + exit 1 + fi + + exec $LAUNCHER $MONO_OPTIONS /opt/renode/bin/Renode.exe "$@" + SCRIPT_EOF' + sudo chmod +x /usr/bin/renode + echo "Created /usr/bin/renode wrapper script" + fi + + # Also ensure /usr/local/bin/renode symlink exists + if [ -x /usr/bin/renode ] && [ ! -x /usr/local/bin/renode ]; then + sudo ln -sf /usr/bin/renode /usr/local/bin/renode + echo "Created symlink: /usr/local/bin/renode -> /usr/bin/renode" + fi + + - name: Verify Renode installation + run: | + # Verify Renode is installed and accessible + RENODE_FOUND=false + RENODE_BIN="" + + # Check various possible locations + for path in /opt/renode/renode /opt/renode/bin/renode /usr/local/bin/renode /usr/bin/renode; do + if [ -x "$path" ]; then + echo "Renode found at $path" + "$path" --version || true + RENODE_BIN="$path" + RENODE_FOUND=true + break + fi + done + + if [ "$RENODE_FOUND" != "true" ]; then + echo "ERROR: Renode binary not found or not executable!" + echo "Searching for renode..." + find /opt /usr -name renode -type f 2>/dev/null | head -10 || true + echo "Checking /opt/renode contents:" + ls -la /opt/renode/ 2>/dev/null | head -10 || true + if [ -d /opt/renode ]; then + echo "Checking /opt/renode subdirectories:" + find /opt/renode -type f -name "*renode*" 2>/dev/null | head -10 || true + fi + exit 1 + fi + + + - name: Clone CMSIS Device (if not cached) + if: steps.cache-cmsis-device.outputs.cache-hit != 'true' + run: | + sudo mkdir -p /opt + sudo git clone --depth 1 https://github.com/STMicroelectronics/${{ env.CMSIS_DEVICE_REPO }}.git ${{ env.CMSIS_DEVICE_PATH }} + + - name: Clone CMSIS 5 (if not cached) + if: steps.cache-cmsis-5.outputs.cache-hit != 'true' + run: | + sudo mkdir -p /opt + sudo git clone --depth 1 https://github.com/ARM-software/CMSIS_5.git /opt/CMSIS_5 + + - name: Clone STM32Cube (if not cached) + if: steps.cache-stm32cube.outputs.cache-hit != 'true' + run: | + sudo mkdir -p /opt + sudo git clone --depth 1 --branch ${{ env.STM32CUBE_BRANCH }} --recurse-submodules https://github.com/STMicroelectronics/${{ env.STM32CUBE_REPO }}.git ${{ env.STM32CUBE_PATH }} || \ + (sudo git clone --depth 1 --branch ${{ env.STM32CUBE_BRANCH }} https://github.com/STMicroelectronics/${{ env.STM32CUBE_REPO }}.git ${{ env.STM32CUBE_PATH }} && \ + cd ${{ env.STM32CUBE_PATH }} && sudo git submodule update --init --recursive --depth 1) + + - name: Setup firmware build directory and helper files + run: | + sudo mkdir -p /opt/firmware + # Copy helper files from repository + sudo cp -r ${{ github.workspace }}/${{ env.RENODE_TEST_DIR }}/* /opt/firmware/ + # Copy HAL config to STM32Cube directory + sudo cp /opt/firmware/${{ env.HAL_CONFIG_FILE }} ${{ env.STM32CUBE_PATH }}/Drivers/${{ env.HAL_DRIVER_INC_PATH }}/ 2>/dev/null || true + sudo chmod +x /opt/firmware/entrypoint.sh + # Create .renode-root file so Renode can find platform files + # Try to find Renode installation directory and create .renode-root with proper permissions + if [ -d "/opt/renode/platforms" ]; then + echo "/opt/renode" | sudo tee /opt/firmware/.renode-root > /dev/null + sudo chmod 644 /opt/firmware/.renode-root + elif [ -d "/usr/lib/renode/platforms" ]; then + echo "/usr/lib/renode" | sudo tee /opt/firmware/.renode-root > /dev/null + sudo chmod 644 /opt/firmware/.renode-root + elif [ -d "/usr/share/renode/platforms" ]; then + echo "/usr/share/renode" | sudo tee /opt/firmware/.renode-root > /dev/null + sudo chmod 644 /opt/firmware/.renode-root + fi + + - name: Build wolfSSL firmware (NOT CACHED - rebuilds on every run) + env: + WOLFSSL_ROOT: /opt/wolfssl + run: | + # Copy wolfSSL source (this is NOT cached - fresh checkout each time) + sudo cp -r ${{ github.workspace }} /opt/wolfssl + # Build with CMake + cd /opt/firmware + sudo cmake -G Ninja \ + -DWOLFSSL_USER_SETTINGS=ON \ + -DUSER_SETTINGS_FILE=/opt/firmware/user_settings.h \ + -DCMAKE_TOOLCHAIN_FILE=/opt/firmware/toolchain-arm-none-eabi.cmake \ + -DCMAKE_BUILD_TYPE=Release \ + -DWOLFSSL_CRYPT_TESTS=OFF \ + -DWOLFSSL_EXAMPLES=OFF \ + -B /opt/firmware/build \ + -S /opt/firmware + sudo cmake --build /opt/firmware/build + # Verify ELF file was created and copy it to expected location + if [ -f "/opt/firmware/build/wolfcrypt_test.elf" ]; then + sudo cp /opt/firmware/build/wolfcrypt_test.elf /opt/firmware/wolfcrypt_test.elf + echo "ELF file copied to /opt/firmware/wolfcrypt_test.elf" + ls -lh /opt/firmware/wolfcrypt_test.elf + else + echo "ERROR: ELF file not found at /opt/firmware/build/wolfcrypt_test.elf" + echo "Searching for ELF files..." + find /opt/firmware/build -name "*.elf" 2>/dev/null || true + exit 1 + fi + + - name: Run Renode test + run: | + # Ensure PATH includes standard binary locations for sudo + sudo env PATH="$PATH" /opt/firmware/entrypoint.sh + diff --git a/.github/workflows/rng-tools.yml b/.github/workflows/rng-tools.yml index ea4b62840..0f124e9f4 100644 --- a/.github/workflows/rng-tools.yml +++ b/.github/workflows/rng-tools.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -47,7 +47,7 @@ jobs: ref: [ 6.16 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 needs: build_wolfssl diff --git a/.github/workflows/smallStackSize.yml b/.github/workflows/smallStackSize.yml index a31105fd1..bd832026b 100644 --- a/.github/workflows/smallStackSize.yml +++ b/.github/workflows/smallStackSize.yml @@ -37,7 +37,7 @@ jobs: ] name: build library if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/socat.yml b/.github/workflows/socat.yml index 91417e7a7..1484027ec 100644 --- a/.github/workflows/socat.yml +++ b/.github/workflows/socat.yml @@ -16,7 +16,7 @@ jobs: build_wolfssl: name: Build wolfSSL if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 4 steps: - name: Build wolfSSL @@ -39,7 +39,7 @@ jobs: socat_check: if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 30 needs: build_wolfssl diff --git a/.github/workflows/softhsm.yml b/.github/workflows/softhsm.yml index bb3824d17..593cd6913 100644 --- a/.github/workflows/softhsm.yml +++ b/.github/workflows/softhsm.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 10 steps: @@ -47,7 +47,7 @@ jobs: ref: [ 2.6.1 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 20 needs: build_wolfssl diff --git a/.github/workflows/sssd.yml b/.github/workflows/sssd.yml index 82024508e..b160bf29d 100644 --- a/.github/workflows/sssd.yml +++ b/.github/workflows/sssd.yml @@ -17,7 +17,7 @@ jobs: if: github.repository_owner == 'wolfssl' name: Build wolfSSL # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -47,7 +47,7 @@ jobs: ref: [ 2.9.1 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 container: image: quay.io/sssd/ci-client-devel:ubuntu-latest env: diff --git a/.github/workflows/stunnel.yml b/.github/workflows/stunnel.yml index 701a4e51b..977ac3ee5 100644 --- a/.github/workflows/stunnel.yml +++ b/.github/workflows/stunnel.yml @@ -17,7 +17,7 @@ jobs: name: Build wolfSSL if: github.repository_owner == 'wolfssl' # Just to keep it the same as the testing target - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 steps: @@ -46,7 +46,7 @@ jobs: ref: [ 5.67 ] name: ${{ matrix.ref }} if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 4 needs: build_wolfssl diff --git a/.github/workflows/symbol-prefixes.yml b/.github/workflows/symbol-prefixes.yml index 84a0e75e9..5073f8e93 100644 --- a/.github/workflows/symbol-prefixes.yml +++ b/.github/workflows/symbol-prefixes.yml @@ -21,7 +21,7 @@ jobs: ] name: make and analyze if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/threadx.yml b/.github/workflows/threadx.yml index f93cea9c1..4cd1be57b 100644 --- a/.github/workflows/threadx.yml +++ b/.github/workflows/threadx.yml @@ -9,7 +9,7 @@ on: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 10 steps: diff --git a/.github/workflows/wolfCrypt-Wconversion.yml b/.github/workflows/wolfCrypt-Wconversion.yml index 258902a4a..b1a7305e4 100644 --- a/.github/workflows/wolfCrypt-Wconversion.yml +++ b/.github/workflows/wolfCrypt-Wconversion.yml @@ -27,7 +27,7 @@ jobs: ] name: build library if: github.repository_owner == 'wolfssl' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # This should be a safe limit for the tests to run. timeout-minutes: 6 steps: diff --git a/.github/workflows/zephyr.yml b/.github/workflows/zephyr.yml index fb7f0d2b3..df1b2e1cd 100644 --- a/.github/workflows/zephyr.yml +++ b/.github/workflows/zephyr.yml @@ -42,7 +42,7 @@ jobs: make gcc gcc-multilib g++-multilib libsdl2-dev libmagic1 \ autoconf automake bison build-essential ca-certificates cargo ccache chrpath cmake \ cpio device-tree-compiler dfu-util diffstat dos2unix doxygen file flex g++ gawk gcc \ - gcovr git git-core gnupg gperf gtk-sharp2 help2man iproute2 lcov libcairo2-dev \ + gcovr git git-core gnupg gperf gtk-sharp3 help2man iproute2 lcov libcairo2-dev \ libglib2.0-dev libgtk2.0-0 liblocale-gettext-perl libncurses5-dev libpcap-dev \ libpopt0 libsdl1.2-dev libsdl2-dev libssl-dev libtool libtool-bin locales make \ net-tools ninja-build openssh-client parallel pkg-config python3-dev python3-pip \ diff --git a/.gitignore b/.gitignore index 7f83d30ed..0ef964417 100644 --- a/.gitignore +++ b/.gitignore @@ -468,3 +468,5 @@ wrapper/Ada/obj/ # Autogenerated debug trace headers wolfssl/debug-trace-error-codes.h wolfssl/debug-untrace-error-codes.h + +AGENTS.md diff --git a/.wolfssl_known_macro_extras b/.wolfssl_known_macro_extras index ac29f9e7c..0a681bd40 100644 --- a/.wolfssl_known_macro_extras +++ b/.wolfssl_known_macro_extras @@ -478,6 +478,7 @@ REDIRECTION_OUT2_KEYELMID REDIRECTION_OUT2_KEYID RENESAS_T4_USE RHEL_MAJOR +RHEL_RELEASE_CODE RTC_ALARMSUBSECONDMASK_ALL RTE_CMSIS_RTOS_RTX RTOS_MODULE_NET_AVAIL @@ -544,6 +545,11 @@ STM32WL55xx STM32_AESGCM_PARTIAL STM32_HW_CLOCK_AUTO STM32_NUTTX_RNG +STSAFE_HOST_KEY_CIPHER +STSAFE_HOST_KEY_MAC +STSAFE_I2C_BUS +STSE_CONF_ECC_BRAINPOOL_P_256 +STSE_CONF_ECC_BRAINPOOL_P_384 TASK_EXTRA_STACK_SIZE TCP_NODELAY TFM_ALREADY_SET @@ -622,6 +628,7 @@ WC_LMS_FULL_HASH WC_NO_ASYNC_SLEEP WC_NO_RNG_SIMPLE WC_NO_STATIC_ASSERT +WC_NO_VERBOSE_RNG WC_PKCS11_FIND_WITH_ID_ONLY WC_PROTECT_ENCRYPTED_MEM WC_RNG_BLOCKING @@ -788,6 +795,7 @@ WOLFSSL_MONT_RED_CT WOLFSSL_MP_COND_COPY WOLFSSL_MP_INVMOD_CONSTANT_TIME WOLFSSL_MULTICIRCULATE_ALTNAMELIST +WOLFSSL_NEW_PRIME_CHECK WOLFSSL_NONBLOCK_OCSP WOLFSSL_NOSHA3_384 WOLFSSL_NOT_WINDOWS_API @@ -799,6 +807,7 @@ WOLFSSL_NO_COPY_KEY WOLFSSL_NO_CRL_DATE_CHECK WOLFSSL_NO_CRL_NEXT_DATE WOLFSSL_NO_CT_MAX_MIN +WOLFSSL_NO_DEBUG_CERTS WOLFSSL_NO_DECODE_EXTRA WOLFSSL_NO_DER_TO_PEM WOLFSSL_NO_DH186 @@ -887,7 +896,6 @@ WOLFSSL_SP_INT_SQR_VOLATILE WOLFSSL_STACK_CHECK WOLFSSL_STM32F427_RNG WOLFSSL_STM32U5_DHUK -WOLFSSL_STM32_RNG_NOLIB WOLFSSL_STRONGEST_HASH_SIG WOLFSSL_STSAFE_TAKES_SLOT WOLFSSL_TELIT_M2MB @@ -1072,6 +1080,7 @@ __WATCOMC__ __WATCOM_INT64__ __XC32 __XTENSA__ +__ZEPHYR__ __aarch64__ __alpha__ __arch64__ diff --git a/CMakeLists.txt b/CMakeLists.txt index b13a79432..1ec00d6f2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -603,6 +603,11 @@ add_option(WOLFSSL_MLKEM "Enable the wolfSSL PQ ML-KEM library (default: disabled)" "no" "yes;no") +# Dilithium +add_option(WOLFSSL_DILITHIUM + "Enable the wolfSSL PQ Dilithium (ML-DSA) implementation (default: disabled)" + "no" "yes;no") + # LMS add_option(WOLFSSL_LMS "Enable the PQ LMS Stateful Hash-based Signature Scheme (default: disabled)" @@ -700,6 +705,22 @@ if (WOLFSSL_EXPERIMENTAL) message(STATUS "Looking for WOLFSSL_LMS - not found") endif() + # Checking for experimental feature: Dilithium + message(STATUS "Looking for WOLFSSL_DILITHIUM") + if (WOLFSSL_DILITHIUM) + set(WOLFSSL_FOUND_EXPERIMENTAL_FEATURE 1) + + message(STATUS "Automatically set related requirements for Dilithium:") + set_wolfssl_definitions("HAVE_DILITHIUM" RESUlT) + set_wolfssl_definitions("WOLFSSL_WC_DILITHIUM" RESUlT) + set_wolfssl_definitions("WOLFSSL_SHA3" RESUlT) + set_wolfssl_definitions("WOLFSSL_SHAKE128" RESUlT) + set_wolfssl_definitions("WOLFSSL_SHAKE256" RESUlT) + message(STATUS "Looking for WOLFSSL_DILITHIUM - found") + else() + message(STATUS "Looking for WOLFSSL_DILITHIUM - not found") + endif() + # Other experimental feature detection can be added here... # Were any experimental features found? Display a message. @@ -713,7 +734,9 @@ if (WOLFSSL_EXPERIMENTAL) if(WOLFSSL_OQS AND WOLFSSL_MLKEM) message(FATAL_ERROR "Error: cannot enable both WOLFSSL_OQS and WOLFSSL_MLKEM at the same time.") endif() - + if(WOLFSSL_OQS AND WOLFSSL_DILITHIUM) + message(FATAL_ERROR "Error: cannot enable both WOLFSSL_OQS and WOLFSSL_DILITHIUM at the same time.") + endif() else() # Experimental mode not enabled, but were any experimental features enabled? Error out if so: message(STATUS "Looking for WOLFSSL_EXPERIMENTAL - not found") @@ -723,6 +746,9 @@ else() if(WOLFSSL_MLKEM) message(FATAL_ERROR "Error: WOLFSSL_MLKEM requires WOLFSSL_EXPERIMENTAL at this time.") endif() + if(WOLFSSL_DILITHIUM) + message(FATAL_ERROR "Error: WOLFSSL_DILITHIUM requires WOLFSSL_EXPERIMENTAL at this time.") + endif() endif() # LMS @@ -1866,6 +1892,15 @@ if(NOT WOLFSSL_PKCS12) list(APPEND WOLFSSL_DEFINITIONS "-DNO_PKCS12") endif() +# PKCS#11 +add_option("WOLFSSL_PKCS11" + "Enable PKCS#11 (default: disabled)" + "no" "yes;no") + +if(WOLFSSL_PKCS11 AND NOT WIN32) + list(APPEND WOLFSSL_LINK_LIBS ${CMAKE_DL_LIBS}) +endif() + # PWDBASED has to come after certservice since we want it on w/o explicit on # PWDBASED @@ -1964,7 +1999,6 @@ add_option("WOLFSSL_CRYPT_TESTS_HELP" "no" "yes;no") # TODO: - LIBZ -# - PKCS#11 # - Cavium # - Cavium V # - Cavium Octeon @@ -2705,6 +2739,13 @@ if(WOLFSSL_EXAMPLES) tests/api/test_ossl_x509_str.c tests/api/test_ossl_x509_lu.c tests/api/test_ossl_pem.c + tests/api/test_ossl_rand.c + tests/api/test_ossl_obj.c + tests/api/test_ossl_p7p12.c + tests/api/test_evp_digest.c + tests/api/test_evp_cipher.c + tests/api/test_evp_pkey.c + tests/api/test_certman.c tests/api/test_tls13.c tests/srp.c tests/suites.c diff --git a/IDE/XCODE/Benchmark/wolfBench.xcodeproj/project.pbxproj b/IDE/XCODE/Benchmark/wolfBench.xcodeproj/project.pbxproj index 5f2fd2cf5..0da0f0757 100644 --- a/IDE/XCODE/Benchmark/wolfBench.xcodeproj/project.pbxproj +++ b/IDE/XCODE/Benchmark/wolfBench.xcodeproj/project.pbxproj @@ -112,7 +112,7 @@ 9D2E31E3291CE4800082B941 /* dtls.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = dtls.c; path = ../../../src/dtls.c; sourceTree = ""; }; 9D2E31E6291CE4AC0082B941 /* dtls13.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = dtls13.c; path = ../../../src/dtls13.c; sourceTree = ""; }; 9D2E31E8291CE5CB0082B941 /* kdf.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = kdf.c; path = ../../../wolfcrypt/src/kdf.c; sourceTree = ""; }; - A46FE14C2493E8F500A25BE7 /* armv8-chacha-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "armv8-chacha-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-chacha-asm.S"; sourceTree = ""; }; + A46FE14C2493E8F500A25BE7 /* armv8-chacha-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "armv8-chacha-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-chacha-asm.S"; sourceTree = ""; }; A46FE14D2493E8F600A25BE7 /* sp_int.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = sp_int.c; path = ../../../wolfcrypt/src/sp_int.c; sourceTree = ""; }; A46FE1512493E8F600A25BE7 /* sp_cortexm.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = sp_cortexm.c; path = ../../../wolfcrypt/src/sp_cortexm.c; sourceTree = ""; }; A46FE1522493E8F600A25BE7 /* blake2s.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = blake2s.c; path = ../../../wolfcrypt/src/blake2s.c; sourceTree = ""; }; @@ -201,10 +201,10 @@ A4ADF8CE1FCE0C5500A06E90 /* coding.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = coding.c; path = ../../../wolfcrypt/src/coding.c; sourceTree = ""; }; A4ADF8D01FCE0C5500A06E90 /* ge_low_mem.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ge_low_mem.c; path = ../../../wolfcrypt/src/ge_low_mem.c; sourceTree = ""; }; A4DFEC0C1FD4CAA300A7BB33 /* benchmark.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = benchmark.c; path = ../../../wolfcrypt/benchmark/benchmark.c; sourceTree = ""; }; - A4DFEC0E1FD4CB8500A7BB33 /* armv8-sha256-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "armv8-sha256-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-sha256-asm.S"; sourceTree = ""; }; - A4DFEC0F1FD4CB8500A7BB33 /* armv8-aes-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "armv8-aes-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-aes-asm.S"; sourceTree = ""; }; + A4DFEC0E1FD4CB8500A7BB33 /* armv8-sha256-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "armv8-sha256-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-sha256-asm.S"; sourceTree = ""; }; + A4DFEC0F1FD4CB8500A7BB33 /* armv8-aes-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "armv8-aes-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-aes-asm.S"; sourceTree = ""; }; A4DFEC3B1FD6B9CC00A7BB33 /* test.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = test.c; path = ../../../wolfcrypt/test/test.c; sourceTree = ""; }; - CB81DE1C24C9284700B98DA6 /* armv8-poly1305-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "armv8-poly1305-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-poly1305-asm.S"; sourceTree = ""; }; + CB81DE1C24C9284700B98DA6 /* armv8-poly1305-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "armv8-poly1305-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-poly1305-asm.S"; sourceTree = ""; }; CB81DE1E24C93EC000B98DA6 /* armv8-curve25519.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "armv8-curve25519.S"; path = "../../../wolfcrypt/src/port/arm/armv8-curve25519.S"; sourceTree = ""; }; CB81DE2224C93FB300B98DA6 /* armv8-sha512-asm.S */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "armv8-sha512-asm.S"; path = "../../../wolfcrypt/src/port/arm/armv8-sha512-asm.S"; sourceTree = ""; }; /* End PBXFileReference section */ diff --git a/LICENSING b/LICENSING index 14b0801ac..a0b2ca4a0 100644 --- a/LICENSING +++ b/LICENSING @@ -17,6 +17,10 @@ Fetchmail OpenVPN +SWUpdate + +RPCS3 + For our users who cannot use wolfSSL under GPLv3, a commercial license to wolfSSL and wolfCrypt is available. diff --git a/README b/README index 619f1f37d..14bc22b32 100644 --- a/README +++ b/README @@ -7,8 +7,9 @@ and feature set. It is commonly used in standard operating environments as well because of its royalty-free pricing and excellent cross platform support. wolfSSL supports industry standards up to the current TLS 1.3 and DTLS 1.3 levels, is up to 20 times smaller than OpenSSL, and offers progressive ciphers -such as ChaCha20, Curve25519, and Blake2b. User benchmarking and feedback -reports dramatically better performance when using wolfSSL over OpenSSL. +such as ChaCha20, Curve25519, BLAKE2b/BLAKE2s and Post-Quantum TLS 1.3 groups. +User benchmarking and feedback reports dramatically better performance when +using wolfSSL over OpenSSL. wolfSSL is powered by the wolfCrypt library. Two versions of the wolfCrypt cryptography library have been FIPS 140-2 validated (Certificate #2425 and diff --git a/README.md b/README.md index f555f08b2..94b71b812 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,8 @@ standard operating environments as well because of its royalty-free pricing and excellent cross platform support. wolfSSL supports industry standards up to the current [TLS 1.3](https://www.wolfssl.com/tls13) and DTLS 1.3, is up to 20 times smaller than OpenSSL, and offers progressive ciphers such as ChaCha20, -Curve25519, Blake2b and Post-Quantum TLS 1.3 groups. User benchmarking and -feedback reports dramatically better performance when using wolfSSL over +Curve25519, BLAKE2b/BLAKE2s and Post-Quantum TLS 1.3 groups. User benchmarking +and feedback reports dramatically better performance when using wolfSSL over OpenSSL. wolfSSL is powered by the wolfCrypt cryptography library. Two versions of diff --git a/bsdkm/Makefile b/bsdkm/Makefile index 46ff5ea39..dd6bbcbd7 100644 --- a/bsdkm/Makefile +++ b/bsdkm/Makefile @@ -5,8 +5,14 @@ WOLFSSL_DIR=../ CFLAGS+=-I${WOLFSSL_DIR} CFLAGS+=-DWOLFSSL_IGNORE_FILE_WARN -DHAVE_CONFIG_H -DNO_MAIN_DRIVER -# debug printing -# CFLAGS+=-DWOLFSSL_BSDKM_VERBOSE_DEBUG +# +# debug options +# verbose printing: +# CFLAGS+=-DWOLFSSL_BSDKM_VERBOSE_DEBUG +# +# print memory mallocs / frees: +# CFLAGS+=-DWOLFSSL_BSDKM_MEMORY_DEBUG +# CFLAGS+=$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) # FreeBSD make does not support GNU make's patsubst and related. Filter diff --git a/bsdkm/README.md b/bsdkm/README.md new file mode 100644 index 000000000..b84c2588f --- /dev/null +++ b/bsdkm/README.md @@ -0,0 +1,102 @@ +# wolfSSL bsdkm (bsd kernel module) + +libwolfssl supports building as a FreeBSD kernel module (`libwolfssl.ko`). +When loaded, wolfCrypt is made available to the rest of the kernel, allowing +other loadable modules to link to wolfCrypt. + +Supported features: +- wolfCrypt in kernel. +- FIPS-wolfcrypt. + +Planned features: +- crypto acceleration: AES-NI, AVX, etc. +- kernel opencrypto driver registration. +- full wolfSSL in kernel (kernel TLS). + +## Building and Installing + +Build bsdkm with: + +```sh +./configure --enable-freebsdkm --enable-cryptonly && make +``` + +The default freebsdkm build assumes kernel source tree root at `/usr/src/sys/`. +Use `--with-kernel-source=PATH` to configure a different path. + +Assuming you are targeting your native system, install with: + +```sh +sudo kldload bsdkm/libwolfssl.ko +``` + +You should see it now: +```sh +kldstat -m libwolfssl +Id Refs Name +509 1 libwolfssl +``` + +Unload with: +```sh +sudo kldunload libwolfssl +``` + +### options + +| freebsdkm option | description | +| :------------------------------- | :--------------------------------------- | +| --with-bsd-export-syms=LIST | Export list of symbols as global.
. Options are 'all', 'none', or
comma separated list of symbols. | +| --with-kernel-source=PATH | Path to kernel tree root (default `/usr/src/sys`) | + +### FIPS + +Building with FIPS is largely the same, with the additional step of +configuring a fips hash. + +1. Build bsdkm (the `fips_hash` here is a placeholder): + +```sh +fips_hash=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef +./configure --enable-freebsdkm --enable-cryptonly --enable-fips=v6 \ + CFLAGS="-DWOLFCRYPT_FIPS_CORE_HASH_VALUE=$fips_hash" && make +``` + +2. Attempt first install. This is expected to fail, because the hash was a +placeholder. +```sh +$ sudo kldload bsdkm/libwolfssl.ko +kldload: an error occurred while loading module bsdkm/libwolfssl.ko. Please check dmesg(8) for more details. +``` + +3. Check dmesg output for the updated hash value (yours will be different). +```sh +$ dmesg | tail -n5 +In-core integrity hash check failure. +Rebuild with "WOLFCRYPT_FIPS_CORE_HASH_VALUE=3B144A08F291DBA536324646BBD127447B8F222D29A135780E330351E0DF9F0F". +error: wc_RunAllCast_fips failed at shutdown with return value 19 +info: libwolfssl unloaded +module_register_init: MOD_LOAD (libwolfssl_fips, 0xffffffff842c28d0, 0) error 85 +``` + +4. Repeat steps 1-2 with the new hash value. The load should succeed now. + +``` +$ kldstat -m libwolfssl_fips +Id Refs Name +523 1 libwolfssl_fips +``` + +Unload with +``` +sudo kldunload libwolfssl +``` + +On unload, the FIPS self-test will run a final time and print its status +to system message buffer: + +``` +info: wolfCrypt FIPS re-self-test succeeded at unload: all algorithms re-verified. +info: libwolfssl unloaded +``` + diff --git a/bsdkm/bsdkm_wc_port.h b/bsdkm/bsdkm_wc_port.h index ead4a4c97..2a5524d5f 100644 --- a/bsdkm/bsdkm_wc_port.h +++ b/bsdkm/bsdkm_wc_port.h @@ -37,11 +37,28 @@ #include #endif /* !CHAR_BIT*/ +#define NO_THREAD_LS +#define NO_ATTRIBUTE_CONSTRUCTOR + +/* and TIME(3) are userspace only in FreeBSD. + * Use a small wrapper around time_second instead. */ +#include +static inline time_t wolfkmod_time(time_t * tloc) { + time_t _now = time_second; + if (tloc) { + *tloc = _now; + } + return _now; +} +#define XTIME wolfkmod_time + /* needed to prevent wolfcrypt/src/asn.c version shadowing * extern global version from /usr/src/sys/sys/systm.h */ #define version wc_version -#define wc_km_printf printf +/* printf and logging defines */ +#define wc_km_printf printf +#define WOLFSSL_DEBUG_PRINTF_FN printf /* str and char utility functions */ #define XATOI(s) ({ \ @@ -51,7 +68,7 @@ _xatoi_ret = 0; \ } \ (int)_xatoi_ret; \ - }) +}) #if !defined(XMALLOC_OVERRIDE) #error bsdkm requires XMALLOC_OVERRIDE @@ -60,21 +77,44 @@ /* use malloc and free from /usr/include/sys/malloc.h */ extern struct malloc_type M_WOLFSSL[1]; -#define XMALLOC(s, h, t) \ - ({(void)(h); (void)(t); malloc(s, M_WOLFSSL, M_WAITOK | M_ZERO);}) +#if defined(WOLFSSL_BSDKM_MEMORY_DEBUG) + #define XMALLOC(s, h, t) ({ \ + (void)(h); (void)(t); \ + void * _ptr = malloc(s, M_WOLFSSL, M_WAITOK | M_ZERO); \ + printf("info: malloc: %p, M_WOLFSSL, %zu\n", _ptr, (size_t) s); \ + (void *)_ptr; \ + }) -#ifdef WOLFSSL_XFREE_NO_NULLNESS_CHECK - #define XFREE(p, h, t) \ - ({(void)(h); (void)(t); free(p, M_WOLFSSL);}) + #define XFREE(p, h, t) ({ \ + void* _xp; (void)(h); (void)(t); _xp = (p); \ + printf("info: free: %p, M_WOLFSSL\n", p); \ + if(_xp) free(_xp, M_WOLFSSL); \ + }) #else - #define XFREE(p, h, t) \ - ({void* _xp; (void)(h); (void)(t); _xp = (p); \ - if(_xp) free(_xp, M_WOLFSSL);}) -#endif + #define XMALLOC(s, h, t) ({ \ + (void)(h); (void)(t); \ + void * _ptr = malloc(s, M_WOLFSSL, M_WAITOK | M_ZERO); \ + (void *)_ptr; \ + }) + + #define XFREE(p, h, t) ({ \ + void* _xp; (void)(h); (void)(t); _xp = (p); \ + if(_xp) free(_xp, M_WOLFSSL); \ + }) +#endif /* WOLFSSL_BSDKM_DEBUG_MEMORY */ #if !defined(SINGLE_THREADED) #define WC_MUTEX_OPS_INLINE + /* Copied from wc_port.h */ + #if defined(HAVE_FIPS) && !defined(WOLFSSL_API_PREFIX_MAP) + /* For FIPS keep the function names the same */ + #define wc_InitMutex InitMutex + #define wc_FreeMutex FreeMutex + #define wc_LockMutex LockMutex + #define wc_UnLockMutex UnLockMutex + #endif /* HAVE_FIPS */ + typedef struct wolfSSL_Mutex { struct mtx lock; } wolfSSL_Mutex; @@ -106,12 +146,18 @@ extern struct malloc_type M_WOLFSSL[1]; #if defined(WOLFSSL_HAVE_ATOMIC_H) && !defined(WOLFSSL_NO_ATOMICS) #include - typedef volatile int wolfSSL_Atomic_Int; + typedef volatile int wolfSSL_Atomic_Int; typedef volatile unsigned int wolfSSL_Atomic_Uint; #define WOLFSSL_ATOMIC_INITIALIZER(x) (x) #define WOLFSSL_ATOMIC_LOAD(x) (int)atomic_load_acq_int(&(x)) #define WOLFSSL_ATOMIC_STORE(x, v) atomic_store_rel_int(&(x), (v)) #define WOLFSSL_ATOMIC_OPS + + #if defined(HAVE_FIPS) + /* There is no corresponding ATOMIC_INIT macro in FreeBSD. + * The FreeBSD equivalent is just an integer initialization. */ + #define ATOMIC_INIT(x) (x) + #endif #endif /* WOLFSSL_HAVE_ATOMIC_H && !WOLFSSL_NO_ATOMICS */ #endif /* WOLFSSL_BSDKM */ diff --git a/bsdkm/include.am b/bsdkm/include.am index 4dfc4a636..896a5447c 100644 --- a/bsdkm/include.am +++ b/bsdkm/include.am @@ -4,5 +4,6 @@ EXTRA_DIST += m4/ax_bsdkm.m4 \ bsdkm/Makefile \ + bsdkm/README.md \ bsdkm/wolfkmod.c \ bsdkm/bsdkm_wc_port.h diff --git a/bsdkm/wolfkmod.c b/bsdkm/wolfkmod.c index 41420a096..bb03e8ebb 100644 --- a/bsdkm/wolfkmod.c +++ b/bsdkm/wolfkmod.c @@ -33,10 +33,20 @@ #else #include #endif + +#ifdef HAVE_FIPS + #ifdef USE_CONTESTMUTEX + #error USE_CONTESTMUTEX is incompatible with WOLFSSL_BSDKM + #endif + #include +#endif /* HAVE_FIPS */ + #if !defined(NO_CRYPT_TEST) #include #endif +#include + MALLOC_DEFINE(M_WOLFSSL, "libwolfssl", "wolfSSL kernel memory"); static int wolfkmod_init(void); @@ -44,47 +54,140 @@ static int wolfkmod_cleanup(void); static int wolfkmod_load(void); static int wolfkmod_unload(void); +#ifdef HAVE_FIPS + #define WOLFKMOD_FIPS_ERR_MSG(hash) ({ \ + printf("In-core integrity hash check failure.\n"); \ + if ((hash)) \ + printf("Rebuild with \"WOLFCRYPT_FIPS_CORE_HASH_VALUE=%s\".\n", \ + hash); \ + else \ + printf("error: could not compute new hash. " \ + "Contact customer support.\n"); \ + }) + + static void wolfkmod_fips_cb(int ok, int err, const char * hash) + { + if ((!ok) || (err != 0)) { + printf("error: libwolfssl FIPS error: %s\n", + wc_GetErrorString(err)); + } + + if (err == WC_NO_ERR_TRACE(IN_CORE_FIPS_E)) { + WOLFKMOD_FIPS_ERR_MSG(hash); + } + } +#endif /* HAVE_FIPS */ + static int wolfkmod_init(void) { - int ret = 0; + int error = 0; + + #ifdef HAVE_FIPS + error = wolfCrypt_SetCb_fips(wolfkmod_fips_cb); + if (error != 0) { + printf("error: wolfCrypt_SetCb_fips failed: %s\n", + wc_GetErrorString(error)); + return (ECANCELED); + } + + fipsEntry(); + + error = wolfCrypt_GetStatus_fips(); + if (error != 0) { + printf("error: wolfCrypt_GetStatus_fips failed: %d: %s\n", + error, wc_GetErrorString(error)); + if (error == WC_NO_ERR_TRACE(IN_CORE_FIPS_E)) { + const char *newhash = wolfCrypt_GetCoreHash_fips(); + WOLFKMOD_FIPS_ERR_MSG(newhash); + } + return (ECANCELED); + } + #endif /* HAVE_FIPS */ + + #ifdef WC_RNG_SEED_CB + error = wc_SetSeed_Cb(WC_GENERATE_SEED_DEFAULT); + if (error < 0) { + printf("error: wc_SetSeed_Cb failed: %d\n", error); + return (ECANCELED); + } + #endif /* WC_RNG_SEED_CB */ #ifdef WOLFCRYPT_ONLY - ret = wolfCrypt_Init(); - if (ret != 0) { - printf("error: wolfCrypt_Init failed: %s\n", wc_GetErrorString(ret)); + error = wolfCrypt_Init(); + if (error != 0) { + printf("error: wolfCrypt_Init failed: %s\n", wc_GetErrorString(error)); return (ECANCELED); } #else - ret = wolfSSL_Init(); - if (ret != WOLFSSL_SUCCESS) { - printf("error: wolfSSL_Init failed: %s\n", wc_GetErrorString(ret)); + error = wolfSSL_Init(); + if (error != WOLFSSL_SUCCESS) { + printf("error: wolfSSL_Init failed: %s\n", wc_GetErrorString(error)); return (ECANCELED); } + #endif /* WOLFCRYPT_ONLY */ + + #ifdef HAVE_FIPS + error = wc_RunAllCast_fips(); + if (error != 0) { + printf("error: wc_RunAllCast_fips failed with " + "return value %d\n", error); + return (ECANCELED); + } + else { + printf("info: FIPS 140-3 wolfCrypt-fips v%d.%d.%d%s%s startup " + "self-test succeeded.\n", + #ifdef HAVE_FIPS_VERSION_MAJOR + HAVE_FIPS_VERSION_MAJOR, + #else + HAVE_FIPS_VERSION, #endif + #ifdef HAVE_FIPS_VERSION_MINOR + HAVE_FIPS_VERSION_MINOR, + #else + 0, + #endif + #ifdef HAVE_FIPS_VERSION_PATCH + HAVE_FIPS_VERSION_PATCH, + #else + 0, + #endif + #ifdef HAVE_FIPS_VERSION_PORT + "-", + HAVE_FIPS_VERSION_PORT + #else + "", + "" + #endif + ); + } + #endif /* HAVE_FIPS */ return (0); } static int wolfkmod_cleanup(void) { - int ret = 0; + int error = 0; #ifdef WOLFCRYPT_ONLY - ret = wolfCrypt_Cleanup(); - if (ret != 0) { - printf("error: wolfCrypt_Cleanup failed: %s\n", wc_GetErrorString(ret)); + error = wolfCrypt_Cleanup(); + if (error != 0) { + printf("error: wolfCrypt_Cleanup failed: %s\n", + wc_GetErrorString(error)); return (ECANCELED); } #else - ret = wolfSSL_Cleanup(); - if (ret != WOLFSSL_SUCCESS) { - printf("error: wolfSSL_Cleanup failed: %s\n", wc_GetErrorString(ret)); + error = wolfSSL_Cleanup(); + if (error != WOLFSSL_SUCCESS) { + printf("error: wolfSSL_Cleanup failed: %s\n", + wc_GetErrorString(error)); return (ECANCELED); } #endif /* WOLFCRYPT_ONLY */ #if defined(WOLFSSL_BSDKM_VERBOSE_DEBUG) - printf("info: libwolfssl " LIBWOLFSSL_VERSION_STRING " cleanup complete.\n"); + printf("info: libwolfssl " LIBWOLFSSL_VERSION_STRING + " cleanup complete.\n"); #endif /* WOLFSSL_BSDKM_VERBOSE_DEBUG */ return (0); @@ -92,23 +195,21 @@ static int wolfkmod_cleanup(void) static int wolfkmod_load(void) { - int ret = 0; + int error = 0; - ret = wolfkmod_init(); - if (ret != 0) { + error = wolfkmod_init(); + if (error != 0) { return (ECANCELED); } #ifndef NO_CRYPT_TEST - ret = wolfcrypt_test(NULL); - if (ret != 0) { - printf("error: wolfcrypt test failed with return code: %d\n", ret); + error = wolfcrypt_test(NULL); + if (error != 0) { + printf("error: wolfcrypt test failed: %d\n", error); (void)wolfkmod_cleanup(); return (ECANCELED); } - #if defined(WOLFSSL_BSDKM_VERBOSE_DEBUG) printf("info: wolfCrypt self-test passed.\n"); - #endif /* WOLFSSL_BSDKM_VERBOSE_DEBUG */ #endif /* NO_CRYPT_TEST */ /** @@ -123,52 +224,83 @@ static int wolfkmod_load(void) static int wolfkmod_unload(void) { - int ret = 0; + int error = 0; - ret = wolfkmod_cleanup(); + #ifdef HAVE_FIPS + error = wc_RunAllCast_fips(); + if (error != 0) { + printf("error: wc_RunAllCast_fips failed at shutdown with " + "return value %d\n", error); + } + else + printf("info: wolfCrypt FIPS re-self-test succeeded at unload: " + "all algorithms re-verified.\n"); + #endif + + error = wolfkmod_cleanup(); /** * todo: unregister wolfcrypt algs here with crypto_unregister_all * and related. * */ - if (ret == 0) { + if (error == 0) { printf("info: libwolfssl unloaded\n"); } - return (ret); + return (error); } +#if defined(WOLFSSL_BSDKM_VERBOSE_DEBUG) +static const char * wolfkmod_event_to_str(modeventtype_t what) +{ + switch (what) { + case MOD_LOAD: + return "MOD_LOAD"; + case MOD_UNLOAD: + return "MOD_UNLOAD"; + case MOD_SHUTDOWN: + return "MOD_SHUTDOWN"; + case MOD_QUIESCE: + return "MOD_QUIESCE"; + } +} +#endif /* WOLFSSL_BSDKM_VERBOSE_DEBUG */ + /* see /usr/include/sys/module.h for more info. */ static int wolfkmod_event(struct module * m, int what, void * arg) { - int ret = 0; + int error = 0; + #if defined(WOLFSSL_BSDKM_VERBOSE_DEBUG) + printf("info: wolfkmod_event: %s\n", wolfkmod_event_to_str(what)); + #endif /* WOLFSSL_BSDKM_VERBOSE_DEBUG */ switch (what) { case MOD_LOAD: - ret = wolfkmod_load(); + error = wolfkmod_load(); break; case MOD_UNLOAD: - ret = wolfkmod_unload(); + error = wolfkmod_unload(); break; case MOD_SHUTDOWN: case MOD_QUIESCE: default: - #if defined(WOLFSSL_BSDKM_VERBOSE_DEBUG) - printf("info: not implemented: %d\n", what); - #endif /* WOLFSSL_BSDKM_VERBOSE_DEBUG */ - ret = EOPNOTSUPP; + error = EOPNOTSUPP; } (void)m; (void)arg; - return (ret); + return (error); } static moduledata_t libwolfmod = { + #ifdef HAVE_FIPS + "libwolfssl_fips", /* module name */ + #else "libwolfssl", /* module name */ + #endif /* HAVE_FIPS */ wolfkmod_event, /* module event handler */ NULL /* extra data, unused */ }; diff --git a/certs/crl/bad_time_fmt.pem b/certs/crl/bad_time_fmt.pem new file mode 100644 index 000000000..589771d8f --- /dev/null +++ b/certs/crl/bad_time_fmt.pem @@ -0,0 +1,13 @@ +-----BEGIN X509 CRL----- +MIIB7DCB1QIBATANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJVUzELMAkGA1UE +CAwCVVMxCzAJBgNVBAcMAlVTMQswCQYDVQQKDAJVUzELMAkGA1UEAwwCVVMxCzAJ +BgNVBAsMAlVTGA0yNDAxMjMwMDAwMDBaGA0zNDAxMjAwMDAwMDBaMDUwMwIUHIAC +LvgfJAXulqYS3LYf4KxwHl4XDTI1MDMxMzAyNDQ0MFowDDAKBgNVHRUEAwoBBqAc +MBowGAYDVR0UBBECDxnP/97adO3y9qRGDM7hQDANBgkqhkiG9w0BAQsFAAOCAQEA +aDY9jBdAJiAujUkaLYLVtzNWF/0SxD5CB4dYIcZMqtPKLn5ykcxkXvnRbVihJ+Kn +AAv9Fkn5iwj77EGwxNjyZktQ4gAmcMhCTBEcAHbmi92tHttot9Sr44+CN+0NaaQD +OflIeVw7Zir90TWufjScy8/e7FkVm+aD5CicrbJWqoe21pB1Q1jS49iNrZzqZ2vw +HLiqNAzpecxwUih/YPe5+CBk5Nq4vICeieGVC/JO9r5SkdDwWQTl0I3kSK6n4Jh7 +53FmIen80F2ZZuZu4/fhJ7C4rlr6W9i6FrK06s5mk1PeYFHKhCkwI8wp8cIudJQD +lLsK2u4CTcuTKdbDLsszYA== +-----END X509 CRL----- diff --git a/certs/crl/extra-crls/large_crlnum.pem b/certs/crl/extra-crls/large_crlnum.pem new file mode 100644 index 000000000..8b5d79745 --- /dev/null +++ b/certs/crl/extra-crls/large_crlnum.pem @@ -0,0 +1,43 @@ +Certificate Revocation List (CRL): + Version 2 (0x1) + Signature Algorithm: sha256WithRSAEncryption + Issuer: C=US, ST=Montana, L=Bozeman, O=Sawtooth, OU=Consulting, CN=www.wolfssl.com, emailAddress=info@wolfssl.com + Last Update: Jan 8 07:15:25 2026 GMT + Next Update: Oct 4 07:15:25 2028 GMT + CRL extensions: + X509v3 CRL Number: + 0xD8AFADA7F08B38E6178BD0E5CD7B0DF80071BA74 +Revoked Certificates: + Serial Number: 01 + Revocation Date: Jan 8 07:15:25 2026 GMT + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 0c:45:a0:2e:ba:ad:28:48:eb:61:29:a6:fa:d0:76:8c:96:bb: + 1a:9a:79:90:05:06:78:8e:d2:f6:4d:6d:4c:75:62:d2:b2:91: + f8:e4:59:a9:db:6f:e6:58:fe:f9:2e:7a:67:a7:01:a3:68:ee: + b1:23:a6:25:2a:85:84:3d:bf:86:bf:6d:d5:a6:2d:03:8e:d1: + ac:0f:73:4c:47:ea:fb:75:2e:85:1f:dc:fa:5e:b2:eb:d1:f4: + 75:e9:ae:a9:90:6e:ec:c9:05:db:61:39:30:a8:4e:c3:d2:ce: + 77:2d:ba:bf:fd:74:dc:c6:41:db:65:c4:83:66:9c:91:60:43: + 57:a3:52:bb:9c:b7:fa:30:d3:01:89:7f:5e:c8:06:0a:34:1b: + 77:ce:e8:b4:85:c5:6e:63:50:f3:88:cc:e3:54:7b:29:5c:08: + 4a:7b:35:b4:3f:01:2e:c5:93:4f:7c:7a:17:bf:0d:bd:be:3e: + a9:1b:ef:a0:9c:bc:78:9e:91:99:91:e7:38:63:f1:24:86:02: + 63:81:cb:67:3a:f7:3c:5c:45:87:54:f4:9a:16:25:a2:e5:bd: + ee:7e:9a:28:c0:db:4e:bc:4a:0d:c2:5f:14:ea:9c:8a:42:db: + d2:1d:27:b8:d2:3c:57:4a:bf:46:4a:95:ac:7f:f4:47:22:dd: + d5:dc:52:3f +-----BEGIN X509 CRL----- +MIICGTCCAQECAQEwDQYJKoZIhvcNAQELBQAwgZQxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIDAdNb250YW5hMRAwDgYDVQQHDAdCb3plbWFuMREwDwYDVQQKDAhTYXd0b290 +aDETMBEGA1UECwwKQ29uc3VsdGluZzEYMBYGA1UEAwwPd3d3LndvbGZzc2wuY29t +MR8wHQYJKoZIhvcNAQkBFhBpbmZvQHdvbGZzc2wuY29tFw0yNjAxMDgwNzE1MjVa +Fw0yODEwMDQwNzE1MjVaMBQwEgIBARcNMjYwMTA4MDcxNTI1WqAiMCAwHgYDVR0U +BBcCFQDYr62n8Is45heL0OXNew34AHG6dDANBgkqhkiG9w0BAQsFAAOCAQEADEWg +LrqtKEjrYSmm+tB2jJa7Gpp5kAUGeI7S9k1tTHVi0rKR+ORZqdtv5lj++S56Z6cB +o2jusSOmJSqFhD2/hr9t1aYtA47RrA9zTEfq+3UuhR/c+l6y69H0demuqZBu7MkF +22E5MKhOw9LOdy26v/103MZB22XEg2ackWBDV6NSu5y3+jDTAYl/XsgGCjQbd87o +tIXFbmNQ84jM41R7KVwISns1tD8BLsWTT3x6F78Nvb4+qRvvoJy8eJ6RmZHnOGPx +JIYCY4HLZzr3PFxFh1T0mhYlouW97n6aKMDbTrxKDcJfFOqcikLb0h0nuNI8V0q/ +RkqVrH/0RyLd1dxSPw== +-----END X509 CRL----- diff --git a/certs/crl/extra-crls/large_crlnum2.pem b/certs/crl/extra-crls/large_crlnum2.pem new file mode 100644 index 000000000..162b44c0f --- /dev/null +++ b/certs/crl/extra-crls/large_crlnum2.pem @@ -0,0 +1,43 @@ +Certificate Revocation List (CRL): + Version 2 (0x1) + Signature Algorithm: sha256WithRSAEncryption + Issuer: C=US, ST=Montana, L=Bozeman, O=Sawtooth, OU=Consulting, CN=www.wolfssl.com, emailAddress=info@wolfssl.com + Last Update: Jan 8 07:15:25 2026 GMT + Next Update: Oct 4 07:15:25 2028 GMT + CRL extensions: + X509v3 CRL Number: + 0x8BC28C3B3F7A6344CD464A9FDC837F2009DEB94FD3 +Revoked Certificates: + Serial Number: 01 + Revocation Date: Jan 8 07:15:25 2026 GMT + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 47:71:aa:8d:29:11:90:57:c9:70:78:a5:de:40:ee:c3:da:81: + 68:d0:20:09:af:5b:5f:30:f9:69:14:ff:8a:cf:46:0d:e8:0d: + 45:df:1d:49:ce:05:01:28:a5:34:50:b6:cb:54:9d:a1:42:6c: + f6:e2:66:de:be:e4:90:55:c1:83:e5:4c:26:96:43:29:39:84: + ad:68:3c:0d:5a:d4:e7:ba:7c:21:e9:a1:c2:0c:ad:6f:0c:32: + 71:81:9f:df:7d:c3:0d:92:a4:6f:43:9f:8f:b7:ef:2d:6d:92: + a6:17:cb:c7:4c:2e:3b:a5:2b:2c:74:fa:d1:be:6d:dc:19:04: + d6:b6:56:6c:26:94:8e:13:15:29:12:fe:1a:a4:73:55:df:a5: + c8:d3:d5:99:4a:c6:be:64:1f:90:a9:d8:94:d1:3b:b1:0e:ff: + e4:81:d0:e5:a4:8a:a7:a9:82:fb:a6:86:be:e7:e1:a8:b5:0d: + 87:bb:76:5b:0e:05:1f:d4:82:3c:68:99:ec:ae:ae:8e:4a:72: + cf:3f:8a:7f:b0:a2:69:d9:8c:68:7d:2f:3e:54:e9:fb:70:cf: + d4:ed:1b:61:68:33:4f:93:9b:5f:5e:e9:de:e8:51:66:fd:c8: + 35:40:a0:7d:42:bd:d7:f4:96:cd:c8:72:14:84:cd:f5:19:8c: + a0:5a:b7:72 +-----BEGIN X509 CRL----- +MIICGjCCAQICAQEwDQYJKoZIhvcNAQELBQAwgZQxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIDAdNb250YW5hMRAwDgYDVQQHDAdCb3plbWFuMREwDwYDVQQKDAhTYXd0b290 +aDETMBEGA1UECwwKQ29uc3VsdGluZzEYMBYGA1UEAwwPd3d3LndvbGZzc2wuY29t +MR8wHQYJKoZIhvcNAQkBFhBpbmZvQHdvbGZzc2wuY29tFw0yNjAxMDgwNzE1MjVa +Fw0yODEwMDQwNzE1MjVaMBQwEgIBARcNMjYwMTA4MDcxNTI1WqAjMCEwHwYDVR0U +BBgCFgCLwow7P3pjRM1GSp/cg38gCd65T9MwDQYJKoZIhvcNAQELBQADggEBAEdx +qo0pEZBXyXB4pd5A7sPagWjQIAmvW18w+WkU/4rPRg3oDUXfHUnOBQEopTRQtstU +naFCbPbiZt6+5JBVwYPlTCaWQyk5hK1oPA1a1Oe6fCHpocIMrW8MMnGBn999ww2S +pG9Dn4+37y1tkqYXy8dMLjulKyx0+tG+bdwZBNa2VmwmlI4TFSkS/hqkc1XfpcjT +1ZlKxr5kH5Cp2JTRO7EO/+SB0OWkiqepgvumhr7n4ai1DYe7dlsOBR/Ugjxomeyu +ro5Kcs8/in+womnZjGh9Lz5U6ftwz9TtG2FoM0+Tm19e6d7oUWb9yDVAoH1Cvdf0 +ls3IchSEzfUZjKBat3I= +-----END X509 CRL----- diff --git a/certs/crl/gencrls.sh b/certs/crl/gencrls.sh index 9a1c67f16..3fcff5640 100755 --- a/certs/crl/gencrls.sh +++ b/certs/crl/gencrls.sh @@ -219,4 +219,26 @@ openssl crl -in crl_rsapss.pem -text > tmp check_result $? mv tmp crl_rsapss.pem +echo "Step 29 large CRL number( = 20 octets )" +echo d8afada7f08b38e6178bd0e5cd7b0df80071ba74 > crlnumber +openssl ca -config ../renewcerts/wolfssl.cnf -gencrl -crldays 1000 -out extra-crls/large_crlnum.pem -keyfile ../ca-key.pem -cert ../ca-cert.pem +check_result $? + +# metadata +echo "Step 29" +openssl crl -in extra-crls/large_crlnum.pem -text > tmp +check_result $? +mv tmp extra-crls/large_crlnum.pem + +echo "Step 30 large CRL number( > 20 octets )" +echo 8bc28c3b3f7a6344cd464a9fdc837f2009deb94fd3 > crlnumber +openssl ca -config ../renewcerts/wolfssl.cnf -gencrl -crldays 1000 -out extra-crls/large_crlnum2.pem -keyfile ../ca-key.pem -cert ../ca-cert.pem +check_result $? + +# metadata +echo "Step 30" +openssl crl -in extra-crls/large_crlnum2.pem -text > tmp +check_result $? +mv tmp extra-crls/large_crlnum2.pem + exit 0 diff --git a/certs/crl/include.am b/certs/crl/include.am index d3194933a..6f7f6f26b 100644 --- a/certs/crl/include.am +++ b/certs/crl/include.am @@ -16,7 +16,8 @@ EXTRA_DIST += \ certs/crl/wolfssl.cnf \ certs/crl/crl.der \ certs/crl/crl2.der \ - certs/crl/crl_rsapss.pem + certs/crl/crl_rsapss.pem \ + certs/crl/bad_time_fmt.pem EXTRA_DIST += \ certs/crl/crl.revoked \ diff --git a/certs/external/README.txt b/certs/external/README.txt index c213e6081..fdbf9199d 100644 --- a/certs/external/README.txt +++ b/certs/external/README.txt @@ -1,3 +1,2 @@ -ca_collection.pem contains the two possible Root CA's that login.live.com can -return, either the Baltimore Cyber Trust Root CA or the DigiCert Global Sign -Root CA. +ca_collection.pem contains the Root CA certificates that login.live.com can +return: DigiCert Global Root CA and DigiCert Global Root G2. diff --git a/certs/external/ca_collection.pem b/certs/external/ca_collection.pem index c76d6c605..934b04253 100644 --- a/certs/external/ca_collection.pem +++ b/certs/external/ca_collection.pem @@ -1,63 +1,3 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: - 08:3b:e0:56:90:42:46:b1:a1:75:6a:c9:59:91:c7:4a - Signature Algorithm: sha1WithRSAEncryption - Issuer: C = US, O = DigiCert Inc, OU = www.digicert.com, CN = DigiCert Global Root CA - Validity - Not Before: Nov 10 00:00:00 2006 GMT - Not After : Nov 10 00:00:00 2031 GMT - Subject: C = US, O = DigiCert Inc, OU = www.digicert.com, CN = DigiCert Global Root CA - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - RSA Public-Key: (2048 bit) - Modulus: - 00:e2:3b:e1:11:72:de:a8:a4:d3:a3:57:aa:50:a2: - 8f:0b:77:90:c9:a2:a5:ee:12:ce:96:5b:01:09:20: - cc:01:93:a7:4e:30:b7:53:f7:43:c4:69:00:57:9d: - e2:8d:22:dd:87:06:40:00:81:09:ce:ce:1b:83:bf: - df:cd:3b:71:46:e2:d6:66:c7:05:b3:76:27:16:8f: - 7b:9e:1e:95:7d:ee:b7:48:a3:08:da:d6:af:7a:0c: - 39:06:65:7f:4a:5d:1f:bc:17:f8:ab:be:ee:28:d7: - 74:7f:7a:78:99:59:85:68:6e:5c:23:32:4b:bf:4e: - c0:e8:5a:6d:e3:70:bf:77:10:bf:fc:01:f6:85:d9: - a8:44:10:58:32:a9:75:18:d5:d1:a2:be:47:e2:27: - 6a:f4:9a:33:f8:49:08:60:8b:d4:5f:b4:3a:84:bf: - a1:aa:4a:4c:7d:3e:cf:4f:5f:6c:76:5e:a0:4b:37: - 91:9e:dc:22:e6:6d:ce:14:1a:8e:6a:cb:fe:cd:b3: - 14:64:17:c7:5b:29:9e:32:bf:f2:ee:fa:d3:0b:42: - d4:ab:b7:41:32:da:0c:d4:ef:f8:81:d5:bb:8d:58: - 3f:b5:1b:e8:49:28:a2:70:da:31:04:dd:f7:b2:16: - f2:4c:0a:4e:07:a8:ed:4a:3d:5e:b5:7f:a3:90:c3: - af:27 - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Key Usage: critical - Digital Signature, Certificate Sign, CRL Sign - X509v3 Basic Constraints: critical - CA:TRUE - X509v3 Subject Key Identifier: - 03:DE:50:35:56:D1:4C:BB:66:F0:A3:E2:1B:1B:C3:97:B2:3D:D1:55 - X509v3 Authority Key Identifier: - keyid:03:DE:50:35:56:D1:4C:BB:66:F0:A3:E2:1B:1B:C3:97:B2:3D:D1:55 - - Signature Algorithm: sha1WithRSAEncryption - cb:9c:37:aa:48:13:12:0a:fa:dd:44:9c:4f:52:b0:f4:df:ae: - 04:f5:79:79:08:a3:24:18:fc:4b:2b:84:c0:2d:b9:d5:c7:fe: - f4:c1:1f:58:cb:b8:6d:9c:7a:74:e7:98:29:ab:11:b5:e3:70: - a0:a1:cd:4c:88:99:93:8c:91:70:e2:ab:0f:1c:be:93:a9:ff: - 63:d5:e4:07:60:d3:a3:bf:9d:5b:09:f1:d5:8e:e3:53:f4:8e: - 63:fa:3f:a7:db:b4:66:df:62:66:d6:d1:6e:41:8d:f2:2d:b5: - ea:77:4a:9f:9d:58:e2:2b:59:c0:40:23:ed:2d:28:82:45:3e: - 79:54:92:26:98:e0:80:48:a8:37:ef:f0:d6:79:60:16:de:ac: - e8:0e:cd:6e:ac:44:17:38:2f:49:da:e1:45:3e:2a:b9:36:53: - cf:3a:50:06:f7:2e:e8:c4:57:49:6c:61:21:18:d5:04:ad:78: - 3c:2c:3a:80:6b:a7:eb:af:15:14:e9:d8:89:c1:b9:38:6c:e2: - 91:6c:8a:ff:64:b9:77:25:57:30:c0:1b:24:a3:e1:dc:e9:df: - 47:7c:b5:b4:24:08:05:30:ec:2d:bd:0b:bf:45:bf:50:b9:a9: - f3:eb:98:01:12:ad:c8:88:c6:98:34:5f:8d:0a:3c:c6:e9:d5: - 95:95:6d:de -----BEGIN CERTIFICATE----- MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 @@ -80,3 +20,26 @@ PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= -----END CERTIFICATE----- + +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- diff --git a/cmake/functions.cmake b/cmake/functions.cmake index d2d683ca8..a011faf58 100644 --- a/cmake/functions.cmake +++ b/cmake/functions.cmake @@ -201,6 +201,9 @@ function(generate_build_flags) if(WOLFSSL_MLKEM OR WOLFSSL_USER_SETTINGS) set(BUILD_WC_MLKEM "yes" PARENT_SCOPE) endif() + if(WOLFSSL_DILITHIUM OR WOLFSSL_USER_SETTINGS) + set(BUILD_DILITHIUM "yes" PARENT_SCOPE) + endif() if(WOLFSSL_OQS OR WOLFSSL_USER_SETTINGS) set(BUILD_FALCON "yes" PARENT_SCOPE) set(BUILD_SPHINCS "yes" PARENT_SCOPE) @@ -389,6 +392,10 @@ function(generate_lib_src_list LIB_SOURCES) if(BUILD_INTELASM) list(APPEND LIB_SOURCES wolfcrypt/src/aes_gcm_asm.S) + list(APPEND LIB_SOURCES wolfcrypt/src/sha3_asm.S) + elseif(BUILD_ARMASM) + list(APPEND LIB_SOURCES wolfcrypt/src/port/arm/armv8-sha3-asm_c.c) + list(APPEND LIB_SOURCES wolfcrypt/src/port/arm/armv8-sha3-asm.S) endif() endif() @@ -563,11 +570,13 @@ function(generate_lib_src_list LIB_SOURCES) if(BUILD_ARMASM_INLINE) list(APPEND LIB_SOURCES wolfcrypt/src/port/arm/armv8-sha256.c - wolfcrypt/src/port/arm/armv8-32-sha256-asm_c.c) + wolfcrypt/src/port/arm/armv8-32-sha256-asm_c.c + wolfcrypt/src/port/arm/armv8-sha256-asm_c.c) else() list(APPEND LIB_SOURCES - wolfcrypt/src/port/arm/armv8-sha256-asm.S - wolfcrypt/src/port/arm/armv8-32-sha256-asm.S) + wolfcrypt/src/port/arm/armv8-sha256.c + wolfcrypt/src/port/arm/armv8-32-sha256-asm.S + wolfcrypt/src/port/arm/armv8-sha256-asm.S) endif() if(BUILD_ARMASM_INLINE AND BUILD_ARM_THUMB) list(APPEND LIB_SOURCES @@ -990,6 +999,10 @@ function(generate_lib_src_list LIB_SOURCES) if(BUILD_DILITHIUM) list(APPEND LIB_SOURCES wolfcrypt/src/dilithium.c) + + if(BUILD_INTELASM) + list(APPEND LIB_SOURCES wolfcrypt/src/wc_mldsa_asm.S) + endif() endif() if(BUILD_WC_MLKEM) diff --git a/cmake/options.h.in b/cmake/options.h.in index 7446b1b83..d01b2c794 100644 --- a/cmake/options.h.in +++ b/cmake/options.h.in @@ -96,6 +96,8 @@ extern "C" { #cmakedefine HAVE_CURVE448 #undef HAVE_DH_DEFAULT_PARAMS #cmakedefine HAVE_DH_DEFAULT_PARAMS +#undef HAVE_DILITHIUM +#cmakedefine HAVE_DILITHIUM #undef HAVE_ECC #cmakedefine HAVE_ECC #undef HAVE_ECH @@ -354,6 +356,8 @@ extern "C" { #cmakedefine WOLFSSL_TLS13 #undef WOLFSSL_USE_ALIGN #cmakedefine WOLFSSL_USE_ALIGN +#undef WOLFSSL_USER_SETTINGS +#cmakedefine WOLFSSL_USER_SETTINGS #undef WOLFSSL_USER_SETTINGS_ASM #cmakedefine WOLFSSL_USER_SETTINGS_ASM #undef WOLFSSL_W64_WRAPPER @@ -370,6 +374,8 @@ extern "C" { #cmakedefine WOLFSSL_HAVE_MLKEM #undef WOLFSSL_WC_MLKEM #cmakedefine WOLFSSL_WC_MLKEM +#undef WOLFSSL_WC_DILITHIUM +#cmakedefine WOLFSSL_WC_DILITHIUM #undef NO_WOLFSSL_STUB #cmakedefine NO_WOLFSSL_STUB #undef HAVE_ECC_SECPR2 diff --git a/configure.ac b/configure.ac index a2e98dd35..975bb0418 100644 --- a/configure.ac +++ b/configure.ac @@ -314,6 +314,11 @@ AC_ARG_ENABLE([32bit], [ ENABLED_32BIT=no ] ) +if test "$ENABLED_32BIT" = "yes" +then + AM_CFLAGS="$AM_CFLAGS -DWC_32BIT_CPU" +fi + # 16-bit compiler support AC_ARG_ENABLE([16bit], [AS_HELP_STRING([--enable-16bit],[Enables 16-bit support (default: disabled)])], @@ -830,6 +835,11 @@ then AM_CFLAGS="$AM_CFLAGS -DXMALLOC_OVERRIDE -DWOLFCRYPT_ONLY" AM_CFLAGS="$AM_CFLAGS -DNO_ASN_TIME" + if test "$ax_enable_debug" = "yes"; then + AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_BSDKM_VERBOSE_DEBUG" + AM_CFLAGS="$AM_CFLAGS -DNO_WOLFSSL_DEBUG_CERTS" + fi + if test "$KERNEL_ROOT" = ""; then AC_PATH_DEFAULT_BSDKM_SOURCE KERNEL_ROOT="$DEFAULT_BSDKM_ROOT" @@ -936,9 +946,21 @@ AC_ARG_ENABLE([fasthugemath], [ ENABLED_FASTHUGEMATH=no ] ) +# ssl bump build +AC_ARG_ENABLE([bump], + [AS_HELP_STRING([--enable-bump],[Enable SSL Bump build (default: disabled)])], + [ ENABLED_BUMP=$enableval ], + [ ENABLED_BUMP=no ] + ) + if test "$ENABLED_BUMP" = "yes" then - ENABLED_FASTHUGEMATH="yes" + AM_CFLAGS="$AM_CFLAGS -DLARGE_STATIC_BUFFERS -DWOLFSSL_CERT_GEN -DWOLFSSL_KEY_GEN -DHUGE_SESSION_CACHE -DWOLFSSL_DER_LOAD -DWOLFSSL_ALT_NAMES -DWOLFSSL_TEST_CERT" + DEFAULT_MAX_CLASSIC_ASYM_KEY_BITS=4096 + if test "$ENABLED_SP_MATH" = "no" && test "$ENABLED_SP_MATH_ALL" = "no" + then + ENABLED_FASTHUGEMATH="yes" + fi fi if test "$ENABLED_FASTHUGEMATH" = "yes" @@ -946,7 +968,8 @@ then ENABLED_FASTMATH="yes" fi -if test "$host_cpu" = "x86_64" || test "$host_cpu" = "amd64" +if (test "$host_cpu" = "x86_64" || test "$host_cpu" = "amd64") && + test "$ENABLED_32BIT" != "yes" then AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_X86_64_BUILD" fi @@ -1428,6 +1451,7 @@ then test "$enable_md5" = "" && enable_md5=yes test "$enable_anon" = "" && enable_anon=yes test "$enable_ssh" = "" && test "$enable_hmac" != "no" && enable_ssh=yes + test "$enable_rng_bank" = "" && enable_rng_bank=yes # the compiler optimizer generates a weird out-of-bounds bss reference for # find_hole() in the FP_ECC implementation. @@ -2208,6 +2232,19 @@ then AM_CFLAGS="$AM_CFLAGS -DWC_NO_RNG" fi +AC_ARG_ENABLE([rng-bank], + [AS_HELP_STRING([--enable-rng-bank],[Enable compiling and using RNG banks (default: disabled)])], + [ ENABLED_RNG_BANK=$enableval ], + [ ENABLED_RNG_BANK=$KERNEL_MODE_DEFAULTS ] + ) + +if test "$ENABLED_RNG_BANK" = "yes" +then + AS_IF([test "$ENABLED_RNG" = "no"], + AC_MSG_ERROR([--enable-rng-bank requires --enable-rng])) + AM_CFLAGS="$AM_CFLAGS -DWC_RNG_BANK_SUPPORT" +fi + # DTLS-SCTP AC_ARG_ENABLE([sctp], @@ -2469,13 +2506,6 @@ AC_ARG_ENABLE([qt], [ ENABLED_QT=no ] ) -# ssl bump build -AC_ARG_ENABLE([bump], - [AS_HELP_STRING([--enable-bump],[Enable SSL Bump build (default: disabled)])], - [ ENABLED_BUMP=$enableval ], - [ ENABLED_BUMP=no ] - ) - # SNIFFER AC_ARG_ENABLE([sniffer], [AS_HELP_STRING([--enable-sniffer],[Enable wolfSSL sniffer support (default: disabled)])], @@ -2779,14 +2809,6 @@ then AM_CFLAGS="$AM_CFLAGS -DFORTRESS -DWOLFSSL_ALWAYS_VERIFY_CB -DWOLFSSL_AES_COUNTER -DWOLFSSL_AES_DIRECT -DWOLFSSL_DER_LOAD -DWOLFSSL_KEY_GEN" fi - -if test "$ENABLED_BUMP" = "yes" -then - AM_CFLAGS="$AM_CFLAGS -DLARGE_STATIC_BUFFERS -DWOLFSSL_CERT_GEN -DWOLFSSL_KEY_GEN -DHUGE_SESSION_CACHE -DWOLFSSL_DER_LOAD -DWOLFSSL_ALT_NAMES -DWOLFSSL_TEST_CERT" - DEFAULT_MAX_CLASSIC_ASYM_KEY_BITS=4096 -fi - - # lean TLS build (TLS 1.2 client only (no client auth), ECC256, AES128 and SHA256 w/o Shamir) AC_ARG_ENABLE([leantls], [AS_HELP_STRING([--enable-leantls],[Enable Lean TLS build (default: disabled)])], @@ -3389,6 +3411,13 @@ then ENABLED_ARMASM_CRYPTO=no ;; sha256-small) + case $host_cpu in + *arm*) + ;; + *) + AC_MSG_ERROR([SHA256 small option only available on 32-bit ARM CPU.]) + break;; + esac ENABLED_ARMASM_SHA256_SMALL=yes ;; sha512-crypto | sha3-crypto) @@ -3458,8 +3487,25 @@ then esac ENABLED_ARMASM_BARRIER_DETECT=yes ;; + aes-block-dup) + case $host_cpu in + *arm*) + ;; + *) + AC_MSG_ERROR([AES assembly option only available on 32-bit ARM CPU.]) + break;; + esac + ENABLED_ARMASM_AES_BLOCK_INLINE=yes + ;; *) - AC_MSG_ERROR([Invalid choice of ARM asm inclusions (yes, sha512-crypto, sha3-crypto): $ENABLED_ARMASM.]) + case $host_cpu in + *aarch64*) + AC_MSG_ERROR([Invalid choice of ARM asm inclusions (yes, inline, no-crypto, sha512-crypto, sha3-crypto, no-sha512-crypto, no-sha3-crypto, barrier-sb, barrier-detect): $ENABLED_ARMASM.]) + break;; + *arm*) + AC_MSG_ERROR([Invalid choice of ARM asm inclusions (yes, inline, no-crypto, sha256-small, aes-block-dup): $ENABLED_ARMASM.]) + break;; + esac break;; esac done @@ -3624,6 +3670,9 @@ fi if test "$ENABLED_ARMASM_INLINE" = "yes"; then AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_ARMASM_INLINE" fi +if test "$ENABLED_ARMASM_AES_BLOCK_INLINE" = "yes"; then + AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_ARMASM_AES_BLOCK_INLINE" +fi # RISC-V Assembly AC_ARG_ENABLE([riscv-asm], @@ -3721,6 +3770,9 @@ then inline) ENABLED_PPC32_ASM_INLINE=yes ;; + inline-reg) + ENABLED_PPC32_ASM_INLINE_REG=yes + ;; small) ENABLED_PPC32_ASM_SMALL=yes ;; @@ -3738,7 +3790,7 @@ then AC_MSG_NOTICE([32-bit PowerPC assembly for SHA-256]) ENABLED_PPC32_ASM=yes fi -if test "$ENABLED_PPC32_ASM_INLINE" = "yes"; then +if test "$ENABLED_PPC32_ASM_INLINE" = "yes" || test "$ENABLED_PPC32_ASM_INLINE_REG" = "yes"; then AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_PPC32_ASM_INLINE" else AM_CCASFLAGS="$AM_CCASFLAGS -DWOLFSSL_PPC32_ASM" @@ -3954,12 +4006,17 @@ then fi # AMD RDSEED -AC_ARG_ENABLE([amdrand], - [AS_HELP_STRING([--enable-amdrand],[Enable AMD rdseed as preferred RNG seeding source (default: disabled)])], +AC_ARG_ENABLE([amdrdseed], + [AS_HELP_STRING([--enable-amdrdseed],[Enable AMD rdseed as preferred RNG seeding source (default: disabled)])], [ ENABLED_AMDRDSEED=$enableval ], [ ENABLED_AMDRDSEED=no ] ) +AC_ARG_ENABLE([amdrand], + [AS_HELP_STRING([--enable-amdrand],[Enable AMD rdseed as preferred RNG seeding source (default: disabled)])], + [ ENABLED_AMDRDSEED=$enableval ] + ) + if test "$ENABLED_AMDRDSEED" = "yes" then AM_CFLAGS="$AM_CFLAGS -DHAVE_AMD_RDSEED" @@ -7835,7 +7892,7 @@ fi if test "$ENABLED_HAPROXY" = "yes" then AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_HAPROXY -DOPENSSL_COMPATIBLE_DEFAULTS" - AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_SIGNER_DER_CERT" + AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_SIGNER_DER_CERT -DWOLFSSL_KEEP_RNG_SEED_FD_OPEN" # --enable-all defines its own DEFAULT_MAX_CLASSIC_ASYM_KEY_BITS if test -z "$DEFAULT_MAX_CLASSIC_ASYM_KEY_BITS" then @@ -10287,6 +10344,12 @@ then AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_CURVE25519_USE_ED25519" AM_CCASFLAGS="$AM_CCASFLAGS -DWOLFSSL_CURVE25519_USE_ED25519" fi + if test "$ENABLED_CURVE25519" = "not-ed" + then + AM_CFLAGS="$AM_CFLAGS -DWOLFSSL_CURVE25519_NOT_USE_ED25519" + AM_CCASFLAGS="$AM_CCASFLAGS -DWOLFSSL_CURVE25519_NOT_USE_ED25519" + fi + AM_CFLAGS="$AM_CFLAGS -DHAVE_CURVE25519" AM_CCASFLAGS="$AM_CCASFLAGS -DHAVE_CURVE25519" @@ -10861,6 +10924,11 @@ fi LIB_SOCKET_NSL AX_HARDEN_CC_COMPILER_FLAGS +# -Wdeprecated-enum-enum-conversion is on by default in C++20, but conflicts with +# our use of enum constructs to define fungible constants. +AX_CHECK_COMPILE_FLAG([-Werror -Wno-deprecated-enum-enum-conversion], + [AX_APPEND_FLAG([-Wno-deprecated-enum-enum-conversion], [AM_CFLAGS])]) + case $host_os in mingw*) # if mingw then link to ws2_32 for sockets, and crypt32 @@ -11006,6 +11074,7 @@ AM_CONDITIONAL([BUILD_ARM_64],[test "$ENABLED_ARM_64" = "yes" || test "$ENABLED AM_CONDITIONAL([BUILD_RISCV_ASM],[test "x$ENABLED_RISCV_ASM" = "xyes"]) AM_CONDITIONAL([BUILD_PPC32_ASM],[test "x$ENABLED_PPC32_ASM" = "xyes"]) AM_CONDITIONAL([BUILD_PPC32_ASM_INLINE],[test "x$ENABLED_PPC32_ASM_INLINE" = "xyes"]) +AM_CONDITIONAL([BUILD_PPC32_ASM_INLINE_REG],[test "x$ENABLED_PPC32_ASM_INLINE_REG" = "xyes"]) AM_CONDITIONAL([BUILD_XILINX],[test "x$ENABLED_XILINX" = "xyes"]) AM_CONDITIONAL([BUILD_AESNI],[test "x$ENABLED_AESNI" = "xyes"]) AM_CONDITIONAL([BUILD_INTELASM],[test "x$ENABLED_INTELASM" = "xyes"]) @@ -11042,6 +11111,7 @@ AM_CONDITIONAL([BUILD_ECCSI],[test "x$ENABLED_ECCSI" = "xyes" || test "x$ENABLED AM_CONDITIONAL([BUILD_SAKKE],[test "x$ENABLED_SAKKE" = "xyes" || test "x$ENABLED_USERSETTINGS" = "xyes"]) AM_CONDITIONAL([BUILD_MEMORY],[test "x$ENABLED_MEMORY" = "xyes" || test "x$ENABLED_USERSETTINGS" = "xyes"]) AM_CONDITIONAL([BUILD_MEMUSE],[test "x$ENABLED_ENTROPY_MEMUSE" = "xyes" || test "x$ENABLED_USERSETTINGS" = "xyes"]) +AM_CONDITIONAL([BUILD_RNG_BANK],[test "$ENABLED_RNG_BANK" = "yes" || test "$ENABLED_USERSETTINGS" = "yes"]) AM_CONDITIONAL([BUILD_RSA],[test "x$ENABLED_RSA" = "xyes" || test "x$ENABLED_USERSETTINGS" = "xyes"]) AM_CONDITIONAL([BUILD_DH],[test "x$ENABLED_DH" != "xno" || test "x$ENABLED_USERSETTINGS" = "xyes"]) AM_CONDITIONAL([BUILD_ASN],[test "x$ENABLED_ASN" != "xno" || test "x$ENABLED_RSA" = "xyes" || test "x$ENABLED_USERSETTINGS" = "xyes"]) @@ -11677,6 +11747,10 @@ if test "$ENABLED_PPC32_ASM_INLINE" = "yes" then ENABLED_PPC32_ASM="inline C" fi +if test "$ENABLED_PPC32_ASM_INLINE_REG" = "yes" +then + ENABLED_PPC32_ASM="inline C Reg" +fi echo " * PPC32 ASM $ENABLED_PPC32_ASM" echo " * Write duplicate: $ENABLED_WRITEDUP" echo " * Xilinx Hardware Acc.: $ENABLED_XILINX" diff --git a/devin_lifeguard.yaml b/devin_lifeguard.yaml deleted file mode 100644 index c3834eeaa..000000000 --- a/devin_lifeguard.yaml +++ /dev/null @@ -1,145 +0,0 @@ -rules: - - name: no-void-functions - trigger: >- - All functions must return a value. Avoid using void return types to ensure - error values can be propagated upstream. - solution: >- - Change the function to return an appropriate error code or result instead - of void. Ensure all return paths provide a meaningful value. - - name: avoid-recursion - trigger: >- - Recursion is not allowed. Prefer iterative solutions to reduce stack usage - and prevent potential stack overflows. - solution: >- - Refactor the recursive function into an iterative one using loops or other - control structures. - - name: use-forcezero - trigger: >- - Sensitive data such as private keys must be zeroized using `ForceZero()` - to prevent the compiler from optimizing away the zeroization. - solution: >- - Replace `memset` or similar functions with `ForceZero(variable, size)` to - ensure sensitive data is properly cleared from memory. - - name: check-all-return-codes - trigger: >- - Every return code from function calls must be checked to handle errors - appropriately and prevent unexpected behavior. - solution: >- - After each function call, add error handling logic to check the return - value and respond accordingly. - - name: no-memory-leaks - trigger: >- - Memory or resources allocated must have a clear path to being released to - prevent memory leaks. - solution: >- - Ensure that every allocation has a corresponding free or release call. Use - resource management patterns to handle allocations and deallocations. - - name: do-not-change-external-apis - trigger: >- - External facing APIs should not be altered. Instead of modifying an - existing API, create a new version with the necessary parameters. - solution: >- - If additional parameters are needed, create a new function (e.g., `f_ex(a, - b)`) and have the original function (`f(a)`) call the new one with default - or null parameters. - - name: limit-stack-usage - trigger: >- - Functions should not use more than 100 bytes of stack. Excessive stack - usage can lead to stack overflows and reduced performance. - solution: >- - Apply the `WOLFSSL_SMALL_STACK` pattern by dynamically allocating large - variables to minimize stack usage within the function. - - name: prefer-constant-time - trigger: >- - Implement algorithms in constant time to prevent timing attacks and ensure - security. - solution: >- - Review and refactor algorithms to ensure their execution time does not - depend on input values. Use constant-time libraries or functions where - applicable. - - name: use-sizeof - trigger: >- - Avoid hard-coded numeric values for sizes. Use `sizeof()` to ensure - portability and maintainability. - solution: >- - Replace hard-coded sizes with `sizeof(type)` to automatically adapt to - changes in type sizes. - - name: use-typedefs-not-stdint - trigger: >- - Use `byte`, `word16`, `word32` instead of standard integer types like - `uint32_t` to maintain consistency across the codebase. - solution: >- - Replace instances of `uint32_t` and similar types with the designated - typedefs such as `word32`. - - name: use-c-style-comments - trigger: >- - Only C-style comments (`/* */`) are allowed in C code. C++ style comments - (`//`) should not be used. - solution: >- - Replace all `//` comments with `/* */` to adhere to the project's - commenting standards. - - name: pointer-null-check - trigger: >- - Always check for null pointers using the `ptr != NULL` pattern to prevent - dereferencing null pointers. - solution: >- - Add a condition to verify that the pointer is not null before using it, - e.g., `if (ptr != NULL) { /* use ptr */ }`. - - name: declare-const-pointers - trigger: >- - Pointer parameters that are not modified within a function should be - declared as `const` to enhance code safety and clarity. - solution: >- - Add the `const` keyword to pointer parameters that are not intended to be - modified, e.g., `const void *ptr`. - - name: struct-member-order - trigger: >- - Struct members should be ordered in descending size to optimize memory - alignment and reduce padding. - solution: >- - Reorder the members of the struct so that larger data types are declared - before smaller ones. - - name: no-always-success-stubs - trigger: >- - when implementing a stub function that is not fully developed, returning - success unconditionally can hide real logic and debugging information - solution: >- - either implement the stub with real logic or return an appropriate error - code to indicate "not yet implemented," so that failures are not silently - ignored - - name: free-allocated-memory - trigger: |- - allocating memory but forgetting to free it on all code paths - or using functions that allocate buffers without a corresponding free - solution: >- - for every XMALLOC call, ensure there's a matching XFREE on every return - path - - if handing ownership off, confirm the new owner also properly frees it - - name: check-return-codes - trigger: >- - calling library functions that return non-zero in case of error, but not - checking or handling those return values - solution: >- - always verify and handle function return codes - - if ret != 0, do not continue silently; either propagate the error or - handle it - - name: handle-partial-writes - trigger: >- - calling a write function (e.g., wolfSSL_write_ex) that may write only part - of the data, returning fewer bytes than requested or a particular status - solution: >- - if partial writes are possible, loop until the entire buffer is written or - an error occurs - - do not assume a single call wrote or accepted all bytes - - name: manage-ephemeral-objects-correctly - trigger: >- - generating or importing ephemeral objects (e.g., ephemeral keys, ephemeral - certs) and forgetting to finalize or free them, or double-freeing them - solution: >- - coordinate ephemeral object ownership carefully - - ensure ephemeral structures are freed once no longer needed, and avoid - reusing pointers after free diff --git a/doc/dox_comments/header_files-ja/rsa.h b/doc/dox_comments/header_files-ja/rsa.h index dc46e5c7f..80fa88ae9 100644 --- a/doc/dox_comments/header_files-ja/rsa.h +++ b/doc/dox_comments/header_files-ja/rsa.h @@ -307,8 +307,8 @@ int wc_RsaSSL_Sign(const byte* in, word32 inLen, byte* out, \brief メッセージがRSAキーによって署名されたことを検証するために使用されます。出力は入力と同じバイト配列を使用します。 - \return >0 テキストの長さ。 - \return <0 エラーが発生しました。 + \return `>0` ダイジェストの長さ。 + \return `<0` エラーが発生しました。 \param in 復号されるバイト配列。 \param inLen 入力バッファの長さ。 @@ -344,7 +344,7 @@ int wc_RsaSSL_VerifyInline(byte* in, word32 inLen, byte** out, \brief メッセージがキーによって署名されたことを検証するために使用されます。 - \return Success エラーがない場合のテキストの長さ。 + \return Success エラーがない場合のダイジェストの長さ。 \return MEMORY_E メモリ例外。 \param in 復号されるバイト配列。 diff --git a/doc/dox_comments/header_files/aes.h b/doc/dox_comments/header_files/aes.h index 281029854..194f833dc 100644 --- a/doc/dox_comments/header_files/aes.h +++ b/doc/dox_comments/header_files/aes.h @@ -1974,3 +1974,1718 @@ int wc_AesCtsDecryptUpdate(Aes* aes, byte* out, word32* outSz, \sa wc_AesCtsEncryptFinal */ int wc_AesCtsDecryptFinal(Aes* aes, byte* out, word32* outSz); + + +/*! + \ingroup AES + \brief This function encrypts data using AES CFB-1 mode (1-bit + feedback). It processes data one bit at a time, making it suitable + for bit-oriented applications. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store encrypted data + \param in pointer to the input buffer containing data to encrypt + (packed to left, e.g., 101 is 0x90) + \param sz size of input in bits + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + byte plaintext[1] = { 0x90 }; // bits 101 + byte ciphertext[1]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + int ret = wc_AesCfb1Encrypt(&aes, ciphertext, plaintext, 3); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCfb1Decrypt + \sa wc_AesCfb8Encrypt +*/ +int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function encrypts data using AES CFB-8 mode (8-bit + feedback). It processes data one byte at a time, making it suitable + for byte-oriented stream encryption. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store encrypted data + \param in pointer to the input buffer containing data to encrypt + \param sz size of input in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + byte plaintext[10] = { }; // data to encrypt + byte ciphertext[10]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + int ret = wc_AesCfb8Encrypt(&aes, ciphertext, plaintext, 10); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCfb8Decrypt + \sa wc_AesCfb1Encrypt +*/ +int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function decrypts data using AES CFB-1 mode (1-bit + feedback). It processes data one bit at a time, making it suitable + for bit-oriented applications. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store decrypted data + \param in pointer to the input buffer containing data to decrypt + \param sz size of input in bits + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + byte ciphertext[1] = { }; // encrypted bits + byte plaintext[1]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + int ret = wc_AesCfb1Decrypt(&aes, plaintext, ciphertext, 3); + if (ret != 0) { + // decryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCfb1Encrypt + \sa wc_AesCfb8Decrypt +*/ +int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function decrypts data using AES CFB-8 mode (8-bit + feedback). It processes data one byte at a time, making it suitable + for byte-oriented stream decryption. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store decrypted data + \param in pointer to the input buffer containing data to decrypt + \param sz size of input in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + byte ciphertext[10] = { }; // encrypted data + byte plaintext[10]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + int ret = wc_AesCfb8Decrypt(&aes, plaintext, ciphertext, 10); + if (ret != 0) { + // decryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCfb8Encrypt + \sa wc_AesCfb1Decrypt +*/ +int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function encrypts data using AES OFB mode (Output + Feedback). OFB mode turns a block cipher into a stream cipher by + encrypting the IV and XORing with plaintext. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store encrypted data + \param in pointer to the input buffer containing data to encrypt + \param sz size of input in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + byte plaintext[100] = { }; // data to encrypt + byte ciphertext[100]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + int ret = wc_AesOfbEncrypt(&aes, ciphertext, plaintext, 100); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesOfbDecrypt + \sa wc_AesSetKey +*/ +int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function decrypts data using AES OFB mode (Output + Feedback). In OFB mode, encryption and decryption are the same + operation. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store decrypted data + \param in pointer to the input buffer containing data to decrypt + \param sz size of input in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + byte ciphertext[100] = { }; // encrypted data + byte plaintext[100]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + int ret = wc_AesOfbDecrypt(&aes, plaintext, ciphertext, 100); + if (ret != 0) { + // decryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesOfbEncrypt + \sa wc_AesSetKey +*/ +int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function encrypts data using AES ECB mode (Electronic + Codebook). Warning: ECB mode is not recommended for most use cases + as it does not provide semantic security. Each block is encrypted + independently. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store encrypted data + \param in pointer to the input buffer containing data to encrypt + \param sz size of input in bytes (must be multiple of AES_BLOCK_SIZE) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte plaintext[32] = { }; // data to encrypt + byte ciphertext[32]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, NULL, AES_ENCRYPTION); + int ret = wc_AesEcbEncrypt(&aes, ciphertext, plaintext, 32); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesEcbDecrypt + \sa wc_AesSetKey +*/ +int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function decrypts data using AES ECB mode (Electronic + Codebook). Warning: ECB mode is not recommended for most use cases + as it does not provide semantic security. Each block is decrypted + independently. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL. + \return Other negative values on error. + + \param aes pointer to the AES structure containing the key + \param out pointer to the output buffer to store decrypted data + \param in pointer to the input buffer containing data to decrypt + \param sz size of input in bytes (must be multiple of AES_BLOCK_SIZE) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte ciphertext[32] = { }; // encrypted data + byte plaintext[32]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, NULL, AES_DECRYPTION); + int ret = wc_AesEcbDecrypt(&aes, plaintext, ciphertext, 32); + if (ret != 0) { + // decryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesEcbEncrypt + \sa wc_AesSetKey +*/ +int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz); + +/*! + \ingroup AES + \brief This function sets the key and IV for AES CTR mode. It + initializes the AES structure for counter mode encryption or + decryption. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, key, or iv is NULL, or if key length + is invalid. + + \param aes pointer to the AES structure to initialize + \param key pointer to the key buffer (16, 24, or 32 bytes) + \param len length of the key in bytes + \param iv pointer to the initialization vector (16 bytes) + \param dir cipher direction (always use AES_ENCRYPTION for CTR mode) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[16] = { }; // initialization vector + + wc_AesInit(&aes, NULL, INVALID_DEVID); + int ret = wc_AesCtrSetKey(&aes, key, 16, iv, AES_ENCRYPTION); + if (ret != 0) { + // failed to set key + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCtrEncrypt + \sa wc_AesSetKey +*/ +int wc_AesCtrSetKey(Aes* aes, const byte* key, word32 len, const byte* iv, + int dir); + +/*! + \ingroup AES + \brief This function sets the key for AES GCM with an extended key + update parameter. It allows for key updates in certain hardware + implementations. + + \note This function is currently only available when building with + Xilinx hardware acceleration. It requires one of the following build + options: WOLFSSL_XILINX_CRYPT (for Xilinx SecureIP integration) or + WOLFSSL_AFALG_XILINX_AES (for Xilinx AF_ALG support). This API may + be exposed for additional build configurations in the future. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or key is NULL, or if key length is invalid. + + \param aes pointer to the AES structure to initialize + \param key pointer to the key buffer (16, 24, or 32 bytes) + \param len length of the key in bytes + \param kup key update parameter for hardware implementations + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + + wc_AesInit(&aes, NULL, INVALID_DEVID); + int ret = wc_AesGcmSetKey_ex(&aes, key, 16, 0); + if (ret != 0) { + // failed to set key + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmSetKey + \sa wc_AesGcmInit +*/ +int wc_AesGcmSetKey_ex(Aes* aes, const byte* key, word32 len, word32 kup); + +/*! + \ingroup AES + \brief This function initializes an AES GCM cipher with key and IV. + It can be called with NULL key to only set the IV, or with NULL IV + to only set the key. + + \return 0 On success. + \return BAD_FUNC_ARG If aes is NULL, or if parameters are invalid. + \return MEMORY_E If dynamic memory allocation fails. + + \param aes pointer to the AES structure to initialize + \param key pointer to the key buffer, or NULL to skip key setting + \param len length of the key in bytes + \param iv pointer to the IV/nonce buffer, or NULL to skip IV setting + \param ivSz length of the IV/nonce in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // 96-bit nonce + + wc_AesInit(&aes, NULL, INVALID_DEVID); + int ret = wc_AesGcmInit(&aes, key, 16, iv, 12); + if (ret != 0) { + // failed to initialize + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmSetKey + \sa wc_AesGcmEncrypt +*/ +int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv, + word32 ivSz); + +/*! + \ingroup AES + \brief This function initializes an AES GCM cipher for encryption. + It is a convenience wrapper around wc_AesGcmInit for encryption + operations. + + \return 0 On success. + \return BAD_FUNC_ARG If aes is NULL, or if parameters are invalid. + + \param aes pointer to the AES structure to initialize + \param key pointer to the key buffer, or NULL to skip key setting + \param len length of the key in bytes + \param iv pointer to the IV/nonce buffer, or NULL to skip IV setting + \param ivSz length of the IV/nonce in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // 96-bit nonce + + wc_AesInit(&aes, NULL, INVALID_DEVID); + int ret = wc_AesGcmEncryptInit(&aes, key, 16, iv, 12); + if (ret != 0) { + // failed to initialize + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmInit + \sa wc_AesGcmEncryptUpdate +*/ +int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, + const byte* iv, word32 ivSz); + +/*! + \ingroup AES + \brief This function initializes an AES GCM cipher for encryption and + outputs the IV. This is useful when part of the IV is generated + internally. Must call wc_AesGcmSetIV() before this function to set + the fixed part of the IV. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, ivOut is NULL, or if ivOutSz doesn't + match the cached nonce size. + + \param aes pointer to the AES structure to initialize + \param key pointer to the key buffer, or NULL to skip key setting + \param len length of the key in bytes + \param ivOut pointer to buffer to receive the complete IV + \param ivOutSz length of the IV output buffer in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte ivFixed[4] = { }; // fixed part of IV + byte ivOut[12]; + WC_RNG rng; + + wc_InitRng(&rng); + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmSetIV(&aes, 12, ivFixed, 4, &rng); + int ret = wc_AesGcmEncryptInit_ex(&aes, key, 16, ivOut, 12); + if (ret != 0) { + // failed to initialize + } + wc_AesFree(&aes); + wc_FreeRng(&rng); + \endcode + + \sa wc_AesGcmSetIV + \sa wc_AesGcmEncryptUpdate +*/ +int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, + byte* ivOut, word32 ivOutSz); + +/*! + \ingroup AES + \brief This function performs an update step of AES GCM encryption. + It processes plaintext and/or additional authentication data (AAD) + in a streaming fashion. + + All the AAD must be passed to update before the plaintext. + The last part of AAD can be passed with the first part of plaintext. + + Must set key and IV before calling this function. + Must call wc_AesGcmInit() before calling this function. + + \return 0 On success. + \return BAD_FUNC_ARG If aes is NULL, or a length is non-zero but + buffer is NULL. + + \param aes pointer to the AES structure + \param out pointer to buffer to store ciphertext (can be NULL if sz=0) + \param in pointer to plaintext to encrypt (can be NULL if sz=0) + \param sz length of plaintext in bytes + \param authIn pointer to additional authentication data (can be NULL) + \param authInSz length of AAD in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // nonce + byte plaintext[100] = { }; // data + byte ciphertext[100]; + byte aad[20] = { }; // additional data + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmInit(&aes, key, 16, iv, 12); + int ret = wc_AesGcmEncryptUpdate(&aes, ciphertext, plaintext, 100, + aad, 20); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmInit + \sa wc_AesGcmEncryptInit + \sa wc_AesGcmEncryptFinal +*/ +int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz, + const byte* authIn, word32 authInSz); + +/*! + \ingroup AES + \brief This function finalizes AES GCM encryption and generates the + authentication tag. This must be called after all data has been + processed with wc_AesGcmEncryptUpdate. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or authTag is NULL, or if authTagSz is + invalid. + + \param aes pointer to the AES structure + \param authTag pointer to buffer to store the authentication tag + \param authTagSz length of the authentication tag in bytes (typically + 12 or 16) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // nonce + byte plaintext[100] = { }; // data + byte ciphertext[100]; + byte authTag[16]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmEncryptInit(&aes, key, 16, iv, 12); + wc_AesGcmEncryptUpdate(&aes, ciphertext, plaintext, 100, NULL, 0); + int ret = wc_AesGcmEncryptFinal(&aes, authTag, 16); + if (ret != 0) { + // failed to generate tag + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmEncryptUpdate + \sa wc_AesGcmDecryptFinal +*/ +int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz); + +/*! + \ingroup AES + \brief This function initializes an AES GCM cipher for decryption. + It is a convenience wrapper around wc_AesGcmInit for decryption + operations. + + \return 0 On success. + \return BAD_FUNC_ARG If aes is NULL, or if parameters are invalid. + + \param aes pointer to the AES structure to initialize + \param key pointer to the key buffer, or NULL to skip key setting + \param len length of the key in bytes + \param iv pointer to the IV/nonce buffer, or NULL to skip IV setting + \param ivSz length of the IV/nonce in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // 96-bit nonce + + wc_AesInit(&aes, NULL, INVALID_DEVID); + int ret = wc_AesGcmDecryptInit(&aes, key, 16, iv, 12); + if (ret != 0) { + // failed to initialize + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmInit + \sa wc_AesGcmDecryptUpdate +*/ +int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, + const byte* iv, word32 ivSz); + +/*! + \ingroup AES + \brief This function performs an update step of AES GCM decryption. + It processes ciphertext and/or additional authentication data (AAD) + in a streaming fashion. + + All the AAD must be passed to update before the ciphertext. + The last part of AAD can be passed with the first part of ciphertext. + + Must set key and IV before calling this function. + Must call wc_AesGcmInit() before calling this function. + + \return 0 On success. + \return BAD_FUNC_ARG If aes is NULL, or a length is non-zero but + buffer is NULL. + + \param aes pointer to the AES structure + \param out pointer to buffer to store plaintext (can be NULL if sz=0) + \param in pointer to ciphertext to decrypt (can be NULL if sz=0) + \param sz length of ciphertext in bytes + \param authIn pointer to additional authentication data (can be NULL) + \param authInSz length of AAD in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // nonce + byte ciphertext[100] = { }; // encrypted data + byte plaintext[100]; + byte aad[20] = { }; // additional data + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmInit(&aes, key, 16, iv, 12); + int ret = wc_AesGcmDecryptUpdate(&aes, plaintext, ciphertext, 100, + aad, 20); + if (ret != 0) { + // decryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmInit + \sa wc_AesGcmDecryptInit + \sa wc_AesGcmDecryptFinal +*/ +int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz, + const byte* authIn, word32 authInSz); + +/*! + \ingroup AES + \brief This function finalizes AES GCM decryption and verifies the + authentication tag. This must be called after all data has been + processed with wc_AesGcmDecryptUpdate. + + \return 0 On success. + \return AES_GCM_AUTH_E If authentication tag verification fails. + \return BAD_FUNC_ARG If aes or authTag is NULL, or if authTagSz is + invalid. + + \param aes pointer to the AES structure + \param authTag pointer to the authentication tag to verify + \param authTagSz length of the authentication tag in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // nonce + byte ciphertext[100] = { }; // encrypted data + byte plaintext[100]; + byte authTag[16] = { }; // received tag + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmDecryptInit(&aes, key, 16, iv, 12); + wc_AesGcmDecryptUpdate(&aes, plaintext, ciphertext, 100, NULL, 0); + int ret = wc_AesGcmDecryptFinal(&aes, authTag, 16); + if (ret != 0) { + // authentication failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmDecryptUpdate + \sa wc_AesGcmEncryptFinal +*/ +int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz); + +/*! + \ingroup AES + \brief This function sets an external IV for AES GCM. This allows + using an IV that was generated externally or received from another + source. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or iv is NULL, or if ivSz is invalid. + + \param aes pointer to the AES structure + \param iv pointer to the IV/nonce buffer + \param ivSz length of the IV/nonce in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // external nonce + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmSetKey(&aes, key, 16); + int ret = wc_AesGcmSetExtIV(&aes, iv, 12); + if (ret != 0) { + // failed to set IV + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesGcmSetIV + \sa wc_AesGcmInit +*/ +int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz); + +/*! + \ingroup AES + \brief This function sets the IV for AES GCM with optional random + generation. It can generate part of the IV using an RNG, which is + useful for ensuring IV uniqueness. + + \return 0 On success. + \return BAD_FUNC_ARG If aes is NULL, or if parameters are invalid. + \return Other negative values on RNG or other errors. + + \param aes pointer to the AES structure + \param ivSz total length of the IV/nonce in bytes + \param ivFixed pointer to the fixed part of the IV (can be NULL) + \param ivFixedSz length of the fixed part in bytes + \param rng pointer to initialized RNG for generating random part + (can be NULL if ivFixedSz equals ivSz) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte ivFixed[4] = { }; // fixed part + WC_RNG rng; + + wc_InitRng(&rng); + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmSetKey(&aes, key, 16); + int ret = wc_AesGcmSetIV(&aes, 12, ivFixed, 4, &rng); + if (ret != 0) { + // failed to set IV + } + wc_AesFree(&aes); + wc_FreeRng(&rng); + \endcode + + \sa wc_AesGcmSetExtIV + \sa wc_AesGcmEncryptInit_ex +*/ +int wc_AesGcmSetIV(Aes* aes, word32 ivSz, const byte* ivFixed, + word32 ivFixedSz, WC_RNG* rng); + +/*! + \ingroup AES + \brief This function performs AES GCM encryption with extended + parameters, including IV output. This is a one-shot encryption + function that outputs the generated IV. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param aes pointer to the AES structure + \param out pointer to buffer to store ciphertext + \param in pointer to plaintext to encrypt + \param sz length of plaintext in bytes + \param ivOut pointer to buffer to receive the IV + \param ivOutSz length of the IV output buffer in bytes + \param authTag pointer to buffer to store authentication tag + \param authTagSz length of authentication tag in bytes + \param authIn pointer to additional authentication data + \param authInSz length of AAD in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte ivFixed[4] = { }; // fixed part + byte ivOut[12]; + byte plaintext[100] = { }; // data + byte ciphertext[100]; + byte authTag[16]; + WC_RNG rng; + + wc_InitRng(&rng); + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesGcmSetKey(&aes, key, 16); + wc_AesGcmSetIV(&aes, 12, ivFixed, 4, &rng); + int ret = wc_AesGcmEncrypt_ex(&aes, ciphertext, plaintext, 100, + ivOut, 12, authTag, 16, NULL, 0); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + wc_FreeRng(&rng); + \endcode + + \sa wc_AesGcmEncrypt + \sa wc_AesGcmSetIV +*/ +int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz, + byte* ivOut, word32 ivOutSz, byte* authTag, + word32 authTagSz, const byte* authIn, + word32 authInSz); + +/*! + \ingroup AES + \brief This function performs GMAC (Galois Message Authentication Code) + generation. GMAC is essentially AES-GCM with no plaintext, used for + authentication only. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param key pointer to the key buffer + \param keySz length of the key in bytes (16, 24, or 32) + \param iv pointer to the IV/nonce buffer + \param ivSz length of the IV/nonce in bytes + \param authIn pointer to data to authenticate + \param authInSz length of data to authenticate in bytes + \param authTag pointer to buffer to store authentication tag + \param authTagSz length of authentication tag in bytes + \param rng pointer to initialized RNG (can be NULL if IV is complete) + + _Example_ + \code + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // nonce + byte data[100] = { }; // data to authenticate + byte authTag[16]; + + int ret = wc_Gmac(key, 16, iv, 12, data, 100, authTag, 16, NULL); + if (ret != 0) { + // GMAC generation failed + } + \endcode + + \sa wc_GmacVerify + \sa wc_AesGcmEncrypt +*/ +int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz, + const byte* authIn, word32 authInSz, byte* authTag, + word32 authTagSz, WC_RNG* rng); + +/*! + \ingroup AES + \brief This function verifies a GMAC (Galois Message Authentication + Code). It computes the GMAC and compares it with the provided tag. + + \return 0 On successful verification. + \return AES_GCM_AUTH_E If authentication tag verification fails. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param key pointer to the key buffer + \param keySz length of the key in bytes (16, 24, or 32) + \param iv pointer to the IV/nonce buffer + \param ivSz length of the IV/nonce in bytes + \param authIn pointer to data to authenticate + \param authInSz length of data to authenticate in bytes + \param authTag pointer to the authentication tag to verify + \param authTagSz length of authentication tag in bytes + + _Example_ + \code + byte key[16] = { }; // 128-bit key + byte iv[12] = { }; // nonce + byte data[100] = { }; // data to authenticate + byte authTag[16] = { }; // received tag + + int ret = wc_GmacVerify(key, 16, iv, 12, data, 100, authTag, 16); + if (ret != 0) { + // GMAC verification failed + } + \endcode + + \sa wc_Gmac + \sa wc_AesGcmDecrypt +*/ +int wc_GmacVerify(const byte* key, word32 keySz, const byte* iv, + word32 ivSz, const byte* authIn, word32 authInSz, + const byte* authTag, word32 authTagSz); + +/*! + \ingroup AES + \brief This function sets the nonce for AES CCM mode. The nonce must + be set before encryption or decryption operations. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or nonce is NULL, or if nonceSz is invalid. + + \param aes pointer to the AES structure + \param nonce pointer to the nonce buffer + \param nonceSz length of the nonce in bytes (7-13 bytes for CCM) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte nonce[12] = { }; // nonce + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesCcmSetKey(&aes, key, 16); + int ret = wc_AesCcmSetNonce(&aes, nonce, 12); + if (ret != 0) { + // failed to set nonce + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCcmEncrypt + \sa wc_AesCcmSetKey +*/ +int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz); + +/*! + \ingroup AES + \brief This function performs AES CCM encryption with extended + parameters, including nonce output. This is useful when part of the + nonce is generated internally. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param aes pointer to the AES structure + \param out pointer to buffer to store ciphertext + \param in pointer to plaintext to encrypt + \param sz length of plaintext in bytes + \param ivOut pointer to buffer to receive the nonce + \param ivOutSz length of the nonce output buffer in bytes + \param authTag pointer to buffer to store authentication tag + \param authTagSz length of authentication tag in bytes + \param authIn pointer to additional authentication data + \param authInSz length of AAD in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + byte nonce[12]; + byte plaintext[100] = { }; // data + byte ciphertext[100]; + byte authTag[16]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesCcmSetKey(&aes, key, 16); + int ret = wc_AesCcmEncrypt_ex(&aes, ciphertext, plaintext, 100, + nonce, 12, authTag, 16, NULL, 0); + if (ret != 0) { + // encryption failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesCcmEncrypt + \sa wc_AesCcmSetNonce +*/ +int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz, + byte* ivOut, word32 ivOutSz, byte* authTag, + word32 authTagSz, const byte* authIn, + word32 authInSz); + +/*! + \ingroup AES + \brief This function wraps a key using AES Key Wrap algorithm + (RFC 3394). This is commonly used to securely transport + cryptographic keys. + + \return Length of wrapped key in bytes on success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param key pointer to the key-encryption key + \param keySz length of the key-encryption key in bytes + \param in pointer to the key to wrap + \param inSz length of the key to wrap in bytes + \param out pointer to buffer to store wrapped key + \param outSz size of output buffer in bytes + \param iv pointer to IV (typically NULL to use default) + + _Example_ + \code + byte kek[16] = { }; // key-encryption key + byte keyToWrap[16] = { }; // key to wrap + byte wrappedKey[24]; + + int wrappedLen = wc_AesKeyWrap(kek, 16, keyToWrap, 16, wrappedKey, + 24, NULL); + if (wrappedLen <= 0) { + // key wrap failed + } + \endcode + + \sa wc_AesKeyUnWrap + \sa wc_AesKeyWrap_ex +*/ +int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, + word32 inSz, byte* out, word32 outSz, const byte* iv); + +/*! + \ingroup AES + \brief This function wraps a key using AES Key Wrap algorithm with + an initialized AES structure. This allows reusing the same AES + structure for multiple wrap operations. + + \return Length of wrapped key in bytes on success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param aes pointer to initialized AES structure + \param in pointer to the key to wrap + \param inSz length of the key to wrap in bytes + \param out pointer to buffer to store wrapped key + \param outSz size of output buffer in bytes + \param iv pointer to IV (typically NULL to use default) + + _Example_ + \code + Aes aes; + byte kek[16] = { }; // key-encryption key + byte keyToWrap[16] = { }; // key to wrap + byte wrappedKey[24]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, kek, 16, NULL, AES_ENCRYPTION); + int wrappedLen = wc_AesKeyWrap_ex(&aes, keyToWrap, 16, wrappedKey, + 24, NULL); + if (wrappedLen <= 0) { + // key wrap failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesKeyWrap + \sa wc_AesKeyUnWrap_ex +*/ +int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out, + word32 outSz, const byte* iv); + +/*! + \ingroup AES + \brief This function unwraps a key using AES Key Unwrap algorithm + (RFC 3394). This is used to securely receive cryptographic keys + that were wrapped. + + \return Length of unwrapped key in bytes on success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param key pointer to the key-encryption key + \param keySz length of the key-encryption key in bytes + \param in pointer to the wrapped key + \param inSz length of the wrapped key in bytes + \param out pointer to buffer to store unwrapped key + \param outSz size of output buffer in bytes + \param iv pointer to IV (typically NULL to use default) + + _Example_ + \code + byte kek[16] = { }; // key-encryption key + byte wrappedKey[24] = { }; // wrapped key + byte unwrappedKey[16]; + + int unwrappedLen = wc_AesKeyUnWrap(kek, 16, wrappedKey, 24, + unwrappedKey, 16, NULL); + if (unwrappedLen <= 0) { + // key unwrap failed + } + \endcode + + \sa wc_AesKeyWrap + \sa wc_AesKeyUnWrap_ex +*/ +int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, + word32 inSz, byte* out, word32 outSz, const byte* iv); + +/*! + \ingroup AES + \brief This function unwraps a key using AES Key Unwrap algorithm + with an initialized AES structure. This allows reusing the same AES + structure for multiple unwrap operations. + + \return Length of unwrapped key in bytes on success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param aes pointer to initialized AES structure + \param in pointer to the wrapped key + \param inSz length of the wrapped key in bytes + \param out pointer to buffer to store unwrapped key + \param outSz size of output buffer in bytes + \param iv pointer to IV (typically NULL to use default) + + _Example_ + \code + Aes aes; + byte kek[16] = { }; // key-encryption key + byte wrappedKey[24] = { }; // wrapped key + byte unwrappedKey[16]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, kek, 16, NULL, AES_ENCRYPTION); + int unwrappedLen = wc_AesKeyUnWrap_ex(&aes, wrappedKey, 24, + unwrappedKey, 16, NULL); + if (unwrappedLen <= 0) { + // key unwrap failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesKeyUnWrap + \sa wc_AesKeyWrap_ex +*/ +int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out, + word32 outSz, const byte* iv); + +/*! + \ingroup AES + \brief This function encrypts multiple consecutive sectors using AES XTS + mode. It processes multiple sectors in sequence, automatically + incrementing the sector number for each sector. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL, or if sectorSz is 0, + or if sz is less than AES_BLOCK_SIZE. + \return Other negative values on error. + + \param aes pointer to the XtsAes structure + \param out pointer to buffer to store encrypted data + \param in pointer to plaintext data to encrypt + \param sz total length of data in bytes + \param sector starting sector number for the tweak + \param sectorSz size of each sector in bytes + + _Example_ + \code + XtsAes aes; + byte key[32] = { }; // 256-bit key + byte plaintext[1024] = { }; // data + byte ciphertext[1024]; + + wc_AesXtsSetKey(&aes, key, 32, AES_ENCRYPTION, NULL, INVALID_DEVID); + int ret = wc_AesXtsEncryptConsecutiveSectors(&aes, ciphertext, + plaintext, 1024, 0, 512); + if (ret != 0) { + // encryption failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsDecryptConsecutiveSectors + \sa wc_AesXtsEncryptSector +*/ +int wc_AesXtsEncryptConsecutiveSectors(XtsAes* aes, byte* out, + const byte* in, word32 sz, + word64 sector, word32 sectorSz); + +/*! + \ingroup AES + \brief This function decrypts multiple consecutive sectors using AES XTS + mode. It processes multiple sectors in sequence, automatically + incrementing the sector number for each sector. + + \return 0 On success. + \return BAD_FUNC_ARG If aes, out, or in is NULL, or if sectorSz is 0, + or if sz is less than AES_BLOCK_SIZE. + \return Other negative values on error. + + \param aes pointer to the XtsAes structure + \param out pointer to buffer to store decrypted data + \param in pointer to ciphertext data to decrypt + \param sz total length of data in bytes + \param sector starting sector number for the tweak + \param sectorSz size of each sector in bytes + + _Example_ + \code + XtsAes aes; + byte key[32] = { }; // 256-bit key + byte ciphertext[1024] = { }; // encrypted data + byte plaintext[1024]; + + wc_AesXtsSetKey(&aes, key, 32, AES_DECRYPTION, NULL, INVALID_DEVID); + int ret = wc_AesXtsDecryptConsecutiveSectors(&aes, plaintext, + ciphertext, 1024, 0, 512); + if (ret != 0) { + // decryption failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsEncryptConsecutiveSectors + \sa wc_AesXtsDecryptSector +*/ +int wc_AesXtsDecryptConsecutiveSectors(XtsAes* aes, byte* out, + const byte* in, word32 sz, + word64 sector, word32 sectorSz); + +/*! + \ingroup AES + \brief This function initializes streaming AES XTS encryption. It sets + up the context for processing data in multiple update calls. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + + \param aes pointer to the XtsAes structure + \param i pointer to the tweak/IV buffer + \param iSz length of the tweak/IV in bytes + \param stream pointer to XtsAesStreamData structure for streaming state + + _Example_ + \code + XtsAes aes; + struct XtsAesStreamData stream; + byte key[32] = { }; // 256-bit key + byte tweak[16] = { }; // tweak value + + wc_AesXtsSetKey(&aes, key, 32, AES_ENCRYPTION, NULL, INVALID_DEVID); + int ret = wc_AesXtsEncryptInit(&aes, tweak, 16, &stream); + if (ret != 0) { + // initialization failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsEncryptUpdate + \sa wc_AesXtsEncryptFinal +*/ +int wc_AesXtsEncryptInit(XtsAes* aes, const byte* i, word32 iSz, + struct XtsAesStreamData *stream); + +/*! + \ingroup AES + \brief This function initializes streaming AES XTS decryption. It sets + up the context for processing data in multiple update calls. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + + \param aes pointer to the XtsAes structure + \param i pointer to the tweak/IV buffer + \param iSz length of the tweak/IV in bytes + \param stream pointer to XtsAesStreamData structure for streaming state + + _Example_ + \code + XtsAes aes; + struct XtsAesStreamData stream; + byte key[32] = { }; // 256-bit key + byte tweak[16] = { }; // tweak value + + wc_AesXtsSetKey(&aes, key, 32, AES_DECRYPTION, NULL, INVALID_DEVID); + int ret = wc_AesXtsDecryptInit(&aes, tweak, 16, &stream); + if (ret != 0) { + // initialization failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsDecryptUpdate + \sa wc_AesXtsDecryptFinal +*/ +int wc_AesXtsDecryptInit(XtsAes* aes, const byte* i, word32 iSz, + struct XtsAesStreamData *stream); + +/*! + \ingroup AES + \brief This function performs an update step of streaming AES XTS + encryption. It processes a chunk of data and can be called multiple + times. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + + \param aes pointer to the XtsAes structure + \param out pointer to buffer to store encrypted data + \param in pointer to plaintext data to encrypt + \param sz length of data in bytes + \param stream pointer to XtsAesStreamData structure for streaming state + + _Example_ + \code + XtsAes aes; + struct XtsAesStreamData stream; + byte key[32] = { }; // 256-bit key + byte tweak[16] = { }; // tweak value + byte plaintext[100] = { }; // data + byte ciphertext[100]; + + wc_AesXtsSetKey(&aes, key, 32, AES_ENCRYPTION, NULL, INVALID_DEVID); + wc_AesXtsEncryptInit(&aes, tweak, 16, &stream); + int ret = wc_AesXtsEncryptUpdate(&aes, ciphertext, plaintext, 100, + &stream); + if (ret != 0) { + // encryption failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsEncryptInit + \sa wc_AesXtsEncryptFinal +*/ +int wc_AesXtsEncryptUpdate(XtsAes* aes, byte* out, const byte* in, + word32 sz, struct XtsAesStreamData *stream); + +/*! + \ingroup AES + \brief This function performs an update step of streaming AES XTS + decryption. It processes a chunk of data and can be called multiple + times. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + + \param aes pointer to the XtsAes structure + \param out pointer to buffer to store decrypted data + \param in pointer to ciphertext data to decrypt + \param sz length of data in bytes + \param stream pointer to XtsAesStreamData structure for streaming state + + _Example_ + \code + XtsAes aes; + struct XtsAesStreamData stream; + byte key[32] = { }; // 256-bit key + byte tweak[16] = { }; // tweak value + byte ciphertext[100] = { }; // encrypted data + byte plaintext[100]; + + wc_AesXtsSetKey(&aes, key, 32, AES_DECRYPTION, NULL, INVALID_DEVID); + wc_AesXtsDecryptInit(&aes, tweak, 16, &stream); + int ret = wc_AesXtsDecryptUpdate(&aes, plaintext, ciphertext, 100, + &stream); + if (ret != 0) { + // decryption failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsDecryptInit + \sa wc_AesXtsDecryptFinal +*/ +int wc_AesXtsDecryptUpdate(XtsAes* aes, byte* out, const byte* in, + word32 sz, struct XtsAesStreamData *stream); + +/*! + \ingroup AES + \brief This function finalizes streaming AES XTS encryption. It + processes any remaining data and completes the encryption operation. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + + \param aes pointer to the XtsAes structure + \param out pointer to buffer to store final encrypted data + \param in pointer to final plaintext data to encrypt + \param sz length of final data in bytes + \param stream pointer to XtsAesStreamData structure for streaming state + + _Example_ + \code + XtsAes aes; + struct XtsAesStreamData stream; + byte key[32] = { }; // 256-bit key + byte tweak[16] = { }; // tweak value + byte plaintext[50] = { }; // final data + byte ciphertext[50]; + + wc_AesXtsSetKey(&aes, key, 32, AES_ENCRYPTION, NULL, INVALID_DEVID); + wc_AesXtsEncryptInit(&aes, tweak, 16, &stream); + // ... update calls ... + int ret = wc_AesXtsEncryptFinal(&aes, ciphertext, plaintext, 50, + &stream); + if (ret != 0) { + // finalization failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsEncryptUpdate + \sa wc_AesXtsEncryptInit +*/ +int wc_AesXtsEncryptFinal(XtsAes* aes, byte* out, const byte* in, + word32 sz, struct XtsAesStreamData *stream); + +/*! + \ingroup AES + \brief This function finalizes streaming AES XTS decryption. It + processes any remaining data and completes the decryption operation. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + + \param aes pointer to the XtsAes structure + \param out pointer to buffer to store final decrypted data + \param in pointer to final ciphertext data to decrypt + \param sz length of final data in bytes + \param stream pointer to XtsAesStreamData structure for streaming state + + _Example_ + \code + XtsAes aes; + struct XtsAesStreamData stream; + byte key[32] = { }; // 256-bit key + byte tweak[16] = { }; // tweak value + byte ciphertext[50] = { }; // final encrypted data + byte plaintext[50]; + + wc_AesXtsSetKey(&aes, key, 32, AES_DECRYPTION, NULL, INVALID_DEVID); + wc_AesXtsDecryptInit(&aes, tweak, 16, &stream); + // ... update calls ... + int ret = wc_AesXtsDecryptFinal(&aes, plaintext, ciphertext, 50, + &stream); + if (ret != 0) { + // finalization failed + } + wc_AesXtsFree(&aes); + \endcode + + \sa wc_AesXtsDecryptUpdate + \sa wc_AesXtsDecryptInit +*/ +int wc_AesXtsDecryptFinal(XtsAes* aes, byte* out, const byte* in, + word32 sz, struct XtsAesStreamData *stream); + +/*! + \ingroup AES + \brief This function retrieves the key size from an initialized AES + structure. It returns the size of the key currently set in the AES + object. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or keySize is NULL. + + \param aes pointer to the AES structure + \param keySize pointer to word32 to store the key size in bytes + + _Example_ + \code + Aes aes; + byte key[16] = { }; // 128-bit key + word32 keySize; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, 16, NULL, AES_ENCRYPTION); + int ret = wc_AesGetKeySize(&aes, &keySize); + if (ret == 0) { + // keySize now contains 16 + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesSetKey + \sa wc_AesInit +*/ +int wc_AesGetKeySize(Aes* aes, word32* keySize); + +/*! + \ingroup AES + \brief This function initializes an AES structure with an ID. This is + useful for tracking or identifying specific AES instances in + applications that manage multiple AES contexts. + + \note This API is only available when WOLF_PRIVATE_KEY_ID is defined, + which is set for PKCS11 support. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or id is NULL, or if len is invalid. + + \param aes pointer to the AES structure to initialize + \param id pointer to the ID buffer + \param len length of the ID in bytes + \param heap pointer to heap hint for memory allocation (can be NULL) + \param devId device ID for hardware acceleration (use INVALID_DEVID + for software) + + _Example_ + \code + Aes aes; + byte id[8] = { }; // unique identifier + + int ret = wc_AesInit_Id(&aes, id, 8, NULL, INVALID_DEVID); + if (ret != 0) { + // initialization failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesInit + \sa wc_AesInit_Label +*/ +int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, + int devId); + +/*! + \ingroup AES + \brief This function initializes an AES structure with a label string. + This is useful for tracking or identifying specific AES instances with + human-readable names. + + \note This API is only available when WOLF_PRIVATE_KEY_ID is defined, + which is set for PKCS11 support. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or label is NULL. + + \param aes pointer to the AES structure to initialize + \param label pointer to the null-terminated label string + \param heap pointer to heap hint for memory allocation (can be NULL) + \param devId device ID for hardware acceleration (use INVALID_DEVID + for software) + + _Example_ + \code + Aes aes; + + int ret = wc_AesInit_Label(&aes, "MyAESContext", NULL, INVALID_DEVID); + if (ret != 0) { + // initialization failed + } + wc_AesFree(&aes); + \endcode + + \sa wc_AesInit + \sa wc_AesInit_Id +*/ +int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId); + +/*! + \ingroup AES + \brief This function allocates and initializes a new AES structure. + It returns a pointer to the allocated structure, which must be freed + with wc_AesDelete when no longer needed. These New/Delete functions + are exposed to support allocation of the structure using dynamic memory + to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return Pointer to allocated Aes structure on success. + \return NULL on allocation failure. + + \param heap pointer to heap hint for memory allocation (can be NULL) + \param devId device ID for hardware acceleration (use INVALID_DEVID + for software) + \param result_code pointer to int to store result code (can be NULL) + + _Example_ + \code + int result; + Aes* aes = wc_AesNew(NULL, INVALID_DEVID, &result); + if (aes == NULL || result != 0) { + // allocation or initialization failed + } + // use aes... + wc_AesDelete(aes, &aes); + \endcode + + \sa wc_AesDelete + \sa wc_AesInit +*/ +Aes* wc_AesNew(void* heap, int devId, int *result_code); + +/*! + \ingroup AES + \brief This function frees an AES structure that was allocated with + wc_AesNew. It also sets the pointer to NULL to prevent use-after-free. + These New/Delete functions are exposed to support allocation of the + structure using dynamic memory to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return 0 On success. + \return BAD_FUNC_ARG If aes or aes_p is NULL. + + \param aes pointer to the AES structure to free + \param aes_p pointer to the AES pointer (will be set to NULL) + + _Example_ + \code + Aes* aes = wc_AesNew(NULL, INVALID_DEVID, NULL); + if (aes != NULL) { + // use aes... + int ret = wc_AesDelete(aes, &aes); + // aes is now NULL + } + \endcode + + \sa wc_AesNew + \sa wc_AesFree +*/ +int wc_AesDelete(Aes* aes, Aes** aes_p); + +/*! + \ingroup AES + \brief This function performs AES-SIV (Synthetic IV) encryption with + extended parameters. AES-SIV provides nonce-misuse resistance and + deterministic authenticated encryption. + + \return 0 On success. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param key pointer to the key buffer (32, 48, or 64 bytes for SIV) + \param keySz length of the key in bytes + \param assoc pointer to array of associated data structures + \param numAssoc number of associated data items + \param nonce pointer to the nonce buffer (can be NULL) + \param nonceSz length of the nonce in bytes + \param in pointer to plaintext to encrypt + \param inSz length of plaintext in bytes + \param siv pointer to buffer to store the SIV (16 bytes) + \param out pointer to buffer to store ciphertext + + _Example_ + \code + byte key[32] = { }; // 256-bit key for AES-128-SIV + AesSivAssoc assoc[1]; + byte aad[20] = { }; // associated data + byte nonce[12] = { }; // nonce + byte plaintext[100] = { }; // data + byte siv[16]; + byte ciphertext[100]; + + assoc[0].data = aad; + assoc[0].sz = 20; + + int ret = wc_AesSivEncrypt_ex(key, 32, assoc, 1, nonce, 12, + plaintext, 100, siv, ciphertext); + if (ret != 0) { + // encryption failed + } + \endcode + + \sa wc_AesSivDecrypt_ex + \sa wc_AesSivEncrypt +*/ +int wc_AesSivEncrypt_ex(const byte* key, word32 keySz, + const AesSivAssoc* assoc, word32 numAssoc, + const byte* nonce, word32 nonceSz, const byte* in, + word32 inSz, byte* siv, byte* out); + +/*! + \ingroup AES + \brief This function performs AES-SIV (Synthetic IV) decryption with + extended parameters. It verifies the SIV and decrypts the ciphertext. + + \return 0 On successful decryption and verification. + \return AES_SIV_AUTH_E If SIV verification fails. + \return BAD_FUNC_ARG If parameters are invalid. + \return Other negative values on error. + + \param key pointer to the key buffer (32, 48, or 64 bytes for SIV) + \param keySz length of the key in bytes + \param assoc pointer to array of associated data structures + \param numAssoc number of associated data items + \param nonce pointer to the nonce buffer (can be NULL) + \param nonceSz length of the nonce in bytes + \param in pointer to ciphertext to decrypt + \param inSz length of ciphertext in bytes + \param siv pointer to the SIV to verify (16 bytes) + \param out pointer to buffer to store plaintext + + _Example_ + \code + byte key[32] = { }; // 256-bit key for AES-128-SIV + AesSivAssoc assoc[1]; + byte aad[20] = { }; // associated data + byte nonce[12] = { }; // nonce + byte ciphertext[100] = { }; // encrypted data + byte siv[16] = { }; // received SIV + byte plaintext[100]; + + assoc[0].data = aad; + assoc[0].sz = 20; + + int ret = wc_AesSivDecrypt_ex(key, 32, assoc, 1, nonce, 12, + ciphertext, 100, siv, plaintext); + if (ret != 0) { + // decryption or verification failed + } + \endcode + + \sa wc_AesSivEncrypt_ex + \sa wc_AesSivDecrypt +*/ +int wc_AesSivDecrypt_ex(const byte* key, word32 keySz, + const AesSivAssoc* assoc, word32 numAssoc, + const byte* nonce, word32 nonceSz, const byte* in, + word32 inSz, byte* siv, byte* out); diff --git a/doc/dox_comments/header_files/arc4.h b/doc/dox_comments/header_files/arc4.h index bcc2d47a3..3bbcced77 100644 --- a/doc/dox_comments/header_files/arc4.h +++ b/doc/dox_comments/header_files/arc4.h @@ -57,3 +57,57 @@ int wc_Arc4Process(Arc4* arc4, byte* out, const byte* in, word32 length); \sa wc_Arc4Process */ int wc_Arc4SetKey(Arc4* arc4, const byte* key, word32 length); + +/*! + \ingroup ARC4 + \brief This function initializes an ARC4 structure for use with + asynchronous cryptographic operations. It sets up the heap hint and + device ID for hardware acceleration support. + + \return 0 On success. + \return BAD_FUNC_ARG If arc4 is NULL. + + \param arc4 pointer to the Arc4 structure to initialize + \param heap pointer to heap hint for memory allocation (can be NULL) + \param devId device ID for hardware acceleration (use INVALID_DEVID + for software) + + _Example_ + \code + Arc4 arc4; + int ret = wc_Arc4Init(&arc4, NULL, INVALID_DEVID); + if (ret != 0) { + // initialization failed + } + // use arc4 for encryption/decryption + wc_Arc4Free(&arc4); + \endcode + + \sa wc_Arc4SetKey + \sa wc_Arc4Free +*/ +int wc_Arc4Init(Arc4* arc4, void* heap, int devId); + +/*! + \ingroup ARC4 + \brief This function frees an ARC4 structure, releasing any resources + allocated for asynchronous cryptographic operations. It should be + called when the ARC4 structure is no longer needed. + + \return none No return value. + + \param arc4 pointer to the Arc4 structure to free + + _Example_ + \code + Arc4 arc4; + wc_Arc4Init(&arc4, NULL, INVALID_DEVID); + wc_Arc4SetKey(&arc4, key, keyLen); + // use arc4 for encryption/decryption + wc_Arc4Free(&arc4); + \endcode + + \sa wc_Arc4Init + \sa wc_Arc4SetKey +*/ +void wc_Arc4Free(Arc4* arc4); diff --git a/doc/dox_comments/header_files/ascon.h b/doc/dox_comments/header_files/ascon.h index 3aab14fc9..4d0c0b540 100644 --- a/doc/dox_comments/header_files/ascon.h +++ b/doc/dox_comments/header_files/ascon.h @@ -466,4 +466,109 @@ int wc_AsconAEAD128_DecryptUpdate(wc_AsconAEAD128* a, byte* out, const byte* in, */ int wc_AsconAEAD128_DecryptFinal(wc_AsconAEAD128* a, const byte* tag); +/*! + \ingroup ASCON + \brief This function allocates and initializes a new Ascon Hash256 + context. The returned context must be freed with wc_AsconHash256_Free + when no longer needed. + + \return Pointer to allocated wc_AsconHash256 structure on success. + \return NULL on allocation or initialization failure. + + _Example_ + \code + wc_AsconHash256* hash = wc_AsconHash256_New(); + if (hash == NULL) { + // handle allocation error + } + byte data[]; // data to hash + wc_AsconHash256_Update(hash, data, sizeof(data)); + byte digest[ASCON_HASH256_SZ]; + wc_AsconHash256_Final(hash, digest); + wc_AsconHash256_Free(hash); + \endcode + + \sa wc_AsconHash256_Free + \sa wc_AsconHash256_Init +*/ +wc_AsconHash256* wc_AsconHash256_New(void); + +/*! + \ingroup ASCON + \brief This function frees an Ascon Hash256 context that was allocated + with wc_AsconHash256_New. It clears the context before freeing to + prevent information leakage. + + \return none No return value. + + \param a pointer to the wc_AsconHash256 structure to free + + _Example_ + \code + wc_AsconHash256* hash = wc_AsconHash256_New(); + if (hash != NULL) { + // use hash context + wc_AsconHash256_Free(hash); + } + \endcode + + \sa wc_AsconHash256_New + \sa wc_AsconHash256_Clear +*/ +void wc_AsconHash256_Free(wc_AsconHash256* a); + +/*! + \ingroup ASCON + \brief This function clears an Ascon Hash256 context by zeroing all + internal state. This should be called to securely erase sensitive + data from memory. + + \return none No return value. + + \param a pointer to the wc_AsconHash256 structure to clear + + _Example_ + \code + wc_AsconHash256 hash; + wc_AsconHash256_Init(&hash); + byte data[]; // data to hash + wc_AsconHash256_Update(&hash, data, sizeof(data)); + byte digest[ASCON_HASH256_SZ]; + wc_AsconHash256_Final(&hash, digest); + wc_AsconHash256_Clear(&hash); + \endcode + + \sa wc_AsconHash256_Init + \sa wc_AsconHash256_Free +*/ +void wc_AsconHash256_Clear(wc_AsconHash256* a); + +/*! + \ingroup ASCON + \brief This function allocates and initializes a new Ascon AEAD128 + context. The returned context must be freed with wc_AsconAEAD128_Free + when no longer needed. + + \return Pointer to allocated wc_AsconAEAD128 structure on success. + \return NULL on allocation or initialization failure. + + _Example_ + \code + wc_AsconAEAD128* aead = wc_AsconAEAD128_New(); + if (aead == NULL) { + // handle allocation error + } + byte key[ASCON_AEAD128_KEY_SZ] = { }; // key + byte nonce[ASCON_AEAD128_NONCE_SZ] = { }; // nonce + wc_AsconAEAD128_SetKey(aead, key); + wc_AsconAEAD128_SetNonce(aead, nonce); + // perform encryption/decryption + wc_AsconAEAD128_Free(aead); + \endcode + + \sa wc_AsconAEAD128_Free + \sa wc_AsconAEAD128_Init +*/ +wc_AsconAEAD128* wc_AsconAEAD128_New(void); + diff --git a/doc/dox_comments/header_files/asn.h b/doc/dox_comments/header_files/asn.h index e69de29bb..a26ac6e53 100644 --- a/doc/dox_comments/header_files/asn.h +++ b/doc/dox_comments/header_files/asn.h @@ -0,0 +1,239 @@ +/*! + \ingroup ASN + \brief This function converts BER (Basic Encoding Rules) formatted data + to DER (Distinguished Encoding Rules) format. BER allows indefinite + length encoding while DER requires definite lengths. This function + calculates definite lengths for all indefinite length items. + + \return 0 On success. + \return ASN_PARSE_E If the BER data is invalid. + \return BAD_FUNC_ARG If ber or derSz are NULL. + \return BUFFER_E If der is not NULL and derSz is too small. + + \param ber pointer to the buffer containing BER formatted data + \param berSz size of the BER data in bytes + \param der pointer to buffer to store DER formatted data (can be NULL + to calculate required size) + \param derSz pointer to size of der buffer; updated with actual size + needed or used + + \note This API is not public by default. Define WOLFSSL_PUBLIC_ASN to + expose APIs marked WOLFSSL_ASN_API. + + _Example_ + \code + byte ber[256] = { }; // BER encoded data + byte der[256]; + word32 derSz = sizeof(der); + + int ret = wc_BerToDer(ber, sizeof(ber), der, &derSz); + if (ret == 0) { + // der now contains DER formatted data of length derSz + } + \endcode + + \sa wc_EncodeObjectId +*/ +int wc_BerToDer(const byte* ber, word32 berSz, byte* der, word32* derSz); + +/*! + \ingroup ASN + \brief This function frees a linked list of alternative names + (DNS_entry structures). It deallocates each node and its associated + name string, IP string, and RID string if present. + + \return none No return value. + + \param altNames pointer to the head of the alternative names linked list + \param heap pointer to heap hint for memory deallocation (can be NULL) + + \note This API is not public by default. Define WOLFSSL_PUBLIC_ASN to + expose APIs marked WOLFSSL_ASN_API. + + _Example_ + \code + DNS_entry* altNames = NULL; + // populate altNames with certificate alternative names + + FreeAltNames(altNames, NULL); + // altNames list is now freed + \endcode + + \sa AltNameNew +*/ +void FreeAltNames(DNS_entry* altNames, void* heap); + +/*! + \ingroup ASN + \brief This function sets an extended callback for handling unknown + certificate extensions during certificate parsing. The callback + receives additional context information compared to the basic + callback. + + \return 0 On success. + \return BAD_FUNC_ARG If cert is NULL. + + \param cert pointer to the DecodedCert structure + \param cb callback function to handle unknown extensions + \param ctx context pointer passed to the callback + + \note This API is not public by default. Define WOLFSSL_PUBLIC_ASN to + expose APIs marked WOLFSSL_ASN_API. + + _Example_ + \code + DecodedCert cert; + + int UnknownExtCallback(const byte* oid, word32 oidSz, int crit, + const byte* der, word32 derSz, void* ctx) { + // handle unknown extension + return 0; + } + + wc_InitDecodedCert(&cert, derCert, derCertSz, NULL); + wc_SetUnknownExtCallbackEx(&cert, UnknownExtCallback, myContext); + wc_ParseCert(&cert, CERT_TYPE, NO_VERIFY, NULL); + \endcode + + \sa wc_SetUnknownExtCallback + \sa wc_InitDecodedCert +*/ +int wc_SetUnknownExtCallbackEx(DecodedCert* cert, + wc_UnknownExtCallbackEx cb, void *ctx); + +/*! + \ingroup ASN + \brief This function verifies the signature on a certificate using a + certificate manager. It checks that the certificate is properly + signed by a trusted CA. + + \return 0 On successful signature verification. + \return ASN_SIG_CONFIRM_E If signature verification fails. + \return Other negative values on error. + + \param cert pointer to the DER encoded certificate + \param certSz size of the certificate in bytes + \param heap pointer to heap hint for memory allocation (can be NULL) + \param cm pointer to certificate manager containing trusted CAs + + _Example_ + \code + byte cert[2048] = { }; // DER encoded certificate + word32 certSz = sizeof(cert); + WOLFSSL_CERT_MANAGER* cm; + + cm = wolfSSL_CertManagerNew(); + wolfSSL_CertManagerLoadCA(cm, "ca-cert.pem", NULL); + + int ret = wc_CheckCertSignature(cert, certSz, NULL, cm); + if (ret == 0) { + // certificate signature is valid + } + wolfSSL_CertManagerFree(cm); + \endcode + + \sa wolfSSL_CertManagerNew + \sa wolfSSL_CertManagerLoadCA +*/ +int wc_CheckCertSignature(const byte* cert, word32 certSz, void* heap, + void* cm); + +/*! + \ingroup ASN + \brief This function encodes an array of word16 values into an ASN.1 + Object Identifier (OID) in DER format. OIDs are used to identify + algorithms, extensions, and other objects in certificates and + cryptographic protocols. + + \return 0 On success. + \return BAD_FUNC_ARG If in, inSz, or outSz are invalid. + \return BUFFER_E If out is not NULL and outSz is too small. + + \param in pointer to array of word16 values representing OID components + \param inSz number of components in the OID + \param out pointer to buffer to store encoded OID (can be NULL to + calculate size) + \param outSz pointer to size of out buffer; updated with actual size + + _Example_ + \code + word16 oid[] = {1, 2, 840, 113549, 1, 1, 11}; // sha256WithRSAEncryption + byte encoded[32]; + word32 encodedSz = sizeof(encoded); + + int ret = wc_EncodeObjectId(oid, sizeof(oid)/sizeof(word16), + encoded, &encodedSz); + if (ret == 0) { + // encoded contains DER encoded OID + } + \endcode + + \sa wc_BerToDer +*/ +int wc_EncodeObjectId(const word16* in, word32 inSz, byte* out, + word32* outSz); + +/*! + \ingroup ASN + \brief This function sets the algorithm identifier in DER format. It + encodes the algorithm OID and optional parameters based on the + algorithm type and curve size. + + \return Length of the encoded algorithm identifier on success. + \return Negative value on error. + + \param algoOID algorithm object identifier constant + \param output pointer to buffer to store encoded algorithm ID + \param type type of encoding (oidSigType, oidHashType, etc.) + \param curveSz size of the curve for ECC algorithms (0 for non-ECC) + + _Example_ + \code + byte algId[32]; + word32 len; + + len = SetAlgoID(CTC_SHA256wRSA, algId, oidSigType, 0); + if (len > 0) { + // algId contains encoded algorithm identifier + } + \endcode + + \sa wc_EncodeObjectId +*/ +word32 SetAlgoID(int algoOID, byte* output, int type, int curveSz); + +/*! + \ingroup ASN + \brief This function decodes a DER encoded Diffie-Hellman public key. + It extracts the public key value from the DER encoding and stores it + in the DhKey structure. + + \return 0 On success. + \return BAD_FUNC_ARG If input, inOutIdx, key, or inSz are invalid. + \return ASN_PARSE_E If the DER encoding is invalid. + \return Other negative values on error. + + \param input pointer to buffer containing DER encoded public key + \param inOutIdx pointer to index in buffer; updated to end of key + \param key pointer to DhKey structure to store decoded public key + \param inSz size of the input buffer + + _Example_ + \code + byte derKey[256] = { }; // DER encoded DH public key + word32 idx = 0; + DhKey key; + + wc_InitDhKey(&key); + int ret = wc_DhPublicKeyDecode(derKey, &idx, &key, sizeof(derKey)); + if (ret == 0) { + // key now contains the decoded public key + } + wc_FreeDhKey(&key); + \endcode + + \sa wc_InitDhKey + \sa wc_DhKeyDecode +*/ +int wc_DhPublicKeyDecode(const byte* input, word32* inOutIdx, DhKey* key, + word32 inSz); diff --git a/doc/dox_comments/header_files/asn_public.h b/doc/dox_comments/header_files/asn_public.h index aa8ed94d3..b4c537ec7 100644 --- a/doc/dox_comments/header_files/asn_public.h +++ b/doc/dox_comments/header_files/asn_public.h @@ -53,6 +53,31 @@ int wc_InitCert(Cert* cert); */ Cert* wc_CertNew(void* heap); +/*! + \ingroup ASN + \brief Initializes certificate with heap hint and device ID. + + \return 0 on success + \return BAD_FUNC_ARG if cert is NULL + + \param cert Cert structure to initialize + \param heap Heap hint for memory allocation + \param devId Device ID for hardware acceleration + + _Example_ + \code + Cert myCert; + int ret = wc_InitCert_ex(&myCert, NULL, INVALID_DEVID); + if (ret != 0) { + // error initializing cert + } + \endcode + + \sa wc_InitCert + \sa wc_MakeCert_ex +*/ +int wc_InitCert_ex(Cert* cert, void* heap, int devId); + /*! \ingroup ASN @@ -127,6 +152,248 @@ void wc_CertFree(Cert* cert); int wc_MakeCert(Cert* cert, byte* derBuffer, word32 derSz, RsaKey* rsaKey, ecc_key* eccKey, WC_RNG* rng); +/*! + \ingroup ASN + \brief Makes certificate with generic key type support. + + \return Size of certificate on success + \return MEMORY_E if memory allocation fails + \return BUFFER_E if buffer too small + \return Other error codes on failure + + \param cert Initialized cert structure + \param derBuffer Buffer for generated certificate + \param derSz Size of derBuffer + \param keyType Key type (RSA_TYPE, ECC_TYPE, ED25519_TYPE, etc.) + \param key Pointer to key structure + \param rng Random number generator + + _Example_ + \code + Cert myCert; + wc_InitCert(&myCert); + byte derCert[4096]; + RsaKey key; + WC_RNG rng; + int certSz = wc_MakeCert_ex(&myCert, derCert, sizeof(derCert), + RSA_TYPE, &key, &rng); + \endcode + + \sa wc_MakeCert + \sa wc_SignCert_ex +*/ +int wc_MakeCert_ex(Cert* cert, byte* derBuffer, word32 derSz, + int keyType, void* key, WC_RNG* rng); + +/*! + \ingroup ASN + \brief Makes certificate request with generic key type support. + + \return Size of certificate request on success + \return MEMORY_E if memory allocation fails + \return BUFFER_E if buffer too small + \return Other error codes on failure + + \param cert Initialized cert structure + \param derBuffer Buffer for generated certificate request + \param derSz Size of derBuffer + \param keyType Key type (RSA_TYPE, ECC_TYPE, ED25519_TYPE, etc.) + \param key Pointer to key structure + + _Example_ + \code + Cert myCert; + wc_InitCert(&myCert); + byte derCert[4096]; + EccKey key; + int certSz = wc_MakeCertReq_ex(&myCert, derCert, sizeof(derCert), + ECC_TYPE, &key); + \endcode + + \sa wc_MakeCertReq + \sa wc_SignCert_ex +*/ +int wc_MakeCertReq_ex(Cert* cert, byte* derBuffer, word32 derSz, + int keyType, void* key); + +/*! + \ingroup ASN + \brief Signs certificate with generic key type support. + + \return New size of certificate with signature on success + \return MEMORY_E if memory allocation fails + \return BUFFER_E if buffer too small + \return Other error codes on failure + + \param requestSz Size of certificate body to sign + \param sType Signature type + \param buf Buffer containing certificate to sign + \param buffSz Total size of buffer + \param keyType Key type (RSA_TYPE, ECC_TYPE, ED25519_TYPE, etc.) + \param key Pointer to key structure + \param rng Random number generator + + _Example_ + \code + Cert myCert; + byte derCert[4096]; + RsaKey key; + WC_RNG rng; + // Initialize cert and set fields (issuer, subject, dates, etc.) + wc_InitCert(&myCert); + // ... set myCert fields ... + // Generate certificate body (TBS - To Be Signed) + int bodySz = wc_MakeCert_ex(&myCert, derCert, sizeof(derCert), + RSA_TYPE, &key, &rng); + if (bodySz > 0) { + // bodySz is the size of the unsigned certificate body + // Sign the certificate body and append signature + int certSz = wc_SignCert_ex(bodySz, CTC_SHA256wRSA, + derCert, sizeof(derCert), RSA_TYPE, + &key, &rng); + // derCert now contains complete signed certificate of size certSz + } + \endcode + + \sa wc_SignCert + \sa wc_MakeCert_ex +*/ +int wc_SignCert_ex(int requestSz, int sType, byte* buf, word32 buffSz, + int keyType, void* key, WC_RNG* rng); + +/*! + \ingroup ASN + \brief Makes signature with bit string encoding. This function is used + for dual algorithm certificate signing, where an alternative signature + is created using a secondary key algorithm (e.g., a post-quantum algorithm + alongside a traditional algorithm). + + \note This API is only available when WOLFSSL_DUAL_ALG_CERTS is defined, + which enables support for dual algorithm certificates used in Post-Quantum + cryptography to provide hybrid signing with both traditional and PQ + algorithms. + + \return Size of signature on success + \return Negative on error + + \param sig Output buffer for signature + \param sigSz Size of signature buffer + \param sType Signature type + \param buf Data to sign (typically the TBS - To Be Signed - + certificate data) + \param bufSz Size of data + \param keyType Key type (RSA_TYPE, ECC_TYPE, ED25519_TYPE, etc.) + \param key Pointer to key structure + \param rng Random number generator + + _Example_ + \code + byte sig[512], data[256]; + RsaKey key; + WC_RNG rng; + int sigSz = wc_MakeSigWithBitStr(sig, sizeof(sig), CTC_SHA256wRSA, + data, sizeof(data), RSA_TYPE, + &key, &rng); + \endcode + + \sa wc_SignCert_ex + \sa wc_GeneratePreTBS +*/ +int wc_MakeSigWithBitStr(byte *sig, int sigSz, int sType, byte* buf, + word32 bufSz, int keyType, void* key, + WC_RNG* rng); + +/*! + \ingroup ASN + \brief Gets certificate validity dates. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + + \param cert Certificate structure + \param before Output for notBefore date + \param after Output for notAfter date + + _Example_ + \code + Cert myCert; + struct tm beforeDate, afterDate; + int ret = wc_GetCertDates(&myCert, &beforeDate, &afterDate); + \endcode + + \sa wc_InitCert +*/ +int wc_GetCertDates(Cert* cert, struct tm* before, struct tm* after); + +/*! + \ingroup ASN + \brief Extracts date information from certificate date field. This + function parses an ASN.1 encoded date (including tag and length) and + returns a pointer to the raw date value bytes, the ASN.1 time type, + and the length of the date value. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + \return ASN_PARSE_E if date parsing fails + + \param certDate Certificate date buffer containing ASN.1 encoded date + (tag + length + value) + \param certDateSz Size of certificate date buffer + \param date Output pointer set to the raw date value bytes (without + tag/length) + \param format Output byte indicating ASN.1 time type: ASN_UTC_TIME + (0x17) or ASN_GENERALIZED_TIME (0x18) + \param length Output length of the raw date value in bytes + + _Example_ + \code + const byte* certDate; + const byte* date; + byte format; + int length; + int ret = wc_GetDateInfo(certDate, certDateSz, &date, + &format, &length); + if (ret == 0) { + // date points to raw time bytes, format indicates UTC or + // Generalized time, length is the number of date value bytes + } + \endcode + + \sa wc_GetCertDates + \sa wc_GetDateAsCalendarTime +*/ +int wc_GetDateInfo(const byte* certDate, int certDateSz, + const byte** date, byte* format, int* length); + +/*! + \ingroup ASN + \brief Converts certificate date to calendar time structure. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + \return ASN_TIME_E if time conversion fails + + \param date Date buffer + \param length Length of date buffer + \param format Date format (ASN_UTC_TIME or ASN_GENERALIZED_TIME) + \param timearg Pointer to tm structure to fill + + _Example_ + \code + const byte* date; + int length; + byte format; + struct tm timeInfo; + int ret = wc_GetDateAsCalendarTime(date, length, format, + &timeInfo); + \endcode + + \sa wc_GetDateInfo + \sa wc_GetCertDates +*/ +int wc_GetDateAsCalendarTime(const byte* date, int length, + byte format, struct tm* timearg); + /*! \ingroup ASN @@ -898,6 +1165,32 @@ int wc_SetDatesBuffer(Cert* cert, const byte* der, int derSz); int wc_SetAuthKeyIdFromPublicKey(Cert *cert, RsaKey *rsakey, ecc_key *eckey); +/*! + \ingroup ASN + \brief Sets authority key ID from public key with generic key type. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + \return MEMORY_E if memory allocation fails + + \param cert Certificate structure + \param keyType Key type (RSA_TYPE, ECC_TYPE, ED25519_TYPE, etc.) + \param key Pointer to key structure + + _Example_ + \code + Cert myCert; + wc_InitCert(&myCert); + RsaKey key; + int ret = wc_SetAuthKeyIdFromPublicKey_ex(&myCert, RSA_TYPE, + &key); + \endcode + + \sa wc_SetAuthKeyIdFromPublicKey +*/ +int wc_SetAuthKeyIdFromPublicKey_ex(Cert *cert, int keyType, + void* key); + /*! \ingroup ASN @@ -989,6 +1282,33 @@ int wc_SetAuthKeyId(Cert *cert, const char* file); int wc_SetSubjectKeyIdFromPublicKey(Cert *cert, RsaKey *rsakey, ecc_key *eckey); +/*! + \ingroup ASN + \brief Sets subject key ID from public key with generic key type. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + \return MEMORY_E if memory allocation fails + \return PUBLIC_KEY_E if error getting public key + + \param cert Certificate structure + \param keyType Key type (RSA_TYPE, ECC_TYPE, ED25519_TYPE, etc.) + \param key Pointer to key structure + + _Example_ + \code + Cert myCert; + wc_InitCert(&myCert); + EccKey key; + int ret = wc_SetSubjectKeyIdFromPublicKey_ex(&myCert, ECC_TYPE, + &key); + \endcode + + \sa wc_SetSubjectKeyIdFromPublicKey +*/ +int wc_SetSubjectKeyIdFromPublicKey_ex(Cert *cert, int keyType, + void* key); + /*! \ingroup ASN @@ -1053,6 +1373,58 @@ int wc_SetSubjectKeyId(Cert *cert, const char* file); */ int wc_SetKeyUsage(Cert *cert, const char *value); +/*! + \ingroup ASN + \brief Sets extended key usage using comma-delimited string. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + \return MEMORY_E if memory allocation fails + + \param cert Certificate structure + \param value Comma-delimited string of extended key usage values + + _Example_ + \code + Cert myCert; + wc_InitCert(&myCert); + int ret = wc_SetExtKeyUsage(&myCert, + "serverAuth,clientAuth"); + \endcode + + \sa wc_SetKeyUsage + \sa wc_SetExtKeyUsageOID +*/ +int wc_SetExtKeyUsage(Cert *cert, const char *value); + +/*! + \ingroup ASN + \brief Sets extended key usage using OID string. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + \return MEMORY_E if memory allocation fails + + \param cert Certificate structure + \param oid OID string + \param sz Length of OID string + \param idx Index for multiple OIDs + \param heap Heap hint for memory allocation + + _Example_ + \code + Cert myCert; + wc_InitCert(&myCert); + const char* oid = "1.3.6.1.5.5.7.3.1"; + int ret = wc_SetExtKeyUsageOID(&myCert, oid, strlen(oid), + 0, NULL); + \endcode + + \sa wc_SetExtKeyUsage +*/ +int wc_SetExtKeyUsageOID(Cert *cert, const char *oid, word32 sz, + byte idx, void* heap); + /*! \ingroup ASN @@ -1084,6 +1456,31 @@ int wc_SetKeyUsage(Cert *cert, const char *value); int wc_PemPubKeyToDer(const char* fileName, unsigned char* derBuf, int derSz); +/*! + \ingroup ASN + \brief Loads PEM public key from file to DER buffer. + + \return 0 on success + \return negative on error + + \param fileName Path to PEM file + \param der Pointer to DerBuffer pointer to allocate + + _Example_ + \code + DerBuffer* der = NULL; + int ret = wc_PemPubKeyToDer_ex("pubkey.pem", &der); + if (ret == 0) { + // Use der->buffer and der->length + wc_FreeDer(&der); + } + \endcode + + \sa wc_PemPubKeyToDer + \sa wc_FreeDer +*/ +int wc_PemPubKeyToDer_ex(const char* fileName, DerBuffer** der); + /*! \ingroup ASN @@ -1116,6 +1513,111 @@ int wc_PemPubKeyToDer(const char* fileName, int wc_PubKeyPemToDer(const unsigned char* pem, int pemSz, unsigned char* buff, int buffSz); +/*! + \ingroup ASN + \brief Gets PEM header and footer strings for given type. + + \return 0 on success + \return BAD_FUNC_ARG if parameters invalid + + \param type PEM type (CERT_TYPE, PRIVATEKEY_TYPE, etc.) + \param header Pointer to header string pointer + \param footer Pointer to footer string pointer + + _Example_ + \code + const char* header; + const char* footer; + int ret = wc_PemGetHeaderFooter(CERT_TYPE, &header, &footer); + \endcode + + \sa wc_PemToDer +*/ +int wc_PemGetHeaderFooter(int type, const char** header, + const char** footer); + +/*! + \ingroup ASN + \brief Allocates DER buffer with specified length and type. + + \return 0 on success + \return BAD_FUNC_ARG if pDer is NULL + \return MEMORY_E if allocation fails + + \param pDer Pointer to DerBuffer pointer to allocate + \param length Length of buffer to allocate + \param type Buffer type for tracking + \param heap Heap hint for memory allocation + + _Example_ + \code + DerBuffer* der = NULL; + int ret = wc_AllocDer(&der, 1024, CERT_TYPE, NULL); + if (ret == 0) { + // Use der->buffer + wc_FreeDer(&der); + } + \endcode + + \sa wc_FreeDer +*/ +int wc_AllocDer(DerBuffer** pDer, word32 length, int type, + void* heap); + +/*! + \ingroup ASN + \brief Frees DER buffer allocated by wc_AllocDer or wc_PemToDer. + + \param pDer Pointer to DerBuffer pointer to free + + _Example_ + \code + DerBuffer* der = NULL; + wc_AllocDer(&der, 1024, CERT_TYPE, NULL); + // Use der + wc_FreeDer(&der); + \endcode + + \sa wc_AllocDer + \sa wc_PemToDer +*/ +void wc_FreeDer(DerBuffer** pDer); + +/*! + \ingroup ASN + \brief Converts PEM to DER format with encryption info support. + + \return 0 on success + \return negative on error + + \param buff PEM buffer + \param longSz Size of PEM buffer + \param type PEM type (CERT_TYPE, PRIVATEKEY_TYPE, etc.) + \param pDer Pointer to DerBuffer pointer to allocate + \param heap Heap hint for memory allocation + \param info Encryption info for encrypted PEM + \param keyFormat Pointer to store key format + + _Example_ + \code + const unsigned char* pem; + DerBuffer* der = NULL; + EncryptedInfo info; + int keyFormat; + int ret = wc_PemToDer(pem, pemSz, PRIVATEKEY_TYPE, &der, + NULL, &info, &keyFormat); + if (ret == 0) { + wc_FreeDer(&der); + } + \endcode + + \sa wc_PemCertToDer + \sa wc_FreeDer +*/ +int wc_PemToDer(const unsigned char* buff, long longSz, int type, + DerBuffer** pDer, void* heap, EncryptedInfo* info, + int* keyFormat); + /*! \ingroup ASN @@ -1309,6 +1811,138 @@ int wc_KeyPemToDer(const unsigned char* pem, int pemSz, int wc_CertPemToDer(const unsigned char* pem, int pemSz, unsigned char* buff, int buffSz, int type); +/*! + \ingroup ASN + \brief Loads PEM certificate from file to DER buffer. + + \return 0 on success + \return negative on error + + \param fileName Path to PEM certificate file + \param der Pointer to DerBuffer pointer to allocate + + _Example_ + \code + DerBuffer* der = NULL; + int ret = wc_PemCertToDer_ex("cert.pem", &der); + if (ret == 0) { + // Use der->buffer and der->length + wc_FreeDer(&der); + } + \endcode + + \sa wc_CertPemToDer + \sa wc_FreeDer +*/ +int wc_PemCertToDer_ex(const char* fileName, DerBuffer** der); + +/*! + \ingroup ASN + \brief Adds PKCS padding to buffer for RSA encryption. + + \return Padded size on success + \return 0 on error + + \param buf Buffer to pad + \param sz Current size of data in buffer + \param blockSz Block size for padding + + _Example_ + \code + byte buffer[256]; + word32 dataSz = 100; + word32 paddedSz = wc_PkcsPad(buffer, dataSz, 256); + \endcode + + \sa wc_RsaPublicEncrypt +*/ +word32 wc_PkcsPad(byte* buf, word32 sz, word32 blockSz); + +/*! + \ingroup RSA + \brief Decodes RSA public key and extracts modulus and exponent. + + \return 0 on success + \return negative on error + + \param input DER encoded RSA public key buffer + \param inOutIdx Pointer to index in buffer + \param inSz Size of input buffer + \param n Pointer to modulus pointer + \param nSz Pointer to modulus size + \param e Pointer to exponent pointer + \param eSz Pointer to exponent size + + _Example_ + \code + const byte* n; + const byte* e; + word32 nSz, eSz, idx = 0; + int ret = wc_RsaPublicKeyDecode_ex(derBuf, &idx, derSz, + &n, &nSz, &e, &eSz); + \endcode + + \sa wc_RsaPublicKeyDecode +*/ +int wc_RsaPublicKeyDecode_ex(const byte* input, word32* inOutIdx, + word32 inSz, const byte** n, word32* nSz, + const byte** e, word32* eSz); + +/*! + \ingroup RSA + \brief Calculates DER encoded RSA public key size. + + \return Size on success + \return negative on error + + \param key RSA key structure + \param with_header Include sequence header if non-zero + + _Example_ + \code + RsaKey key; + int derSz = wc_RsaPublicKeyDerSize(&key, 1); + \endcode + + \sa wc_RsaKeyToDer +*/ +int wc_RsaPublicKeyDerSize(RsaKey* key, int with_header); + +/*! + \ingroup RSA + \brief Validates DER encoded RSA private key format. This function + validates the ASN.1 syntax and structure of the RSA private key + (sequences, integer tags, and lengths) without loading the key values + into an RsaKey structure. It does not perform mathematical validation + of the RSA key parameters (e.g., checking if p and q are prime, or if + the key components satisfy RSA mathematical relationships). + + \return 0 on success (valid ASN.1 structure) + \return ASN_PARSE_E if ASN.1 parsing fails + \return ASN_RSA_KEY_E if RSA key structure is invalid + \return BAD_FUNC_ARG if parameters are invalid + + \param input DER encoded RSA private key buffer + \param inOutIdx Pointer to index in buffer (updated on success) + \param keySz Pointer to store modulus size in bytes + \param inSz Size of input buffer + + _Example_ + \code + word32 idx = 0; + int keySz; + int ret = wc_RsaPrivateKeyValidate(derBuf, &idx, &keySz, + derSz); + if (ret == 0) { + // ASN.1 structure is valid, keySz contains modulus size + } + \endcode + + \sa wc_RsaPrivateKeyDecode +*/ +int wc_RsaPrivateKeyValidate(const byte* input, word32* inOutIdx, + int* keySz, word32 inSz); + /*! \ingroup CertsKeys @@ -1332,6 +1966,145 @@ int wc_CertPemToDer(const unsigned char* pem, int pemSz, int wc_GetPubKeyDerFromCert(struct DecodedCert* cert, byte* derKey, word32* derKeySz); +/*! + \ingroup DSA + \brief Decodes DSA parameters from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded DSA parameters buffer + \param inOutIdx Pointer to index in buffer + \param key DSA key structure to store parameters + \param inSz Size of input buffer + + _Example_ + \code + DsaKey key; + word32 idx = 0; + int ret = wc_DsaParamsDecode(derBuf, &idx, &key, derSz); + \endcode + + \sa wc_DsaKeyToParamsDer +*/ +int wc_DsaParamsDecode(const byte* input, word32* inOutIdx, + DsaKey* key, word32 inSz); + +/*! + \ingroup DSA + \brief Encodes DSA parameters to DER format. + + \return Size on success + \return negative on error + + \param key DSA key structure with parameters + \param output Buffer for DER encoded parameters + \param inLen Size of output buffer + + _Example_ + \code + DsaKey key; + byte der[1024]; + int derSz = wc_DsaKeyToParamsDer(&key, der, sizeof(der)); + \endcode + + \sa wc_DsaParamsDecode +*/ +int wc_DsaKeyToParamsDer(DsaKey* key, byte* output, word32 inLen); + +/*! + \ingroup DSA + \brief Encodes DSA parameters to DER with size output. + + \return 0 on success + \return negative on error + + \param key DSA key structure with parameters + \param output Buffer for DER encoded parameters + \param inLen Pointer to buffer size (in/out) + + _Example_ + \code + DsaKey key; + byte der[1024]; + word32 derSz = sizeof(der); + int ret = wc_DsaKeyToParamsDer_ex(&key, der, &derSz); + \endcode + + \sa wc_DsaKeyToParamsDer +*/ +int wc_DsaKeyToParamsDer_ex(DsaKey* key, byte* output, + word32* inLen); + +/*! + \ingroup DH + \brief Encodes DH parameters to DER format. + + \return 0 on success + \return negative on error + + \param key DH key structure with parameters + \param out Buffer for DER encoded parameters + \param outSz Pointer to buffer size (in/out) + + _Example_ + \code + DhKey key; + byte der[1024]; + word32 derSz = sizeof(der); + int ret = wc_DhParamsToDer(&key, der, &derSz); + \endcode + + \sa wc_DhKeyToDer +*/ +int wc_DhParamsToDer(DhKey* key, byte* out, word32* outSz); + +/*! + \ingroup DH + \brief Encodes DH public key to DER format. + + \return 0 on success + \return negative on error + + \param key DH key structure with public key + \param out Buffer for DER encoded public key + \param outSz Pointer to buffer size (in/out) + + _Example_ + \code + DhKey key; + byte der[1024]; + word32 derSz = sizeof(der); + int ret = wc_DhPubKeyToDer(&key, der, &derSz); + \endcode + + \sa wc_DhKeyToDer +*/ +int wc_DhPubKeyToDer(DhKey* key, byte* out, word32* outSz); + +/*! + \ingroup DH + \brief Encodes DH private key to DER format. + + \return 0 on success + \return negative on error + + \param key DH key structure with private key + \param out Buffer for DER encoded private key + \param outSz Pointer to buffer size (in/out) + + _Example_ + \code + DhKey key; + byte der[1024]; + word32 derSz = sizeof(der); + int ret = wc_DhPrivKeyToDer(&key, der, &derSz); + \endcode + + \sa wc_DhKeyToDer +*/ +int wc_DhPrivKeyToDer(DhKey* key, byte* out, word32* outSz); + /*! \ingroup ASN @@ -1392,6 +2165,117 @@ int wc_GetPubKeyDerFromCert(struct DecodedCert* cert, int wc_EccPrivateKeyDecode(const byte* input, word32* inOutIdx, ecc_key* key, word32 inSz); +/*! + \ingroup ECC + \brief Encodes ECC private key to DER format. + + \return Size on success + \return negative on error + + \param key ECC key structure with private key + \param output Buffer for DER encoded private key + \param inLen Size of output buffer + + _Example_ + \code + ecc_key key; + byte der[1024]; + int derSz = wc_EccPrivateKeyToDer(&key, der, sizeof(der)); + \endcode + + \sa wc_EccPrivateKeyDecode +*/ +int wc_EccPrivateKeyToDer(ecc_key* key, byte* output, + word32 inLen); + +/*! + \ingroup ECC + \brief Calculates DER encoded ECC key size. + + \return Size on success + \return negative on error + + \param key ECC key structure + \param pub Non-zero to include public key + + _Example_ + \code + ecc_key key; + int derSz = wc_EccKeyDerSize(&key, 1); + \endcode + + \sa wc_EccPrivateKeyToDer +*/ +int wc_EccKeyDerSize(ecc_key* key, int pub); + +/*! + \ingroup ECC + \brief Encodes ECC private key to PKCS#8 format. + + \return Size on success + \return negative on error + + \param key ECC key structure with private key + \param output Buffer for PKCS#8 encoded key + \param inLen Pointer to buffer size (in/out) + + _Example_ + \code + ecc_key key; + byte pkcs8[1024]; + word32 pkcs8Sz = sizeof(pkcs8); + int ret = wc_EccPrivateKeyToPKCS8(&key, pkcs8, &pkcs8Sz); + \endcode + + \sa wc_EccPrivateKeyToDer +*/ +int wc_EccPrivateKeyToPKCS8(ecc_key* key, byte* output, + word32* inLen); + +/*! + \ingroup ECC + \brief Encodes ECC key pair to PKCS#8 format. + + \return Size on success + \return negative on error + + \param key ECC key structure with key pair + \param output Buffer for PKCS#8 encoded key + \param inLen Pointer to buffer size (in/out) + + _Example_ + \code + ecc_key key; + byte pkcs8[1024]; + word32 pkcs8Sz = sizeof(pkcs8); + int ret = wc_EccKeyToPKCS8(&key, pkcs8, &pkcs8Sz); + \endcode + + \sa wc_EccPrivateKeyToPKCS8 +*/ +int wc_EccKeyToPKCS8(ecc_key* key, byte* output, + word32* inLen); + +/*! + \ingroup ECC + \brief Calculates DER encoded ECC public key size. + + \return Size on success + \return negative on error + + \param key ECC key structure + \param with_AlgCurve Include algorithm and curve if non-zero + + _Example_ + \code + ecc_key key; + int derSz = wc_EccPublicKeyDerSize(&key, 1); + \endcode + + \sa wc_EccPublicKeyToDer +*/ +int wc_EccPublicKeyDerSize(ecc_key* key, int with_AlgCurve); + /*! \ingroup ASN @@ -1770,6 +2654,345 @@ int wc_Curve25519PublicKeyToDer(curve25519_key* key, byte* output, word32 inLen, int wc_Curve25519KeyToDer(curve25519_key* key, byte* output, word32 inLen, int withAlg); +/*! + \ingroup Ed25519 + \brief Decodes Ed25519 private key from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded Ed25519 private key buffer + \param inOutIdx Pointer to index in buffer + \param key Ed25519 key structure to store key + \param inSz Size of input buffer + + _Example_ + \code + ed25519_key key; + word32 idx = 0; + int ret = wc_Ed25519PrivateKeyDecode(derBuf, &idx, &key, + derSz); + \endcode + + \sa wc_Ed25519PrivateKeyToDer +*/ +int wc_Ed25519PrivateKeyDecode(const byte* input, word32* inOutIdx, + ed25519_key* key, word32 inSz); + +/*! + \ingroup Ed25519 + \brief Decodes Ed25519 public key from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded Ed25519 public key buffer + \param inOutIdx Pointer to index in buffer + \param key Ed25519 key structure to store key + \param inSz Size of input buffer + + _Example_ + \code + ed25519_key key; + word32 idx = 0; + int ret = wc_Ed25519PublicKeyDecode(derBuf, &idx, &key, + derSz); + \endcode + + \sa wc_Ed25519PublicKeyToDer +*/ +int wc_Ed25519PublicKeyDecode(const byte* input, word32* inOutIdx, + ed25519_key* key, word32 inSz); + +/*! + \ingroup Ed25519 + \brief Encodes Ed25519 key to DER format. + + \return Size on success + \return negative on error + + \param key Ed25519 key structure + \param output Buffer for DER encoded key + \param inLen Size of output buffer + + _Example_ + \code + ed25519_key key; + byte der[1024]; + int derSz = wc_Ed25519KeyToDer(&key, der, sizeof(der)); + \endcode + + \sa wc_Ed25519PrivateKeyToDer +*/ +int wc_Ed25519KeyToDer(const ed25519_key* key, byte* output, + word32 inLen); + +/*! + \ingroup Ed25519 + \brief Encodes Ed25519 private key to DER format. + + \return Size on success + \return negative on error + + \param key Ed25519 key structure with private key + \param output Buffer for DER encoded private key + \param inLen Size of output buffer + + _Example_ + \code + ed25519_key key; + byte der[1024]; + int derSz = wc_Ed25519PrivateKeyToDer(&key, der, + sizeof(der)); + \endcode + + \sa wc_Ed25519PrivateKeyDecode +*/ +int wc_Ed25519PrivateKeyToDer(const ed25519_key* key, byte* output, + word32 inLen); + +/*! + \ingroup Ed25519 + \brief Encodes Ed25519 public key to DER format. + + \return Size on success + \return negative on error + + \param key Ed25519 key structure with public key + \param output Buffer for DER encoded public key + \param inLen Size of output buffer + + _Example_ + \code + ed25519_key key; + byte der[1024]; + int derSz = wc_Ed25519PublicKeyToDer(&key, der, + sizeof(der)); + \endcode + + \sa wc_Ed25519PublicKeyDecode +*/ +int wc_Ed25519PublicKeyToDer(const ed25519_key* key, byte* output, + int inLen); + +/*! + \ingroup Ed448 + \brief Decodes Ed448 private key from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded Ed448 private key buffer + \param inOutIdx Pointer to index in buffer + \param key Ed448 key structure to store key + \param inSz Size of input buffer + + _Example_ + \code + ed448_key key; + word32 idx = 0; + int ret = wc_Ed448PrivateKeyDecode(derBuf, &idx, &key, + derSz); + \endcode + + \sa wc_Ed448PrivateKeyToDer +*/ +int wc_Ed448PrivateKeyDecode(const byte* input, word32* inOutIdx, + ed448_key* key, word32 inSz); + +/*! + \ingroup Ed448 + \brief Decodes Ed448 public key from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded Ed448 public key buffer + \param inOutIdx Pointer to index in buffer + \param key Ed448 key structure to store key + \param inSz Size of input buffer + + _Example_ + \code + ed448_key key; + word32 idx = 0; + int ret = wc_Ed448PublicKeyDecode(derBuf, &idx, &key, + derSz); + \endcode + + \sa wc_Ed448PublicKeyToDer +*/ +int wc_Ed448PublicKeyDecode(const byte* input, word32* inOutIdx, + ed448_key* key, word32 inSz); + +/*! + \ingroup Ed448 + \brief Encodes Ed448 key to DER format. + + \return Size on success + \return negative on error + + \param key Ed448 key structure + \param output Buffer for DER encoded key + \param inLen Size of output buffer + + _Example_ + \code + ed448_key key; + byte der[1024]; + int derSz = wc_Ed448KeyToDer(&key, der, sizeof(der)); + \endcode + + \sa wc_Ed448PrivateKeyToDer +*/ +int wc_Ed448KeyToDer(ed448_key* key, byte* output, word32 inLen); + +/*! + \ingroup Ed448 + \brief Encodes Ed448 private key to DER format. + + \return Size on success + \return negative on error + + \param key Ed448 key structure with private key + \param output Buffer for DER encoded private key + \param inLen Size of output buffer + + _Example_ + \code + ed448_key key; + byte der[1024]; + int derSz = wc_Ed448PrivateKeyToDer(&key, der, + sizeof(der)); + \endcode + + \sa wc_Ed448PrivateKeyDecode +*/ +int wc_Ed448PrivateKeyToDer(ed448_key* key, byte* output, + word32 inLen); + +/*! + \ingroup Ed448 + \brief Encodes Ed448 public key to DER format. + + \return Size on success + \return negative on error + + \param key Ed448 key structure with public key + \param output Buffer for DER encoded public key + \param inLen Size of output buffer + + _Example_ + \code + ed448_key key; + byte der[1024]; + int derSz = wc_Ed448PublicKeyToDer(&key, der, + sizeof(der)); + \endcode + + \sa wc_Ed448PublicKeyDecode +*/ +int wc_Ed448PublicKeyToDer(ed448_key* key, byte* output, + int inLen); + +/*! + \ingroup Curve448 + \brief Decodes Curve448 private key from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded Curve448 private key buffer + \param inOutIdx Pointer to index in buffer + \param key Curve448 key structure to store key + \param inSz Size of input buffer + + _Example_ + \code + curve448_key key; + word32 idx = 0; + int ret = wc_Curve448PrivateKeyDecode(derBuf, &idx, &key, + derSz); + \endcode + + \sa wc_Curve448PrivateKeyToDer +*/ +int wc_Curve448PrivateKeyDecode(const byte* input, word32* inOutIdx, + curve448_key* key, word32 inSz); + +/*! + \ingroup Curve448 + \brief Decodes Curve448 public key from DER format. + + \return 0 on success + \return negative on error + + \param input DER encoded Curve448 public key buffer + \param inOutIdx Pointer to index in buffer + \param key Curve448 key structure to store key + \param inSz Size of input buffer + + _Example_ + \code + curve448_key key; + word32 idx = 0; + int ret = wc_Curve448PublicKeyDecode(derBuf, &idx, &key, + derSz); + \endcode + + \sa wc_Curve448PublicKeyToDer +*/ +int wc_Curve448PublicKeyDecode(const byte* input, word32* inOutIdx, + curve448_key* key, word32 inSz); + +/*! + \ingroup Curve448 + \brief Encodes Curve448 private key to DER format. + + \return Size on success + \return negative on error + + \param key Curve448 key structure with private key + \param output Buffer for DER encoded private key + \param inLen Size of output buffer + + _Example_ + \code + curve448_key key; + byte der[1024]; + int derSz = wc_Curve448PrivateKeyToDer(&key, der, + sizeof(der)); + \endcode + + \sa wc_Curve448PrivateKeyDecode +*/ +int wc_Curve448PrivateKeyToDer(curve448_key* key, byte* output, + word32 inLen); + +/*! + \ingroup Curve448 + \brief Encodes Curve448 public key to DER format. + + \return Size on success + \return negative on error + + \param key Curve448 key structure with public key + \param output Buffer for DER encoded public key + \param inLen Size of output buffer + + _Example_ + \code + curve448_key key; + byte der[1024]; + int derSz = wc_Curve448PublicKeyToDer(&key, der, + sizeof(der)); + \endcode + + \sa wc_Curve448PublicKeyDecode +*/ +int wc_Curve448PublicKeyToDer(curve448_key* key, byte* output, + word32 inLen); + /*! \ingroup ASN @@ -2013,11 +3236,318 @@ int wc_EncryptPKCS8Key(byte* key, word32 keySz, byte* out, int pbeOid, int encAlgId, byte* salt, word32 saltSz, int itt, WC_RNG* rng, void* heap); +/*! + \ingroup ASN + \brief Encrypts PKCS#8 key with extended parameters. + + \return Size on success + \return negative on error + + \param key Private key buffer + \param keySz Size of private key + \param out Output buffer for encrypted key + \param outSz Pointer to output buffer size (in/out) + \param password Password for encryption + \param passwordSz Password length + \param vPKCS PKCS version + \param pbeOid PBE algorithm OID + \param encAlgId Encryption algorithm ID + \param salt Salt buffer + \param saltSz Salt size + \param itt Iteration count + \param rng Random number generator + \param heap Heap hint for memory allocation + \param devId Device ID for hardware acceleration + + _Example_ + \code + byte key[256], encrypted[512]; + word32 encSz = sizeof(encrypted); + WC_RNG rng; + int ret = wc_EncryptPKCS8Key_ex(key, keySz, encrypted, + &encSz, "password", 8, + PKCS5, PBES2, AES256CBCb, + NULL, 0, 2048, &rng, NULL, + INVALID_DEVID); + \endcode + + \sa wc_EncryptPKCS8Key +*/ +int wc_EncryptPKCS8Key_ex(byte* key, word32 keySz, byte* out, + word32* outSz, const char* password, + int passwordSz, int vPKCS, int pbeOid, + int encAlgId, byte* salt, word32 saltSz, + int itt, WC_RNG* rng, void* heap, + int devId); + +/*! + \ingroup ASN + \brief Gets current time for certificate operations. + + \return 0 on success + \return negative on error + + \param timePtr Pointer to time buffer + \param timeSize Size of time buffer + + _Example_ + \code + time_t currentTime; + int ret = wc_GetTime(¤tTime, sizeof(currentTime)); + \endcode + + \sa wc_GetDateInfo +*/ +int wc_GetTime(void* timePtr, word32 timeSize); + +/*! + \ingroup ASN + \brief Gets encryption info from encrypted PEM. + + \return 0 on success + \return negative on error + + \param info EncryptedInfo structure to populate + \param cipherName Cipher name string + + _Example_ + \code + EncryptedInfo info; + int ret = wc_EncryptedInfoGet(&info, "AES-256-CBC"); + \endcode + + \sa wc_PemToDer +*/ +int wc_EncryptedInfoGet(EncryptedInfo* info, + const char* cipherName); + +/*! + \ingroup ASN + \brief Parses PIV certificate format. + + \return 0 on success + \return negative on error + + \param cert PIV certificate structure to populate + \param buf Buffer containing PIV certificate + \param totalSz Size of buffer + + _Example_ + \code + wc_CertPIV cert; + int ret = wc_ParseCertPIV(&cert, pivBuf, pivSz); + \endcode + + \sa wc_InitDecodedCert +*/ +int wc_ParseCertPIV(wc_CertPIV* cert, const byte* buf, + word32 totalSz); + +/*! + \ingroup ASN + \brief Extracts subject public key info from certificate. + + \return Size on success + \return negative on error + + \param certDer DER encoded certificate buffer + \param certDerSz Size of certificate + \param pubKeyDer Output buffer for public key + \param pubKeyDerSz Pointer to output buffer size (in/out) + + _Example_ + \code + byte pubKey[1024]; + word32 pubKeySz = sizeof(pubKey); + int ret = wc_GetSubjectPubKeyInfoDerFromCert(certDer, + certSz, + pubKey, + &pubKeySz); + \endcode + + \sa wc_GetPubKeyDerFromCert +*/ +int wc_GetSubjectPubKeyInfoDerFromCert(const byte* certDer, + word32 certDerSz, + byte* pubKeyDer, + word32* pubKeyDerSz); + +/*! + \ingroup ASN + \brief Extracts UUID from certificate. + + \return 0 on success + \return negative on error + + \param cert Decoded certificate structure + \param uuid Output buffer for UUID + \param uuidSz Pointer to UUID buffer size (in/out) + + _Example_ + \code + DecodedCert cert; + byte uuid[16]; + int uuidSz = sizeof(uuid); + int ret = wc_GetUUIDFromCert(&cert, uuid, &uuidSz); + \endcode + + \sa wc_ParseCert +*/ +int wc_GetUUIDFromCert(struct DecodedCert* cert, + byte* uuid, int* uuidSz); + +/*! + \ingroup ASN + \brief Extracts FASCN from certificate. + + \return 0 on success + \return negative on error + + \param cert Decoded certificate structure + \param fascn Output buffer for FASCN + \param fascnSz Pointer to FASCN buffer size (in/out) + + _Example_ + \code + DecodedCert cert; + byte fascn[25]; + int fascnSz = sizeof(fascn); + int ret = wc_GetFASCNFromCert(&cert, fascn, &fascnSz); + \endcode + + \sa wc_ParseCert +*/ +int wc_GetFASCNFromCert(struct DecodedCert* cert, + byte* fascn, int* fascnSz); + +/*! + \ingroup ASN + \brief Generates the pre-TBS (To Be Signed) certificate data from a + decoded certificate. The TBS portion is the certificate data that gets + signed by the certificate authority. This function is used in dual + algorithm certificate creation where the TBS data needs to be extracted + for signing with an alternative algorithm (e.g., a post-quantum algorithm). + + \note This API is only available when WOLFSSL_DUAL_ALG_CERTS is defined, + which enables support for dual algorithm certificates used in Post-Quantum + cryptography to provide hybrid signing with both traditional and PQ + algorithms. + + \return Size of the pre-TBS data on success + \return Negative error code on failure + + \param cert Decoded certificate structure containing the certificate to + extract TBS data from + \param der Output buffer for the pre-TBS DER-encoded data + \param derSz Size of output buffer in bytes + + _Example_ + \code + DecodedCert cert; + byte preTbs[2048]; + int ret = wc_GeneratePreTBS(&cert, preTbs, sizeof(preTbs)); + if (ret > 0) { + // ret contains the size of the pre-TBS data + // preTbs can now be signed with an alternative algorithm + } + \endcode + + \sa wc_MakeCert + \sa wc_MakeSigWithBitStr +*/ +int wc_GeneratePreTBS(struct DecodedCert* cert, byte *der, + int derSz); + +/*! + \ingroup ASN + \brief Initializes decoded attribute certificate structure. + + \return void + + \param acert Attribute certificate structure to initialize + \param heap Heap hint for memory allocation + + _Example_ + \code + DecodedAcert acert; + wc_InitDecodedAcert(&acert, NULL); + \endcode + + \sa wc_FreeDecodedAcert +*/ +void wc_InitDecodedAcert(struct DecodedAcert* acert, + void* heap); + +/*! + \ingroup ASN + \brief Frees decoded attribute certificate structure. + + \return void + + \param acert Attribute certificate structure to free + + _Example_ + \code + DecodedAcert acert; + wc_InitDecodedAcert(&acert, NULL); + wc_FreeDecodedAcert(&acert); + \endcode + + \sa wc_InitDecodedAcert +*/ +void wc_FreeDecodedAcert(struct DecodedAcert * acert); + +/*! + \ingroup ASN + \brief Parses X.509 attribute certificate. + + \return 0 on success + \return negative on error + + \param acert Decoded attribute certificate structure + \param verify Non-zero to verify signature + + _Example_ + \code + DecodedAcert acert; + wc_InitDecodedAcert(&acert, NULL); + int ret = wc_ParseX509Acert(&acert, 1); + \endcode + + \sa wc_VerifyX509Acert +*/ +int wc_ParseX509Acert(struct DecodedAcert* acert, int verify); + +/*! + \ingroup ASN + \brief Verifies X.509 attribute certificate. + + \return 0 on success + \return negative on error + + \param acert Attribute certificate buffer + \param acertSz Size of attribute certificate + \param issuerCert Issuer certificate buffer + \param issuerCertSz Size of issuer certificate + \param cm Certificate manager + + _Example_ + \code + int ret = wc_VerifyX509Acert(acertBuf, acertSz, + issuerBuf, issuerSz, cm); + \endcode + + \sa wc_ParseX509Acert +*/ +int wc_VerifyX509Acert(const byte* acert, word32 acertSz, + const byte* issuerCert, + word32 issuerCertSz, void* cm); + /*! \ingroup ASN \brief This function takes an encrypted PKCS#8 DER key and decrypts it to - PKCS#8 unencrypted DER. Undoes the encryption done by wc_EncryptPKCS8Key. + PKCS#8 unencrypted DER.Undoes the encryption done by wc_EncryptPKCS8Key. See RFC5208. The input buffer is overwritten with the decrypted data. \return The length of the decrypted buffer on success. @@ -2524,3 +4054,24 @@ int wc_Asn1_SetFile(Asn1* asn1, XFILE file); */ int wc_Asn1_PrintAll(Asn1* asn1, Asn1PrintOptions* opts, unsigned char* data, word32 len); + +/*! + \ingroup ASN + \brief Sets OID to name callback for ASN.1 parsing. + + \return 0 on success + \return negative on error + + \param asn1 ASN.1 structure + \param nameCb Callback function to convert OID to name + + _Example_ + \code + Asn1 asn1; + int ret = wc_Asn1_SetOidToNameCb(&asn1, myOidToNameCb); + \endcode + + \sa wc_Asn1_PrintAll +*/ +int wc_Asn1_SetOidToNameCb(Asn1* asn1, Asn1OidToNameCb nameCb); + diff --git a/doc/dox_comments/header_files/blake2.h b/doc/dox_comments/header_files/blake2.h index 71a858cdf..57768d254 100644 --- a/doc/dox_comments/header_files/blake2.h +++ b/doc/dox_comments/header_files/blake2.h @@ -14,7 +14,7 @@ \code Blake2b b2b; // initialize Blake2b structure with 64 byte digest - wc_InitBlake2b(&b2b, 64); + wc_InitBlake2b(&b2b, WC_BLAKE2B_DIGEST_SIZE); \endcode \sa wc_Blake2bUpdate @@ -41,13 +41,13 @@ int wc_InitBlake2b(Blake2b* b2b, word32 digestSz); int ret; Blake2b b2b; // initialize Blake2b structure with 64 byte digest - wc_InitBlake2b(&b2b, 64); + wc_InitBlake2b(&b2b, WC_BLAKE2B_DIGEST_SIZE); byte plain[] = { // initialize input }; ret = wc_Blake2bUpdate(&b2b, plain, sizeof(plain)); - if( ret != 0) { - // error updating blake2b + if (ret != 0) { + // error updating blake2b } \endcode @@ -78,14 +78,14 @@ int wc_Blake2bUpdate(Blake2b* b2b, const byte* data, word32 sz); \code int ret; Blake2b b2b; - byte hash[64]; + byte hash[WC_BLAKE2B_DIGEST_SIZE]; // initialize Blake2b structure with 64 byte digest - wc_InitBlake2b(&b2b, 64); + wc_InitBlake2b(&b2b, WC_BLAKE2B_DIGEST_SIZE); ... // call wc_Blake2bUpdate to add data to hash - ret = wc_Blake2bFinal(&b2b, hash, 64); - if( ret != 0) { - // error generating blake2b hash + ret = wc_Blake2bFinal(&b2b, hash, WC_BLAKE2B_DIGEST_SIZE); + if (ret != 0) { + // error generating blake2b hash } \endcode @@ -93,3 +93,326 @@ int wc_Blake2bUpdate(Blake2b* b2b, const byte* data, word32 sz); \sa wc_Blake2bUpdate */ int wc_Blake2bFinal(Blake2b* b2b, byte* final, word32 requestSz); + +/*! + \ingroup BLAKE2 + + \brief Initialize an HMAC-BLAKE2b message authentication code computation. + + \return 0 Returned upon successfully initializing the HMAC-BLAKE2b MAC + computation. + + \param b2b Blake2b structure to be used for the MAC computation. + \param key pointer to the key + \param key_len length of the key + + _Example_ + \code + Blake2b b2b; + int ret; + byte key[] = {4, 5, 6}; + ret = wc_Blake2bHmacInit(&b2b, key); + if (ret != 0) { + // error generating HMAC-BLAKE2b + } + \endcode +*/ +int wc_Blake2bHmacInit(Blake2b * b2b, + const byte * key, size_t key_len); + +/*! + \ingroup BLAKE2 + + \brief Update an HMAC-BLAKE2b message authentication code computation with + additional input data. + + \return 0 Returned upon successfully updating the HMAC-BLAKE2b MAC + computation. + + \param b2b Blake2b structure to be used for the MAC computation. + \param in pointer to the input data + \param in_len length of the input data + + _Example_ + \code + Blake2b b2b; + int ret; + byte key[] = {4, 5, 6}; + byte data[] = {1, 2, 3}; + ret = wc_Blake2bHmacInit(&b2b, key, sizeof(key)); + ret = wc_Blake2bHmacUpdate(&b2b, data, sizeof(data)); + \endcode +*/ +int wc_Blake2bHmacUpdate(Blake2b * b2b, + const byte * in, size_t in_len); + +/*! + \ingroup BLAKE2 + + \brief Finalize an HMAC-BLAKE2b message authentication code computation. + + \return 0 Returned upon successfully finalizing the HMAC-BLAKE2b MAC + computation. + + \param b2b Blake2b structure to be used for the MAC computation. + \param key pointer to the key + \param key_len length of the key + \param out output buffer to store computed MAC + \param out_len length of output buffer + + _Example_ + \code + Blake2b b2b; + int ret; + byte key[] = {4, 5, 6}; + byte data[] = {1, 2, 3}; + byte mac[WC_BLAKE2B_DIGEST_SIZE]; + ret = wc_Blake2bHmacInit(&b2b, key, sizeof(key)); + ret = wc_Blake2bHmacUpdate(&b2b, data, sizeof(data)); + ret = wc_Blake2bHmacFinalize(&b2b, key, sizeof(key), mac, sizezof(mac)); + \endcode +*/ +int wc_Blake2bHmacFinal(Blake2b * b2b, + const byte * key, size_t key_len, + byte * out, size_t out_len); + +/*! + \ingroup BLAKE2 + + \brief Compute the HMAC-BLAKE2b message authentication code of the given + input data using the given key. + + \return 0 Returned upon successfully computing the HMAC-BLAKE2b MAC. + + \param in pointer to the input data + \param in_len length of the input data + \param key pointer to the key + \param key_len length of the key + \param out output buffer to store computed MAC + \param out_len length of output buffer + + _Example_ + \code + int ret; + byte mac[WC_BLAKE2B_DIGEST_SIZE]; + byte data[] = {1, 2, 3}; + byte key[] = {4, 5, 6}; + ret = wc_Blake2bHmac(data, sizeof(data), key, sizeof(key), mac, sizeof(mac)); + if (ret != 0) { + // error generating HMAC-BLAKE2b + } + \endcode +*/ +int wc_Blake2bHmac(const byte * in, size_t in_len, + const byte * key, size_t key_len, + byte * out, size_t out_len); + + +/*! + \ingroup BLAKE2 + + \brief This function initializes a Blake2s structure for use with the + Blake2 hash function. + + \return 0 Returned upon successfully initializing the Blake2s structure and + setting the digest size. + + \param b2s pointer to the Blake2s structure to initialize + \param digestSz length of the blake 2 digest to implement + + _Example_ + \code + Blake2s b2s; + // initialize Blake2s structure with 32 byte digest + wc_InitBlake2s(&b2s, WC_BLAKE2S_DIGEST_SIZE); + \endcode + + \sa wc_Blake2sUpdate +*/ +int wc_InitBlake2s(Blake2s* b2s, word32 digestSz); + +/*! + \ingroup BLAKE2 + + \brief This function updates the Blake2s hash with the given input data. + This function should be called after wc_InitBlake2s, and repeated until + one is ready for the final hash: wc_Blake2sFinal. + + \return 0 Returned upon successfully update the Blake2s structure with + the given data + \return -1 Returned if there is a failure while compressing the input data + + \param b2s pointer to the Blake2s structure to update + \param data pointer to a buffer containing the data to append + \param sz length of the input data to append + + _Example_ + \code + int ret; + Blake2s b2s; + // initialize Blake2s structure with 32 byte digest + wc_InitBlake2s(&b2s, WC_BLAKE2S_DIGEST_SIZE); + + byte plain[] = { // initialize input }; + + ret = wc_Blake2sUpdate(&b2s, plain, sizeof(plain)); + if (ret != 0) { + // error updating blake2s + } + \endcode + + \sa wc_InitBlake2s + \sa wc_Blake2sFinal +*/ +int wc_Blake2sUpdate(Blake2s* b2s, const byte* data, word32 sz); + +/*! + \ingroup BLAKE2 + + \brief This function computes the Blake2s hash of the previously supplied + input data. The output hash will be of length requestSz, or, if + requestSz==0, the digestSz of the b2s structure. This function should be + called after wc_InitBlake2s and wc_Blake2sUpdate has been processed for + each piece of input data desired. + + \return 0 Returned upon successfully computing the Blake2s hash + \return -1 Returned if there is a failure while parsing the Blake2s hash + + \param b2s pointer to the Blake2s structure to update + \param final pointer to a buffer in which to store the blake2s hash. + Should be of length requestSz + \param requestSz length of the digest to compute. When this is zero, + b2s->digestSz will be used instead + + _Example_ + \code + int ret; + Blake2s b2s; + byte hash[WC_BLAKE2S_DIGEST_SIZE]; + // initialize Blake2s structure with 32 byte digest + wc_InitBlake2s(&b2s, WC_BLAKE2S_DIGEST_SIZE); + ... // call wc_Blake2sUpdate to add data to hash + + ret = wc_Blake2sFinal(&b2s, hash, WC_BLAKE2S_DIGEST_SIZE); + if (ret != 0) { + // error generating blake2s hash + } + \endcode + + \sa wc_InitBlake2s + \sa wc_Blake2sUpdate +*/ +int wc_Blake2sFinal(Blake2s* b2s, byte* final, word32 requestSz); + +/*! + \ingroup BLAKE2 + + \brief Initialize an HMAC-BLAKE2s message authentication code computation. + + \return 0 Returned upon successfully initializing the HMAC-BLAKE2s MAC + computation. + + \param b2s Blake2s structure to be used for the MAC computation. + \param key pointer to the key + \param key_len length of the key + + _Example_ + \code + Blake2s b2s; + int ret; + byte key[] = {4, 5, 6}; + ret = wc_Blake2sHmacInit(&b2s, key); + if (ret != 0) { + // error generating HMAC-BLAKE2s + } + \endcode +*/ +int wc_Blake2sHmacInit(Blake2s * b2s, + const byte * key, size_t key_len); + +/*! + \ingroup BLAKE2 + + \brief Update an HMAC-BLAKE2s message authentication code computation with + additional input data. + + \return 0 Returned upon successfully updating the HMAC-BLAKE2s MAC + computation. + + \param b2s Blake2s structure to be used for the MAC computation. + \param in pointer to the input data + \param in_len length of the input data + + _Example_ + \code + Blake2s b2s; + int ret; + byte key[] = {4, 5, 6}; + byte data[] = {1, 2, 3}; + ret = wc_Blake2sHmacInit(&b2s, key, sizeof(key)); + ret = wc_Blake2sHmacUpdate(&b2s, data, sizeof(data)); + \endcode +*/ +int wc_Blake2sHmacUpdate(Blake2s * b2s, + const byte * in, size_t in_len); + +/*! + \ingroup BLAKE2 + + \brief Finalize an HMAC-BLAKE2s message authentication code computation. + + \return 0 Returned upon successfully finalizing the HMAC-BLAKE2s MAC + computation. + + \param b2s Blake2s structure to be used for the MAC computation. + \param key pointer to the key + \param key_len length of the key + \param out output buffer to store computed MAC + \param out_len length of output buffer + + _Example_ + \code + Blake2s b2s; + int ret; + byte key[] = {4, 5, 6}; + byte data[] = {1, 2, 3}; + byte mac[WC_BLAKE2S_DIGEST_SIZE]; + ret = wc_Blake2sHmacInit(&b2s, key, sizeof(key)); + ret = wc_Blake2sHmacUpdate(&b2s, data, sizeof(data)); + ret = wc_Blake2sHmacFinalize(&b2s, key, sizeof(key), mac, sizezof(mac)); + \endcode +*/ +int wc_Blake2sHmacFinal(Blake2s * b2s, + const byte * key, size_t key_len, + byte * out, size_t out_len); + +/*! + \ingroup BLAKE2 + + \brief This function computes the HMAC-BLAKE2s message authentication code + of the given input data using the given key. + + \return 0 Returned upon successfully computing the HMAC-BLAKE2s MAC. + + \param in pointer to the input data + \param in_len length of the input data + \param key pointer to the key + \param key_len length of the key + \param out output buffer to store computed MAC + \param out_len length of output buffer + + _Example_ + \code + int ret; + byte mac[WC_BLAKE2S_DIGEST_SIZE]; + byte data[] = {1, 2, 3}; + byte key[] = {4, 5, 6}; + ret = wc_Blake2sHmac(data, sizeof(data), key, sizeof(key), mac, sizeof(mac)); + if (ret != 0) { + // error generating HMAC-BLAKE2s + } + \endcode +*/ +int wc_Blake2sHmac(const byte * in, size_t in_len, + const byte * key, size_t key_len, + byte * out, size_t out_len); diff --git a/doc/dox_comments/header_files/chacha.h b/doc/dox_comments/header_files/chacha.h index 3daacd85b..890b82702 100644 --- a/doc/dox_comments/header_files/chacha.h +++ b/doc/dox_comments/header_files/chacha.h @@ -97,3 +97,44 @@ int wc_Chacha_Process(ChaCha* ctx, byte* cipher, const byte* plain, \sa wc_Chacha_Process */ int wc_Chacha_SetKey(ChaCha* ctx, const byte* key, word32 keySz); + +/*! + \ingroup ChaCha + \brief This function sets the key and nonce for an XChaCha cipher + context. XChaCha extends ChaCha20 to use a 192-bit nonce instead of + 96 bits, providing better security for applications that need to + encrypt many messages with the same key. + + \return 0 On success. + \return BAD_FUNC_ARG If ctx, key, or nonce is NULL, or if keySz is + invalid, or if nonceSz is not XCHACHA_NONCE_BYTES (24 bytes). + \return Other negative values on error. + + \param ctx pointer to the ChaCha structure to initialize + \param key pointer to the key buffer (16 or 32 bytes) + \param keySz length of the key in bytes (16 or 32) + \param nonce pointer to the nonce buffer (must be 24 bytes) + \param nonceSz length of the nonce in bytes (must be 24) + \param counter initial block counter value (usually 0) + + _Example_ + \code + ChaCha ctx; + byte key[32] = { }; // 256-bit key + byte nonce[24] = { }; // 192-bit nonce + byte plaintext[100] = { }; // data to encrypt + byte ciphertext[100]; + + int ret = wc_XChacha_SetKey(&ctx, key, 32, nonce, 24, 0); + if (ret != 0) { + // error setting XChaCha key + } + wc_Chacha_Process(&ctx, ciphertext, plaintext, 100); + \endcode + + \sa wc_Chacha_SetKey + \sa wc_Chacha_SetIV + \sa wc_Chacha_Process +*/ +int wc_XChacha_SetKey(ChaCha *ctx, const byte *key, word32 keySz, + const byte *nonce, word32 nonceSz, word32 counter); diff --git a/doc/dox_comments/header_files/chacha20_poly1305.h b/doc/dox_comments/header_files/chacha20_poly1305.h index 53e2f5ef2..661940746 100644 --- a/doc/dox_comments/header_files/chacha20_poly1305.h +++ b/doc/dox_comments/header_files/chacha20_poly1305.h @@ -122,3 +122,273 @@ int wc_ChaCha20Poly1305_Decrypt( const byte* inCiphertext, word32 inCiphertextLen, const byte inAuthTag[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE], byte* outPlaintext); + +/*! + \ingroup ChaCha20Poly1305 + \brief Compares two authentication tags in constant time to prevent + timing attacks. + + \return 0 If tags match + \return MAC_CMP_FAILED_E If tags do not match + + \param authTag First authentication tag + \param authTagChk Second authentication tag to compare + + _Example_ + \code + byte tag1[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]; + byte tag2[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]; + + int ret = wc_ChaCha20Poly1305_CheckTag(tag1, tag2); + if (ret != 0) { + // tags do not match + } + \endcode + + \sa wc_ChaCha20Poly1305_Decrypt +*/ +int wc_ChaCha20Poly1305_CheckTag( + const byte authTag[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE], + const byte authTagChk[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]); + +/*! + \ingroup ChaCha20Poly1305 + \brief Initializes a ChaChaPoly_Aead structure for incremental + encryption or decryption operations. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + + \param aead Pointer to ChaChaPoly_Aead structure to initialize + \param inKey 32-byte encryption key + \param inIV 12-byte initialization vector + \param isEncrypt 1 for encryption, 0 for decryption + + _Example_ + \code + ChaChaPoly_Aead aead; + byte key[CHACHA20_POLY1305_AEAD_KEYSIZE]; + byte iv[CHACHA20_POLY1305_AEAD_IV_SIZE]; + + int ret = wc_ChaCha20Poly1305_Init(&aead, key, iv, 1); + if (ret != 0) { + // error initializing + } + \endcode + + \sa wc_ChaCha20Poly1305_UpdateAad + \sa wc_ChaCha20Poly1305_UpdateData + \sa wc_ChaCha20Poly1305_Final +*/ +int wc_ChaCha20Poly1305_Init(ChaChaPoly_Aead* aead, + const byte inKey[CHACHA20_POLY1305_AEAD_KEYSIZE], + const byte inIV[CHACHA20_POLY1305_AEAD_IV_SIZE], + int isEncrypt); + +/*! + \ingroup ChaCha20Poly1305 + \brief Updates the AEAD context with additional authenticated data + (AAD). Must be called after Init and before UpdateData. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + + \param aead Pointer to initialized ChaChaPoly_Aead structure + \param inAAD Additional authenticated data + \param inAADLen Length of AAD in bytes + + _Example_ + \code + ChaChaPoly_Aead aead; + byte aad[]; // AAD data + + wc_ChaCha20Poly1305_Init(&aead, key, iv, 1); + int ret = wc_ChaCha20Poly1305_UpdateAad(&aead, aad, sizeof(aad)); + if (ret != 0) { + // error updating AAD + } + \endcode + + \sa wc_ChaCha20Poly1305_Init + \sa wc_ChaCha20Poly1305_UpdateData +*/ +int wc_ChaCha20Poly1305_UpdateAad(ChaChaPoly_Aead* aead, + const byte* inAAD, word32 inAADLen); + +/*! + \ingroup ChaCha20Poly1305 + \brief Encrypts or decrypts data incrementally. Can be called + multiple times to process data in chunks. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + + \param aead Pointer to initialized ChaChaPoly_Aead structure + \param inData Input data (plaintext or ciphertext) + \param outData Output buffer for result + \param dataLen Length of data to process + + _Example_ + \code + ChaChaPoly_Aead aead; + byte plain[]; // plaintext + byte cipher[sizeof(plain)]; + + wc_ChaCha20Poly1305_Init(&aead, key, iv, 1); + wc_ChaCha20Poly1305_UpdateAad(&aead, aad, aadLen); + int ret = wc_ChaCha20Poly1305_UpdateData(&aead, plain, + cipher, sizeof(plain)); + \endcode + + \sa wc_ChaCha20Poly1305_Init + \sa wc_ChaCha20Poly1305_Final +*/ +int wc_ChaCha20Poly1305_UpdateData(ChaChaPoly_Aead* aead, + const byte* inData, byte* outData, word32 dataLen); + +/*! + \ingroup ChaCha20Poly1305 + \brief Finalizes the AEAD operation and generates the + authentication tag. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + + \param aead Pointer to ChaChaPoly_Aead structure + \param outAuthTag Buffer to store 16-byte authentication tag + + _Example_ + \code + ChaChaPoly_Aead aead; + byte authTag[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]; + + wc_ChaCha20Poly1305_Init(&aead, key, iv, 1); + wc_ChaCha20Poly1305_UpdateAad(&aead, aad, aadLen); + wc_ChaCha20Poly1305_UpdateData(&aead, plain, cipher, plainLen); + int ret = wc_ChaCha20Poly1305_Final(&aead, authTag); + \endcode + + \sa wc_ChaCha20Poly1305_Init + \sa wc_ChaCha20Poly1305_UpdateData +*/ +int wc_ChaCha20Poly1305_Final(ChaChaPoly_Aead* aead, + byte outAuthTag[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]); + +/*! + \ingroup ChaCha20Poly1305 + \brief Initializes XChaCha20-Poly1305 AEAD with extended nonce. + XChaCha20 uses a 24-byte nonce instead of 12-byte. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + + \param aead Pointer to ChaChaPoly_Aead structure + \param ad Additional authenticated data + \param ad_len Length of AAD + \param inKey Encryption key + \param inKeySz Key size (must be 32) + \param inIV Initialization vector + \param inIVSz IV size (must be 24 for XChaCha20) + \param isEncrypt 1 for encryption, 0 for decryption + + _Example_ + \code + ChaChaPoly_Aead aead; + byte key[32]; + byte iv[24]; + byte aad[]; // AAD + + int ret = wc_XChaCha20Poly1305_Init(&aead, aad, sizeof(aad), + key, 32, iv, 24, 1); + \endcode + + \sa wc_XChaCha20Poly1305_Encrypt + \sa wc_XChaCha20Poly1305_Decrypt +*/ +int wc_XChaCha20Poly1305_Init(ChaChaPoly_Aead* aead, + const byte *ad, word32 ad_len, + const byte *inKey, word32 inKeySz, + const byte *inIV, word32 inIVSz, + int isEncrypt); + +/*! + \ingroup ChaCha20Poly1305 + \brief One-shot XChaCha20-Poly1305 encryption with 24-byte nonce. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + \return BUFFER_E If dst_space is insufficient + + \param dst Output buffer for ciphertext and tag + \param dst_space Size of output buffer + \param src Input plaintext + \param src_len Length of plaintext + \param ad Additional authenticated data + \param ad_len Length of AAD + \param nonce 24-byte nonce + \param nonce_len Nonce length (must be 24) + \param key 32-byte encryption key + \param key_len Key length (must be 32) + + _Example_ + \code + byte key[32], nonce[24]; + byte plain[]; // plaintext + byte cipher[sizeof(plain) + 16]; + + int ret = wc_XChaCha20Poly1305_Encrypt(cipher, sizeof(cipher), + plain, sizeof(plain), + NULL, 0, nonce, 24, + key, 32); + \endcode + + \sa wc_XChaCha20Poly1305_Decrypt +*/ +int wc_XChaCha20Poly1305_Encrypt(byte *dst, size_t dst_space, + const byte *src, size_t src_len, + const byte *ad, size_t ad_len, + const byte *nonce, size_t nonce_len, + const byte *key, size_t key_len); + +/*! + \ingroup ChaCha20Poly1305 + \brief One-shot XChaCha20-Poly1305 decryption with 24-byte nonce. + + \return 0 On success + \return BAD_FUNC_ARG If parameters are invalid + \return BUFFER_E If dst_space is insufficient + \return MAC_CMP_FAILED_E If authentication fails + + \param dst Output buffer for plaintext + \param dst_space Size of output buffer + \param src Input ciphertext with tag + \param src_len Length of ciphertext plus tag + \param ad Additional authenticated data + \param ad_len Length of AAD + \param nonce 24-byte nonce + \param nonce_len Nonce length (must be 24) + \param key 32-byte decryption key + \param key_len Key length (must be 32) + + _Example_ + \code + byte key[32], nonce[24]; + byte cipher[]; // ciphertext + tag + byte plain[sizeof(cipher) - 16]; + + int ret = wc_XChaCha20Poly1305_Decrypt(plain, sizeof(plain), + cipher, sizeof(cipher), + NULL, 0, nonce, 24, + key, 32); + if (ret == MAC_CMP_FAILED_E) { + // authentication failed + } + \endcode + + \sa wc_XChaCha20Poly1305_Encrypt +*/ +int wc_XChaCha20Poly1305_Decrypt(byte *dst, size_t dst_space, + const byte *src, size_t src_len, + const byte *ad, size_t ad_len, + const byte *nonce, size_t nonce_len, + const byte *key, size_t key_len); diff --git a/doc/dox_comments/header_files/cmac.h b/doc/dox_comments/header_files/cmac.h index 989d2ed42..4de7810a8 100644 --- a/doc/dox_comments/header_files/cmac.h +++ b/doc/dox_comments/header_files/cmac.h @@ -206,3 +206,82 @@ int wc_AesCmacVerify(const byte* check, word32 checkSz, \endcode */ int wc_CMAC_Grow(Cmac* cmac, const byte* in, int inSz); + +/*! + \ingroup CMAC + \brief Single shot AES-CMAC generation with extended parameters + including heap and device ID. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + + \param cmac Pointer to Cmac structure (can be NULL for one-shot) + \param out Buffer to store MAC output + \param outSz Pointer to output size (in/out) + \param in Input data to authenticate + \param inSz Length of input data + \param key AES key + \param keySz Key size (16, 24, or 32 bytes) + \param heap Heap hint for memory allocation (can be NULL) + \param devId Device ID for hardware acceleration (use + INVALID_DEVID for software) + + _Example_ + \code + byte mac[AES_BLOCK_SIZE]; + word32 macSz = sizeof(mac); + byte key[16], msg[64]; + + int ret = wc_AesCmacGenerate_ex(NULL, mac, &macSz, msg, + sizeof(msg), key, sizeof(key), + NULL, INVALID_DEVID); + \endcode + + \sa wc_AesCmacGenerate + \sa wc_AesCmacVerify_ex +*/ +int wc_AesCmacGenerate_ex(Cmac *cmac, byte* out, word32* outSz, + const byte* in, word32 inSz, + const byte* key, word32 keySz, + void* heap, int devId); + +/*! + \ingroup CMAC + \brief Single shot AES-CMAC verification with extended parameters + including heap and device ID. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + \return MAC_CMP_FAILED_E if MAC verification fails + + \param cmac Pointer to Cmac structure (can be NULL for one-shot) + \param check Expected MAC value to verify + \param checkSz Size of expected MAC + \param in Input data to authenticate + \param inSz Length of input data + \param key AES key + \param keySz Key size (16, 24, or 32 bytes) + \param heap Heap hint for memory allocation (can be NULL) + \param devId Device ID for hardware acceleration (use + INVALID_DEVID for software) + + _Example_ + \code + byte mac[AES_BLOCK_SIZE]; + byte key[16], msg[64]; + + int ret = wc_AesCmacVerify_ex(NULL, mac, sizeof(mac), msg, + sizeof(msg), key, sizeof(key), + NULL, INVALID_DEVID); + if (ret == MAC_CMP_FAILED_E) { + // MAC verification failed + } + \endcode + + \sa wc_AesCmacVerify + \sa wc_AesCmacGenerate_ex +*/ +int wc_AesCmacVerify_ex(Cmac* cmac, const byte* check, word32 checkSz, + const byte* in, word32 inSz, + const byte* key, word32 keySz, + void* heap, int devId); diff --git a/doc/dox_comments/header_files/coding.h b/doc/dox_comments/header_files/coding.h index 60677ab1d..e1d7d5e9f 100644 --- a/doc/dox_comments/header_files/coding.h +++ b/doc/dox_comments/header_files/coding.h @@ -235,3 +235,43 @@ int Base16_Decode(const byte* in, word32 inLen, byte* out, word32* outLen); */ int Base16_Encode(const byte* in, word32 inLen, byte* out, word32* outLen); + +/*! + \ingroup Base_Encoding + \brief This function decodes Base64 encoded input without using + constant-time operations. This is faster than the constant-time + version but may be vulnerable to timing attacks. Use only when + timing attacks are not a concern. + + \return 0 On successfully decoding the Base64 encoded input. + \return BAD_FUNC_ARG If the output buffer is too small to store the + decoded input. + \return ASN_INPUT_E If a character in the input buffer falls outside + of the Base64 range or if there is an invalid line ending. + \return BUFFER_E If running out of buffer while decoding. + + \param in pointer to the input buffer to decode + \param inLen length of the input buffer to decode + \param out pointer to the output buffer to store decoded message + \param outLen pointer to length of output buffer; updated with bytes + written + + _Example_ + \code + byte encoded[] = "SGVsbG8gV29ybGQ="; // "Hello World" in Base64 + byte decoded[64]; + word32 outLen = sizeof(decoded); + + int ret = Base64_Decode_nonCT(encoded, sizeof(encoded)-1, decoded, + &outLen); + if (ret != 0) { + // error decoding input + } + // decoded now contains "Hello World" + \endcode + + \sa Base64_Decode + \sa Base64_Encode +*/ +int Base64_Decode_nonCT(const byte* in, word32 inLen, byte* out, + word32* outLen); diff --git a/doc/dox_comments/header_files/compress.h b/doc/dox_comments/header_files/compress.h index d17111cda..2d225ee47 100644 --- a/doc/dox_comments/header_files/compress.h +++ b/doc/dox_comments/header_files/compress.h @@ -70,3 +70,131 @@ int wc_Compress(byte* out, word32 outSz, const byte* in, word32 inSz, word32 fla \sa wc_Compress */ int wc_DeCompress(byte* out, word32 outSz, const byte* in, word32 inSz); + +/*! + \ingroup Compression + \brief This function compresses the given input data using Huffman + coding with extended parameters. This is similar to wc_Compress but + allows specification of compression flags and window bits for more + control over the compression process. + + \return On successfully compressing the input data, returns the + number of bytes stored in the output buffer + \return COMPRESS_INIT_E Returned if there is an error initializing + the stream for compression + \return COMPRESS_E Returned if an error occurs during compression + + \param out pointer to the output buffer in which to store the + compressed data + \param outSz size available in the output buffer for storage + \param in pointer to the buffer containing the message to compress + \param inSz size of the input message to compress + \param flags flags to control how compression operates + \param windowBits the base two logarithm of the window size (8..15) + + _Example_ + \code + byte message[] = { // initialize text to compress }; + byte compressed[(sizeof(message) + sizeof(message) * .001 + 12)]; + word32 flags = 0; + word32 windowBits = 15; // 32KB window + + int ret = wc_Compress_ex(compressed, sizeof(compressed), message, + sizeof(message), flags, windowBits); + if (ret < 0) { + // error compressing data + } + \endcode + + \sa wc_Compress + \sa wc_DeCompress_ex +*/ +int wc_Compress_ex(byte* out, word32 outSz, const byte* in, word32 inSz, + word32 flags, word32 windowBits); + +/*! + \ingroup Compression + \brief This function decompresses the given compressed data using + Huffman coding with extended parameters. This is similar to + wc_DeCompress but allows specification of window bits for more + control over the decompression process. + + \return On successfully decompressing the input data, returns the + number of bytes stored in the output buffer + \return COMPRESS_INIT_E Returned if there is an error initializing + the stream for decompression + \return COMPRESS_E Returned if an error occurs during decompression + + \param out pointer to the output buffer in which to store the + decompressed data + \param outSz size available in the output buffer for storage + \param in pointer to the buffer containing the message to decompress + \param inSz size of the input message to decompress + \param windowBits the base two logarithm of the window size (8..15) + + _Example_ + \code + byte compressed[] = { // initialize compressed message }; + byte decompressed[MAX_MESSAGE_SIZE]; + int windowBits = 15; + + int ret = wc_DeCompress_ex(decompressed, sizeof(decompressed), + compressed, sizeof(compressed), + windowBits); + if (ret < 0) { + // error decompressing data + } + \endcode + + \sa wc_DeCompress + \sa wc_Compress_ex +*/ +int wc_DeCompress_ex(byte* out, word32 outSz, const byte* in, word32 inSz, + int windowBits); + +/*! + \ingroup Compression + \brief This function decompresses the given compressed data using + Huffman coding with dynamic memory allocation. The output buffer is + allocated dynamically and the caller is responsible for freeing it. + + \return On successfully decompressing the input data, returns the + number of bytes stored in the output buffer + \return COMPRESS_INIT_E Returned if there is an error initializing + the stream for decompression + \return COMPRESS_E Returned if an error occurs during decompression + \return MEMORY_E Returned if memory allocation fails + + \param out pointer to pointer that will be set to the allocated + output buffer + \param max maximum size to allocate for output buffer + \param memoryType type of memory to allocate (DYNAMIC_TYPE_TMP_BUFFER) + \param in pointer to the buffer containing the message to decompress + \param inSz size of the input message to decompress + \param windowBits the base two logarithm of the window size (8..15) + \param heap heap hint for memory allocation (can be NULL) + + _Example_ + \code + byte compressed[] = { // initialize compressed message }; + byte* decompressed = NULL; + int max = 1024 * 1024; // 1MB max + + int ret = wc_DeCompressDynamic(&decompressed, max, + DYNAMIC_TYPE_TMP_BUFFER, compressed, + sizeof(compressed), 15, NULL); + if (ret < 0) { + // error decompressing data + } + else { + // use decompressed data + XFREE(decompressed, NULL, DYNAMIC_TYPE_TMP_BUFFER); + } + \endcode + + \sa wc_DeCompress + \sa wc_DeCompress_ex +*/ +int wc_DeCompressDynamic(byte** out, int max, int memoryType, + const byte* in, word32 inSz, int windowBits, + void* heap); diff --git a/doc/dox_comments/header_files/cryptocb.h b/doc/dox_comments/header_files/cryptocb.h index 35cc88ef2..145d8c9ff 100644 --- a/doc/dox_comments/header_files/cryptocb.h +++ b/doc/dox_comments/header_files/cryptocb.h @@ -109,3 +109,74 @@ int wc_CryptoCb_RegisterDevice(int devId, CryptoDevCallbackFunc cb, void* ctx); \sa wolfSSL_CTX_SetDevId */ void wc_CryptoCb_UnRegisterDevice(int devId); + +/*! + \ingroup CryptoCb + \brief This function returns the default device ID for crypto + callbacks. This is useful when you want to get the device ID that + was set as the default for the library. + + \return The default device ID, or INVALID_DEVID if no default is set. + + _Example_ + \code + int devId = wc_CryptoCb_DefaultDevID(); + if (devId != INVALID_DEVID) { + // default device ID is set + } + \endcode + + \sa wc_CryptoCb_RegisterDevice + \sa wc_CryptoCb_UnRegisterDevice +*/ +int wc_CryptoCb_DefaultDevID(void); + +/*! + \ingroup CryptoCb + \brief This function sets a callback for finding crypto devices. + The callback is invoked when a device ID needs to be resolved to + a device context. This is useful for dynamic device management. + + \return none No returns. + + \param cb callback function with prototype: + typedef void* (*CryptoDevCallbackFind)(int devId); + + _Example_ + \code + void* myDeviceFindCb(int devId) { + // lookup device context by ID + return deviceContext; + } + + wc_CryptoCb_SetDeviceFindCb(myDeviceFindCb); + \endcode + + \sa wc_CryptoCb_RegisterDevice +*/ +void wc_CryptoCb_SetDeviceFindCb(CryptoDevCallbackFind cb); + +/*! + \ingroup CryptoCb + \brief This function converts a wc_CryptoInfo structure to a + human-readable string for debugging purposes. The string is printed + to stdout and describes the cryptographic operation being performed. + + \return none No returns. + + \param info pointer to the wc_CryptoInfo structure to convert + + _Example_ + \code + int myCryptoCb(int devId, wc_CryptoInfo* info, void* ctx) { + // print debug info about the operation + wc_CryptoCb_InfoString(info); + + // handle the operation + return CRYPTOCB_UNAVAILABLE; + } + \endcode + + \sa wc_CryptoCb_RegisterDevice +*/ +void wc_CryptoCb_InfoString(wc_CryptoInfo* info); diff --git a/doc/dox_comments/header_files/curve25519.h b/doc/dox_comments/header_files/curve25519.h index 1c12300bc..3c9b68f39 100644 --- a/doc/dox_comments/header_files/curve25519.h +++ b/doc/dox_comments/header_files/curve25519.h @@ -46,7 +46,7 @@ int wc_curve25519_make_key(WC_RNG* rng, int keysize, curve25519_key* key); \brief This function computes a shared secret key given a secret private key and a received public key. It stores the generated secret key in the - buffer out and assigns the variable of the secret key to outlen. Only + buffer out and assigns the length of the secret key to outlen. Only supports big endian. \return 0 Returned on successfully computing a shared secret key. @@ -93,7 +93,7 @@ int wc_curve25519_shared_secret(curve25519_key* private_key, \brief This function computes a shared secret key given a secret private key and a received public key. It stores the generated secret key in the - buffer out and assigns the variable of the secret key to outlen. Supports + buffer out and assigns the length of the secret key to outlen. Supports both big and little endian. \return 0 Returned on successfully computing a shared secret key. @@ -361,7 +361,7 @@ int wc_curve25519_import_private_raw_ex(const byte* priv, word32 privSz, \return 0 Returned on successfully exporting the private key from the curve25519_key structure. \return BAD_FUNC_ARG Returned if any input parameters are NULL. - \return ECC_BAD_ARG_E Returned if wc_curve25519_size() is not equal to key. + \return ECC_BAD_ARG_E Returned if *outLen is less than wc_curve25519_size(). \param [in] key Pointer to the structure from which to export the key. \param [out] out Pointer to the buffer in which to store the exported key. @@ -372,7 +372,7 @@ int wc_curve25519_import_private_raw_ex(const byte* priv, word32 privSz, \code int ret; byte priv[32]; - int privSz; + word32 privSz; curve25519_key key; // initialize and make key @@ -402,7 +402,7 @@ int wc_curve25519_export_private_raw(curve25519_key* key, byte* out, \return 0 Returned on successfully exporting the private key from the curve25519_key structure. \return BAD_FUNC_ARG Returned if any input parameters are NULL. - \return ECC_BAD_ARG_E Returned if wc_curve25519_size() is not equal to key. + \return ECC_BAD_ARG_E Returned if *outLen is less than wc_curve25519_size(). \param [in] key Pointer to the structure from which to export the key. \param [out] out Pointer to the buffer in which to store the exported key. @@ -416,7 +416,7 @@ int wc_curve25519_export_private_raw(curve25519_key* key, byte* out, int ret; byte priv[32]; - int privSz; + word32 privSz; curve25519_key key; // initialize and make key ret = wc_curve25519_export_private_raw_ex(&key, priv, &privSz, @@ -656,7 +656,7 @@ int wc_curve25519_export_public_ex(curve25519_key* key, byte* out, \return ECC_BAD_ARG_E Returned if privSz is less than CURVE25519_KEY_SIZE or pubSz is less than CURVE25519_PUB_KEY_SIZE. - \param [in] key Pointer to the curve448_key structure in from which to + \param [in] key Pointer to the curve25519_key structure in from which to export the key pair. \param [out] priv Pointer to the buffer in which to store the private key. \param [in,out] privSz On in, is the size of the priv buffer in bytes. @@ -702,7 +702,7 @@ int wc_curve25519_export_key_raw(curve25519_key* key, \return ECC_BAD_ARG_E Returned if privSz is less than CURVE25519_KEY_SIZE or pubSz is less than CURVE25519_PUB_KEY_SIZE. - \param [in] key Pointer to the curve448_key structure in from which to + \param [in] key Pointer to the curve25519_key structure in from which to export the key pair. \param [out] priv Pointer to the buffer in which to store the private key. \param [in,out] privSz On in, is the size of the priv buffer in bytes. @@ -725,7 +725,7 @@ int wc_curve25519_export_key_raw(curve25519_key* key, curve25519_key key; // initialize and make key - ret = wc_curve25519_export_key_raw_ex(&key,priv, &privSz, pub, &pubSz, + ret = wc_curve25519_export_key_raw_ex(&key, priv, &privSz, pub, &pubSz, EC25519_BIG_ENDIAN); if (ret != 0) { // error exporting key @@ -769,3 +769,326 @@ int wc_curve25519_export_key_raw_ex(curve25519_key* key, */ int wc_curve25519_size(curve25519_key* key); + +/*! + \ingroup Curve25519 + \brief This function generates a Curve25519 public key from a given + private key. This is a lower-level function that operates directly + on byte buffers rather than curve25519_key structures. + + \return 0 On successfully generating the public key + \return ECC_BAD_ARG_E If the key sizes are invalid + \return BAD_FUNC_ARG If any input parameters are NULL + + \param public_size Size of the public key buffer (must be 32) + \param pub Pointer to buffer to store the public key + \param private_size Size of the private key (must be 32) + \param priv Pointer to buffer containing the private key + + _Example_ + \code + byte priv[CURVE25519_KEYSIZE]; + byte pub[CURVE25519_KEYSIZE]; + + // initialize priv with private key + int ret = wc_curve25519_make_pub(sizeof(pub), pub, sizeof(priv), + priv); + if (ret != 0) { + // error generating public key + } + \endcode + + \sa wc_curve25519_make_key + \sa wc_curve25519_make_pub_blind +*/ +int wc_curve25519_make_pub(int public_size, byte* pub, int private_size, + const byte* priv); + +/*! + \ingroup Curve25519 + \brief This function generates a Curve25519 public key from a given + private key with blinding to resist side-channel attacks. This adds + randomization to the scalar multiplication operation. + + \return 0 On successfully generating the public key + \return ECC_BAD_ARG_E If the key sizes are invalid + \return BAD_FUNC_ARG If any input parameters are NULL + + \param public_size Size of the public key buffer (must be 32) + \param pub Pointer to buffer to store the public key + \param private_size Size of the private key (must be 32) + \param priv Pointer to buffer containing the private key + \param rng Pointer to initialized RNG for blinding + + _Example_ + \code + WC_RNG rng; + byte priv[CURVE25519_KEYSIZE]; + byte pub[CURVE25519_KEYSIZE]; + + wc_InitRng(&rng); + // initialize priv with private key + int ret = wc_curve25519_make_pub_blind(sizeof(pub), pub, + sizeof(priv), priv, &rng); + if (ret != 0) { + // error generating public key + } + \endcode + + \sa wc_curve25519_make_pub + \sa wc_curve25519_generic_blind +*/ +int wc_curve25519_make_pub_blind(int public_size, byte* pub, + int private_size, const byte* priv, + WC_RNG* rng); + +/*! + \ingroup Curve25519 + \brief This function performs a generic Curve25519 scalar + multiplication with a custom basepoint. This allows computing + scalar * basepoint for any basepoint, not just the standard + generator. + + \return 0 On successfully computing the result + \return ECC_BAD_ARG_E If the sizes are invalid + \return BAD_FUNC_ARG If any input parameters are NULL + + \param public_size Size of the output buffer (must be 32) + \param pub Pointer to buffer to store the result + \param private_size Size of the scalar (must be 32) + \param priv Pointer to buffer containing the scalar + \param basepoint_size Size of the basepoint (must be 32) + \param basepoint Pointer to buffer containing the basepoint + + _Example_ + \code + byte scalar[CURVE25519_KEYSIZE]; + byte basepoint[CURVE25519_KEYSIZE]; + byte result[CURVE25519_KEYSIZE]; + + // initialize scalar and basepoint + int ret = wc_curve25519_generic(sizeof(result), result, + sizeof(scalar), scalar, + sizeof(basepoint), basepoint); + if (ret != 0) { + // error computing result + } + \endcode + + \sa wc_curve25519_shared_secret + \sa wc_curve25519_generic_blind +*/ +int wc_curve25519_generic(int public_size, byte* pub, int private_size, + const byte* priv, int basepoint_size, + const byte* basepoint); + +/*! + \ingroup Curve25519 + \brief This function performs a generic Curve25519 scalar + multiplication with a custom basepoint and blinding to resist + side-channel attacks. + + \return 0 On successfully computing the result + \return ECC_BAD_ARG_E If the sizes are invalid + \return BAD_FUNC_ARG If any input parameters are NULL + + \param public_size Size of the output buffer (must be 32) + \param pub Pointer to buffer to store the result + \param private_size Size of the scalar (must be 32) + \param priv Pointer to buffer containing the scalar + \param basepoint_size Size of the basepoint (must be 32) + \param basepoint Pointer to buffer containing the basepoint + \param rng Pointer to initialized RNG for blinding + + _Example_ + \code + WC_RNG rng; + byte scalar[CURVE25519_KEYSIZE]; + byte basepoint[CURVE25519_KEYSIZE]; + byte result[CURVE25519_KEYSIZE]; + + wc_InitRng(&rng); + // initialize scalar and basepoint + int ret = wc_curve25519_generic_blind(sizeof(result), result, + sizeof(scalar), scalar, + sizeof(basepoint), basepoint, + &rng); + \endcode + + \sa wc_curve25519_generic + \sa wc_curve25519_make_pub_blind +*/ +int wc_curve25519_generic_blind(int public_size, byte* pub, + int private_size, const byte* priv, + int basepoint_size, const byte* basepoint, + WC_RNG* rng); + +/*! + \ingroup Curve25519 + \brief This function generates a Curve25519 private key using the + given random number generator. This is a lower-level function that + generates only the private key bytes. + + \return 0 On successfully generating the private key + \return ECC_BAD_ARG_E If keysize is invalid + \return BAD_FUNC_ARG If any input parameters are NULL + \return RNG_FAILURE_E If random number generation fails + + \param rng Pointer to initialized RNG + \param keysize Size of the key to generate (must be 32) + \param priv Pointer to buffer to store the private key + + _Example_ + \code + WC_RNG rng; + byte priv[CURVE25519_KEYSIZE]; + + wc_InitRng(&rng); + int ret = wc_curve25519_make_priv(&rng, sizeof(priv), priv); + if (ret != 0) { + // error generating private key + } + \endcode + + \sa wc_curve25519_make_key + \sa wc_curve25519_make_pub +*/ +int wc_curve25519_make_priv(WC_RNG* rng, int keysize, byte* priv); + +/*! + \ingroup Curve25519 + \brief This function initializes a Curve25519 key with extended + parameters, allowing specification of custom heap and device ID + for hardware acceleration. + + \return 0 On successfully initializing the key + \return BAD_FUNC_ARG If key is NULL + + \param key Pointer to the curve25519_key structure to initialize + \param heap Pointer to heap hint for memory allocation (can be + NULL) + \param devId Device ID for hardware acceleration (use + INVALID_DEVID for software only) + + _Example_ + \code + curve25519_key key; + void* heap = NULL; + int devId = INVALID_DEVID; + + int ret = wc_curve25519_init_ex(&key, heap, devId); + if (ret != 0) { + // error initializing key + } + \endcode + + \sa wc_curve25519_init + \sa wc_curve25519_free +*/ +int wc_curve25519_init_ex(curve25519_key* key, void* heap, int devId); + +/*! + \ingroup Curve25519 + \brief This function sets the RNG to be used with a Curve25519 + key. This is useful for operations that require randomness such + as blinded scalar multiplication. + + \return 0 On successfully setting the RNG + \return BAD_FUNC_ARG If key or rng is NULL + + \param key Pointer to the curve25519_key structure + \param rng Pointer to initialized RNG + + _Example_ + \code + WC_RNG rng; + curve25519_key key; + + wc_InitRng(&rng); + wc_curve25519_init(&key); + int ret = wc_curve25519_set_rng(&key, &rng); + if (ret != 0) { + // error setting RNG + } + \endcode + + \sa wc_curve25519_init + \sa wc_curve25519_make_key +*/ +int wc_curve25519_set_rng(curve25519_key* key, WC_RNG* rng); + +/*! + \ingroup Curve25519 + \brief This function allocates and initializes a new Curve25519 + key structure with extended parameters. The caller is responsible + for freeing the key with wc_curve25519_delete. These New/Delete + functions are exposed to support allocation of the structure using + dynamic memory to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return Pointer to newly allocated curve25519_key on success + \return NULL on failure + + \param heap Pointer to heap hint for memory allocation (can be + NULL) + \param devId Device ID for hardware acceleration (use + INVALID_DEVID for software only) + \param result_code Pointer to store result code (0 on success) + + _Example_ + \code + int ret; + curve25519_key* key; + + key = wc_curve25519_new(NULL, INVALID_DEVID, &ret); + if (key == NULL || ret != 0) { + // error allocating key + } + // use key + wc_curve25519_delete(key, &key); + \endcode + + \sa wc_curve25519_delete + \sa wc_curve25519_init_ex +*/ +curve25519_key* wc_curve25519_new(void* heap, int devId, + int *result_code); + +/*! + \ingroup Curve25519 + \brief This function frees a Curve25519 key structure that was + allocated with wc_curve25519_new and sets the pointer to NULL. + These New/Delete functions are exposed to support allocation of the + structure using dynamic memory to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return 0 On successfully freeing the key + \return BAD_FUNC_ARG If key or key_p is NULL + + \param key Pointer to the curve25519_key structure to free + \param key_p Pointer to the key pointer (will be set to NULL) + + _Example_ + \code + int ret; + curve25519_key* key; + + key = wc_curve25519_new(NULL, INVALID_DEVID, &ret); + // use key + ret = wc_curve25519_delete(key, &key); + if (ret != 0) { + // error freeing key + } + // key is now NULL + \endcode + + \sa wc_curve25519_new + \sa wc_curve25519_free +*/ +int wc_curve25519_delete(curve25519_key* key, curve25519_key** key_p); diff --git a/doc/dox_comments/header_files/curve448.h b/doc/dox_comments/header_files/curve448.h index 8e0e125a0..23ff7253c 100644 --- a/doc/dox_comments/header_files/curve448.h +++ b/doc/dox_comments/header_files/curve448.h @@ -766,3 +766,37 @@ int wc_curve448_export_key_raw_ex(curve448_key* key, */ int wc_curve448_size(curve448_key* key); + +/*! + \ingroup Curve448 + \brief This function generates a Curve448 public key from a given + private key. It computes the public key by performing scalar + multiplication of the base point with the private key. + + \return 0 On success. + \return ECC_BAD_ARG_E If public_size is not CURVE448_PUB_KEY_SIZE or + if private_size is not CURVE448_KEY_SIZE. + \return BAD_FUNC_ARG If pub or priv is NULL. + + \param public_size size of the public key buffer (must be 56 bytes) + \param pub pointer to buffer to store the generated public key + \param private_size size of the private key (must be 56 bytes) + \param priv pointer to the private key buffer + + _Example_ + \code + byte priv[CURVE448_KEY_SIZE] = { }; // private key + byte pub[CURVE448_PUB_KEY_SIZE]; + + int ret = wc_curve448_make_pub(CURVE448_PUB_KEY_SIZE, pub, + CURVE448_KEY_SIZE, priv); + if (ret != 0) { + // error generating public key + } + \endcode + + \sa wc_curve448_make_key + \sa wc_curve448_import_private +*/ +int wc_curve448_make_pub(int public_size, byte* pub, int private_size, + const byte* priv); diff --git a/doc/dox_comments/header_files/des3.h b/doc/dox_comments/header_files/des3.h index ded1a0406..9b071ad75 100644 --- a/doc/dox_comments/header_files/des3.h +++ b/doc/dox_comments/header_files/des3.h @@ -330,3 +330,124 @@ int wc_Des3_CbcEncrypt(Des3* des, byte* out, */ int wc_Des3_CbcDecrypt(Des3* des, byte* out, const byte* in,word32 sz); + +/*! + \ingroup 3DES + \brief This function decrypts the input ciphertext and stores the + resulting plaintext in the output buffer. It uses DES encryption + with Electronic Codebook (ECB) mode. Warning: In nearly all use + cases ECB mode is considered to be less secure. Please avoid using + ECB APIs directly whenever possible. + + \return 0 On successfully decrypting the given ciphertext + + \param des pointer to the Des structure to use for decryption + \param out pointer to the buffer in which to store the decrypted + plaintext + \param in pointer to the input buffer containing the ciphertext + \param sz length of the ciphertext to decrypt + + _Example_ + \code + Des dec; + byte cipher[]; // ciphertext to decrypt + byte plain[sizeof(cipher)]; + + wc_Des_SetKey(&dec, key, iv, DES_DECRYPTION); + if (wc_Des_EcbDecrypt(&dec, plain, cipher, sizeof(cipher)) != 0) { + // error decrypting message + } + \endcode + + \sa wc_Des_SetKey + \sa wc_Des_EcbEncrypt +*/ +int wc_Des_EcbDecrypt(Des* des, byte* out, const byte* in, word32 sz); + +/*! + \ingroup 3DES + \brief This function decrypts the input ciphertext and stores the + resulting plaintext in the output buffer. It uses Triple DES (3DES) + encryption with Electronic Codebook (ECB) mode. Warning: In nearly + all use cases ECB mode is considered to be less secure. Please + avoid using ECB APIs directly whenever possible. + + \return 0 On successfully decrypting the given ciphertext + + \param des pointer to the Des3 structure to use for decryption + \param out pointer to the buffer in which to store the decrypted + plaintext + \param in pointer to the input buffer containing the ciphertext + \param sz length of the ciphertext to decrypt + + _Example_ + \code + Des3 dec; + byte cipher[]; // ciphertext to decrypt + byte plain[sizeof(cipher)]; + + wc_Des3_SetKey(&dec, key, iv, DES_DECRYPTION); + if (wc_Des3_EcbDecrypt(&dec, plain, cipher, sizeof(cipher)) != 0) { + // error decrypting message + } + \endcode + + \sa wc_Des3_SetKey + \sa wc_Des3_EcbEncrypt +*/ +int wc_Des3_EcbDecrypt(Des3* des, byte* out, const byte* in, word32 sz); + +/*! + \ingroup 3DES + \brief This function initializes a Des3 structure for use with + hardware acceleration and custom memory management. This is an + extended version of the standard initialization that allows + specification of heap hints and device IDs. + + \return 0 On successfully initializing the Des3 structure + \return BAD_FUNC_ARG If des3 is NULL + + \param des3 pointer to the Des3 structure to initialize + \param heap pointer to heap hint for memory allocation (can be NULL) + \param devId device ID for hardware acceleration (use INVALID_DEVID + for software only) + + _Example_ + \code + Des3 des; + void* heap = NULL; + int devId = INVALID_DEVID; + + if (wc_Des3Init(&des, heap, devId) != 0) { + // error initializing Des3 structure + } + \endcode + + \sa wc_Des3_SetKey + \sa wc_Des3Free +*/ +int wc_Des3Init(Des3* des3, void* heap, int devId); + +/*! + \ingroup 3DES + \brief This function frees a Des3 structure and releases any + resources allocated for it. This should be called when finished + using the Des3 structure to prevent memory leaks. + + \return none No returns. + + \param des3 pointer to the Des3 structure to free + + _Example_ + \code + Des3 des; + wc_Des3Init(&des, NULL, INVALID_DEVID); + wc_Des3_SetKey(&des, key, iv, DES_ENCRYPTION); + // use des for encryption/decryption + wc_Des3Free(&des); + \endcode + + \sa wc_Des3Init + \sa wc_Des3_SetKey +*/ +void wc_Des3Free(Des3* des3); diff --git a/doc/dox_comments/header_files/dh.h b/doc/dox_comments/header_files/dh.h index d2a3868c7..a7caf644a 100644 --- a/doc/dox_comments/header_files/dh.h +++ b/doc/dox_comments/header_files/dh.h @@ -274,9 +274,348 @@ int wc_DhParamsLoad(const byte* input, word32 inSz, byte* p, /*! \ingroup Diffie-Hellman + \brief Encodes DH parameters to DER format for OpenSSL compatibility. - \brief This function returns ... and requires that HAVE_FFDHE_2048 be - defined. + \return Length of DER encoding on success + \return Negative on error + + \param dh DH parameters to encode + \param out Output buffer pointer (if *out is NULL, allocates buffer) + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + unsigned char* der = NULL; + int derSz = wolfSSL_i2d_DHparams(dh, &der); + if (derSz > 0) { + // use der buffer + XFREE(der, NULL, DYNAMIC_TYPE_OPENSSL); + } + \endcode + + \sa wolfSSL_DH_new +*/ +int wolfSSL_i2d_DHparams(const WOLFSSL_DH *dh, unsigned char **out); + +/*! + \ingroup Diffie-Hellman + \brief Allocates and initializes a new DH structure for OpenSSL + compatibility. + + \return Pointer to WOLFSSL_DH on success + \return NULL on failure + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + if (dh == NULL) { + // error allocating DH + } + // use dh + wolfSSL_DH_free(dh); + \endcode + + \sa wolfSSL_DH_free + \sa wolfSSL_DH_generate_key +*/ +WOLFSSL_DH* wolfSSL_DH_new(void); + +/*! + \ingroup Diffie-Hellman + \brief Creates a new DH structure with named group parameters. + + \return Pointer to WOLFSSL_DH on success + \return NULL on failure + + \param nid Named group identifier (e.g., NID_ffdhe2048) + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new_by_nid(NID_ffdhe2048); + if (dh == NULL) { + // error creating DH with named group + } + \endcode + + \sa wolfSSL_DH_new +*/ +WOLFSSL_DH* wolfSSL_DH_new_by_nid(int nid); + +/*! + \ingroup Diffie-Hellman + \brief Frees a DH structure. + + \param dh DH structure to free + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + // use dh + wolfSSL_DH_free(dh); + \endcode + + \sa wolfSSL_DH_new +*/ +void wolfSSL_DH_free(WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Duplicates a DH structure. + + \return Pointer to new WOLFSSL_DH on success + \return NULL on failure + + \param dh DH structure to duplicate + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + WOLFSSL_DH* dhCopy = wolfSSL_DH_dup(dh); + \endcode + + \sa wolfSSL_DH_new +*/ +WOLFSSL_DH* wolfSSL_DH_dup(WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Increments reference count for DH structure. + + \return 1 on success + \return 0 on failure + + \param dh DH structure to increment reference + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + int ret = wolfSSL_DH_up_ref(dh); + \endcode + + \sa wolfSSL_DH_free +*/ +int wolfSSL_DH_up_ref(WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Validates DH parameters. + + \return 1 on success + \return 0 on failure + + \param dh DH parameters to check + \param codes Output for validation error codes + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + int codes; + int ret = wolfSSL_DH_check(dh, &codes); + if (ret != 1 || codes != 0) { + // validation failed + } + \endcode + + \sa wolfSSL_DH_generate_key +*/ +int wolfSSL_DH_check(const WOLFSSL_DH *dh, int *codes); + +/*! + \ingroup Diffie-Hellman + \brief Returns size of DH key in bytes. + + \return Key size in bytes on success + \return -1 on failure + + \param dh DH structure + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + int size = wolfSSL_DH_size(dh); + \endcode + + \sa wolfSSL_DH_new +*/ +int wolfSSL_DH_size(WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Generates DH public/private key pair. + + \return 1 on success + \return 0 on failure + + \param dh DH structure with parameters set + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + // set p and g parameters + int ret = wolfSSL_DH_generate_key(dh); + if (ret != 1) { + // key generation failed + } + \endcode + + \sa wolfSSL_DH_compute_key +*/ +int wolfSSL_DH_generate_key(WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Computes shared secret from peer's public key. + + \return Length of shared secret on success + \return -1 on failure + + \param key Output buffer for shared secret + \param pub Peer's public key + \param dh DH structure with private key + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + wolfSSL_DH_generate_key(dh); + byte secret[256]; + WOLFSSL_BIGNUM* peerPub = NULL; // peer's public key + int secretSz = wolfSSL_DH_compute_key(secret, peerPub, dh); + \endcode + + \sa wolfSSL_DH_generate_key +*/ +int wolfSSL_DH_compute_key(unsigned char* key, + const WOLFSSL_BIGNUM* pub, WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Computes shared secret with zero-padding to DH size. + + \return Length of shared secret on success + \return -1 on failure + + \param key Output buffer for shared secret + \param otherPub Peer's public key + \param dh DH structure with private key + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + wolfSSL_DH_generate_key(dh); + byte secret[256]; + WOLFSSL_BIGNUM* peerPub = NULL; + int secretSz = wolfSSL_DH_compute_key_padded(secret, peerPub, dh); + \endcode + + \sa wolfSSL_DH_compute_key +*/ +int wolfSSL_DH_compute_key_padded(unsigned char* key, + const WOLFSSL_BIGNUM* otherPub, + WOLFSSL_DH* dh); + +/*! + \ingroup Diffie-Hellman + \brief Loads DH parameters from DER buffer. + + \return WOLFSSL_SUCCESS on success + \return WOLFSSL_FAILURE on failure + + \param dh DH structure to load into + \param derBuf DER-encoded DH parameters + \param derSz Size of DER buffer + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + byte derBuf[256]; + int ret = wolfSSL_DH_LoadDer(dh, derBuf, sizeof(derBuf)); + \endcode + + \sa wolfSSL_DH_new +*/ +int wolfSSL_DH_LoadDer(WOLFSSL_DH* dh, const unsigned char* derBuf, + int derSz); + +/*! + \ingroup Diffie-Hellman + \brief Sets optional private key length. + + \return 1 on success + \return 0 on failure + + \param dh DH structure + \param len Private key length in bits + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + int ret = wolfSSL_DH_set_length(dh, 256); + \endcode + + \sa wolfSSL_DH_generate_key +*/ +int wolfSSL_DH_set_length(WOLFSSL_DH* dh, long len); + +/*! + \ingroup Diffie-Hellman + \brief Sets DH parameters p, q, and g. + + \return 1 on success + \return 0 on failure + + \param dh DH structure + \param p Prime modulus (takes ownership) + \param q Subgroup order (takes ownership, can be NULL) + \param g Generator (takes ownership) + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_new(); + WOLFSSL_BIGNUM *p = wolfSSL_BN_new(); + WOLFSSL_BIGNUM *g = wolfSSL_BN_new(); + // set p and g values + int ret = wolfSSL_DH_set0_pqg(dh, p, NULL, g); + \endcode + + \sa wolfSSL_DH_generate_key +*/ +int wolfSSL_DH_set0_pqg(WOLFSSL_DH *dh, WOLFSSL_BIGNUM *p, + WOLFSSL_BIGNUM *q, WOLFSSL_BIGNUM *g); + +/*! + \ingroup Diffie-Hellman + \brief Returns DH parameters for 2048-bit MODP group with 256-bit + subgroup. + + \return Pointer to WOLFSSL_DH on success + \return NULL on failure + + _Example_ + \code + WOLFSSL_DH* dh = wolfSSL_DH_get_2048_256(); + if (dh == NULL) { + // error getting standard group + } + \endcode + + \sa wolfSSL_DH_new_by_nid +*/ +WOLFSSL_DH* wolfSSL_DH_get_2048_256(void); + +/*! + \ingroup Diffie-Hellman + \brief Returns FFDHE 2048-bit group parameters. + + \return Pointer to DhParams structure + \return NULL if not compiled with HAVE_FFDHE_2048 + + _Example_ + \code + const DhParams* params = wc_Dh_ffdhe2048_Get(); + if (params != NULL) { + // use params + } + \endcode \sa wc_Dh_ffdhe3072_Get \sa wc_Dh_ffdhe4096_Get @@ -287,9 +626,18 @@ const DhParams* wc_Dh_ffdhe2048_Get(void); /*! \ingroup Diffie-Hellman + \brief Returns FFDHE 3072-bit group parameters. - \brief This function returns ... and requires that HAVE_FFDHE_3072 be - defined. + \return Pointer to DhParams structure + \return NULL if not compiled with HAVE_FFDHE_3072 + + _Example_ + \code + const DhParams* params = wc_Dh_ffdhe3072_Get(); + if (params != NULL) { + // use params + } + \endcode \sa wc_Dh_ffdhe2048_Get \sa wc_Dh_ffdhe4096_Get @@ -300,9 +648,18 @@ const DhParams* wc_Dh_ffdhe3072_Get(void); /*! \ingroup Diffie-Hellman + \brief Returns FFDHE 4096-bit group parameters. - \brief This function returns ... and requires that HAVE_FFDHE_4096 be - defined. + \return Pointer to DhParams structure + \return NULL if not compiled with HAVE_FFDHE_4096 + + _Example_ + \code + const DhParams* params = wc_Dh_ffdhe4096_Get(); + if (params != NULL) { + // use params + } + \endcode \sa wc_Dh_ffdhe2048_Get \sa wc_Dh_ffdhe3072_Get @@ -313,9 +670,18 @@ const DhParams* wc_Dh_ffdhe4096_Get(void); /*! \ingroup Diffie-Hellman + \brief Returns FFDHE 6144-bit group parameters. - \brief This function returns ... and requires that HAVE_FFDHE_6144 be - defined. + \return Pointer to DhParams structure + \return NULL if not compiled with HAVE_FFDHE_6144 + + _Example_ + \code + const DhParams* params = wc_Dh_ffdhe6144_Get(); + if (params != NULL) { + // use params + } + \endcode \sa wc_Dh_ffdhe2048_Get \sa wc_Dh_ffdhe3072_Get @@ -326,9 +692,18 @@ const DhParams* wc_Dh_ffdhe6144_Get(void); /*! \ingroup Diffie-Hellman + \brief Returns FFDHE 8192-bit group parameters. - \brief This function returns ... and requires that HAVE_FFDHE_8192 be - defined. + \return Pointer to DhParams structure + \return NULL if not compiled with HAVE_FFDHE_8192 + + _Example_ + \code + const DhParams* params = wc_Dh_ffdhe8192_Get(); + if (params != NULL) { + // use params + } + \endcode \sa wc_Dh_ffdhe2048_Get \sa wc_Dh_ffdhe3072_Get @@ -337,6 +712,291 @@ const DhParams* wc_Dh_ffdhe6144_Get(void); */ const DhParams* wc_Dh_ffdhe8192_Get(void); +/*! + \ingroup Diffie-Hellman + \brief Initializes DH key with heap hint and device ID. + + \return 0 on success + \return BAD_FUNC_ARG if key is NULL + + \param key DH key to initialize + \param heap Heap hint for memory allocation + \param devId Device ID for hardware acceleration + + _Example_ + \code + DhKey key; + int ret = wc_InitDhKey_ex(&key, NULL, INVALID_DEVID); + if (ret != 0) { + // error initializing key + } + \endcode + + \sa wc_InitDhKey + \sa wc_FreeDhKey +*/ +int wc_InitDhKey_ex(DhKey* key, void* heap, int devId); + +/*! + \ingroup Diffie-Hellman + \brief Computes shared secret with constant-time operations. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + \return BUFFER_E if output buffer too small + + \param key DH key with parameters + \param agree Output buffer for shared secret + \param agreeSz Input: buffer size, Output: secret size + \param priv Private key + \param privSz Private key size + \param otherPub Peer's public key + \param pubSz Peer's public key size + + _Example_ + \code + DhKey key; + byte agree[256], priv[256], pub[256]; + word32 agreeSz = sizeof(agree); + int ret = wc_DhAgree_ct(&key, agree, &agreeSz, priv, + sizeof(priv), pub, sizeof(pub)); + \endcode + + \sa wc_DhAgree +*/ +int wc_DhAgree_ct(DhKey* key, byte* agree, word32 *agreeSz, + const byte* priv, word32 privSz, + const byte* otherPub, word32 pubSz); + +/*! + \ingroup Diffie-Hellman + \brief Sets DH key to use named group parameters. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + + \param key DH key to configure + \param name Named group identifier + + _Example_ + \code + DhKey key; + wc_InitDhKey(&key); + int ret = wc_DhSetNamedKey(&key, WC_FFDHE_2048); + \endcode + + \sa wc_DhGetNamedKeyParamSize +*/ +int wc_DhSetNamedKey(DhKey* key, int name); + +/*! + \ingroup Diffie-Hellman + \brief Gets parameter sizes for named group. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + + \param name Named group identifier + \param p Output for prime size + \param g Output for generator size + \param q Output for subgroup order size + + _Example_ + \code + word32 pSz, gSz, qSz; + int ret = wc_DhGetNamedKeyParamSize(WC_FFDHE_2048, &pSz, &gSz, + &qSz); + \endcode + + \sa wc_DhSetNamedKey +*/ +int wc_DhGetNamedKeyParamSize(int name, word32* p, word32* g, + word32* q); + +/*! + \ingroup Diffie-Hellman + \brief Gets minimum key size for named group. + + \return Minimum key size in bits + \return 0 if invalid name + + \param name Named group identifier + + _Example_ + \code + word32 minSize = wc_DhGetNamedKeyMinSize(WC_FFDHE_2048); + \endcode + + \sa wc_DhSetNamedKey +*/ +word32 wc_DhGetNamedKeyMinSize(int name); + +/*! + \ingroup Diffie-Hellman + \brief Compares parameters against named group. + + \return 0 if parameters match named group + \return Non-zero if parameters don't match + + \param name Named group identifier + \param noQ 1 to skip q comparison + \param p Prime modulus + \param pSz Prime size + \param g Generator + \param gSz Generator size + \param q Subgroup order + \param qSz Subgroup order size + + _Example_ + \code + byte p[256], g[256]; + int ret = wc_DhCmpNamedKey(WC_FFDHE_2048, 1, p, sizeof(p), + g, sizeof(g), NULL, 0); + \endcode + + \sa wc_DhSetNamedKey +*/ +int wc_DhCmpNamedKey(int name, int noQ, const byte* p, word32 pSz, + const byte* g, word32 gSz, const byte* q, + word32 qSz); + +/*! + \ingroup Diffie-Hellman + \brief Copies named group parameters to buffers. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + \return BUFFER_E if buffers too small + + \param name Named group identifier + \param p Output buffer for prime + \param pSz Input: buffer size, Output: prime size + \param g Output buffer for generator + \param gSz Input: buffer size, Output: generator size + \param q Output buffer for subgroup order + \param qSz Input: buffer size, Output: subgroup order size + + _Example_ + \code + byte p[512], g[512], q[512]; + word32 pSz = sizeof(p), gSz = sizeof(g), qSz = sizeof(q); + int ret = wc_DhCopyNamedKey(WC_FFDHE_2048, p, &pSz, g, &gSz, + q, &qSz); + \endcode + + \sa wc_DhSetNamedKey +*/ +int wc_DhCopyNamedKey(int name, byte* p, word32* pSz, byte* g, + word32* gSz, byte* q, word32* qSz); + +/*! + \ingroup Diffie-Hellman + \brief Generates public key from private key. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + + \param key DH key with parameters set + \param priv Private key + \param privSz Private key size + \param pub Output buffer for public key + \param pubSz Input: buffer size, Output: public key size + + _Example_ + \code + DhKey key; + byte priv[256], pub[256]; + word32 pubSz = sizeof(pub); + int ret = wc_DhGeneratePublic(&key, priv, sizeof(priv), pub, + &pubSz); + \endcode + + \sa wc_DhGenerateKeyPair +*/ +int wc_DhGeneratePublic(DhKey* key, byte* priv, word32 privSz, + byte* pub, word32* pubSz); + +/*! + \ingroup Diffie-Hellman + \brief Imports private and/or public key into DH key. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + + \param key DH key to import into + \param priv Private key (can be NULL) + \param privSz Private key size + \param pub Public key (can be NULL) + \param pubSz Public key size + + _Example_ + \code + DhKey key; + byte priv[256], pub[256]; + int ret = wc_DhImportKeyPair(&key, priv, sizeof(priv), pub, + sizeof(pub)); + \endcode + + \sa wc_DhExportKeyPair +*/ +int wc_DhImportKeyPair(DhKey* key, const byte* priv, word32 privSz, + const byte* pub, word32 pubSz); + +/*! + \ingroup Diffie-Hellman + \brief Exports private and public key from DH key. + + \return 0 on success + \return BAD_FUNC_ARG if parameters are invalid + \return BUFFER_E if buffers too small + + \param key DH key to export from + \param priv Output buffer for private key + \param pPrivSz Input: buffer size, Output: private key size + \param pub Output buffer for public key + \param pPubSz Input: buffer size, Output: public key size + + _Example_ + \code + DhKey key; + byte priv[256], pub[256]; + word32 privSz = sizeof(priv), pubSz = sizeof(pub); + int ret = wc_DhExportKeyPair(&key, priv, &privSz, pub, &pubSz); + \endcode + + \sa wc_DhImportKeyPair +*/ +int wc_DhExportKeyPair(DhKey* key, byte* priv, word32* pPrivSz, + byte* pub, word32* pPubSz); + +/*! + \ingroup Diffie-Hellman + \brief Validates public key value. + + \return 0 if public key is valid + \return BAD_FUNC_ARG if parameters are invalid + \return MP_VAL if public key is invalid + + \param prime Prime modulus + \param primeSz Prime size + \param pub Public key to validate + \param pubSz Public key size + + _Example_ + \code + byte prime[256], pub[256]; + int ret = wc_DhCheckPubValue(prime, sizeof(prime), pub, + sizeof(pub)); + if (ret != 0) { + // invalid public key + } + \endcode + + \sa wc_DhCheckPubKey +*/ +int wc_DhCheckPubValue(const byte* prime, word32 primeSz, + const byte* pub, word32 pubSz); + /*! \ingroup Diffie-Hellman diff --git a/doc/dox_comments/header_files/dsa.h b/doc/dox_comments/header_files/dsa.h index d66ccbcb5..f60200265 100644 --- a/doc/dox_comments/header_files/dsa.h +++ b/doc/dox_comments/header_files/dsa.h @@ -340,3 +340,298 @@ int wc_MakeDsaKey(WC_RNG *rng, DsaKey *dsa); \sa wc_InitDsaKey */ int wc_MakeDsaParameters(WC_RNG *rng, int modulus_size, DsaKey *dsa); +/*! + \ingroup DSA + \brief Initializes DSA key with heap hint. + + \return 0 on success + \return negative on failure + + \param key DSA key structure + \param h Heap hint for memory allocation + + _Example_ + \code + DsaKey key; + int ret = wc_InitDsaKey_h(&key, NULL); + \endcode + + \sa wc_InitDsaKey +*/ +int wc_InitDsaKey_h(DsaKey* key, void* h); + +/*! + \ingroup DSA + \brief Signs digest with extended parameters. + + \return 0 on success + \return negative on failure + + \param digest Digest to sign + \param digestSz Digest size + \param out Output signature buffer + \param key DSA key + \param rng Random number generator + + _Example_ + \code + byte digest[WC_SHA_DIGEST_SIZE]; + byte sig[40]; + WC_RNG rng; + int ret = wc_DsaSign_ex(digest, sizeof(digest), sig, &key, + &rng); + \endcode + + \sa wc_DsaSign +*/ +int wc_DsaSign_ex(const byte* digest, word32 digestSz, byte* out, + DsaKey* key, WC_RNG* rng); + +/*! + \ingroup DSA + \brief Verifies signature with extended parameters. + + \return 0 on success + \return negative on failure + + \param digest Digest + \param digestSz Digest size + \param sig Signature buffer + \param key DSA key + \param answer Verification result + + _Example_ + \code + byte digest[WC_SHA_DIGEST_SIZE]; + byte sig[40]; + int answer; + int ret = wc_DsaVerify_ex(digest, sizeof(digest), sig, &key, + &answer); + \endcode + + \sa wc_DsaVerify +*/ +int wc_DsaVerify_ex(const byte* digest, word32 digestSz, + const byte* sig, DsaKey* key, int* answer); + +/*! + \ingroup DSA + \brief Sets DSA public key in output buffer. + + \return Size on success + \return negative on failure + + \param output Output buffer + \param key DSA key + \param outLen Output buffer length + \param with_header Include header flag + + _Example_ + \code + byte output[256]; + int ret = wc_SetDsaPublicKey(output, &key, sizeof(output), 1); + \endcode + + \sa wc_DsaKeyToPublicDer +*/ +int wc_SetDsaPublicKey(byte* output, DsaKey* key, int outLen, + int with_header); + +/*! + \ingroup DSA + \brief Converts DSA key to public DER format. + + \return Size on success + \return negative on failure + + \param key DSA key + \param output Output buffer + \param inLen Output buffer length + + _Example_ + \code + DsaKey key; + WC_RNG rng; + byte output[256]; + + // Initialize key and RNG + wc_InitDsaKey(&key); + wc_InitRng(&rng); + + // Generate DSA key or import existing key + wc_MakeDsaKey(&rng, &key); + + // Convert to public DER format + int ret = wc_DsaKeyToPublicDer(&key, output, sizeof(output)); + if (ret > 0) { + // output contains DER encoded public key of size ret + } + + wc_FreeDsaKey(&key); + wc_FreeRng(&rng); + \endcode + + \sa wc_SetDsaPublicKey +*/ +int wc_DsaKeyToPublicDer(DsaKey* key, byte* output, word32 inLen); + +/*! + \ingroup DSA + \brief Imports DSA parameters from raw format. The parameters p, q, and + g must be provided as ASCII hexadecimal strings (without 0x prefix). + These represent the DSA domain parameters: p is the prime modulus, q is + the prime divisor (subgroup order), and g is the generator. + + \return 0 on success + \return negative on failure + + \param dsa DSA key structure (must be initialized) + \param p P parameter as ASCII hex string (prime modulus) + \param q Q parameter as ASCII hex string (prime divisor/subgroup order) + \param g G parameter as ASCII hex string (generator) + + _Example_ + \code + DsaKey dsa; + wc_InitDsaKey(&dsa); + + // DSA parameters as ASCII hexadecimal strings (example values) + const char* pStr = "E0A67598CD1B763BC98C8ABB333E5DDA0CD3AA0E5E1F" + "B5BA8A7B4EABC10BA338FAE06DD4B90FDA70D7CF0CB0" + "C638BE3341BEC0AF8A7330A3307DED2299A0EE606DF0" + "35177A239C34A912C202AA5F83B9C4A7CF0235B5316B" + "FC6EFB9A248411258B30B839AF172440F32563056CB6" + "7A861158DDD90E6A894C72A5BBEF9E286C6B"; + const char* qStr = "E950511EAB424B9A19A2AEB4E159B7844C589C4F"; + const char* gStr = "D29D5121B0423C2769AB21843E5A3240FF19CACC792D" + "C6E7925E6D1A4E6E4E3D119A3D133C8D3C8C8C8C8C8C" + "8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C" + "8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C"; + + int ret = wc_DsaImportParamsRaw(&dsa, pStr, qStr, gStr); + if (ret == 0) { + // DSA parameters successfully imported + // Can now use dsa for key generation or signing + } + wc_FreeDsaKey(&dsa); + \endcode + + \sa wc_DsaImportParamsRawCheck + \sa wc_InitDsaKey +*/ +int wc_DsaImportParamsRaw(DsaKey* dsa, const char* p, const char* q, + const char* g); + +/*! + \ingroup DSA + \brief Imports DSA parameters from raw format with optional validation. + The parameters p, q, and g must be provided as ASCII hexadecimal strings + (without 0x prefix). The trusted parameter controls whether the prime p + is validated: when trusted=1, prime checking is skipped (use when + parameters come from a trusted source); when trusted=0, performs full + primality testing on p (recommended for untrusted sources). + + \return 0 on success + \return DH_CHECK_PUB_E if p fails primality test (when trusted=0) + \return negative on other failures + + \param dsa DSA key structure (must be initialized) + \param p P parameter as ASCII hex string (prime modulus) + \param q Q parameter as ASCII hex string (prime divisor/subgroup order) + \param g G parameter as ASCII hex string (generator) + \param trusted If 1, skip prime validation (trusted source); if 0, + perform full primality test on p + \param rng Random number generator (required when trusted=0 for + primality testing) + + _Example_ + \code + DsaKey dsa; + WC_RNG rng; + + // Initialize DSA key and RNG + wc_InitDsaKey(&dsa); + wc_InitRng(&rng); + + // DSA parameters as ASCII hexadecimal strings + const char* pStr = "E0A67598CD1B763BC98C8ABB333E5DDA0CD3AA0E5E1F" + "B5BA8A7B4EABC10BA338FAE06DD4B90FDA70D7CF0CB0" + "C638BE3341BEC0AF8A7330A3307DED2299A0EE606DF0" + "35177A239C34A912C202AA5F83B9C4A7CF0235B5316B" + "FC6EFB9A248411258B30B839AF172440F32563056CB6" + "7A861158DDD90E6A894C72A5BBEF9E286C6B"; + const char* qStr = "E950511EAB424B9A19A2AEB4E159B7844C589C4F"; + const char* gStr = "D29D5121B0423C2769AB21843E5A3240FF19CACC792D" + "C6E7925E6D1A4E6E4E3D119A3D133C8D3C8C8C8C8C8C" + "8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C" + "8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C8C"; + + // Import with validation (trusted=0 performs primality test on p) + int ret = wc_DsaImportParamsRawCheck(&dsa, pStr, qStr, gStr, 0, + &rng); + if (ret == 0) { + // Parameters imported and validated successfully + } + + wc_FreeDsaKey(&dsa); + wc_FreeRng(&rng); + \endcode + + \sa wc_DsaImportParamsRaw + \sa wc_InitDsaKey +*/ +int wc_DsaImportParamsRawCheck(DsaKey* dsa, const char* p, + const char* q, const char* g, int trusted, WC_RNG* rng); + +/*! + \ingroup DSA + \brief Exports DSA parameters to raw format. + + \return 0 on success + \return negative on failure + + \param dsa DSA key structure + \param p P parameter buffer + \param pSz P parameter size (in/out) + \param q Q parameter buffer + \param qSz Q parameter size (in/out) + \param g G parameter buffer + \param gSz G parameter size (in/out) + + _Example_ + \code + byte p[256], q[32], g[256]; + word32 pSz = sizeof(p), qSz = sizeof(q), gSz = sizeof(g); + int ret = wc_DsaExportParamsRaw(&dsa, p, &pSz, q, &qSz, g, + &gSz); + \endcode + + \sa wc_DsaImportParamsRaw +*/ +int wc_DsaExportParamsRaw(DsaKey* dsa, byte* p, word32* pSz, byte* q, + word32* qSz, byte* g, word32* gSz); + +/*! + \ingroup DSA + \brief Exports DSA key to raw format. + + \return 0 on success + \return negative on failure + + \param dsa DSA key structure + \param x Private key buffer + \param xSz Private key size (in/out) + \param y Public key buffer + \param ySz Public key size (in/out) + + _Example_ + \code + byte x[32], y[256]; + word32 xSz = sizeof(x), ySz = sizeof(y); + int ret = wc_DsaExportKeyRaw(&dsa, x, &xSz, y, &ySz); + \endcode + + \sa wc_DsaImportParamsRaw +*/ +int wc_DsaExportKeyRaw(DsaKey* dsa, byte* x, word32* xSz, byte* y, + word32* ySz); diff --git a/doc/dox_comments/header_files/ecc.h b/doc/dox_comments/header_files/ecc.h index 5becaf5a9..7f77bd244 100644 --- a/doc/dox_comments/header_files/ecc.h +++ b/doc/dox_comments/header_files/ecc.h @@ -543,12 +543,12 @@ int wc_ecc_sign_hash_ex(const byte* in, word32 inlen, WC_RNG* rng, \ingroup ECC \brief This function verifies the ECC signature of a hash to ensure - authenticity. It returns the answer through stat, with 1 corresponding + authenticity. It returns the answer through res, with 1 corresponding to a valid signature, and 0 corresponding to an invalid signature. \return 0 Returned upon successfully performing the signature verification. Note: This does not mean that the signature is verified. - The authenticity information is stored instead in stat + The authenticity information is stored instead in res \return BAD_FUNC_ARG Returned any of the input parameters evaluate to NULL \return MEMORY_E Returned if there is an error allocating memory \return MP_INIT_E may be returned if there is an error while computing @@ -579,7 +579,7 @@ int wc_ecc_sign_hash_ex(const byte* in, word32 inlen, WC_RNG* rng, \param hash pointer to the buffer containing the hash of the message verified \param hashlen length of the hash of the message verified - \param stat pointer to the result of the verification. 1 indicates the + \param res pointer to the result of the verification. 1 indicates the message was successfully verified \param key pointer to a public ECC key with which to verify the signature @@ -605,14 +605,14 @@ int wc_ecc_sign_hash_ex(const byte* in, word32 inlen, WC_RNG* rng, */ int wc_ecc_verify_hash(const byte* sig, word32 siglen, const byte* hash, - word32 hashlen, int* stat, ecc_key* key); + word32 hashlen, int* res, ecc_key* key); /*! \ingroup ECC - \brief Verify an ECC signature. Result is written to stat. + \brief Verify an ECC signature. Result is written to res. 1 is valid, 0 is invalid. - Note: Do not use the return value to test for valid. Only use stat. + Note: Do not use the return value to test for valid. Only use res. \return MP_OKAY If successful (even if the signature is not valid) \return ECC_BAD_ARG_E Returns if arguments are null or if @@ -623,20 +623,20 @@ int wc_ecc_verify_hash(const byte* sig, word32 siglen, const byte* hash, \param s The signature S component to verify \param hash The hash (message digest) that was signed \param hashlen The length of the hash (octets) - \param stat Result of signature, 1==valid, 0==invalid + \param res Result of signature, 1==valid, 0==invalid \param key The corresponding public ECC key _Example_ \code mp_int r; mp_int s; - int stat; + int res; byte hash[] = { Some hash } ecc_key key; - if(wc_ecc_verify_hash_ex(&r, &s, hash, hashlen, &stat, &key) == MP_OKAY) + if(wc_ecc_verify_hash_ex(&r, &s, hash, hashlen, &res, &key) == MP_OKAY) { - // Check stat + // Check res } \endcode @@ -644,7 +644,7 @@ int wc_ecc_verify_hash(const byte* sig, word32 siglen, const byte* hash, */ int wc_ecc_verify_hash_ex(mp_int *r, mp_int *s, const byte* hash, - word32 hashlen, int* stat, ecc_key* key); + word32 hashlen, int* res, ecc_key* key); /*! \ingroup ECC @@ -983,6 +983,7 @@ int wc_ecc_point_is_at_infinity(ecc_point *p); \param k The multiplicand. \param G Base point to multiply. \param R Destination of product. + \param a ECC curve parameter a. \param modulus The modulus for the curve. \param map If non-zero maps the point back to affine coordinates, otherwise it's left in jacobian-montgomery form. @@ -997,7 +998,10 @@ int wc_ecc_point_is_at_infinity(ecc_point *p); // Setup other arguments mp_int multiplicand; mp_int modulus; + mp_int a; int map; + int rc; + rc = wc_ecc_mulmod(&multiplicand, base, destination, &a, &modulus, map); \endcode \sa none @@ -1779,7 +1783,7 @@ int wc_ecc_ctx_set_algo(ecEncCtx* ctx, byte encAlgo, byte kdfAlgo, \sa wc_ecc_ctx_set_kdf_salt */ -const byte* wc_ecc_ctx_get_own_salt(ecEncCtx*); +const byte* wc_ecc_ctx_get_own_salt(ecEncCtx* ctx); /*! \ingroup ECC @@ -2143,3 +2147,1244 @@ int wc_ecc_set_nonblock(ecc_key *key, ecc_nb_ctx_t* ctx); \endcode */ int wc_ecc_set_curve(ecc_key *key, int keysize, int curve_id); + +/*! + \ingroup ECC + \brief Gets private key mp_int from ECC key. + + \return mp_int pointer on success + \return NULL on failure + + \param key ECC key structure + + _Example_ + \code + ecc_key key; + mp_int* priv = wc_ecc_key_get_priv(&key); + \endcode + + \sa wc_ecc_init +*/ +mp_int* wc_ecc_key_get_priv(ecc_key* key); + +/*! + \ingroup ECC + \brief Allocates and initializes new ECC key. + + \return ecc_key pointer on success + \return NULL on failure + + \param heap Heap hint for memory allocation + + _Example_ + \code + ecc_key* key = wc_ecc_key_new(NULL); + if (key != NULL) { + // use key + wc_ecc_key_free(key); + } + \endcode + + \sa wc_ecc_key_free +*/ +ecc_key* wc_ecc_key_new(void* heap); + +/*! + \ingroup ECC + \brief Returns number of supported ECC curve sets. + + \return Number of curve sets + + _Example_ + \code + size_t count = wc_ecc_get_sets_count(); + \endcode + + \sa wc_ecc_get_curve_params +*/ +size_t wc_ecc_get_sets_count(void); + +/*! + \ingroup ECC + \brief Gets curve name from curve ID. + + \return Curve name string on success + \return NULL on failure + + \param curve_id Curve identifier + + _Example_ + \code + const char* name = wc_ecc_get_name(ECC_SECP256R1); + \endcode + + \sa wc_ecc_get_curve_id +*/ +const char* wc_ecc_get_name(int curve_id); + +/*! + \ingroup ECC + \brief Makes ECC key with extended options. + + \return 0 on success + \return negative on error + + \param rng Random number generator + \param keysize Key size in bytes + \param key ECC key structure + \param curve_id Curve identifier + \param flags Additional flags + + _Example_ + \code + WC_RNG rng; + ecc_key key; + int ret = wc_ecc_make_key_ex2(&rng, 32, &key, + ECC_SECP256R1, 0); + \endcode + + \sa wc_ecc_make_key_ex +*/ +int wc_ecc_make_key_ex2(WC_RNG* rng, int keysize, ecc_key* key, + int curve_id, int flags); + +/*! + \ingroup ECC + \brief Checks if point is on curve. + + \return 1 if point is on curve + \return 0 if not on curve + \return negative on error + + \param ecp ECC point + \param a Curve parameter a + \param b Curve parameter b + \param prime Curve prime + + _Example_ + \code + ecc_point* point; + mp_int a, b, prime; + int ret = wc_ecc_is_point(point, &a, &b, &prime); + \endcode + + \sa wc_ecc_point_is_on_curve +*/ +int wc_ecc_is_point(ecc_point* ecp, mp_int* a, mp_int* b, + mp_int* prime); + +/*! + \ingroup ECC + \brief Gets generator point for curve. + + \return 0 on success + \return negative on error + + \param ecp ECC point to store generator + \param curve_idx Curve index + + _Example_ + \code + ecc_point* gen = wc_ecc_new_point(); + int ret = wc_ecc_get_generator(gen, 0); + \endcode + + \sa wc_ecc_get_curve_params +*/ +int wc_ecc_get_generator(ecc_point* ecp, int curve_idx); + +/*! + \ingroup ECC + \brief Sets deterministic signing mode. + + \return 0 on success + \return negative on error + + \param key ECC key + \param flag Enable/disable flag + + _Example_ + \code + ecc_key key; + int ret = wc_ecc_set_deterministic(&key, 1); + \endcode + + \sa wc_ecc_set_deterministic_ex +*/ +int wc_ecc_set_deterministic(ecc_key* key, byte flag); + +/*! + \ingroup ECC + \brief Sets deterministic signing with hash type. + + \return 0 on success + \return negative on error + + \param key ECC key + \param flag Enable/disable flag + \param hashType Hash algorithm type + + _Example_ + \code + ecc_key key; + int ret = wc_ecc_set_deterministic_ex(&key, 1, WC_HASH_TYPE_SHA256); + \endcode + + \sa wc_ecc_set_deterministic +*/ +int wc_ecc_set_deterministic_ex(ecc_key* key, byte flag, + enum wc_HashType hashType); + +/*! + \ingroup ECC + \brief Generates deterministic k value for signing. + + \return 0 on success + \return negative on error + + \param hash Hash value + \param hashSz Hash size + \param hashType Hash algorithm type + \param priv Private key + \param k Output k value + \param order Curve order + \param heap Heap hint + + _Example_ + \code + byte hash[32]; + mp_int priv, k, order; + int ret = wc_ecc_gen_deterministic_k(hash, 32, + WC_HASH_TYPE_SHA256, + &priv, &k, &order, NULL); + \endcode + + \sa wc_ecc_sign_set_k +*/ +int wc_ecc_gen_deterministic_k(const byte* hash, word32 hashSz, + enum wc_HashType hashType, mp_int* priv, mp_int* k, + mp_int* order, void* heap); + +/*! + \ingroup ECC + \brief Sets k value for signing. + + \return 0 on success + \return negative on error + + \param k K value buffer + \param klen K value length + \param key ECC key + + _Example_ + \code + byte k[32]; + ecc_key key; + int ret = wc_ecc_sign_set_k(k, sizeof(k), &key); + \endcode + + \sa wc_ecc_gen_deterministic_k +*/ +int wc_ecc_sign_set_k(const byte* k, word32 klen, ecc_key* key); + +/*! + \ingroup ECC + \brief Initializes ECC key with ID. + + \note This API is only available when WOLF_PRIVATE_KEY_ID is defined, + which is set for PKCS11 support. + + \return 0 on success + \return negative on error + + \param key ECC key + \param id ID buffer + \param len ID length + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + ecc_key key; + unsigned char id[] = "mykey"; + int ret = wc_ecc_init_id(&key, id, sizeof(id), NULL, + INVALID_DEVID); + \endcode + + \sa wc_ecc_init_label +*/ +int wc_ecc_init_id(ecc_key* key, unsigned char* id, int len, + void* heap, int devId); + +/*! + \ingroup ECC + \brief Initializes ECC key with label. + + \note This API is only available when WOLF_PRIVATE_KEY_ID is defined, + which is set for PKCS11 support. + + \return 0 on success + \return negative on error + + \param key ECC key + \param label Label string + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + ecc_key key; + int ret = wc_ecc_init_label(&key, "mykey", NULL, + INVALID_DEVID); + \endcode + + \sa wc_ecc_init_id +*/ +int wc_ecc_init_label(ecc_key* key, const char* label, void* heap, + int devId); + +/*! + \ingroup ECC + \brief Sets flags on ECC key. + + \return 0 on success + \return negative on error + + \param key ECC key + \param flags Flags to set + + _Example_ + \code + ecc_key key; + int ret = wc_ecc_set_flags(&key, WC_ECC_FLAG_COFACTOR); + \endcode + + \sa wc_ecc_init +*/ +int wc_ecc_set_flags(ecc_key* key, word32 flags); + +/*! + \ingroup ECC + \brief Initializes fixed-point cache. + + \return none No returns + + _Example_ + \code + wc_ecc_fp_init(); + \endcode + + \sa wc_ecc_init +*/ +void wc_ecc_fp_init(void); + +/*! + \ingroup ECC + \brief Sets RNG for ECC key. + + \return 0 on success + \return negative on error + + \param key ECC key + \param rng Random number generator + + _Example_ + \code + ecc_key key; + WC_RNG rng; + int ret = wc_ecc_set_rng(&key, &rng); + \endcode + + \sa wc_ecc_make_key +*/ +int wc_ecc_set_rng(ecc_key* key, WC_RNG* rng); + +/*! + \ingroup ECC + \brief Gets curve index from curve ID. + + \return Curve index on success + \return negative on error + + \param curve_id Curve identifier + + _Example_ + \code + int idx = wc_ecc_get_curve_idx(ECC_SECP256R1); + \endcode + + \sa wc_ecc_get_curve_id +*/ +int wc_ecc_get_curve_idx(int curve_id); + +/*! + \ingroup ECC + \brief Gets curve ID from curve index. + + \return Curve ID on success + \return negative on error + + \param curve_idx Curve index + + _Example_ + \code + int id = wc_ecc_get_curve_id(0); + \endcode + + \sa wc_ecc_get_curve_idx +*/ +int wc_ecc_get_curve_id(int curve_idx); + +/*! + \ingroup ECC + \brief Gets curve size from curve ID. + + \return Key size in bytes on success + \return negative on error + + \param curve_id Curve identifier + + _Example_ + \code + int size = wc_ecc_get_curve_size_from_id(ECC_SECP256R1); + \endcode + + \sa wc_ecc_get_curve_id +*/ +int wc_ecc_get_curve_size_from_id(int curve_id); + +/*! + \ingroup ECC + \brief Gets curve index from curve name. + + \return Curve index on success + \return negative on error + + \param curveName Curve name string + + _Example_ + \code + int idx = wc_ecc_get_curve_idx_from_name("SECP256R1"); + \endcode + + \sa wc_ecc_get_name +*/ +int wc_ecc_get_curve_idx_from_name(const char* curveName); + +/*! + \ingroup ECC + \brief Gets curve size from curve name. + + \return Key size in bytes on success + \return negative on error + + \param curveName Curve name string + + _Example_ + \code + int size = wc_ecc_get_curve_size_from_name("SECP256R1"); + \endcode + + \sa wc_ecc_get_curve_idx_from_name +*/ +int wc_ecc_get_curve_size_from_name(const char* curveName); + +/*! + \ingroup ECC + \brief Gets curve ID from curve name. + + \return Curve ID on success + \return negative on error + + \param curveName Curve name string + + _Example_ + \code + int id = wc_ecc_get_curve_id_from_name("SECP256R1"); + \endcode + + \sa wc_ecc_get_name +*/ +int wc_ecc_get_curve_id_from_name(const char* curveName); + +/*! + \ingroup ECC + \brief Gets curve ID from curve parameters. + + \return Curve ID on success + \return negative on error + + \param fieldSize Field size + \param prime Prime modulus + \param primeSz Prime size + \param Af Curve parameter A + \param AfSz A size + \param Bf Curve parameter B + \param BfSz B size + \param order Curve order + \param orderSz Order size + \param Gx Generator X coordinate + \param GxSz Gx size + \param Gy Generator Y coordinate + \param GySz Gy size + \param cofactor Curve cofactor + + _Example_ + \code + int id = wc_ecc_get_curve_id_from_params(256, prime, 32, + Af, 32, Bf, 32, + order, 32, Gx, 32, + Gy, 32, 1); + \endcode + + \sa wc_ecc_get_curve_params +*/ +int wc_ecc_get_curve_id_from_params(int fieldSize, + const byte* prime, word32 primeSz, const byte* Af, word32 AfSz, + const byte* Bf, word32 BfSz, const byte* order, word32 orderSz, + const byte* Gx, word32 GxSz, const byte* Gy, word32 GySz, + int cofactor); + +/*! + \ingroup ECC + \brief Gets curve ID from domain parameters. + + \return Curve ID on success + \return negative on error + + \param dp Domain parameters + + _Example_ + \code + const ecc_set_type* dp; + int id = wc_ecc_get_curve_id_from_dp_params(dp); + \endcode + + \sa wc_ecc_get_curve_params +*/ +int wc_ecc_get_curve_id_from_dp_params(const ecc_set_type* dp); + +/*! + \ingroup ECC + \brief Gets curve ID from OID. + + \return Curve ID on success + \return negative on error + + \param oid OID buffer + \param len OID length + + _Example_ + \code + byte oid[] = {0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x03, 0x01, 0x07}; + int id = wc_ecc_get_curve_id_from_oid(oid, sizeof(oid)); + \endcode + + \sa wc_ecc_get_oid +*/ +int wc_ecc_get_curve_id_from_oid(const byte* oid, word32 len); + +/*! + \ingroup ECC + \brief Gets curve parameters from curve index. + + \return ecc_set_type pointer on success + \return NULL on failure + + \param curve_idx Curve index + + _Example_ + \code + const ecc_set_type* params = wc_ecc_get_curve_params(0); + \endcode + + \sa wc_ecc_get_curve_idx +*/ +const ecc_set_type* wc_ecc_get_curve_params(int curve_idx); + +/*! + \ingroup ECC + \brief Allocates new ECC point. + + \return ecc_point pointer on success + \return NULL on failure + + _Example_ + \code + ecc_point* point = wc_ecc_new_point(); + if (point != NULL) { + // use point + wc_ecc_del_point(point); + } + \endcode + + \sa wc_ecc_del_point +*/ +ecc_point* wc_ecc_new_point(void); + +/*! + \ingroup ECC + \brief Allocates new ECC point with heap hint. + + \return ecc_point pointer on success + \return NULL on failure + + \param h Heap hint + + _Example_ + \code + ecc_point* point = wc_ecc_new_point_h(NULL); + if (point != NULL) { + // use point + wc_ecc_del_point_h(point, NULL); + } + \endcode + + \sa wc_ecc_del_point_h +*/ +ecc_point* wc_ecc_new_point_h(void* h); + +/*! + \ingroup ECC + \brief Frees ECC point with heap hint. + + \return none No returns + + \param p ECC point to free + \param h Heap hint + + _Example_ + \code + ecc_point* point = wc_ecc_new_point_h(NULL); + // use point + wc_ecc_del_point_h(point, NULL); + \endcode + + \sa wc_ecc_new_point_h +*/ +void wc_ecc_del_point_h(ecc_point* p, void* h); + +/*! + \ingroup ECC + \brief Securely zeros ECC point. + + \return none No returns + + \param p ECC point to zero + + _Example_ + \code + ecc_point* point; + wc_ecc_forcezero_point(point); + \endcode + + \sa wc_ecc_del_point +*/ +void wc_ecc_forcezero_point(ecc_point* p); + +/*! + \ingroup ECC + \brief Checks if point is on curve. + + \return 1 if on curve + \return 0 if not on curve + \return negative on error + + \param p ECC point + \param curve_idx Curve index + + _Example_ + \code + ecc_point* point; + int ret = wc_ecc_point_is_on_curve(point, 0); + \endcode + + \sa wc_ecc_is_point +*/ +int wc_ecc_point_is_on_curve(ecc_point *p, int curve_idx); + +/*! + \ingroup ECC + \brief Imports X9.63 format with curve ID. + + \return 0 on success + \return negative on error + + \param in Input buffer + \param inLen Input length + \param key ECC key + \param curve_id Curve identifier + + _Example_ + \code + byte x963[65]; + ecc_key key; + int ret = wc_ecc_import_x963_ex(x963, sizeof(x963), &key, + ECC_SECP256R1); + \endcode + + \sa wc_ecc_import_x963 +*/ +int wc_ecc_import_x963_ex(const byte* in, word32 inLen, + ecc_key* key, int curve_id); + +/*! + \ingroup ECC + \brief Imports private key with curve ID. + + \return 0 on success + \return negative on error + + \param priv Private key buffer + \param privSz Private key size + \param pub Public key buffer + \param pubSz Public key size + \param key ECC key + \param curve_id Curve identifier + + _Example_ + \code + byte priv[32], pub[65]; + ecc_key key; + int ret = wc_ecc_import_private_key_ex(priv, 32, pub, 65, + &key, ECC_SECP256R1); + \endcode + + \sa wc_ecc_import_private_key +*/ +int wc_ecc_import_private_key_ex(const byte* priv, word32 privSz, + const byte* pub, word32 pubSz, ecc_key* key, int curve_id); + +/*! + \ingroup ECC + \brief Converts raw r,s to signature. + + \return 0 on success + \return negative on error + + \param r R value buffer + \param rSz R value size + \param s S value buffer + \param sSz S value size + \param out Output signature buffer + \param outlen Output signature length + + _Example_ + \code + byte r[32], s[32], sig[72]; + word32 sigLen = sizeof(sig); + int ret = wc_ecc_rs_raw_to_sig(r, 32, s, 32, sig, &sigLen); + \endcode + + \sa wc_ecc_sig_to_rs +*/ +int wc_ecc_rs_raw_to_sig(const byte* r, word32 rSz, const byte* s, + word32 sSz, byte* out, word32* outlen); + +/*! + \ingroup ECC + \brief Converts signature to raw r,s. + + \return 0 on success + \return negative on error + + \param sig Signature buffer + \param sigLen Signature length + \param r R value buffer + \param rLen R value length + \param s S value buffer + \param sLen S value length + + _Example_ + \code + byte sig[72], r[32], s[32]; + word32 rLen = 32, sLen = 32; + int ret = wc_ecc_sig_to_rs(sig, 72, r, &rLen, s, &sLen); + \endcode + + \sa wc_ecc_rs_raw_to_sig +*/ +int wc_ecc_sig_to_rs(const byte* sig, word32 sigLen, byte* r, + word32* rLen, byte* s, word32* sLen); + +/*! + \ingroup ECC + \brief Imports raw key with curve ID. + + \return 0 on success + \return negative on error + + \param key ECC key + \param qx X coordinate string + \param qy Y coordinate string + \param d Private key string + \param curve_id Curve identifier + + _Example_ + \code + ecc_key key; + int ret = wc_ecc_import_raw_ex(&key, qxStr, qyStr, dStr, + ECC_SECP256R1); + \endcode + + \sa wc_ecc_import_raw +*/ +int wc_ecc_import_raw_ex(ecc_key* key, const char* qx, + const char* qy, const char* d, int curve_id); + +/*! + \ingroup ECC + \brief Imports unsigned key with curve ID. + + \return 0 on success + \return negative on error + + \param key ECC key + \param qx X coordinate buffer + \param qy Y coordinate buffer + \param d Private key buffer + \param curve_id Curve identifier + + _Example_ + \code + ecc_key key; + byte qx[32], qy[32], d[32]; + int ret = wc_ecc_import_unsigned(&key, qx, qy, d, + ECC_SECP256R1); + \endcode + + \sa wc_ecc_import_raw_ex +*/ +int wc_ecc_import_unsigned(ecc_key* key, const byte* qx, + const byte* qy, const byte* d, int curve_id); + +/*! + \ingroup ECC + \brief Exports key with encoding type. + + \return 0 on success + \return negative on error + + \param key ECC key + \param qx X coordinate buffer + \param qxLen X coordinate length + \param qy Y coordinate buffer + \param qyLen Y coordinate length + \param d Private key buffer + \param dLen Private key length + \param encType Encoding type + + _Example_ + \code + ecc_key key; + byte qx[32], qy[32], d[32]; + word32 qxLen = 32, qyLen = 32, dLen = 32; + int ret = wc_ecc_export_ex(&key, qx, &qxLen, qy, &qyLen, + d, &dLen, 0); + \endcode + + \sa wc_ecc_export_public_raw +*/ +int wc_ecc_export_ex(ecc_key* key, byte* qx, word32* qxLen, + byte* qy, word32* qyLen, byte* d, word32* dLen, int encType); + +/*! + \ingroup ECC + \brief Exports public key in raw format. + + \return 0 on success + \return negative on error + + \param key ECC key + \param qx X coordinate buffer + \param qxLen X coordinate length + \param qy Y coordinate buffer + \param qyLen Y coordinate length + + _Example_ + \code + ecc_key key; + byte qx[32], qy[32]; + word32 qxLen = 32, qyLen = 32; + int ret = wc_ecc_export_public_raw(&key, qx, &qxLen, qy, + &qyLen); + \endcode + + \sa wc_ecc_export_private_raw +*/ +int wc_ecc_export_public_raw(ecc_key* key, byte* qx, + word32* qxLen, byte* qy, word32* qyLen); + +/*! + \ingroup ECC + \brief Exports private key in raw format. + + \return 0 on success + \return negative on error + + \param key ECC key + \param qx X coordinate buffer + \param qxLen X coordinate length + \param qy Y coordinate buffer + \param qyLen Y coordinate length + \param d Private key buffer + \param dLen Private key length + + _Example_ + \code + ecc_key key; + byte qx[32], qy[32], d[32]; + word32 qxLen = 32, qyLen = 32, dLen = 32; + int ret = wc_ecc_export_private_raw(&key, qx, &qxLen, qy, + &qyLen, d, &dLen); + \endcode + + \sa wc_ecc_export_public_raw +*/ +int wc_ecc_export_private_raw(ecc_key* key, byte* qx, + word32* qxLen, byte* qy, word32* qyLen, byte* d, word32* dLen); + +/*! + \ingroup ECC + \brief Exports point in DER format with compression. + + \return Size on success + \return negative on error + + \param curve_idx Curve index + \param point ECC point + \param out Output buffer + \param outLen Output length + \param compressed Compression flag + + _Example_ + \code + ecc_point* point; + byte out[65]; + word32 outLen = sizeof(out); + int ret = wc_ecc_export_point_der_ex(0, point, out, &outLen, + 0); + \endcode + + \sa wc_ecc_export_point_der +*/ +int wc_ecc_export_point_der_ex(const int curve_idx, + ecc_point* point, byte* out, word32* outLen, int compressed); + +/*! + \ingroup ECC + \brief Imports point from DER format. + + \return 0 on success + \return negative on error + + \param in Input buffer + \param inLen Input length + \param curve_idx Curve index + \param point ECC point + \param shortKeySize Short key size flag + + _Example_ + \code + byte der[65]; + ecc_point* point = wc_ecc_new_point(); + int ret = wc_ecc_import_point_der_ex(der, sizeof(der), 0, + point, 0); + \endcode + + \sa wc_ecc_import_point_der +*/ +int wc_ecc_import_point_der_ex(const byte* in, word32 inLen, + const int curve_idx, ecc_point* point, int shortKeySize); + +/*! + \ingroup ECC + \brief Gets OID for curve. + + \return 0 on success + \return negative on error + + \param oidSum OID sum + \param oid OID buffer pointer + \param oidSz OID size pointer + + _Example_ + \code + const byte* oid; + word32 oidSz; + int ret = wc_ecc_get_oid(0x2A8648CE3D030107, &oid, &oidSz); + \endcode + + \sa wc_ecc_get_curve_id_from_oid +*/ +int wc_ecc_get_oid(word32 oidSum, const byte** oid, word32* oidSz); + +/*! + \ingroup ECC + \brief Sets custom curve parameters. + + \return 0 on success + \return negative on error + + \param key ECC key + \param dp Domain parameters + + _Example_ + \code + ecc_key key; + ecc_set_type dp; + int ret = wc_ecc_set_custom_curve(&key, &dp); + \endcode + + \sa wc_ecc_get_curve_params +*/ +int wc_ecc_set_custom_curve(ecc_key* key, const ecc_set_type* dp); + +/*! + \ingroup ECC + \brief Creates new ECC encryption context. + + \return ecEncCtx pointer on success + \return NULL on failure + + \param flags Context flags + \param rng Random number generator + + _Example_ + \code + WC_RNG rng; + ecEncCtx* ctx = wc_ecc_ctx_new(0, &rng); + \endcode + + \sa wc_ecc_ctx_free +*/ +ecEncCtx* wc_ecc_ctx_new(int flags, WC_RNG* rng); + +/*! + \ingroup ECC + \brief Creates new ECC encryption context with heap. + + \return ecEncCtx pointer on success + \return NULL on failure + + \param flags Context flags + \param rng Random number generator + \param heap Heap hint + + _Example_ + \code + WC_RNG rng; + ecEncCtx* ctx = wc_ecc_ctx_new_ex(0, &rng, NULL); + \endcode + + \sa wc_ecc_ctx_new +*/ +ecEncCtx* wc_ecc_ctx_new_ex(int flags, WC_RNG* rng, void* heap); + +/*! + \ingroup ECC + \brief Resets ECC encryption context. + + \return 0 on success + \return negative on error + + \param ctx ECC encryption context + \param rng Random number generator + + _Example_ + \code + ecEncCtx* ctx; + WC_RNG rng; + int ret = wc_ecc_ctx_reset(ctx, &rng); + \endcode + + \sa wc_ecc_ctx_new +*/ +int wc_ecc_ctx_reset(ecEncCtx* ctx, WC_RNG* rng); + +/*! + \ingroup ECC + \brief Gets own salt from context. + + \return Salt pointer on success + \return NULL on failure + + \param ctx ECC encryption context + + _Example_ + \code + ecEncCtx* ctx; + const byte* salt = wc_ecc_ctx_get_own_salt(ctx); + \endcode + + \sa wc_ecc_ctx_set_own_salt +*/ +const byte* wc_ecc_ctx_get_own_salt(ecEncCtx* ctx); + +/*! + \ingroup ECC + \brief Sets own salt in context. + + \return 0 on success + \return negative on error + + \param ctx ECC encryption context + \param salt Salt buffer + \param sz Salt size + + _Example_ + \code + ecEncCtx* ctx; + byte salt[16]; + int ret = wc_ecc_ctx_set_own_salt(ctx, salt, sizeof(salt)); + \endcode + + \sa wc_ecc_ctx_get_own_salt +*/ +int wc_ecc_ctx_set_own_salt(ecEncCtx* ctx, const byte* salt, + word32 sz); + +/*! + \ingroup ECC + \brief X9.63 Key Derivation Function. + + \return 0 on success + \return negative on error + + \param type Hash type + \param secret Shared secret + \param secretSz Secret size + \param sinfo Shared info + \param sinfoSz Shared info size + \param out Output buffer + \param outSz Output size + + _Example_ + \code + byte secret[32], sinfo[10], out[32]; + int ret = wc_X963_KDF(WC_HASH_TYPE_SHA256, secret, 32, + sinfo, 10, out, 32); + \endcode + + \sa wc_ecc_shared_secret +*/ +int wc_X963_KDF(enum wc_HashType type, const byte* secret, + word32 secretSz, const byte* sinfo, word32 sinfoSz, + byte* out, word32 outSz); + +/*! + \ingroup ECC + \brief Initializes curve cache. + + \return 0 on success + \return negative on error + + _Example_ + \code + int ret = wc_ecc_curve_cache_init(); + \endcode + + \sa wc_ecc_curve_cache_free +*/ +int wc_ecc_curve_cache_init(void); + +/*! + \ingroup ECC + \brief Frees curve cache. + + \return none No returns + + _Example_ + \code + wc_ecc_curve_cache_free(); + \endcode + + \sa wc_ecc_curve_cache_init +*/ +void wc_ecc_curve_cache_free(void); + +/*! + \ingroup ECC + \brief Generates random k value. + + \return 0 on success + \return negative on error + + \param rng Random number generator + \param size Key size + \param k Output k value + \param order Curve order + + _Example_ + \code + WC_RNG rng; + mp_int k, order; + int ret = wc_ecc_gen_k(&rng, 32, &k, &order); + \endcode + + \sa wc_ecc_sign_hash +*/ +int wc_ecc_gen_k(WC_RNG* rng, int size, mp_int* k, mp_int* order); + +/*! + \ingroup ECC + \brief Sets remote handle for hardware. + + \return 0 on success + \return negative on error + + \param key ECC key + \param handle Remote handle + + _Example_ + \code + ecc_key key; + remote_handle64 handle = 0x1234; + int ret = wc_ecc_set_handle(&key, handle); + \endcode + + \sa wc_ecc_init +*/ +int wc_ecc_set_handle(ecc_key* key, remote_handle64 handle); + +/*! + \ingroup ECC + \brief Uses key ID for hardware. + + \return 0 on success + \return negative on error + + \param key ECC key + \param keyId Key identifier + \param flags Flags + + _Example_ + \code + ecc_key key; + int ret = wc_ecc_use_key_id(&key, 1, 0); + \endcode + + \sa wc_ecc_get_key_id +*/ +int wc_ecc_use_key_id(ecc_key* key, word32 keyId, word32 flags); + +/*! + \ingroup ECC + \brief Gets key ID from hardware key. + + \return 0 on success + \return negative on error + + \param key ECC key + \param keyId Key identifier pointer + + _Example_ + \code + ecc_key key; + word32 keyId; + int ret = wc_ecc_get_key_id(&key, &keyId); + \endcode + + \sa wc_ecc_use_key_id +*/ +int wc_ecc_get_key_id(ecc_key* key, word32* keyId); diff --git a/doc/dox_comments/header_files/ed25519.h b/doc/dox_comments/header_files/ed25519.h index 977a8e4a9..13443c6fa 100644 --- a/doc/dox_comments/header_files/ed25519.h +++ b/doc/dox_comments/header_files/ed25519.h @@ -1087,3 +1087,218 @@ int wc_ed25519_pub_size(const ed25519_key* key); */ int wc_ed25519_sig_size(const ed25519_key* key); +/*! + \ingroup ED25519 + \brief Signs message with extended parameters. + + \return 0 on success + \return negative on failure + + \param in Input message + \param inLen Input message length + \param out Output signature buffer + \param outLen Output signature length pointer + \param key Ed25519 key + \param type Signature type + \param context Context buffer + \param contextLen Context length + + _Example_ + \code + byte msg[] = "message"; + byte sig[ED25519_SIG_SIZE]; + word32 sigLen = sizeof(sig); + int ret = wc_ed25519_sign_msg_ex(msg, sizeof(msg), sig, &sigLen, + &key, Ed25519, NULL, 0); + \endcode + + \sa wc_ed25519_sign_msg +*/ +int wc_ed25519_sign_msg_ex(const byte* in, word32 inLen, byte* out, + word32 *outLen, ed25519_key* key, byte type, const byte* context, + byte contextLen); + +/*! + \ingroup ED25519 + \brief Verifies signature with extended parameters. + + \return 0 on success + \return negative on failure + + \param sig Signature buffer + \param sigLen Signature length + \param msg Message buffer + \param msgLen Message length + \param res Verification result pointer + \param key Ed25519 key + \param type Signature type + \param context Context buffer + \param contextLen Context length + + _Example_ + \code + byte msg[] = "message"; + byte sig[ED25519_SIG_SIZE]; + int res; + int ret = wc_ed25519_verify_msg_ex(sig, sizeof(sig), msg, + sizeof(msg), &res, &key, + Ed25519, NULL, 0); + \endcode + + \sa wc_ed25519_verify_msg +*/ +int wc_ed25519_verify_msg_ex(const byte* sig, word32 sigLen, + const byte* msg, word32 msgLen, int* res, ed25519_key* key, + byte type, const byte* context, byte contextLen); + +/*! + \ingroup ED25519 + \brief Initializes streaming verification. + + \return 0 on success + \return negative on failure + + \param sig Signature buffer + \param sigLen Signature length + \param key Ed25519 key + \param type Signature type + \param context Context buffer + \param contextLen Context length + + _Example_ + \code + byte sig[ED25519_SIG_SIZE]; + int ret = wc_ed25519_verify_msg_init(sig, sizeof(sig), &key, + Ed25519, NULL, 0); + \endcode + + \sa wc_ed25519_verify_msg_update + \sa wc_ed25519_verify_msg_final +*/ +int wc_ed25519_verify_msg_init(const byte* sig, word32 sigLen, + ed25519_key* key, byte type, const byte* context, + byte contextLen); + +/*! + \ingroup ED25519 + \brief Updates streaming verification with message segment. + + \return 0 on success + \return negative on failure + + \param msgSegment Message segment buffer + \param msgSegmentLen Message segment length + \param key Ed25519 key + + _Example_ + \code + byte msgPart[] = "part"; + int ret = wc_ed25519_verify_msg_update(msgPart, sizeof(msgPart), + &key); + \endcode + + \sa wc_ed25519_verify_msg_init + \sa wc_ed25519_verify_msg_final +*/ +int wc_ed25519_verify_msg_update(const byte* msgSegment, + word32 msgSegmentLen, ed25519_key* key); + +/*! + \ingroup ED25519 + \brief Finalizes streaming verification. + + \return 0 on success + \return negative on failure + + \param sig Signature buffer + \param sigLen Signature length + \param res Verification result pointer + \param key Ed25519 key + + _Example_ + \code + byte sig[ED25519_SIG_SIZE]; + int res; + int ret = wc_ed25519_verify_msg_final(sig, sizeof(sig), &res, + &key); + \endcode + + \sa wc_ed25519_verify_msg_init + \sa wc_ed25519_verify_msg_update +*/ +int wc_ed25519_verify_msg_final(const byte* sig, word32 sigLen, + int* res, ed25519_key* key); + +/*! + \ingroup ED25519 + \brief Initializes Ed25519 key with extended parameters. + + \return 0 on success + \return negative on failure + + \param key Ed25519 key structure + \param heap Heap hint for memory allocation + \param devId Device ID for hardware acceleration + + _Example_ + \code + ed25519_key key; + int ret = wc_ed25519_init_ex(&key, NULL, INVALID_DEVID); + \endcode + + \sa wc_ed25519_init +*/ +int wc_ed25519_init_ex(ed25519_key* key, void* heap, int devId); + +/*! + \ingroup ED25519 + \brief Allocates and initializes new Ed25519 key. These New/Delete + functions are exposed to support allocation of the structure using + dynamic memory to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return ed25519_key pointer on success + \return NULL on failure + + \param heap Heap hint for memory allocation + \param devId Device ID for hardware acceleration + \param result_code Result code pointer + + _Example_ + \code + int result; + ed25519_key* key = wc_ed25519_new(NULL, INVALID_DEVID, &result); + \endcode + + \sa wc_ed25519_delete +*/ +ed25519_key* wc_ed25519_new(void* heap, int devId, int *result_code); + +/*! + \ingroup ED25519 + \brief Frees and deletes Ed25519 key. These New/Delete functions are + exposed to support allocation of the structure using dynamic memory + to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return 0 on success + \return negative on failure + + \param key Ed25519 key to delete + \param key_p Pointer to key pointer (set to NULL after delete) + + _Example_ + \code + ed25519_key* key = wc_ed25519_new(NULL, INVALID_DEVID, NULL); + int ret = wc_ed25519_delete(key, &key); + \endcode + + \sa wc_ed25519_new +*/ +int wc_ed25519_delete(ed25519_key* key, ed25519_key** key_p); diff --git a/doc/dox_comments/header_files/iotsafe.h b/doc/dox_comments/header_files/iotsafe.h index 46bd2e5da..31d97838e 100644 --- a/doc/dox_comments/header_files/iotsafe.h +++ b/doc/dox_comments/header_files/iotsafe.h @@ -288,6 +288,43 @@ int wc_iotsafe_ecc_import_public(ecc_key *key, byte key_id); */ int wc_iotsafe_ecc_export_public(ecc_key *key, byte key_id); +/*! + \ingroup IoTSafe + \brief Export an ECC 256-bit public key, from ecc_key object to a + writable public-key slot into the IoT-Safe applet. Equivalent to + wc_iotsafe_ecc_export_public, except that it can be invoked with a + key ID of two or more bytes. + + \return 0 upon success + \return < 0 in case of failure + + \param key the ecc_key object containing the key to be exported + \param key_id pointer to the key id in the IoT-Safe applet where + the public key will be stored + \param id_size the key id size in bytes + + _Example_ + \code + ecc_key key; + word16 keyId = 0x0302; + + wc_ecc_init(&key); + wc_ecc_make_key(&rng, 32, &key); + + int ret = wc_iotsafe_ecc_export_public_ex(&key, (byte*)&keyId, + sizeof(keyId)); + if (ret != 0) { + // error exporting public key + } + \endcode + + \sa wc_iotsafe_ecc_export_public + \sa wc_iotsafe_ecc_import_public_ex + \sa wc_iotsafe_ecc_export_private_ex +*/ +int wc_iotsafe_ecc_export_public_ex(ecc_key *key, byte *key_id, + word16 id_size); + /*! \ingroup IoTSafe @@ -462,4 +499,4 @@ int wc_iotsafe_ecc_gen_k(byte key_id); \sa wc_iotsafe_ecc_sign_hash_ex \sa wc_iotsafe_ecc_verify_hash_ex */ -int wc_iotsafe_ecc_gen_k(byte key_id); +int wc_iotsafe_ecc_gen_k_ex(byte *key_id, word16 id_size); diff --git a/doc/dox_comments/header_files/pkcs7.h b/doc/dox_comments/header_files/pkcs7.h index 925c5223b..69d927c69 100644 --- a/doc/dox_comments/header_files/pkcs7.h +++ b/doc/dox_comments/header_files/pkcs7.h @@ -877,3 +877,1329 @@ int wc_PKCS7_DecodeOneSymmetricKeyAttribute(const byte * osk, */ int wc_PKCS7_DecodeOneSymmetricKeyKey(const byte * osk, word32 oskSz, const byte ** key, word32 * keySz); + +/*! + \ingroup PKCS7 + \brief Creates new PKCS7 structure. + + \return Pointer to new PKCS7 structure on success + \return NULL on error + + \param none No parameters + + _Example_ + \code + PKCS7* pkcs7 = wolfSSL_PKCS7_new(); + if (pkcs7 != NULL) { + // use pkcs7 + wolfSSL_PKCS7_free(pkcs7); + } + \endcode + + \sa wolfSSL_PKCS7_free +*/ +PKCS7* wolfSSL_PKCS7_new(void); + +/*! + \ingroup PKCS7 + \brief Creates new PKCS7_SIGNED structure. + + \return Pointer to new PKCS7_SIGNED structure on success + \return NULL on error + + \param none No parameters + + _Example_ + \code + PKCS7_SIGNED* p7 = wolfSSL_PKCS7_SIGNED_new(); + if (p7 != NULL) { + // use p7 + wolfSSL_PKCS7_SIGNED_free(p7); + } + \endcode + + \sa wolfSSL_PKCS7_SIGNED_free +*/ +PKCS7_SIGNED* wolfSSL_PKCS7_SIGNED_new(void); + +/*! + \ingroup PKCS7 + \brief Frees PKCS7 structure. + + \return none No returns + + \param p7 PKCS7 structure to free + + _Example_ + \code + PKCS7* pkcs7 = wolfSSL_PKCS7_new(); + wolfSSL_PKCS7_free(pkcs7); + \endcode + + \sa wolfSSL_PKCS7_new +*/ +void wolfSSL_PKCS7_free(PKCS7* p7); + +/*! + \ingroup PKCS7 + \brief Frees PKCS7_SIGNED structure. + + \return none No returns + + \param p7 PKCS7_SIGNED structure to free + + _Example_ + \code + PKCS7_SIGNED* p7 = wolfSSL_PKCS7_SIGNED_new(); + wolfSSL_PKCS7_SIGNED_free(p7); + \endcode + + \sa wolfSSL_PKCS7_SIGNED_new +*/ +void wolfSSL_PKCS7_SIGNED_free(PKCS7_SIGNED* p7); + +/*! + \ingroup PKCS7 + \brief Decodes DER-encoded PKCS7 structure. + + \return Pointer to decoded PKCS7 structure on success + \return NULL on error + + \param p7 Pointer to PKCS7 pointer (can be NULL) + \param in Pointer to DER-encoded data + \param len Length of DER data + + _Example_ + \code + PKCS7* p7 = NULL; + const unsigned char* der = ...; // DER data + p7 = wolfSSL_d2i_PKCS7(&p7, &der, derLen); + \endcode + + \sa wolfSSL_i2d_PKCS7 +*/ +PKCS7* wolfSSL_d2i_PKCS7(PKCS7** p7, const unsigned char** in, int len); + +/*! + \ingroup PKCS7 + \brief Decodes PKCS7 from BIO. + + \return Pointer to decoded PKCS7 structure on success + \return NULL on error + + \param bio BIO to read from + \param p7 Pointer to PKCS7 pointer (can be NULL) + + _Example_ + \code + WOLFSSL_BIO* bio = wolfSSL_BIO_new_file("pkcs7.der", "rb"); + PKCS7* p7 = wolfSSL_d2i_PKCS7_bio(bio, NULL); + \endcode + + \sa wolfSSL_i2d_PKCS7_bio +*/ +PKCS7* wolfSSL_d2i_PKCS7_bio(WOLFSSL_BIO* bio, PKCS7** p7); + +/*! + \ingroup PKCS7 + \brief Encodes PKCS7 to BIO. + + \return Length written on success + \return negative on error + + \param bio BIO to write to + \param p7 PKCS7 structure to encode + + _Example_ + \code + WOLFSSL_BIO* bio = wolfSSL_BIO_new(wolfSSL_BIO_s_mem()); + int ret = wolfSSL_i2d_PKCS7_bio(bio, p7); + \endcode + + \sa wolfSSL_d2i_PKCS7_bio +*/ +int wolfSSL_i2d_PKCS7_bio(WOLFSSL_BIO *bio, PKCS7 *p7); + +/*! + \ingroup PKCS7 + \brief Encodes PKCS7 to DER. + + \return Length written on success + \return negative on error + + \param p7 PKCS7 structure to encode + \param out Pointer to output buffer pointer + + _Example_ + \code + unsigned char* der = NULL; + int len = wolfSSL_i2d_PKCS7(p7, &der); + \endcode + + \sa wolfSSL_d2i_PKCS7 +*/ +int wolfSSL_i2d_PKCS7(PKCS7 *p7, unsigned char **out); + +/*! + \ingroup PKCS7 + \brief Creates signed PKCS7 message. + + \return Pointer to signed PKCS7 structure on success + \return NULL on error + + \param signer Signer certificate + \param pkey Private key + \param certs Additional certificates + \param in Input data BIO + \param flags Operation flags + + _Example_ + \code + PKCS7* p7 = wolfSSL_PKCS7_sign(cert, pkey, NULL, bio, 0); + \endcode + + \sa wolfSSL_PKCS7_verify +*/ +PKCS7* wolfSSL_PKCS7_sign(WOLFSSL_X509* signer, WOLFSSL_EVP_PKEY* pkey, + WOLFSSL_STACK* certs, WOLFSSL_BIO* in, int flags); + +/*! + \ingroup PKCS7 + \brief Verifies signed PKCS7 message. + + \return 1 on success + \return 0 or negative on error + + \param p7 PKCS7 structure to verify + \param certs Certificate stack + \param store Certificate store + \param in Input data BIO + \param out Output BIO + \param flags Operation flags + + _Example_ + \code + int ret = wolfSSL_PKCS7_verify(p7, NULL, store, NULL, out, 0); + \endcode + + \sa wolfSSL_PKCS7_sign +*/ +int wolfSSL_PKCS7_verify(PKCS7* p7, WOLFSSL_STACK* certs, + WOLFSSL_X509_STORE* store, WOLFSSL_BIO* in, + WOLFSSL_BIO* out, int flags); + +/*! + \ingroup PKCS7 + \brief Finalizes PKCS7 structure with data. + + \return 1 on success + \return 0 or negative on error + + \param pkcs7 PKCS7 structure + \param in Input data BIO + \param flags Operation flags + + _Example_ + \code + int ret = wolfSSL_PKCS7_final(pkcs7, bio, 0); + \endcode + + \sa wolfSSL_PKCS7_sign +*/ +int wolfSSL_PKCS7_final(PKCS7* pkcs7, WOLFSSL_BIO* in, int flags); + +/*! + \ingroup PKCS7 + \brief Encodes certificates into PKCS7. + + \return 1 on success + \return 0 or negative on error + + \param p7 PKCS7 structure + \param certs Certificate stack + \param out Output BIO + + _Example_ + \code + int ret = wolfSSL_PKCS7_encode_certs(p7, certs, bio); + \endcode + + \sa wolfSSL_PKCS7_to_stack +*/ +int wolfSSL_PKCS7_encode_certs(PKCS7* p7, WOLFSSL_STACK* certs, + WOLFSSL_BIO* out); + +/*! + \ingroup PKCS7 + \brief Converts PKCS7 certificates to stack. + + \return Pointer to certificate stack on success + \return NULL on error + + \param pkcs7 PKCS7 structure + + _Example_ + \code + WOLFSSL_STACK* certs = wolfSSL_PKCS7_to_stack(pkcs7); + \endcode + + \sa wolfSSL_PKCS7_encode_certs +*/ +WOLFSSL_STACK* wolfSSL_PKCS7_to_stack(PKCS7* pkcs7); + +/*! + \ingroup PKCS7 + \brief Gets signer certificates from PKCS7. + + \return Pointer to signer certificate stack on success + \return NULL on error + + \param p7 PKCS7 structure + \param certs Certificate stack + \param flags Operation flags + + _Example_ + \code + WOLFSSL_STACK* signers = wolfSSL_PKCS7_get0_signers(p7, NULL, 0); + \endcode + + \sa wolfSSL_PKCS7_verify +*/ +WOLFSSL_STACK* wolfSSL_PKCS7_get0_signers(PKCS7* p7, WOLFSSL_STACK* certs, + int flags); + +/*! + \ingroup PKCS7 + \brief Writes PKCS7 to BIO in PEM format. + + \return 1 on success + \return 0 or negative on error + + \param bio Output BIO + \param p7 PKCS7 structure + + _Example_ + \code + int ret = wolfSSL_PEM_write_bio_PKCS7(bio, p7); + \endcode + + \sa wolfSSL_SMIME_write_PKCS7 +*/ +int wolfSSL_PEM_write_bio_PKCS7(WOLFSSL_BIO* bio, PKCS7* p7); + +/*! + \ingroup PKCS7 + \brief Reads S/MIME PKCS7 from BIO. + + \return Pointer to PKCS7 structure on success + \return NULL on error + + \param in Input BIO + \param bcont Pointer to content BIO pointer + + _Example_ + \code + WOLFSSL_BIO* cont = NULL; + PKCS7* p7 = wolfSSL_SMIME_read_PKCS7(bio, &cont); + \endcode + + \sa wolfSSL_SMIME_write_PKCS7 +*/ +PKCS7* wolfSSL_SMIME_read_PKCS7(WOLFSSL_BIO* in, WOLFSSL_BIO** bcont); + +/*! + \ingroup PKCS7 + \brief Writes PKCS7 to BIO in S/MIME format. + + \return 1 on success + \return 0 or negative on error + + \param out Output BIO + \param pkcs7 PKCS7 structure + \param in Input data BIO + \param flags Operation flags + + _Example_ + \code + int ret = wolfSSL_SMIME_write_PKCS7(out, pkcs7, in, 0); + \endcode + + \sa wolfSSL_SMIME_read_PKCS7 +*/ +int wolfSSL_SMIME_write_PKCS7(WOLFSSL_BIO* out, PKCS7* pkcs7, + WOLFSSL_BIO* in, int flags); + +/*! + \ingroup PKCS7 + \brief Creates new wc_PKCS7 structure. + + \return Pointer to new wc_PKCS7 structure on success + \return NULL on error + + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_PKCS7* pkcs7 = wc_PKCS7_New(NULL, INVALID_DEVID); + \endcode + + \sa wc_PKCS7_Init +*/ +wc_PKCS7* wc_PKCS7_New(void* heap, int devId); + +/*! + \ingroup PKCS7 + \brief Sets unknown extension callback. + + \return none No returns + + \param pkcs7 PKCS7 structure + \param cb Callback function + + _Example_ + \code + wc_PKCS7_SetUnknownExtCallback(pkcs7, myCallback); + \endcode + + \sa wc_PKCS7_Init +*/ +void wc_PKCS7_SetUnknownExtCallback(wc_PKCS7* pkcs7, + wc_UnknownExtCallback cb); + +/*! + \ingroup PKCS7 + \brief Initializes wc_PKCS7 structure. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_PKCS7 pkcs7; + int ret = wc_PKCS7_Init(&pkcs7, NULL, INVALID_DEVID); + \endcode + + \sa wc_PKCS7_New +*/ +int wc_PKCS7_Init(wc_PKCS7* pkcs7, void* heap, int devId); + +/*! + \ingroup PKCS7 + \brief Adds certificate to PKCS7. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param der DER-encoded certificate + \param derSz Certificate size + + _Example_ + \code + int ret = wc_PKCS7_AddCertificate(&pkcs7, cert, certSz); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_AddCertificate(wc_PKCS7* pkcs7, byte* der, word32 derSz); + +/*! + \ingroup PKCS7 + \brief Gets attribute value from PKCS7. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param oid Attribute OID + \param oidSz OID size + \param out Output buffer + \param outSz Output buffer size pointer + + _Example_ + \code + byte value[256]; + word32 valueSz = sizeof(value); + int ret = wc_PKCS7_GetAttributeValue(&pkcs7, oid, oidSz, value, + &valueSz); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_GetAttributeValue(wc_PKCS7* pkcs7, const byte* oid, + word32 oidSz, byte* out, word32* outSz); + +/*! + \ingroup PKCS7 + \brief Sets signer identifier type. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param type Identifier type + + _Example_ + \code + int ret = wc_PKCS7_SetSignerIdentifierType(&pkcs7, CMS_SKID); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetSignerIdentifierType(wc_PKCS7* pkcs7, int type); + +/*! + \ingroup PKCS7 + \brief Sets content type. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param contentType Content type OID + \param sz OID size + + _Example_ + \code + int ret = wc_PKCS7_SetContentType(&pkcs7, DATA, sizeof(DATA)); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetContentType(wc_PKCS7* pkcs7, byte* contentType, word32 sz); + +/*! + \ingroup PKCS7 + \brief Gets padding size for block cipher. + + \return Padding size + + \param inputSz Input size + \param blockSz Block size + + _Example_ + \code + int padSz = wc_PKCS7_GetPadSize(dataSz, AES_BLOCK_SIZE); + \endcode + + \sa wc_PKCS7_PadData +*/ +int wc_PKCS7_GetPadSize(word32 inputSz, word32 blockSz); + +/*! + \ingroup PKCS7 + \brief Pads data for block cipher. + + \return 0 on success + \return negative on error + + \param in Input data + \param inSz Input size + \param out Output buffer + \param outSz Output buffer size + \param blockSz Block size + + _Example_ + \code + int ret = wc_PKCS7_PadData(data, dataSz, padded, paddedSz, + AES_BLOCK_SIZE); + \endcode + + \sa wc_PKCS7_GetPadSize +*/ +int wc_PKCS7_PadData(byte* in, word32 inSz, byte* out, word32 outSz, + word32 blockSz); + +/*! + \ingroup PKCS7 + \brief Sets custom subject key identifier. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param in SKID data + \param inSz SKID size + + _Example_ + \code + int ret = wc_PKCS7_SetCustomSKID(&pkcs7, skid, skidSz); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetCustomSKID(wc_PKCS7* pkcs7, const byte* in, word16 inSz); + +/*! + \ingroup PKCS7 + \brief Sets detached signature flag. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param flag Detached flag (1=detached, 0=attached) + + _Example_ + \code + int ret = wc_PKCS7_SetDetached(&pkcs7, 1); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetDetached(wc_PKCS7* pkcs7, word16 flag); + +/*! + \ingroup PKCS7 + \brief Disables default signed attributes. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + + _Example_ + \code + int ret = wc_PKCS7_NoDefaultSignedAttribs(&pkcs7); + \endcode + + \sa wc_PKCS7_SetDefaultSignedAttribs +*/ +int wc_PKCS7_NoDefaultSignedAttribs(wc_PKCS7* pkcs7); + +/*! + \ingroup PKCS7 + \brief Sets default signed attributes flag. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param flag Default attributes flag + + _Example_ + \code + int ret = wc_PKCS7_SetDefaultSignedAttribs(&pkcs7, 1); + \endcode + + \sa wc_PKCS7_NoDefaultSignedAttribs +*/ +int wc_PKCS7_SetDefaultSignedAttribs(wc_PKCS7* pkcs7, word16 flag); + +/*! + \ingroup PKCS7 + \brief Allows degenerate PKCS7 (no signers). + + \return none No returns + + \param pkcs7 PKCS7 structure + \param flag Allow degenerate flag + + _Example_ + \code + wc_PKCS7_AllowDegenerate(&pkcs7, 1); + \endcode + + \sa wc_PKCS7_Init +*/ +void wc_PKCS7_AllowDegenerate(wc_PKCS7* pkcs7, word16 flag); + +/*! + \ingroup PKCS7 + \brief Gets signer subject identifier. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param out Output buffer + \param outSz Output buffer size pointer + + _Example_ + \code + byte sid[256]; + word32 sidSz = sizeof(sid); + int ret = wc_PKCS7_GetSignerSID(&pkcs7, sid, &sidSz); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_GetSignerSID(wc_PKCS7* pkcs7, byte* out, word32* outSz); + +/*! + \ingroup PKCS7 + \brief Encodes signed FirmwarePackageData. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param privateKey Private key + \param privateKeySz Private key size + \param signOID Signature algorithm OID + \param hashOID Hash algorithm OID + \param content Content data + \param contentSz Content size + \param signedAttribs Signed attributes + \param signedAttribsSz Signed attributes count + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeSignedFPD(&pkcs7, key, keySz, RSAk, SHAh, + data, dataSz, NULL, 0, out, outSz); + \endcode + + \sa wc_PKCS7_EncodeSignedData +*/ +int wc_PKCS7_EncodeSignedFPD(wc_PKCS7* pkcs7, byte* privateKey, + word32 privateKeySz, int signOID, int hashOID, + byte* content, word32 contentSz, + PKCS7Attrib* signedAttribs, + word32 signedAttribsSz, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Encodes signed encrypted FirmwarePackageData. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param encryptKey Encryption key + \param encryptKeySz Encryption key size + \param privateKey Private key + \param privateKeySz Private key size + \param encryptOID Encryption algorithm OID + \param signOID Signature algorithm OID + \param hashOID Hash algorithm OID + \param content Content data + \param contentSz Content size + \param unprotectedAttribs Unprotected attributes + \param unprotectedAttribsSz Unprotected attributes count + \param signedAttribs Signed attributes + \param signedAttribsSz Signed attributes count + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeSignedEncryptedFPD(&pkcs7, encKey, encKeySz, + key, keySz, AES256CBCb, + RSAk, SHAh, data, dataSz, + NULL, 0, NULL, 0, out, + outSz); + \endcode + + \sa wc_PKCS7_EncodeSignedFPD +*/ +int wc_PKCS7_EncodeSignedEncryptedFPD(wc_PKCS7* pkcs7, byte* encryptKey, + word32 encryptKeySz, byte* privateKey, + word32 privateKeySz, int encryptOID, + int signOID, int hashOID, byte* content, + word32 contentSz, + PKCS7Attrib* unprotectedAttribs, + word32 unprotectedAttribsSz, + PKCS7Attrib* signedAttribs, + word32 signedAttribsSz, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Encodes signed compressed FirmwarePackageData. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param privateKey Private key + \param privateKeySz Private key size + \param signOID Signature algorithm OID + \param hashOID Hash algorithm OID + \param content Content data + \param contentSz Content size + \param signedAttribs Signed attributes + \param signedAttribsSz Signed attributes count + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeSignedCompressedFPD(&pkcs7, key, keySz, RSAk, + SHAh, data, dataSz, NULL, + 0, out, outSz); + \endcode + + \sa wc_PKCS7_EncodeSignedFPD +*/ +int wc_PKCS7_EncodeSignedCompressedFPD(wc_PKCS7* pkcs7, byte* privateKey, + word32 privateKeySz, int signOID, + int hashOID, byte* content, + word32 contentSz, + PKCS7Attrib* signedAttribs, + word32 signedAttribsSz, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Encodes signed encrypted compressed FirmwarePackageData. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param encryptKey Encryption key + \param encryptKeySz Encryption key size + \param privateKey Private key + \param privateKeySz Private key size + \param encryptOID Encryption algorithm OID + \param signOID Signature algorithm OID + \param hashOID Hash algorithm OID + \param content Content data + \param contentSz Content size + \param unprotectedAttribs Unprotected attributes + \param unprotectedAttribsSz Unprotected attributes count + \param signedAttribs Signed attributes + \param signedAttribsSz Signed attributes count + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeSignedEncryptedCompressedFPD(&pkcs7, encKey, + encKeySz, key, + keySz, AES256CBCb, + RSAk, SHAh, data, + dataSz, NULL, 0, + NULL, 0, out, + outSz); + \endcode + + \sa wc_PKCS7_EncodeSignedCompressedFPD +*/ +int wc_PKCS7_EncodeSignedEncryptedCompressedFPD(wc_PKCS7* pkcs7, + byte* encryptKey, + word32 encryptKeySz, + byte* privateKey, + word32 privateKeySz, + int encryptOID, int signOID, + int hashOID, byte* content, + word32 contentSz, + PKCS7Attrib* unprotectedAttribs, + word32 unprotectedAttribsSz, + PKCS7Attrib* signedAttribs, + word32 signedAttribsSz, + byte* output, word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Adds KTRI recipient. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param cert Recipient certificate + \param certSz Certificate size + \param options Options flags + + _Example_ + \code + int ret = wc_PKCS7_AddRecipient_KTRI(&pkcs7, cert, certSz, 0); + \endcode + + \sa wc_PKCS7_AddRecipient_KARI +*/ +int wc_PKCS7_AddRecipient_KTRI(wc_PKCS7* pkcs7, const byte* cert, + word32 certSz, int options); + +/*! + \ingroup PKCS7 + \brief Adds KARI recipient. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param cert Recipient certificate + \param certSz Certificate size + \param keyWrapOID Key wrap algorithm OID + \param keyAgreeOID Key agreement algorithm OID + \param ukm User keying material + \param ukmSz UKM size + \param options Options flags + + _Example_ + \code + int ret = wc_PKCS7_AddRecipient_KARI(&pkcs7, cert, certSz, AES256_WRAP, + dhSinglePass_stdDH_sha256kdf_scheme, + NULL, 0, 0); + \endcode + + \sa wc_PKCS7_AddRecipient_KTRI +*/ +int wc_PKCS7_AddRecipient_KARI(wc_PKCS7* pkcs7, const byte* cert, + word32 certSz, int keyWrapOID, + int keyAgreeOID, byte* ukm, word32 ukmSz, + int options); + +/*! + \ingroup PKCS7 + \brief Sets encryption key. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param key Encryption key + \param keySz Key size + + _Example_ + \code + int ret = wc_PKCS7_SetKey(&pkcs7, key, keySz); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetKey(wc_PKCS7* pkcs7, byte* key, word32 keySz); + +/*! + \ingroup PKCS7 + \brief Adds KEKRI recipient. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param keyWrapOID Key wrap algorithm OID + \param kek Key encryption key + \param kekSz KEK size + \param keyID Key identifier + \param keyIdSz Key ID size + \param timePtr Time pointer + \param otherOID Other OID + \param otherOIDSz Other OID size + \param other Other data + \param otherSz Other data size + \param options Options flags + + _Example_ + \code + int ret = wc_PKCS7_AddRecipient_KEKRI(&pkcs7, AES256_WRAP, kek, kekSz, + keyId, keyIdSz, NULL, NULL, 0, + NULL, 0, 0); + \endcode + + \sa wc_PKCS7_AddRecipient_KTRI +*/ +int wc_PKCS7_AddRecipient_KEKRI(wc_PKCS7* pkcs7, int keyWrapOID, byte* kek, + word32 kekSz, byte* keyID, word32 keyIdSz, + void* timePtr, byte* otherOID, + word32 otherOIDSz, byte* other, + word32 otherSz, int options); + +/*! + \ingroup PKCS7 + \brief Sets password for PWRI. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param passwd Password + \param pLen Password length + + _Example_ + \code + int ret = wc_PKCS7_SetPassword(&pkcs7, password, passwordLen); + \endcode + + \sa wc_PKCS7_AddRecipient_PWRI +*/ +int wc_PKCS7_SetPassword(wc_PKCS7* pkcs7, byte* passwd, word32 pLen); + +/*! + \ingroup PKCS7 + \brief Adds PWRI recipient. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param passwd Password + \param pLen Password length + \param salt Salt + \param saltSz Salt size + \param kdfOID KDF algorithm OID + \param prfOID PRF algorithm OID + \param iterations Iteration count + \param kekEncryptOID KEK encryption algorithm OID + \param options Options flags + + _Example_ + \code + int ret = wc_PKCS7_AddRecipient_PWRI(&pkcs7, password, passwordLen, + salt, saltSz, PBKDF2_OID, HMACh, + 10000, AES256CBCb, 0); + \endcode + + \sa wc_PKCS7_SetPassword +*/ +int wc_PKCS7_AddRecipient_PWRI(wc_PKCS7* pkcs7, byte* passwd, word32 pLen, + byte* salt, word32 saltSz, int kdfOID, + int prfOID, int iterations, + int kekEncryptOID, int options); + +/*! + \ingroup PKCS7 + \brief Sets originator encryption context. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param ctx Context pointer + + _Example_ + \code + int ret = wc_PKCS7_SetOriEncryptCtx(&pkcs7, myContext); + \endcode + + \sa wc_PKCS7_SetOriDecryptCtx +*/ +int wc_PKCS7_SetOriEncryptCtx(wc_PKCS7* pkcs7, void* ctx); + +/*! + \ingroup PKCS7 + \brief Sets originator decryption context. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param ctx Context pointer + + _Example_ + \code + int ret = wc_PKCS7_SetOriDecryptCtx(&pkcs7, myContext); + \endcode + + \sa wc_PKCS7_SetOriEncryptCtx +*/ +int wc_PKCS7_SetOriDecryptCtx(wc_PKCS7* pkcs7, void* ctx); + +/*! + \ingroup PKCS7 + \brief Sets originator decryption callback. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param cb Callback function + + _Example_ + \code + int ret = wc_PKCS7_SetOriDecryptCb(&pkcs7, myDecryptCallback); + \endcode + + \sa wc_PKCS7_SetOriDecryptCtx +*/ +int wc_PKCS7_SetOriDecryptCb(wc_PKCS7* pkcs7, CallbackOriDecrypt cb); + +/*! + \ingroup PKCS7 + \brief Adds ORI recipient. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param cb Originator encryption callback + \param options Options flags + + _Example_ + \code + int ret = wc_PKCS7_AddRecipient_ORI(&pkcs7, myEncryptCallback, 0); + \endcode + + \sa wc_PKCS7_SetOriDecryptCb +*/ +int wc_PKCS7_AddRecipient_ORI(wc_PKCS7* pkcs7, CallbackOriEncrypt cb, + int options); + +/*! + \ingroup PKCS7 + \brief Sets CEK wrap callback. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param wrapCEKCb Wrap CEK callback + + _Example_ + \code + int ret = wc_PKCS7_SetWrapCEKCb(&pkcs7, myWrapCEKCallback); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetWrapCEKCb(wc_PKCS7* pkcs7, CallbackWrapCEK wrapCEKCb); + +/*! + \ingroup PKCS7 + \brief Sets RSA sign raw digest callback. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param cb Callback function + + _Example_ + \code + int ret = wc_PKCS7_SetRsaSignRawDigestCb(&pkcs7, mySignCallback); + \endcode + + \sa wc_PKCS7_Init +*/ +int wc_PKCS7_SetRsaSignRawDigestCb(wc_PKCS7* pkcs7, + CallbackRsaSignRawDigest cb); + +/*! + \ingroup PKCS7 + \brief Encodes authenticated enveloped data. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeAuthEnvelopedData(&pkcs7, out, outSz); + \endcode + + \sa wc_PKCS7_DecodeAuthEnvelopedData +*/ +int wc_PKCS7_EncodeAuthEnvelopedData(wc_PKCS7* pkcs7, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Decodes authenticated enveloped data. + + \return Size of decoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param pkiMsg Input message + \param pkiMsgSz Input message size + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_DecodeAuthEnvelopedData(&pkcs7, msg, msgSz, out, + outSz); + \endcode + + \sa wc_PKCS7_EncodeAuthEnvelopedData +*/ +int wc_PKCS7_DecodeAuthEnvelopedData(wc_PKCS7* pkcs7, byte* pkiMsg, + word32 pkiMsgSz, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Encodes encrypted data. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeEncryptedData(&pkcs7, out, outSz); + \endcode + + \sa wc_PKCS7_DecodeEncryptedData +*/ +int wc_PKCS7_EncodeEncryptedData(wc_PKCS7* pkcs7, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Sets decode encrypted callback. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param decryptionCb Decryption callback + + _Example_ + \code + int ret = wc_PKCS7_SetDecodeEncryptedCb(&pkcs7, myDecryptCallback); + \endcode + + \sa wc_PKCS7_SetDecodeEncryptedCtx +*/ +int wc_PKCS7_SetDecodeEncryptedCb(wc_PKCS7* pkcs7, + CallbackDecryptContent decryptionCb); + +/*! + \ingroup PKCS7 + \brief Sets decode encrypted context. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param ctx Context pointer + + _Example_ + \code + int ret = wc_PKCS7_SetDecodeEncryptedCtx(&pkcs7, myContext); + \endcode + + \sa wc_PKCS7_SetDecodeEncryptedCb +*/ +int wc_PKCS7_SetDecodeEncryptedCtx(wc_PKCS7* pkcs7, void* ctx); + +/*! + \ingroup PKCS7 + \brief Sets stream mode for PKCS7. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param flag Stream mode flag + \param getContentCb Get content callback + \param streamOutCb Stream output callback + \param ctx Context pointer + + _Example_ + \code + int ret = wc_PKCS7_SetStreamMode(&pkcs7, 1, getContent, streamOut, + ctx); + \endcode + + \sa wc_PKCS7_GetStreamMode +*/ +int wc_PKCS7_SetStreamMode(wc_PKCS7* pkcs7, byte flag, + CallbackGetContent getContentCb, + CallbackStreamOut streamOutCb, void* ctx); + +/*! + \ingroup PKCS7 + \brief Gets stream mode setting. + + \return Stream mode flag + + \param pkcs7 PKCS7 structure + + _Example_ + \code + int mode = wc_PKCS7_GetStreamMode(&pkcs7); + \endcode + + \sa wc_PKCS7_SetStreamMode +*/ +int wc_PKCS7_GetStreamMode(wc_PKCS7* pkcs7); + +/*! + \ingroup PKCS7 + \brief Sets no certificates flag. + + \return 0 on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param flag No certificates flag + + _Example_ + \code + int ret = wc_PKCS7_SetNoCerts(&pkcs7, 1); + \endcode + + \sa wc_PKCS7_GetNoCerts +*/ +int wc_PKCS7_SetNoCerts(wc_PKCS7* pkcs7, byte flag); + +/*! + \ingroup PKCS7 + \brief Gets no certificates flag. + + \return No certificates flag + + \param pkcs7 PKCS7 structure + + _Example_ + \code + int noCerts = wc_PKCS7_GetNoCerts(&pkcs7); + \endcode + + \sa wc_PKCS7_SetNoCerts +*/ +int wc_PKCS7_GetNoCerts(wc_PKCS7* pkcs7); + +/*! + \ingroup PKCS7 + \brief Encodes compressed data. + + \return Size of encoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_EncodeCompressedData(&pkcs7, out, outSz); + \endcode + + \sa wc_PKCS7_DecodeCompressedData +*/ +int wc_PKCS7_EncodeCompressedData(wc_PKCS7* pkcs7, byte* output, + word32 outputSz); + +/*! + \ingroup PKCS7 + \brief Decodes compressed data. + + \return Size of decoded data on success + \return negative on error + + \param pkcs7 PKCS7 structure + \param pkiMsg Input message + \param pkiMsgSz Input message size + \param output Output buffer + \param outputSz Output buffer size + + _Example_ + \code + int ret = wc_PKCS7_DecodeCompressedData(&pkcs7, msg, msgSz, out, + outSz); + \endcode + + \sa wc_PKCS7_EncodeCompressedData +*/ +int wc_PKCS7_DecodeCompressedData(wc_PKCS7* pkcs7, byte* pkiMsg, + word32 pkiMsgSz, byte* output, + word32 outputSz); diff --git a/doc/dox_comments/header_files/poly1305.h b/doc/dox_comments/header_files/poly1305.h index 725526e23..dcbce80d4 100644 --- a/doc/dox_comments/header_files/poly1305.h +++ b/doc/dox_comments/header_files/poly1305.h @@ -137,3 +137,71 @@ int wc_Poly1305Final(Poly1305* poly1305, byte* tag); */ int wc_Poly1305_MAC(Poly1305* ctx, const byte* additional, word32 addSz, const byte* input, word32 sz, byte* tag, word32 tagSz); + +/*! + \ingroup Poly1305 + \brief Adds padding to Poly1305 context. + + \return 0 on success + \return BAD_FUNC_ARG if ctx is NULL + + \param ctx Poly1305 context + \param lenToPad Length to pad + + _Example_ + \code + Poly1305 ctx; + byte key[32]; + wc_Poly1305SetKey(&ctx, key, sizeof(key)); + int ret = wc_Poly1305_Pad(&ctx, 10); + \endcode + + \sa wc_Poly1305_MAC +*/ +int wc_Poly1305_Pad(Poly1305* ctx, word32 lenToPad); + +/*! + \ingroup Poly1305 + \brief Encodes AAD and data sizes for Poly1305. + + \return 0 on success + \return BAD_FUNC_ARG if ctx is NULL + + \param ctx Poly1305 context + \param aadSz Additional authenticated data size + \param dataSz Data size + + _Example_ + \code + Poly1305 ctx; + byte key[32]; + wc_Poly1305SetKey(&ctx, key, sizeof(key)); + int ret = wc_Poly1305_EncodeSizes(&ctx, 16, 100); + \endcode + + \sa wc_Poly1305_MAC +*/ +int wc_Poly1305_EncodeSizes(Poly1305* ctx, word32 aadSz, word32 dataSz); + +/*! + \ingroup Poly1305 + \brief Encodes AAD and data sizes for Poly1305 using 64-bit values. + + \return 0 on success + \return BAD_FUNC_ARG if ctx is NULL + + \param ctx Poly1305 context + \param aadSz Additional authenticated data size + \param dataSz Data size + + _Example_ + \code + Poly1305 ctx; + byte key[32]; + wc_Poly1305SetKey(&ctx, key, sizeof(key)); + int ret = wc_Poly1305_EncodeSizes64(&ctx, 16, 100); + \endcode + + \sa wc_Poly1305_EncodeSizes +*/ +int wc_Poly1305_EncodeSizes64(Poly1305* ctx, word64 aadSz, word64 dataSz); diff --git a/doc/dox_comments/header_files/psa.h b/doc/dox_comments/header_files/psa.h index 7c87f0bf4..3eb29c339 100644 --- a/doc/dox_comments/header_files/psa.h +++ b/doc/dox_comments/header_files/psa.h @@ -94,3 +94,65 @@ void wolfSSL_free_psa_ctx(struct psa_ssl_ctx *ctx); int wolfSSL_psa_set_private_key_id(struct psa_ssl_ctx *ctx, psa_key_id_t id); + +/*! + \ingroup PSA + \brief This function generates random bytes using the PSA crypto API. + This is a wrapper around the PSA random number generation functions. + + \return 0 On success + \return Negative value on error + + \param out pointer to buffer to store random bytes + \param sz number of random bytes to generate + + _Example_ + \code + byte random[32]; + + int ret = wc_psa_get_random(random, sizeof(random)); + if (ret != 0) { + // error generating random bytes + } + \endcode + + \sa wc_RNG_GenerateBlock +*/ +int wc_psa_get_random(unsigned char *out, word32 sz); + +/*! + \ingroup PSA + \brief This function performs AES encryption or decryption using the + PSA crypto API. It supports various AES modes through the algorithm + parameter. + + \return 0 On success + \return Negative value on error + + \param aes pointer to initialized Aes structure + \param input pointer to input data buffer + \param output pointer to output data buffer + \param length length of data to process + \param alg PSA algorithm identifier specifying the AES mode + \param direction encryption (1) or decryption (0) + + _Example_ + \code + Aes aes; + byte key[16] = { }; // AES key + byte input[16] = { }; // plaintext + byte output[16]; + + wc_AesInit(&aes, NULL, INVALID_DEVID); + wc_AesSetKey(&aes, key, sizeof(key), NULL, AES_ENCRYPTION); + int ret = wc_psa_aes_encrypt_decrypt(&aes, input, output, + sizeof(input), + PSA_ALG_ECB_NO_PADDING, 1); + \endcode + + \sa wc_AesEncrypt + \sa wc_AesDecrypt +*/ +int wc_psa_aes_encrypt_decrypt(Aes *aes, const uint8_t *input, + uint8_t *output, size_t length, + psa_algorithm_t alg, int direction); diff --git a/doc/dox_comments/header_files/pwdbased.h b/doc/dox_comments/header_files/pwdbased.h index e5dd9c79b..25870b835 100644 --- a/doc/dox_comments/header_files/pwdbased.h +++ b/doc/dox_comments/header_files/pwdbased.h @@ -168,3 +168,173 @@ int wc_PBKDF2(byte* output, const byte* passwd, int pLen, int wc_PKCS12_PBKDF(byte* output, const byte* passwd, int passLen, const byte* salt, int saltLen, int iterations, int kLen, int hashType, int id); + +/*! + \ingroup Password + \brief Extended version of PBKDF1 with heap hint. + + \return 0 on success + \return BAD_FUNC_ARG on invalid arguments + \return MEMORY_E on memory allocation error + + \param key Output key buffer + \param keyLen Key length + \param iv Output IV buffer + \param ivLen IV length + \param passwd Password buffer + \param passwdLen Password length + \param salt Salt buffer + \param saltLen Salt length + \param iterations Iteration count + \param hashType Hash algorithm type + \param heap Heap hint for memory allocation + + _Example_ + \code + byte key[16], iv[16]; + byte pass[] = "password"; + byte salt[] = "salt"; + int ret = wc_PBKDF1_ex(key, sizeof(key), iv, sizeof(iv), + pass, sizeof(pass), salt, sizeof(salt), 1000, WC_SHA, NULL); + \endcode + + \sa wc_PBKDF1 +*/ +int wc_PBKDF1_ex(byte* key, int keyLen, byte* iv, int ivLen, + const byte* passwd, int passwdLen, const byte* salt, int saltLen, + int iterations, int hashType, void* heap); + +/*! + \ingroup Password + \brief Extended version of PBKDF2 with heap hint and device ID. + + \return 0 on success + \return BAD_FUNC_ARG on invalid arguments + \return MEMORY_E on memory allocation error + + \param output Output key buffer + \param passwd Password buffer + \param pLen Password length + \param salt Salt buffer + \param sLen Salt length + \param iterations Iteration count + \param kLen Key length + \param hashType Hash algorithm type + \param heap Heap hint for memory allocation + \param devId Device ID for hardware acceleration + + _Example_ + \code + byte key[32]; + byte pass[] = "password"; + byte salt[] = "salt"; + int ret = wc_PBKDF2_ex(key, pass, sizeof(pass), salt, + sizeof(salt), 2048, sizeof(key), WC_SHA256, NULL, + INVALID_DEVID); + \endcode + + \sa wc_PBKDF2 +*/ +int wc_PBKDF2_ex(byte* output, const byte* passwd, int pLen, + const byte* salt, int sLen, int iterations, int kLen, + int hashType, void* heap, int devId); + +/*! + \ingroup Password + \brief Extended version of PKCS12_PBKDF with heap hint. + + \return 0 on success + \return BAD_FUNC_ARG on invalid arguments + \return MEMORY_E on memory allocation error + + \param output Output key buffer + \param passwd Password buffer + \param passLen Password length + \param salt Salt buffer + \param saltLen Salt length + \param iterations Iteration count + \param kLen Key length + \param hashType Hash algorithm type + \param id Purpose identifier (1=key, 2=IV, 3=MAC) + \param heap Heap hint for memory allocation + + _Example_ + \code + byte key[32]; + byte pass[] = "password"; + byte salt[] = "salt"; + int ret = wc_PKCS12_PBKDF_ex(key, pass, sizeof(pass), salt, + sizeof(salt), 2048, sizeof(key), WC_SHA256, 1, NULL); + \endcode + + \sa wc_PKCS12_PBKDF +*/ +int wc_PKCS12_PBKDF_ex(byte* output, const byte* passwd,int passLen, + const byte* salt, int saltLen, int iterations, int kLen, + int hashType, int id, void* heap); + +/*! + \ingroup Password + \brief Implements scrypt key derivation function. + + \return 0 on success + \return BAD_FUNC_ARG on invalid arguments + \return MEMORY_E on memory allocation error + + \param output Output key buffer + \param passwd Password buffer + \param passLen Password length + \param salt Salt buffer + \param saltLen Salt length + \param cost CPU/memory cost parameter (N) + \param blockSize Block size parameter (r) + \param parallel Parallelization parameter (p) + \param dkLen Derived key length + + _Example_ + \code + byte key[32]; + byte pass[] = "password"; + byte salt[] = "salt"; + int ret = wc_scrypt(key, pass, sizeof(pass), salt, + sizeof(salt), 16384, 8, 1, sizeof(key)); + \endcode + + \sa wc_scrypt_ex +*/ +int wc_scrypt(byte* output, const byte* passwd, int passLen, + const byte* salt, int saltLen, int cost, int blockSize, + int parallel, int dkLen); + +/*! + \ingroup Password + \brief Extended scrypt with iteration count instead of cost. + + \return 0 on success + \return BAD_FUNC_ARG on invalid arguments + \return MEMORY_E on memory allocation error + + \param output Output key buffer + \param passwd Password buffer + \param passLen Password length + \param salt Salt buffer + \param saltLen Salt length + \param iterations Iteration count + \param blockSize Block size parameter (r) + \param parallel Parallelization parameter (p) + \param dkLen Derived key length + + _Example_ + \code + byte key[32]; + byte pass[] = "password"; + byte salt[] = "salt"; + int ret = wc_scrypt_ex(key, pass, sizeof(pass), salt, + sizeof(salt), 16384, 8, 1, sizeof(key)); + \endcode + + \sa wc_scrypt +*/ +int wc_scrypt_ex(byte* output, const byte* passwd, int passLen, + const byte* salt, int saltLen, word32 iterations, int blockSize, + int parallel, int dkLen); diff --git a/doc/dox_comments/header_files/quic.h b/doc/dox_comments/header_files/quic.h index 66b19a9ec..f7c3a980e 100644 --- a/doc/dox_comments/header_files/quic.h +++ b/doc/dox_comments/header_files/quic.h @@ -342,6 +342,42 @@ WOLFSSL_API int wolfSSL_process_quic_post_handshake(WOLFSSL *ssl); */ int wolfSSL_quic_read_write(WOLFSSL *ssl); +/*! + \ingroup QUIC + + \brief Perform the QUIC handshake. This function processes CRYPTO + data that has been provided via wolfSSL_provide_quic_data() and + advances the handshake state. It should be called repeatedly until + the handshake is complete. + + \return WOLFSSL_SUCCESS If handshake completed successfully + \return WOLFSSL_FATAL_ERROR If a fatal error occurred + \return Other values indicating handshake is in progress + + \param ssl pointer to a WOLFSSL structure created using + wolfSSL_new() + + _Example_ + \code + WOLFSSL* ssl; + // initialize ssl with QUIC method + + while (!wolfSSL_is_init_finished(ssl)) { + int ret = wolfSSL_quic_do_handshake(ssl); + if (ret == WOLFSSL_FATAL_ERROR) { + // handle error + break; + } + // provide more CRYPTO data if available + } + \endcode + + \sa wolfSSL_provide_quic_data + \sa wolfSSL_quic_read_write + \sa wolfSSL_is_init_finished +*/ +int wolfSSL_quic_do_handshake(WOLFSSL* ssl); + /*! \ingroup QUIC diff --git a/doc/dox_comments/header_files/random.h b/doc/dox_comments/header_files/random.h index 8d415db21..9594872a5 100644 --- a/doc/dox_comments/header_files/random.h +++ b/doc/dox_comments/header_files/random.h @@ -137,38 +137,6 @@ int wc_InitRng(WC_RNG* rng); */ int wc_RNG_GenerateBlock(WC_RNG* rng, byte* b, word32 sz); -/*! - \ingroup Random - - \brief Creates a new WC_RNG structure. - - - \return WC_RNG structure on success - \return NULL on error - - - \param heap pointer to a heap identifier - \param nonce pointer to the buffer containing the nonce - \param nonceSz length of the nonce - - _Example_ - \code - RNG rng; - byte nonce[] = { initialize nonce }; - word32 nonceSz = sizeof(nonce); - - wc_rng_new(&nonce, nonceSz, &heap); - - - \endcode - - \sa wc_InitRng - \sa wc_rng_free - \sa wc_FreeRng - \sa wc_RNG_HealthTest -*/ -WC_RNG* wc_rng_new(byte* nonce, word32 nonceSz, void* heap) - /*! \ingroup Random @@ -243,36 +211,6 @@ int wc_RNG_GenerateByte(WC_RNG* rng, byte* b); */ int wc_FreeRng(WC_RNG* rng); -/*! - \ingroup Random - - \brief Should be called when RNG no longer needed in order to securely - free rng. - - - \param rng random number generator initialized with wc_InitRng - - _Example_ - \code - RNG rng; - byte nonce[] = { initialize nonce }; - word32 nonceSz = sizeof(nonce); - - rng = wc_rng_new(&nonce, nonceSz, &heap); - - // use rng - - wc_rng_free(&rng); - - \endcode - - \sa wc_InitRng - \sa wc_rng_new - \sa wc_FreeRng - \sa wc_RNG_HealthTest -*/ -WC_RNG* wc_rng_free(WC_RNG* rng); - /*! \ingroup Random @@ -325,3 +263,323 @@ WC_RNG* wc_rng_free(WC_RNG* rng); int wc_RNG_HealthTest(int reseed, const byte* seedA, word32 seedASz, const byte* seedB, word32 seedBSz, byte* output, word32 outputSz); + +/*! + \ingroup Random + \brief Generates seed from OS entropy source. Lower-level function + used internally by wc_InitRng. + + \return 0 On success + \return WINCRYPT_E Failed to acquire context (Windows) + \return CRYPTGEN_E Failed to generate random (Windows) + \return RNG_FAILURE_E Failed to read entropy + + \param os Pointer to OS_Seed structure + \param output Buffer to store seed + \param sz Size of seed in bytes + + _Example_ + \code + OS_Seed os; + byte seed[32]; + int ret = wc_GenerateSeed(&os, seed, sizeof(seed)); + \endcode + + \sa wc_InitRng +*/ +int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz); + +/*! + \ingroup Random + \brief Allocates and initializes new WC_RNG with optional nonce. + + \return Pointer to WC_RNG on success + \return NULL on failure + + \param nonce Nonce buffer (can be NULL) + \param nonceSz Nonce size + \param heap Heap hint (can be NULL) + + _Example_ + \code + WC_RNG* rng = wc_rng_new(NULL, 0, NULL); + wc_rng_free(rng); + \endcode + + \sa wc_rng_free +*/ +WC_RNG* wc_rng_new(byte* nonce, word32 nonceSz, void* heap); + +/*! + \ingroup Random + \brief Allocates and initializes WC_RNG with extended parameters. + + \return 0 On success + \return BAD_FUNC_ARG If rng is NULL + \return MEMORY_E Memory allocation failed + + \param rng Pointer to store WC_RNG pointer + \param nonce Nonce buffer (can be NULL) + \param nonceSz Nonce size + \param heap Heap hint (can be NULL) + \param devId Device ID (INVALID_DEVID for software) + + _Example_ + \code + WC_RNG* rng; + int ret = wc_rng_new_ex(&rng, NULL, 0, NULL, INVALID_DEVID); + wc_rng_free(rng); + \endcode + + \sa wc_rng_new +*/ +int wc_rng_new_ex(WC_RNG **rng, byte* nonce, word32 nonceSz, void* heap, + int devId); + +/*! + \ingroup Random + \brief Frees WC_RNG allocated with wc_rng_new. + + \param rng WC_RNG to free + + _Example_ + \code + WC_RNG* rng = wc_rng_new(NULL, 0, NULL); + wc_rng_free(rng); + \endcode + + \sa wc_rng_new +*/ +void wc_rng_free(WC_RNG* rng); + +/*! + \ingroup Random + \brief Initializes WC_RNG with extended parameters. + + \return 0 On success + \return BAD_FUNC_ARG If rng is NULL + \return RNG_FAILURE_E Initialization failed + + \param rng WC_RNG to initialize + \param heap Heap hint (can be NULL) + \param devId Device ID (INVALID_DEVID for software) + + _Example_ + \code + WC_RNG rng; + int ret = wc_InitRng_ex(&rng, NULL, INVALID_DEVID); + wc_FreeRng(&rng); + \endcode + + \sa wc_InitRng +*/ +int wc_InitRng_ex(WC_RNG* rng, void* heap, int devId); + +/*! + \ingroup Random + \brief Initializes WC_RNG with nonce. + + \return 0 On success + \return BAD_FUNC_ARG If rng is NULL + \return RNG_FAILURE_E Initialization failed + + \param rng WC_RNG to initialize + \param nonce Nonce buffer + \param nonceSz Nonce size + + _Example_ + \code + WC_RNG rng; + byte nonce[16]; + int ret = wc_InitRngNonce(&rng, nonce, sizeof(nonce)); + wc_FreeRng(&rng); + \endcode + + \sa wc_InitRng +*/ +int wc_InitRngNonce(WC_RNG* rng, byte* nonce, word32 nonceSz); + +/*! + \ingroup Random + \brief Initializes WC_RNG with nonce and extended parameters. + + \return 0 On success + \return BAD_FUNC_ARG If rng is NULL + \return RNG_FAILURE_E Initialization failed + + \param rng WC_RNG to initialize + \param nonce Nonce buffer + \param nonceSz Nonce size + \param heap Heap hint (can be NULL) + \param devId Device ID (INVALID_DEVID for software) + + _Example_ + \code + WC_RNG rng; + byte nonce[16]; + int ret = wc_InitRngNonce_ex(&rng, nonce, sizeof(nonce), NULL, + INVALID_DEVID); + wc_FreeRng(&rng); + \endcode + + \sa wc_InitRngNonce +*/ +int wc_InitRngNonce_ex(WC_RNG* rng, byte* nonce, word32 nonceSz, + void* heap, int devId); + +/*! + \ingroup Random + \brief Sets callback for custom seed generation. + + \return 0 On success + \return BAD_FUNC_ARG If cb is NULL + + \param cb Seed callback function + + _Example_ + \code + int my_cb(OS_Seed* os, byte* out, word32 sz) { return 0; } + wc_SetSeed_Cb(my_cb); + \endcode + + \sa wc_GenerateSeed +*/ +int wc_SetSeed_Cb(wc_RngSeed_Cb cb); + +/*! + \ingroup Random + \brief Reseeds DRBG with new entropy. + + \return 0 On success + \return BAD_FUNC_ARG If rng or seed is NULL + \return RNG_FAILURE_E Reseed failed + + \param rng WC_RNG to reseed + \param seed Seed buffer + \param seedSz Seed size + + _Example_ + \code + WC_RNG rng; + byte seed[32]; + wc_InitRng(&rng); + int ret = wc_RNG_DRBG_Reseed(&rng, seed, sizeof(seed)); + \endcode + + \sa wc_InitRng +*/ +int wc_RNG_DRBG_Reseed(WC_RNG* rng, const byte* seed, word32 seedSz); + +/*! + \ingroup Random + \brief Tests seed validity for DRBG. + + \return 0 If valid + \return BAD_FUNC_ARG If seed is NULL + \return RNG_FAILURE_E Validation failed + + \param seed Seed to test + \param seedSz Seed size + + _Example_ + \code + byte seed[32]; + int ret = wc_RNG_TestSeed(seed, sizeof(seed)); + \endcode + + \sa wc_InitRng +*/ +int wc_RNG_TestSeed(const byte* seed, word32 seedSz); + +/*! + \ingroup Random + \brief RNG health test with extended parameters. + + \return 0 On success + \return BAD_FUNC_ARG If required params NULL + \return -1 Test failed + + \param reseed Non-zero to test reseeding + \param nonce Nonce buffer (can be NULL) + \param nonceSz Nonce size + \param seedA Initial seed + \param seedASz Initial seed size + \param seedB Reseed buffer (required if reseed set) + \param seedBSz Reseed size + \param output Output buffer + \param outputSz Output size + \param heap Heap hint (can be NULL) + \param devId Device ID (INVALID_DEVID for software) + + _Example_ + \code + byte seedA[32], seedB[32], out[64]; + int ret = wc_RNG_HealthTest_ex(1, NULL, 0, seedA, 32, seedB, 32, + out, 64, NULL, INVALID_DEVID); + \endcode + + \sa wc_RNG_HealthTest +*/ +int wc_RNG_HealthTest_ex(int reseed, const byte* nonce, word32 nonceSz, + const byte* seedA, word32 seedASz, + const byte* seedB, word32 seedBSz, byte* output, + word32 outputSz, void* heap, int devId); + +/*! + \ingroup Random + \brief Gets raw entropy without DRBG processing. + + \return 0 On success + \return BAD_FUNC_ARG If raw is NULL + \return RNG_FAILURE_E Failed + + \param raw Buffer for entropy + \param cnt Bytes to retrieve + + _Example_ + \code + byte raw[32]; + int ret = wc_Entropy_GetRawEntropy(raw, sizeof(raw)); + \endcode + + \sa wc_Entropy_Get +*/ +int wc_Entropy_GetRawEntropy(unsigned char* raw, int cnt); + +/*! + \ingroup Random + \brief Gets processed entropy with specified bits. + + \return 0 On success + \return BAD_FUNC_ARG If entropy is NULL + \return RNG_FAILURE_E Failed + + \param bits Entropy bits required + \param entropy Buffer for entropy + \param len Buffer size + + _Example_ + \code + byte entropy[32]; + int ret = wc_Entropy_Get(256, entropy, sizeof(entropy)); + \endcode + + \sa wc_Entropy_GetRawEntropy +*/ +int wc_Entropy_Get(int bits, unsigned char* entropy, word32 len); + +/*! + \ingroup Random + \brief Tests entropy source on demand. + + \return 0 On success + \return RNG_FAILURE_E Test failed + + _Example_ + \code + int ret = wc_Entropy_OnDemandTest(); + \endcode + + \sa wc_Entropy_Get +*/ +int wc_Entropy_OnDemandTest(void); diff --git a/doc/dox_comments/header_files/rsa.h b/doc/dox_comments/header_files/rsa.h index 692821828..895ea6c6c 100644 --- a/doc/dox_comments/header_files/rsa.h +++ b/doc/dox_comments/header_files/rsa.h @@ -43,6 +43,9 @@ int wc_InitRsaKey(RsaKey* key, void* heap); The key has to be associated with RNG by wc_RsaSetRNG when WC_RSA_BLINDING is enabled. + \note This API is only available when WOLF_PRIVATE_KEY_ID is defined, + which is set for PKCS11 support. + \return 0 Returned upon successfully initializing the RSA structure for use with encryption and decryption \return BAD_FUNC_ARGS Returned if the RSA key pointer evaluates to NULL @@ -1612,3 +1615,591 @@ int wc_RsaSetNonBlock(RsaKey* key, RsaNb* nb); */ int wc_RsaSetNonBlockTime(RsaKey* key, word32 maxBlockUs, word32 cpuMHz); +/*! + \ingroup RSA + \brief Initializes RSA key with heap and device ID. + + \return 0 on success + \return negative on error + + \param key RSA key structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + RsaKey key; + int ret = wc_InitRsaKey_ex(&key, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitRsaKey +*/ +int wc_InitRsaKey_ex(RsaKey* key, void* heap, int devId); + +/*! + \ingroup RSA + \brief Allocates and initializes new RSA key. These New/Delete functions + are exposed to support allocation of the structure using dynamic memory + to provide better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return RsaKey pointer on success + \return NULL on failure + + \param heap Heap hint + \param devId Device ID + \param result_code Result code pointer + + _Example_ + \code + int result; + RsaKey* key = wc_NewRsaKey(NULL, INVALID_DEVID, &result); + \endcode + + \sa wc_DeleteRsaKey +*/ +RsaKey* wc_NewRsaKey(void* heap, int devId, int *result_code); + +/*! + \ingroup RSA + \brief Deletes and frees RSA key. These New/Delete functions are exposed + to support allocation of the structure using dynamic memory to provide + better ABI compatibility. + + \note This API is only available when WC_NO_CONSTRUCTORS is not defined. + WC_NO_CONSTRUCTORS is automatically defined when WOLFSSL_NO_MALLOC is + defined. + + \return 0 on success + \return negative on error + + \param key RSA key to delete + \param key_p Pointer to key pointer + + _Example_ + \code + RsaKey* key; + int ret = wc_DeleteRsaKey(key, &key); + \endcode + + \sa wc_NewRsaKey +*/ +int wc_DeleteRsaKey(RsaKey* key, RsaKey** key_p); + +/*! + \ingroup RSA + \brief Initializes RSA key with label. + + \note This API is only available when WOLF_PRIVATE_KEY_ID is defined, + which is set for PKCS11 support. + + \return 0 on success + \return negative on error + + \param key RSA key structure + \param label Label string + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + RsaKey key; + int ret = wc_InitRsaKey_Label(&key, "mykey", NULL, + INVALID_DEVID); + \endcode + + \sa wc_InitRsaKey_ex +*/ +int wc_InitRsaKey_Label(RsaKey* key, const char* label, void* heap, + int devId); + +/*! + \ingroup RSA + \brief Checks RSA key validity. + + \return 0 on success + \return negative on error + + \param key RSA key to check + + _Example_ + \code + RsaKey key; + int ret = wc_CheckRsaKey(&key); + \endcode + + \sa wc_MakeRsaKey +*/ +int wc_CheckRsaKey(RsaKey* key); + +/*! + \ingroup RSA + \brief Uses key ID for hardware RSA. + + \return 0 on success + \return negative on error + + \param key RSA key + \param keyId Key identifier + \param flags Flags + + _Example_ + \code + RsaKey key; + int ret = wc_RsaUseKeyId(&key, 1, 0); + \endcode + + \sa wc_RsaGetKeyId +*/ +int wc_RsaUseKeyId(RsaKey* key, word32 keyId, word32 flags); + +/*! + \ingroup RSA + \brief Gets key ID from hardware RSA key. + + \return 0 on success + \return negative on error + + \param key RSA key + \param keyId Key identifier pointer + + _Example_ + \code + RsaKey key; + word32 keyId; + int ret = wc_RsaGetKeyId(&key, &keyId); + \endcode + + \sa wc_RsaUseKeyId +*/ +int wc_RsaGetKeyId(RsaKey* key, word32* keyId); + +/*! + \ingroup RSA + \brief Performs RSA operation. + + \return 0 on success + \return negative on error + + \param in Input buffer + \param inLen Input length + \param out Output buffer + \param outLen Output length pointer + \param type Operation type + \param key RSA key + \param rng Random number generator + + _Example_ + \code + RsaKey key; + WC_RNG rng; + byte in[256], out[256]; + word32 outLen = sizeof(out); + int ret = wc_RsaFunction(in, 256, out, &outLen, + RSA_PUBLIC_ENCRYPT, &key, &rng); + \endcode + + \sa wc_RsaPublicEncrypt +*/ +int wc_RsaFunction(const byte* in, word32 inLen, byte* out, + word32* outLen, int type, RsaKey* key, WC_RNG* rng); + +/*! + \ingroup RSA + \brief Signs with RSA-PSS extended options. + + \return Size of signature on success + \return negative on error + + \param in Input buffer + \param inLen Input length + \param out Output buffer + \param outLen Output buffer size + \param hash Hash type + \param mgf MGF type + \param saltLen Salt length + \param key RSA key + \param rng Random number generator + + _Example_ + \code + RsaKey key; + WC_RNG rng; + byte in[32], sig[256]; + int ret = wc_RsaPSS_Sign_ex(in, 32, sig, sizeof(sig), + WC_HASH_TYPE_SHA256, + WC_MGF1SHA256, 32, &key, &rng); + \endcode + + \sa wc_RsaPSS_Sign +*/ +int wc_RsaPSS_Sign_ex(const byte* in, word32 inLen, byte* out, + word32 outLen, enum wc_HashType hash, int mgf, int saltLen, + RsaKey* key, WC_RNG* rng); + +/*! + \ingroup RSA + \brief Verifies RSA signature with padding type. + + \return Size of decrypted data on success + \return negative on error + + \param in Input signature + \param inLen Signature length + \param out Output buffer + \param outLen Output buffer size + \param key RSA key + \param pad_type Padding type + + _Example_ + \code + RsaKey key; + byte sig[256], out[256]; + int ret = wc_RsaSSL_Verify_ex(sig, 256, out, sizeof(out), + &key, RSA_PKCS1_PADDING); + \endcode + + \sa wc_RsaSSL_Verify +*/ +int wc_RsaSSL_Verify_ex(const byte* in, word32 inLen, byte* out, + word32 outLen, RsaKey* key, int pad_type); + +/*! + \ingroup RSA + \brief Verifies RSA signature with hash type. + + \return Size of decrypted data on success + \return negative on error + + \param in Input signature + \param inLen Signature length + \param out Output buffer + \param outLen Output buffer size + \param key RSA key + \param pad_type Padding type + \param hash Hash type + + _Example_ + \code + RsaKey key; + byte sig[256], out[256]; + int ret = wc_RsaSSL_Verify_ex2(sig, 256, out, sizeof(out), + &key, RSA_PKCS1_PADDING, + WC_HASH_TYPE_SHA256); + \endcode + + \sa wc_RsaSSL_Verify_ex +*/ +int wc_RsaSSL_Verify_ex2(const byte* in, word32 inLen, byte* out, + word32 outLen, RsaKey* key, int pad_type, + enum wc_HashType hash); + +/*! + \ingroup RSA + \brief Verifies RSA-PSS inline with extended options. + + \return Size of verified data on success + \return negative on error + + \param in Input/output buffer + \param inLen Input length + \param out Output pointer + \param hash Hash type + \param mgf MGF type + \param saltLen Salt length + \param key RSA key + + _Example_ + \code + RsaKey key; + byte sig[256]; + byte* out; + int ret = wc_RsaPSS_VerifyInline_ex(sig, 256, &out, + WC_HASH_TYPE_SHA256, + WC_MGF1SHA256, 32, &key); + \endcode + + \sa wc_RsaPSS_VerifyInline +*/ +int wc_RsaPSS_VerifyInline_ex(byte* in, word32 inLen, byte** out, + enum wc_HashType hash, int mgf, int saltLen, RsaKey* key); + +/*! + \ingroup RSA + \brief Verifies RSA-PSS with extended options. + + \return Size of verified data on success + \return negative on error + + \param in Input signature + \param inLen Signature length + \param out Output buffer + \param outLen Output buffer size + \param hash Hash type + \param mgf MGF type + \param saltLen Salt length + \param key RSA key + + _Example_ + \code + RsaKey key; + byte sig[256], out[256]; + int ret = wc_RsaPSS_Verify_ex(sig, 256, out, sizeof(out), + WC_HASH_TYPE_SHA256, + WC_MGF1SHA256, 32, &key); + \endcode + + \sa wc_RsaPSS_Verify +*/ +int wc_RsaPSS_Verify_ex(const byte* in, word32 inLen, byte* out, + word32 outLen, enum wc_HashType hash, int mgf, int saltLen, + RsaKey* key); + +/*! + \ingroup RSA + \brief Checks RSA-PSS padding with extended options. + + \return 0 on success + \return negative on error + + \param in Padded data + \param inLen Padded data length + \param sig Signature + \param sigSz Signature size + \param hashType Hash type + \param saltLen Salt length + \param bits Key size in bits + \param heap Heap hint + + _Example_ + \code + byte padded[256], sig[256]; + int ret = wc_RsaPSS_CheckPadding_ex2(padded, 256, sig, 256, + WC_HASH_TYPE_SHA256, 32, + 2048, NULL); + \endcode + + \sa wc_RsaPSS_CheckPadding_ex +*/ +int wc_RsaPSS_CheckPadding_ex2(const byte* in, word32 inLen, + const byte* sig, word32 sigSz, enum wc_HashType hashType, + int saltLen, int bits, void* heap); + +/*! + \ingroup RSA + \brief Exports RSA key components. + + \return 0 on success + \return negative on error + + \param key RSA key + \param e Public exponent buffer + \param eSz Public exponent size pointer + \param n Modulus buffer + \param nSz Modulus size pointer + \param d Private exponent buffer + \param dSz Private exponent size pointer + \param p Prime p buffer + \param pSz Prime p size pointer + \param q Prime q buffer + \param qSz Prime q size pointer + + _Example_ + \code + RsaKey key; + byte e[3], n[256], d[256], p[128], q[128]; + word32 eSz = 3, nSz = 256, dSz = 256, pSz = 128, qSz = 128; + int ret = wc_RsaExportKey(&key, e, &eSz, n, &nSz, d, &dSz, + p, &pSz, q, &qSz); + \endcode + + \sa wc_RsaFlattenPublicKey +*/ +int wc_RsaExportKey(const RsaKey* key, byte* e, word32* eSz, + byte* n, word32* nSz, byte* d, word32* dSz, byte* p, + word32* pSz, byte* q, word32* qSz); + +/*! + \ingroup RSA + \brief Checks probable prime with extended options. + + \return 0 on success + \return negative on error + + \param p Prime p buffer + \param pSz Prime p size + \param q Prime q buffer + \param qSz Prime q size + \param e Public exponent buffer + \param eSz Public exponent size + \param nlen Modulus length + \param isPrime Prime result pointer + \param rng Random number generator + + _Example_ + \code + byte p[128], q[128], e[3]; + int isPrime; + WC_RNG rng; + int ret = wc_CheckProbablePrime_ex(p, 128, q, 128, e, 3, + 2048, &isPrime, &rng); + \endcode + + \sa wc_CheckProbablePrime +*/ +int wc_CheckProbablePrime_ex(const byte* p, word32 pSz, + const byte* q, word32 qSz, const byte* e, word32 eSz, + int nlen, int* isPrime, WC_RNG* rng); + +/*! + \ingroup RSA + \brief Checks probable prime. + + \return 0 on success + \return negative on error + + \param p Prime p buffer + \param pSz Prime p size + \param q Prime q buffer + \param qSz Prime q size + \param e Public exponent buffer + \param eSz Public exponent size + \param nlen Modulus length + \param isPrime Prime result pointer + + _Example_ + \code + byte p[128], q[128], e[3]; + int isPrime; + int ret = wc_CheckProbablePrime(p, 128, q, 128, e, 3, 2048, + &isPrime); + \endcode + + \sa wc_CheckProbablePrime_ex +*/ +int wc_CheckProbablePrime(const byte* p, word32 pSz, + const byte* q, word32 qSz, const byte* e, word32 eSz, + int nlen, int* isPrime); + +/*! + \ingroup RSA + \brief Pads data with extended options. + + \return 0 on success + \return negative on error + + \param input Input data + \param inputLen Input length + \param pkcsBlock Output padded block + \param pkcsBlockLen Padded block size + \param padValue Pad value + \param rng Random number generator + \param padType Padding type + \param hType Hash type + \param mgf MGF type + \param optLabel Optional label + \param labelLen Label length + \param saltLen Salt length + \param bits Key size in bits + \param heap Heap hint + + _Example_ + \code + byte in[32], padded[256]; + WC_RNG rng; + int ret = wc_RsaPad_ex(in, 32, padded, 256, 0x00, &rng, + RSA_BLOCK_TYPE_1, + WC_HASH_TYPE_SHA256, WC_MGF1SHA256, + NULL, 0, 32, 2048, NULL); + \endcode + + \sa wc_RsaUnPad_ex +*/ +int wc_RsaPad_ex(const byte* input, word32 inputLen, + byte* pkcsBlock, word32 pkcsBlockLen, byte padValue, + WC_RNG* rng, int padType, enum wc_HashType hType, int mgf, + byte* optLabel, word32 labelLen, int saltLen, int bits, + void* heap); + +/*! + \ingroup RSA + \brief Unpads data with extended options. + + \return Size of unpadded data on success + \return negative on error + + \param pkcsBlock Padded block + \param pkcsBlockLen Padded block length + \param out Output pointer + \param padValue Pad value + \param padType Padding type + \param hType Hash type + \param mgf MGF type + \param optLabel Optional label + \param labelLen Label length + \param saltLen Salt length + \param bits Key size in bits + \param heap Heap hint + + _Example_ + \code + byte padded[256]; + byte* out; + int ret = wc_RsaUnPad_ex(padded, 256, &out, 0x00, + RSA_BLOCK_TYPE_1, + WC_HASH_TYPE_SHA256, WC_MGF1SHA256, + NULL, 0, 32, 2048, NULL); + \endcode + + \sa wc_RsaPad_ex +*/ +int wc_RsaUnPad_ex(byte* pkcsBlock, word32 pkcsBlockLen, + byte** out, byte padValue, int padType, + enum wc_HashType hType, int mgf, byte* optLabel, + word32 labelLen, int saltLen, int bits, void* heap); + +/*! + \ingroup RSA + \brief Decodes raw RSA private key. + + \return 0 on success + \return negative on error + + \param n Modulus buffer + \param nSz Modulus size + \param e Public exponent buffer + \param eSz Public exponent size + \param d Private exponent buffer + \param dSz Private exponent size + \param u Coefficient buffer + \param uSz Coefficient size + \param p Prime p buffer + \param pSz Prime p size + \param q Prime q buffer + \param qSz Prime q size + \param dP dP buffer + \param dPSz dP size + \param dQ dQ buffer + \param dQSz dQ size + \param key RSA key + + _Example_ + \code + RsaKey key; + byte n[256], e[3], d[256], u[256], p[128], q[128]; + byte dP[128], dQ[128]; + int ret = wc_RsaPrivateKeyDecodeRaw(n, 256, e, 3, d, 256, + u, 256, p, 128, q, 128, + dP, 128, dQ, 128, &key); + \endcode + + \sa wc_RsaPrivateKeyDecode +*/ +int wc_RsaPrivateKeyDecodeRaw(const byte* n, word32 nSz, + const byte* e, word32 eSz, const byte* d, word32 dSz, + const byte* u, word32 uSz, const byte* p, word32 pSz, + const byte* q, word32 qSz, const byte* dP, word32 dPSz, + const byte* dQ, word32 dQSz, RsaKey* key); diff --git a/doc/dox_comments/header_files/sha.h b/doc/dox_comments/header_files/sha.h index 56a9382d4..e6368fa4a 100644 --- a/doc/dox_comments/header_files/sha.h +++ b/doc/dox_comments/header_files/sha.h @@ -142,3 +142,124 @@ void wc_ShaFree(wc_Sha* sha); \sa wc_InitSha */ int wc_ShaGetHash(wc_Sha* sha, byte* hash); +/*! + \ingroup SHA + \brief Initializes SHA with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha SHA structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha sha; + int ret = wc_InitSha_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha +*/ +int wc_InitSha_ex(wc_Sha* sha, void* heap, int devId); + +/*! + \ingroup SHA + \brief Gets raw hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha SHA structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha sha; + byte hash[WC_SHA_DIGEST_SIZE]; + int ret = wc_ShaFinalRaw(&sha, hash); + \endcode + + \sa wc_ShaFinal +*/ +int wc_ShaFinalRaw(wc_Sha* sha, byte* hash); + +/*! + \ingroup SHA + \brief Copies SHA context. + + \return 0 on success + \return negative on error + + \param src Source SHA structure + \param dst Destination SHA structure + + _Example_ + \code + wc_Sha src, dst; + int ret = wc_ShaCopy(&src, &dst); + \endcode + + \sa wc_InitSha +*/ +int wc_ShaCopy(wc_Sha* src, wc_Sha* dst); + +/*! + \ingroup SHA + \brief Transforms SHA block. + + \return 0 on success + \return negative on error + + \param sha SHA structure + \param data Block data + + _Example_ + \code + wc_Sha sha; + unsigned char block[WC_SHA_BLOCK_SIZE]; + int ret = wc_ShaTransform(&sha, block); + \endcode + + \sa wc_ShaUpdate +*/ +int wc_ShaTransform(wc_Sha* sha, const unsigned char* data); + +/*! + \ingroup SHA + \brief Sets SHA size. + + \return none No returns + + \param sha SHA structure + \param len Size to set + + _Example_ + \code + wc_Sha sha; + wc_ShaSizeSet(&sha, 1000); + \endcode + + \sa wc_ShaUpdate +*/ +void wc_ShaSizeSet(wc_Sha* sha, word32 len); + +/*! + \ingroup SHA + \brief Sets SHA flags. + + \return 0 on success + \return negative on error + + \param sha SHA structure + \param flags Flags to set + + _Example_ + \code + wc_Sha sha; + int ret = wc_ShaSetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha +*/ +int wc_ShaSetFlags(wc_Sha* sha, word32 flags); diff --git a/doc/dox_comments/header_files/sha256.h b/doc/dox_comments/header_files/sha256.h index 93bfd836f..3a94b797b 100644 --- a/doc/dox_comments/header_files/sha256.h +++ b/doc/dox_comments/header_files/sha256.h @@ -194,7 +194,7 @@ int wc_InitSha224(wc_Sha224* sha224); _Example_ \code Sha224 sha224; - byte data[] = { /* Data to be hashed }; + byte data[]; // Data to be hashed word32 len = sizeof(data); if ((ret = wc_InitSha224(&sha224)) != 0) { @@ -227,7 +227,7 @@ int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len); _Example_ \code Sha224 sha224; - byte data[] = { /* Data to be hashed }; + byte data[]; // Data to be hashed word32 len = sizeof(data); if ((ret = wc_InitSha224(&sha224)) != 0) { @@ -244,3 +244,344 @@ int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len); \sa wc_Sha224Update */ int wc_Sha224Final(wc_Sha224* sha224, byte* hash); + +/*! + \ingroup SHA + \brief Initializes SHA256 with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha SHA256 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha256 sha; + int ret = wc_InitSha256_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha256 +*/ +int wc_InitSha256_ex(wc_Sha256* sha, void* heap, int devId); + +/*! + \ingroup SHA + \brief Gets raw hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha256 SHA256 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha256 sha; + byte hash[WC_SHA256_DIGEST_SIZE]; + int ret = wc_Sha256FinalRaw(&sha, hash); + \endcode + + \sa wc_Sha256Final +*/ +int wc_Sha256FinalRaw(wc_Sha256* sha256, byte* hash); + +/*! + \ingroup SHA + \brief Transforms SHA256 block. + + \return 0 on success + \return negative on error + + \param sha SHA256 structure + \param data Block data + + _Example_ + \code + wc_Sha256 sha; + unsigned char block[WC_SHA256_BLOCK_SIZE]; + int ret = wc_Sha256Transform(&sha, block); + \endcode + + \sa wc_Sha256Update +*/ +int wc_Sha256Transform(wc_Sha256* sha, const unsigned char* data); + +/*! + \ingroup SHA + \brief Hashes single block and outputs result. + + \return 0 on success + \return negative on error + + \param sha SHA256 structure + \param data Block data + \param hash Output hash buffer + + _Example_ + \code + wc_Sha256 sha; + unsigned char block[WC_SHA256_BLOCK_SIZE]; + unsigned char hash[WC_SHA256_DIGEST_SIZE]; + int ret = wc_Sha256HashBlock(&sha, block, hash); + \endcode + + \sa wc_Sha256Transform +*/ +int wc_Sha256HashBlock(wc_Sha256* sha, const unsigned char* data, + unsigned char* hash); + +/*! + \ingroup SHA + \brief Grows SHA256 buffer with input data. This function is only + available when WOLFSSL_HASH_KEEP is defined. It is used for keeping an + internal buffer to hold all data to be hashed rather than iterating + over update, which is necessary for some hardware acceleration + platforms that have restrictions on streaming hash operations. + + \return 0 on success + \return negative on error + + \param sha256 SHA256 structure + \param in Input data + \param inSz Input size + + _Example_ + \code + wc_Sha256 sha; + byte data[100]; + int ret = wc_Sha256_Grow(&sha, data, sizeof(data)); + \endcode + + \sa wc_Sha256Update +*/ +int wc_Sha256_Grow(wc_Sha256* sha256, const byte* in, int inSz); + +/*! + \ingroup SHA + \brief Copies SHA256 context. + + \return 0 on success + \return negative on error + + \param src Source SHA256 structure + \param dst Destination SHA256 structure + + _Example_ + \code + wc_Sha256 src, dst; + int ret = wc_Sha256Copy(&src, &dst); + \endcode + + \sa wc_InitSha256 +*/ +int wc_Sha256Copy(wc_Sha256* src, wc_Sha256* dst); + +/*! + \ingroup SHA + \brief Sets SHA256 size. + + \return none No returns + + \param sha256 SHA256 structure + \param len Size to set + + _Example_ + \code + wc_Sha256 sha; + wc_Sha256SizeSet(&sha, 1000); + \endcode + + \sa wc_Sha256Update +*/ +void wc_Sha256SizeSet(wc_Sha256* sha256, word32 len); + +/*! + \ingroup SHA + \brief Sets SHA256 flags. + + \return 0 on success + \return negative on error + + \param sha256 SHA256 structure + \param flags Flags to set + + _Example_ + \code + wc_Sha256 sha; + int ret = wc_Sha256SetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha256 +*/ +int wc_Sha256SetFlags(wc_Sha256* sha256, word32 flags); + +/*! + \ingroup SHA + \brief Gets SHA256 flags. + + \return 0 on success + \return negative on error + + \param sha256 SHA256 structure + \param flags Pointer to store flags + + _Example_ + \code + wc_Sha256 sha; + word32 flags; + int ret = wc_Sha256GetFlags(&sha, &flags); + \endcode + + \sa wc_Sha256SetFlags +*/ +int wc_Sha256GetFlags(wc_Sha256* sha256, word32* flags); + +/*! + \ingroup SHA + \brief Initializes SHA224 with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha224 SHA224 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha224 sha; + int ret = wc_InitSha224_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha224 +*/ +int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId); + +/*! + \ingroup SHA + \brief Frees SHA224 resources. + + \return none No returns + + \param sha224 SHA224 structure + + _Example_ + \code + wc_Sha224 sha; + wc_InitSha224(&sha); + wc_Sha224Free(&sha); + \endcode + + \sa wc_InitSha224 +*/ +void wc_Sha224Free(wc_Sha224* sha224); + +/*! + \ingroup SHA + \brief Grows SHA224 buffer with input data. This function is only + available when WOLFSSL_HASH_KEEP is defined. It is used for keeping an + internal buffer to hold all data to be hashed rather than iterating + over update, which is necessary for some hardware acceleration + platforms that have restrictions on streaming hash operations. + + \return 0 on success + \return negative on error + + \param sha224 SHA224 structure + \param in Input data + \param inSz Input size + + _Example_ + \code + wc_Sha224 sha; + byte data[100]; + int ret = wc_Sha224_Grow(&sha, data, sizeof(data)); + \endcode + + \sa wc_Sha224Update +*/ +int wc_Sha224_Grow(wc_Sha224* sha224, const byte* in, int inSz); + +/*! + \ingroup SHA + \brief Gets SHA224 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha224 SHA224 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha224 sha; + byte hash[WC_SHA224_DIGEST_SIZE]; + int ret = wc_Sha224GetHash(&sha, hash); + \endcode + + \sa wc_Sha224Final +*/ +int wc_Sha224GetHash(wc_Sha224* sha224, byte* hash); + +/*! + \ingroup SHA + \brief Copies SHA224 context. + + \return 0 on success + \return negative on error + + \param src Source SHA224 structure + \param dst Destination SHA224 structure + + _Example_ + \code + wc_Sha224 src, dst; + int ret = wc_Sha224Copy(&src, &dst); + \endcode + + \sa wc_InitSha224 +*/ +int wc_Sha224Copy(wc_Sha224* src, wc_Sha224* dst); + +/*! + \ingroup SHA + \brief Sets SHA224 flags. + + \return 0 on success + \return negative on error + + \param sha224 SHA224 structure + \param flags Flags to set + + _Example_ + \code + wc_Sha224 sha; + int ret = wc_Sha224SetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha224 +*/ +int wc_Sha224SetFlags(wc_Sha224* sha224, word32 flags); + +/*! + \ingroup SHA + \brief Gets SHA224 flags. + + \return 0 on success + \return negative on error + + \param sha224 SHA224 structure + \param flags Pointer to store flags + + _Example_ + \code + wc_Sha224 sha; + word32 flags; + int ret = wc_Sha224GetFlags(&sha, &flags); + \endcode + + \sa wc_Sha224SetFlags +*/ +int wc_Sha224GetFlags(wc_Sha224* sha224, word32* flags); diff --git a/doc/dox_comments/header_files/sha512.h b/doc/dox_comments/header_files/sha512.h index 915ae77a1..0f9c129a4 100644 --- a/doc/dox_comments/header_files/sha512.h +++ b/doc/dox_comments/header_files/sha512.h @@ -181,3 +181,827 @@ int wc_Sha384Update(wc_Sha384* sha, const byte* data, word32 len); \sa wc_InitSha384 */ int wc_Sha384Final(wc_Sha384* sha384, byte* hash); + +/*! + \ingroup SHA + \brief Initializes SHA512 with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_InitSha512_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha512 +*/ +int wc_InitSha512_ex(wc_Sha512* sha, void* heap, int devId); + +/*! + \ingroup SHA + \brief Gets raw hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_DIGEST_SIZE]; + int ret = wc_Sha512FinalRaw(&sha, hash); + \endcode + + \sa wc_Sha512Final +*/ +int wc_Sha512FinalRaw(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Frees SHA512 resources. + + \return none No returns + + \param sha SHA512 structure + + _Example_ + \code + wc_Sha512 sha; + wc_InitSha512(&sha); + wc_Sha512Free(&sha); + \endcode + + \sa wc_InitSha512 +*/ +void wc_Sha512Free(wc_Sha512* sha); + +/*! + \ingroup SHA + \brief Gets SHA512 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_DIGEST_SIZE]; + int ret = wc_Sha512GetHash(&sha, hash); + \endcode + + \sa wc_Sha512Final +*/ +int wc_Sha512GetHash(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Copies SHA512 context. + + \return 0 on success + \return negative on error + + \param src Source SHA512 structure + \param dst Destination SHA512 structure + + _Example_ + \code + wc_Sha512 src, dst; + int ret = wc_Sha512Copy(&src, &dst); + \endcode + + \sa wc_InitSha512 +*/ +int wc_Sha512Copy(wc_Sha512* src, wc_Sha512* dst); + +/*! + \ingroup SHA + \brief Grows SHA512 buffer with input data. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param in Input data + \param inSz Input size + + _Example_ + \code + wc_Sha512 sha; + byte data[100]; + int ret = wc_Sha512_Grow(&sha, data, sizeof(data)); + \endcode + + \sa wc_Sha512Update +*/ +int wc_Sha512_Grow(wc_Sha512* sha512, const byte* in, int inSz); + +/*! + \ingroup SHA + \brief Sets SHA512 flags. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param flags Flags to set + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_Sha512SetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha512 +*/ +int wc_Sha512SetFlags(wc_Sha512* sha512, word32 flags); + +/*! + \ingroup SHA + \brief Gets SHA512 flags. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param flags Pointer to store flags + + _Example_ + \code + wc_Sha512 sha; + word32 flags; + int ret = wc_Sha512GetFlags(&sha, &flags); + \endcode + + \sa wc_Sha512SetFlags +*/ +int wc_Sha512GetFlags(wc_Sha512* sha512, word32* flags); + +/*! + \ingroup SHA + \brief Transforms SHA512 block. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param data Block data + + _Example_ + \code + wc_Sha512 sha; + unsigned char block[WC_SHA512_BLOCK_SIZE]; + int ret = wc_Sha512Transform(&sha, block); + \endcode + + \sa wc_Sha512Update +*/ +int wc_Sha512Transform(wc_Sha512* sha, const unsigned char* data); + +/*! + \ingroup SHA + \brief Initializes SHA512/224. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_InitSha512_224(&sha); + \endcode + + \sa wc_Sha512_224Update +*/ +int wc_InitSha512_224(wc_Sha512* sha); + +/*! + \ingroup SHA + \brief Initializes SHA512/224 with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_InitSha512_224_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha512_224 +*/ +int wc_InitSha512_224_ex(wc_Sha512* sha, void* heap, int devId); + +/*! + \ingroup SHA + \brief Updates SHA512/224 hash with data. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param data Input data + \param len Input size + + _Example_ + \code + wc_Sha512 sha; + byte data[100]; + int ret = wc_Sha512_224Update(&sha, data, sizeof(data)); + \endcode + + \sa wc_InitSha512_224 +*/ +int wc_Sha512_224Update(wc_Sha512* sha, const byte* data, word32 len); + +/*! + \ingroup SHA + \brief Gets raw SHA512/224 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_224_DIGEST_SIZE]; + int ret = wc_Sha512_224FinalRaw(&sha, hash); + \endcode + + \sa wc_Sha512_224Final +*/ +int wc_Sha512_224FinalRaw(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Finalizes SHA512/224 hash. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_224_DIGEST_SIZE]; + int ret = wc_Sha512_224Final(&sha, hash); + \endcode + + \sa wc_Sha512_224Update +*/ +int wc_Sha512_224Final(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Frees SHA512/224 resources. + + \return none No returns + + \param sha SHA512 structure + + _Example_ + \code + wc_Sha512 sha; + wc_InitSha512_224(&sha); + wc_Sha512_224Free(&sha); + \endcode + + \sa wc_InitSha512_224 +*/ +void wc_Sha512_224Free(wc_Sha512* sha); + +/*! + \ingroup SHA + \brief Gets SHA512/224 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_224_DIGEST_SIZE]; + int ret = wc_Sha512_224GetHash(&sha, hash); + \endcode + + \sa wc_Sha512_224Final +*/ +int wc_Sha512_224GetHash(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Copies SHA512/224 context. + + \return 0 on success + \return negative on error + + \param src Source SHA512 structure + \param dst Destination SHA512 structure + + _Example_ + \code + wc_Sha512 src, dst; + int ret = wc_Sha512_224Copy(&src, &dst); + \endcode + + \sa wc_InitSha512_224 +*/ +int wc_Sha512_224Copy(wc_Sha512* src, wc_Sha512* dst); + +/*! + \ingroup SHA + \brief Sets SHA512/224 flags. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param flags Flags to set + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_Sha512_224SetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha512_224 +*/ +int wc_Sha512_224SetFlags(wc_Sha512* sha512, word32 flags); + +/*! + \ingroup SHA + \brief Gets SHA512/224 flags. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param flags Pointer to store flags + + _Example_ + \code + wc_Sha512 sha; + word32 flags; + int ret = wc_Sha512_224GetFlags(&sha, &flags); + \endcode + + \sa wc_Sha512_224SetFlags +*/ +int wc_Sha512_224GetFlags(wc_Sha512* sha512, word32* flags); + +/*! + \ingroup SHA + \brief Transforms SHA512/224 block. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param data Block data + + _Example_ + \code + wc_Sha512 sha; + unsigned char block[WC_SHA512_BLOCK_SIZE]; + int ret = wc_Sha512_224Transform(&sha, block); + \endcode + + \sa wc_Sha512_224Update +*/ +int wc_Sha512_224Transform(wc_Sha512* sha, const unsigned char* data); + +/*! + \ingroup SHA + \brief Initializes SHA512/256. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_InitSha512_256(&sha); + \endcode + + \sa wc_Sha512_256Update +*/ +int wc_InitSha512_256(wc_Sha512* sha); + +/*! + \ingroup SHA + \brief Initializes SHA512/256 with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_InitSha512_256_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha512_256 +*/ +int wc_InitSha512_256_ex(wc_Sha512* sha, void* heap, int devId); + +/*! + \ingroup SHA + \brief Updates SHA512/256 hash with data. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param data Input data + \param len Input size + + _Example_ + \code + wc_Sha512 sha; + byte data[100]; + int ret = wc_Sha512_256Update(&sha, data, sizeof(data)); + \endcode + + \sa wc_InitSha512_256 +*/ +int wc_Sha512_256Update(wc_Sha512* sha, const byte* data, word32 len); + +/*! + \ingroup SHA + \brief Gets raw SHA512/256 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_256_DIGEST_SIZE]; + int ret = wc_Sha512_256FinalRaw(&sha, hash); + \endcode + + \sa wc_Sha512_256Final +*/ +int wc_Sha512_256FinalRaw(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Finalizes SHA512/256 hash. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_256_DIGEST_SIZE]; + int ret = wc_Sha512_256Final(&sha, hash); + \endcode + + \sa wc_Sha512_256Update +*/ +int wc_Sha512_256Final(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Frees SHA512/256 resources. + + \return none No returns + + \param sha SHA512 structure + + _Example_ + \code + wc_Sha512 sha; + wc_InitSha512_256(&sha); + wc_Sha512_256Free(&sha); + \endcode + + \sa wc_InitSha512_256 +*/ +void wc_Sha512_256Free(wc_Sha512* sha); + +/*! + \ingroup SHA + \brief Gets SHA512/256 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha512 sha; + byte hash[WC_SHA512_256_DIGEST_SIZE]; + int ret = wc_Sha512_256GetHash(&sha, hash); + \endcode + + \sa wc_Sha512_256Final +*/ +int wc_Sha512_256GetHash(wc_Sha512* sha512, byte* hash); + +/*! + \ingroup SHA + \brief Copies SHA512/256 context. + + \return 0 on success + \return negative on error + + \param src Source SHA512 structure + \param dst Destination SHA512 structure + + _Example_ + \code + wc_Sha512 src, dst; + int ret = wc_Sha512_256Copy(&src, &dst); + \endcode + + \sa wc_InitSha512_256 +*/ +int wc_Sha512_256Copy(wc_Sha512* src, wc_Sha512* dst); + +/*! + \ingroup SHA + \brief Sets SHA512/256 flags. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param flags Flags to set + + _Example_ + \code + wc_Sha512 sha; + int ret = wc_Sha512_256SetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha512_256 +*/ +int wc_Sha512_256SetFlags(wc_Sha512* sha512, word32 flags); + +/*! + \ingroup SHA + \brief Gets SHA512/256 flags. + + \return 0 on success + \return negative on error + + \param sha512 SHA512 structure + \param flags Pointer to store flags + + _Example_ + \code + wc_Sha512 sha; + word32 flags; + int ret = wc_Sha512_256GetFlags(&sha, &flags); + \endcode + + \sa wc_Sha512_256SetFlags +*/ +int wc_Sha512_256GetFlags(wc_Sha512* sha512, word32* flags); + +/*! + \ingroup SHA + \brief Transforms SHA512/256 block. + + \return 0 on success + \return negative on error + + \param sha SHA512 structure + \param data Block data + + _Example_ + \code + wc_Sha512 sha; + unsigned char block[WC_SHA512_BLOCK_SIZE]; + int ret = wc_Sha512_256Transform(&sha, block); + \endcode + + \sa wc_Sha512_256Update +*/ +int wc_Sha512_256Transform(wc_Sha512* sha, const unsigned char* data); + +/*! + \ingroup SHA + \brief Initializes SHA384 with heap and device ID. + + \return 0 on success + \return negative on error + + \param sha SHA384 structure + \param heap Heap hint + \param devId Device ID + + _Example_ + \code + wc_Sha384 sha; + int ret = wc_InitSha384_ex(&sha, NULL, INVALID_DEVID); + \endcode + + \sa wc_InitSha384 +*/ +int wc_InitSha384_ex(wc_Sha384* sha, void* heap, int devId); + +/*! + \ingroup SHA + \brief Gets raw SHA384 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha384 SHA384 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha384 sha; + byte hash[WC_SHA384_DIGEST_SIZE]; + int ret = wc_Sha384FinalRaw(&sha, hash); + \endcode + + \sa wc_Sha384Final +*/ +int wc_Sha384FinalRaw(wc_Sha384* sha384, byte* hash); + +/*! + \ingroup SHA + \brief Frees SHA384 resources. + + \return none No returns + + \param sha SHA384 structure + + _Example_ + \code + wc_Sha384 sha; + wc_InitSha384(&sha); + wc_Sha384Free(&sha); + \endcode + + \sa wc_InitSha384 +*/ +void wc_Sha384Free(wc_Sha384* sha); + +/*! + \ingroup SHA + \brief Gets SHA384 hash without finalizing. + + \return 0 on success + \return negative on error + + \param sha384 SHA384 structure + \param hash Output hash buffer + + _Example_ + \code + wc_Sha384 sha; + byte hash[WC_SHA384_DIGEST_SIZE]; + int ret = wc_Sha384GetHash(&sha, hash); + \endcode + + \sa wc_Sha384Final +*/ +int wc_Sha384GetHash(wc_Sha384* sha384, byte* hash); + +/*! + \ingroup SHA + \brief Copies SHA384 context. + + \return 0 on success + \return negative on error + + \param src Source SHA384 structure + \param dst Destination SHA384 structure + + _Example_ + \code + wc_Sha384 src, dst; + int ret = wc_Sha384Copy(&src, &dst); + \endcode + + \sa wc_InitSha384 +*/ +int wc_Sha384Copy(wc_Sha384* src, wc_Sha384* dst); + +/*! + \ingroup SHA + \brief Grows SHA384 buffer with input data. + + \return 0 on success + \return negative on error + + \param sha384 SHA384 structure + \param in Input data + \param inSz Input size + + _Example_ + \code + wc_Sha384 sha; + byte data[100]; + int ret = wc_Sha384_Grow(&sha, data, sizeof(data)); + \endcode + + \sa wc_Sha384Update +*/ +int wc_Sha384_Grow(wc_Sha384* sha384, const byte* in, int inSz); + +/*! + \ingroup SHA + \brief Sets SHA384 flags. + + \return 0 on success + \return negative on error + + \param sha384 SHA384 structure + \param flags Flags to set + + _Example_ + \code + wc_Sha384 sha; + int ret = wc_Sha384SetFlags(&sha, WC_HASH_FLAG_WILLCOPY); + \endcode + + \sa wc_InitSha384 +*/ +int wc_Sha384SetFlags(wc_Sha384* sha384, word32 flags); + +/*! + \ingroup SHA + \brief Gets SHA384 flags. + + \return 0 on success + \return negative on error + + \param sha384 SHA384 structure + \param flags Pointer to store flags + + _Example_ + \code + wc_Sha384 sha; + word32 flags; + int ret = wc_Sha384GetFlags(&sha, &flags); + \endcode + + \sa wc_Sha384SetFlags +*/ +int wc_Sha384GetFlags(wc_Sha384* sha384, word32* flags); + +/*! + \ingroup SHA + \brief Transforms SHA384 block. + + \return 0 on success + \return negative on error + + \param sha SHA384 structure + \param data Block data + + _Example_ + \code + wc_Sha384 sha; + unsigned char block[WC_SHA384_BLOCK_SIZE]; + int ret = wc_Sha384Transform(&sha, block); + \endcode + + \sa wc_Sha384Update +*/ +int wc_Sha384Transform(wc_Sha384* sha, const unsigned char* data); diff --git a/doc/dox_comments/header_files/signature.h b/doc/dox_comments/header_files/signature.h index ab1468a26..611388783 100644 --- a/doc/dox_comments/header_files/signature.h +++ b/doc/dox_comments/header_files/signature.h @@ -145,3 +145,206 @@ int wc_SignatureGenerate( byte* sig, word32 *sig_len, const void* key, word32 key_len, WC_RNG* rng); + +/*! + \ingroup Signature + \brief This function verifies a signature using a pre-computed hash. + Unlike wc_SignatureVerify which hashes the data first, this function + takes the hash directly and verifies the signature against it. + If sig_type is WC_SIGNATURE_TYPE_RSA_W_ENC, hash data must be encoded + with wc_EncodeSignature prior to calling. + + \return 0 Success + \return SIG_TYPE_E Signature type not enabled/available + \return BAD_FUNC_ARG Bad function argument provided + \return BUFFER_E Output buffer too small or input too large + + \param hash_type A hash type from enum wc_HashType + \param sig_type A signature type such as WC_SIGNATURE_TYPE_ECC or + WC_SIGNATURE_TYPE_RSA + \param hash_data Pointer to buffer containing the hash to verify + \param hash_len Length of the hash buffer + \param sig Pointer to buffer containing the signature + \param sig_len Length of the signature buffer + \param key Pointer to a key structure such as ecc_key or RsaKey + \param key_len Size of the key structure + + _Example_ + \code + ecc_key eccKey; + byte hash[WC_SHA256_DIGEST_SIZE]; + byte sig[ECC_MAX_SIG_SIZE]; + word32 sigLen = sizeof(sig); + + wc_ecc_init(&eccKey); + // import public key, signature, and pre-computed hash ... + int ret = wc_SignatureVerifyHash(WC_HASH_TYPE_SHA256, + WC_SIGNATURE_TYPE_ECC, hash, + sizeof(hash), sig, sigLen, + &eccKey, sizeof(eccKey)); + if (ret == 0) { + // signature verified + } + \endcode + + \sa wc_SignatureVerify + \sa wc_SignatureGenerateHash +*/ +int wc_SignatureVerifyHash(enum wc_HashType hash_type, + enum wc_SignatureType sig_type, + const byte* hash_data, word32 hash_len, + const byte* sig, word32 sig_len, + const void* key, word32 key_len); + +/*! + \ingroup Signature + \brief This function generates a signature from a pre-computed hash. + Unlike wc_SignatureGenerate which hashes the data first, this + function takes the hash directly and signs it. + If sig_type is WC_SIGNATURE_TYPE_RSA_W_ENC, hash data must be encoded + with wc_EncodeSignature prior to calling. + + \return 0 Success + \return SIG_TYPE_E Signature type not enabled/available + \return BAD_FUNC_ARG Bad function argument provided + \return BUFFER_E Output buffer too small or input too large + + \param hash_type A hash type from enum wc_HashType + \param sig_type A signature type such as WC_SIGNATURE_TYPE_ECC or + WC_SIGNATURE_TYPE_RSA + \param hash_data Pointer to buffer containing the hash to sign + \param hash_len Length of the hash buffer + \param sig Pointer to buffer to output signature + \param sig_len Pointer to length of signature output buffer + \param key Pointer to a key structure such as ecc_key or RsaKey + \param key_len Size of the key structure + \param rng Pointer to an initialized RNG structure + + _Example_ + \code + WC_RNG rng; + ecc_key eccKey; + byte hash[WC_SHA256_DIGEST_SIZE]; + byte sig[ECC_MAX_SIG_SIZE]; + word32 sigLen = sizeof(sig); + + wc_InitRng(&rng); + wc_ecc_init(&eccKey); + wc_ecc_make_key(&rng, 32, &eccKey); + // generate signature from pre-computed hash + int ret = wc_SignatureGenerateHash(WC_HASH_TYPE_SHA256, + WC_SIGNATURE_TYPE_ECC, hash, + sizeof(hash), sig, &sigLen, + &eccKey, sizeof(eccKey), &rng); + \endcode + + \sa wc_SignatureGenerate + \sa wc_SignatureVerifyHash +*/ +int wc_SignatureGenerateHash(enum wc_HashType hash_type, + enum wc_SignatureType sig_type, + const byte* hash_data, word32 hash_len, + byte* sig, word32 *sig_len, + const void* key, word32 key_len, + WC_RNG* rng); + +/*! + \ingroup Signature + \brief This function generates a signature from a pre-computed hash + with extended options. This is similar to wc_SignatureGenerateHash + but allows optional verification of the signature after generation. + + \return 0 Success + \return SIG_TYPE_E Signature type not enabled/available + \return BAD_FUNC_ARG Bad function argument provided + \return BUFFER_E Output buffer too small or input too large + + \param hash_type A hash type from enum wc_HashType + \param sig_type A signature type such as WC_SIGNATURE_TYPE_ECC or + WC_SIGNATURE_TYPE_RSA + \param hash_data Pointer to buffer containing the hash to sign + \param hash_len Length of the hash buffer + \param sig Pointer to buffer to output signature + \param sig_len Pointer to length of signature output buffer + \param key Pointer to a key structure such as ecc_key or RsaKey + \param key_len Size of the key structure + \param rng Pointer to an initialized RNG structure + \param verify If non-zero, verify the signature after generation + + _Example_ + \code + WC_RNG rng; + ecc_key eccKey; + byte hash[WC_SHA256_DIGEST_SIZE]; + byte sig[ECC_MAX_SIG_SIZE]; + word32 sigLen = sizeof(sig); + + wc_InitRng(&rng); + wc_ecc_init(&eccKey); + wc_ecc_make_key(&rng, 32, &eccKey); + int ret = wc_SignatureGenerateHash_ex(WC_HASH_TYPE_SHA256, + WC_SIGNATURE_TYPE_ECC, hash, + sizeof(hash), sig, &sigLen, + &eccKey, sizeof(eccKey), + &rng, 1); + \endcode + + \sa wc_SignatureGenerateHash + \sa wc_SignatureGenerate_ex +*/ +int wc_SignatureGenerateHash_ex(enum wc_HashType hash_type, + enum wc_SignatureType sig_type, + const byte* hash_data, word32 hash_len, + byte* sig, word32 *sig_len, + const void* key, word32 key_len, + WC_RNG* rng, int verify); + +/*! + \ingroup Signature + \brief This function generates a signature from data with extended + options. This is similar to wc_SignatureGenerate but allows optional + verification of the signature after generation. + + \return 0 Success + \return SIG_TYPE_E Signature type not enabled/available + \return BAD_FUNC_ARG Bad function argument provided + \return BUFFER_E Output buffer too small or input too large + + \param hash_type A hash type from enum wc_HashType + \param sig_type A signature type such as WC_SIGNATURE_TYPE_ECC or + WC_SIGNATURE_TYPE_RSA + \param data Pointer to buffer containing the data to hash and sign + \param data_len Length of the data buffer + \param sig Pointer to buffer to output signature + \param sig_len Pointer to length of signature output buffer + \param key Pointer to a key structure such as ecc_key or RsaKey + \param key_len Size of the key structure + \param rng Pointer to an initialized RNG structure + \param verify If non-zero, verify the signature after generation + + _Example_ + \code + WC_RNG rng; + ecc_key eccKey; + byte data[]; // data to sign + byte sig[ECC_MAX_SIG_SIZE]; + word32 sigLen = sizeof(sig); + + wc_InitRng(&rng); + wc_ecc_init(&eccKey); + wc_ecc_make_key(&rng, 32, &eccKey); + int ret = wc_SignatureGenerate_ex(WC_HASH_TYPE_SHA256, + WC_SIGNATURE_TYPE_ECC, data, + sizeof(data), sig, &sigLen, + &eccKey, sizeof(eccKey), &rng, 1); + \endcode + + \sa wc_SignatureGenerate + \sa wc_SignatureGenerateHash_ex +*/ +int wc_SignatureGenerate_ex(enum wc_HashType hash_type, + enum wc_SignatureType sig_type, + const byte* data, word32 data_len, + byte* sig, word32 *sig_len, + const void* key, word32 key_len, + WC_RNG* rng, int verify); diff --git a/doc/dox_comments/header_files/srp.h b/doc/dox_comments/header_files/srp.h index 14f69190b..5a058d229 100644 --- a/doc/dox_comments/header_files/srp.h +++ b/doc/dox_comments/header_files/srp.h @@ -32,6 +32,49 @@ */ int wc_SrpInit(Srp* srp, SrpType type, SrpSide side); +/*! + \ingroup SRP + \brief Initializes the Srp struct for usage with extended parameters. + This function is similar to wc_SrpInit but allows specification of a + custom heap hint and device ID for hardware acceleration. + + \return 0 on success. + \return BAD_FUNC_ARG Returns when there's an issue with the arguments + such as srp being null or SrpSide not being SRP_CLIENT_SIDE or + SRP_SERVER_SIDE. + \return NOT_COMPILED_IN Returns when a type is passed as an argument + but hasn't been configured in the wolfCrypt build. + \return <0 on error. + + \param srp the Srp structure to be initialized. + \param type the hash type to be used. + \param side the side of the communication. + \param heap pointer to heap hint for memory allocation (can be NULL). + \param devId device ID for hardware acceleration (use INVALID_DEVID + for software only). + + _Example_ + \code + Srp srp; + void* heap = NULL; + int devId = INVALID_DEVID; + + if (wc_SrpInit_ex(&srp, SRP_TYPE_SHA, SRP_CLIENT_SIDE, heap, + devId) != 0) { + // Initialization error + } + else { + wc_SrpTerm(&srp); + } + \endcode + + \sa wc_SrpInit + \sa wc_SrpTerm + \sa wc_SrpSetUsername +*/ +int wc_SrpInit_ex(Srp* srp, SrpType type, SrpSide side, void* heap, + int devId); + /*! \ingroup SRP diff --git a/doc/dox_comments/header_files/ssl.h b/doc/dox_comments/header_files/ssl.h index 955b04093..c880e4eba 100644 --- a/doc/dox_comments/header_files/ssl.h +++ b/doc/dox_comments/header_files/ssl.h @@ -14430,6 +14430,8 @@ int wolfSSL_set_max_early_data(WOLFSSL* ssl, unsigned int sz); \return BAD_FUNC_ARG if a pointer parameter is NULL, sz is less than 0 or not using TLSv1.3. \return SIDE_ERROR if called with a server. + \return BAD_STATE_E if invoked without a valid session or without a valid + PSK cb \return WOLFSSL_FATAL_ERROR if the connection is not made. \return the amount of early data written in bytes if successful. diff --git a/doc/dox_comments/header_files/types.h b/doc/dox_comments/header_files/types.h index 6f1ecee26..e55748d6b 100644 --- a/doc/dox_comments/header_files/types.h +++ b/doc/dox_comments/header_files/types.h @@ -170,3 +170,301 @@ void XFREE(void *p, void* heap, int type); \sa CheckRunTimeFastMath */ word32 CheckRunTimeSettings(void); + +/*! + \ingroup String + \brief Thread-safe string tokenization. + + \return Pointer to next token or NULL + + \param str String to tokenize (NULL for continuation) + \param delim Delimiter characters + \param nextp Pointer to save position + + _Example_ + \code + char str[] = "one,two,three"; + char* saveptr; + char* token = wc_strtok(str, ",", &saveptr); + \endcode + + \sa wc_strsep +*/ +char* wc_strtok(char *str, const char *delim, char **nextp); + +/*! + \ingroup String + \brief Separates string by delimiter. + + \return Pointer to token or NULL + + \param stringp Pointer to string pointer + \param delim Delimiter characters + + _Example_ + \code + char str[] = "one,two,three"; + char* ptr = str; + char* token = wc_strsep(&ptr, ","); + \endcode + + \sa wc_strtok +*/ +char* wc_strsep(char **stringp, const char *delim); + +/*! + \ingroup String + \brief Safely copies string with size limit. + + \return Length of source string + + \param dst Destination buffer + \param src Source string + \param dstSize Destination buffer size + + _Example_ + \code + char dst[10]; + size_t len = wc_strlcpy(dst, "hello", sizeof(dst)); + \endcode + + \sa wc_strlcat +*/ +size_t wc_strlcpy(char *dst, const char *src, size_t dstSize); + +/*! + \ingroup String + \brief Safely concatenates strings with size limit. + + \return Total length attempted + + \param dst Destination buffer + \param src Source string + \param dstSize Destination buffer size + + _Example_ + \code + char dst[20] = "hello"; + size_t len = wc_strlcat(dst, " world", sizeof(dst)); + \endcode + + \sa wc_strlcpy +*/ +size_t wc_strlcat(char *dst, const char *src, size_t dstSize); + +/*! + \ingroup String + \brief Case-insensitive string comparison. + + \return 0 if equal, non-zero otherwise + + \param s1 First string + \param s2 Second string + + _Example_ + \code + if (wc_strcasecmp("Hello", "hello") == 0) { + // strings are equal + } + \endcode + + \sa wc_strncasecmp +*/ +int wc_strcasecmp(const char *s1, const char *s2); + +/*! + \ingroup String + \brief Case-insensitive string comparison with length limit. + + \return 0 if equal, non-zero otherwise + + \param s1 First string + \param s2 Second string + \param n Maximum characters to compare + + _Example_ + \code + if (wc_strncasecmp("Hello", "hello", 5) == 0) { + // strings are equal + } + \endcode + + \sa wc_strcasecmp +*/ +int wc_strncasecmp(const char *s1, const char *s2, size_t n); + +/*! + \ingroup Threading + \brief Creates a new thread. + + \return 0 on success + \return negative on error + + \param thread Thread handle pointer + \param cb Thread callback function + \param arg Argument to pass to callback + + _Example_ + \code + THREAD_TYPE thread; + int ret = wolfSSL_NewThread(&thread, myCallback, NULL); + \endcode + + \sa wolfSSL_JoinThread +*/ +int wolfSSL_NewThread(THREAD_TYPE* thread, THREAD_CB cb, void* arg); + +/*! + \ingroup Threading + \brief Creates a detached thread. + + \return 0 on success + \return negative on error + + \param cb Thread callback function + \param arg Argument to pass to callback + + _Example_ + \code + int ret = wolfSSL_NewThreadNoJoin(myCallback, NULL); + \endcode + + \sa wolfSSL_NewThread +*/ +int wolfSSL_NewThreadNoJoin(THREAD_CB_NOJOIN cb, void* arg); + +/*! + \ingroup Threading + \brief Waits for thread to complete. + + \return 0 on success + \return negative on error + + \param thread Thread handle + + _Example_ + \code + THREAD_TYPE thread; + wolfSSL_NewThread(&thread, myCallback, NULL); + int ret = wolfSSL_JoinThread(thread); + \endcode + + \sa wolfSSL_NewThread +*/ +int wolfSSL_JoinThread(THREAD_TYPE thread); + +/*! + \ingroup Threading + \brief Initializes condition variable. + + \return 0 on success + \return negative on error + + \param cond Condition variable pointer + + _Example_ + \code + COND_TYPE cond; + int ret = wolfSSL_CondInit(&cond); + \endcode + + \sa wolfSSL_CondFree +*/ +int wolfSSL_CondInit(COND_TYPE* cond); + +/*! + \ingroup Threading + \brief Frees condition variable. + + \return 0 on success + \return negative on error + + \param cond Condition variable pointer + + _Example_ + \code + COND_TYPE cond; + wolfSSL_CondInit(&cond); + int ret = wolfSSL_CondFree(&cond); + \endcode + + \sa wolfSSL_CondInit +*/ +int wolfSSL_CondFree(COND_TYPE* cond); + +/*! + \ingroup Threading + \brief Signals condition variable. + + \return 0 on success + \return negative on error + + \param cond Condition variable pointer + + _Example_ + \code + COND_TYPE cond; + int ret = wolfSSL_CondSignal(&cond); + \endcode + + \sa wolfSSL_CondWait +*/ +int wolfSSL_CondSignal(COND_TYPE* cond); + +/*! + \ingroup Threading + \brief Waits on condition variable. + + \return 0 on success + \return negative on error + + \param cond Condition variable pointer + + _Example_ + \code + COND_TYPE cond; + int ret = wolfSSL_CondWait(&cond); + \endcode + + \sa wolfSSL_CondSignal +*/ +int wolfSSL_CondWait(COND_TYPE* cond); + +/*! + \ingroup Threading + \brief Starts condition variable. + + \return 0 on success + \return negative on error + + \param cond Condition variable pointer + + _Example_ + \code + COND_TYPE cond; + int ret = wolfSSL_CondStart(&cond); + \endcode + + \sa wolfSSL_CondEnd +*/ +int wolfSSL_CondStart(COND_TYPE* cond); + +/*! + \ingroup Threading + \brief Ends condition variable. + + \return 0 on success + \return negative on error + + \param cond Condition variable pointer + + _Example_ + \code + COND_TYPE cond; + wolfSSL_CondStart(&cond); + int ret = wolfSSL_CondEnd(&cond); + \endcode + + \sa wolfSSL_CondStart +*/ +int wolfSSL_CondEnd(COND_TYPE* cond); diff --git a/doc/dox_comments/header_files/wc_encrypt.h b/doc/dox_comments/header_files/wc_encrypt.h index 54859e7ff..4f7e90117 100644 --- a/doc/dox_comments/header_files/wc_encrypt.h +++ b/doc/dox_comments/header_files/wc_encrypt.h @@ -210,3 +210,124 @@ int wc_Des3_CbcEncryptWithKey(byte* out, int wc_Des3_CbcDecryptWithKey(byte* out, const byte* in, word32 sz, const byte* key, const byte* iv); + +/*! + \ingroup AES + \brief This function encrypts a plaintext message and stores the + result in the output buffer. It uses AES encryption with cipher + block chaining (CBC) mode. This function does not require an AES + structure to be initialized. Instead, it takes in a key and an iv + and uses these to encrypt the message. + + \return 0 On successfully encrypting the message + \return BAD_ALIGN_E Returned on block align error + \return BAD_FUNC_ARG Returned if key length is invalid + \return MEMORY_E Returned if WOLFSSL_SMALL_STACK is enabled and + XMALLOC fails to instantiate an AES object + + \param out pointer to the output buffer in which to store the + ciphertext of the encrypted message + \param in pointer to the input buffer containing plaintext to + encrypt + \param inSz size of input message + \param key 16, 24, or 32 byte secret key for encryption + \param keySz size of key used for encryption + \param iv pointer to the 16 byte initialization vector to use + + _Example_ + \code + byte key[]; // 16, 24, or 32 byte key + byte iv[]; // 16 byte iv + byte plain[]; // plaintext to encrypt + byte cipher[sizeof(plain)]; + + int ret = wc_AesCbcEncryptWithKey(cipher, plain, sizeof(plain), + key, sizeof(key), iv); + if (ret != 0) { + // encryption error + } + \endcode + + \sa wc_AesCbcDecryptWithKey + \sa wc_AesSetKey + \sa wc_AesCbcEncrypt +*/ +int wc_AesCbcEncryptWithKey(byte* out, const byte* in, word32 inSz, + const byte* key, word32 keySz, + const byte* iv); + +/*! + \ingroup Crypto + \brief This function decrypts an encrypted key buffer using the + provided password. It supports various encryption algorithms + including DES, 3DES, and AES. The encryption information is + provided in the EncryptedInfo structure. + + \return Length of decrypted key on success + \return Negative value on error + + \param info pointer to EncryptedInfo structure containing encryption + algorithm and parameters + \param der pointer to the encrypted key buffer + \param derSz size of the encrypted key buffer + \param password pointer to the password buffer + \param passwordSz size of the password + \param hashType hash algorithm to use for key derivation + + _Example_ + \code + EncryptedInfo info; + byte encryptedKey[]; // encrypted key data + byte password[] = "mypassword"; + + int ret = wc_BufferKeyDecrypt(&info, encryptedKey, + sizeof(encryptedKey), password, + sizeof(password)-1, WC_SHA256); + if (ret < 0) { + // decryption error + } + \endcode + + \sa wc_BufferKeyEncrypt +*/ +int wc_BufferKeyDecrypt(struct EncryptedInfo* info, byte* der, + word32 derSz, const byte* password, + int passwordSz, int hashType); + +/*! + \ingroup Crypto + \brief This function encrypts a key buffer using the provided + password. It supports various encryption algorithms including DES, + 3DES, and AES. The encryption information is provided in the + EncryptedInfo structure. + + \return Length of encrypted key on success + \return Negative value on error + + \param info pointer to EncryptedInfo structure containing encryption + algorithm and parameters + \param der pointer to the key buffer to encrypt + \param derSz size of the key buffer + \param password pointer to the password buffer + \param passwordSz size of the password + \param hashType hash algorithm to use for key derivation + + _Example_ + \code + EncryptedInfo info; + byte key[]; // key data to encrypt + byte password[] = "mypassword"; + + info.algo = AES256CBCb; + int ret = wc_BufferKeyEncrypt(&info, key, sizeof(key), password, + sizeof(password)-1, WC_SHA256); + if (ret < 0) { + // encryption error + } + \endcode + + \sa wc_BufferKeyDecrypt +*/ +int wc_BufferKeyEncrypt(struct EncryptedInfo* info, byte* der, + word32 derSz, const byte* password, + int passwordSz, int hashType); diff --git a/doc/dox_comments/header_files/wc_port.h b/doc/dox_comments/header_files/wc_port.h index 9a517ff64..74db950f9 100644 --- a/doc/dox_comments/header_files/wc_port.h +++ b/doc/dox_comments/header_files/wc_port.h @@ -41,3 +41,696 @@ int wolfCrypt_Init(void); \sa wolfCrypt_Init */ int wolfCrypt_Cleanup(void); + +/*! + \ingroup Atomic + \brief Initializes atomic integer. + + \return none No returns + + \param c Atomic integer pointer + \param i Initial value + + _Example_ + \code + wolfSSL_Atomic_Int counter; + wolfSSL_Atomic_Int_Init(&counter, 0); + \endcode + + \sa wolfSSL_Atomic_Int_FetchAdd +*/ +void wolfSSL_Atomic_Int_Init(wolfSSL_Atomic_Int* c, int i); + +/*! + \ingroup Atomic + \brief Initializes atomic unsigned integer. + + \return none No returns + + \param c Atomic unsigned integer pointer + \param i Initial value + + _Example_ + \code + wolfSSL_Atomic_Uint counter; + wolfSSL_Atomic_Uint_Init(&counter, 0); + \endcode + + \sa wolfSSL_Atomic_Uint_FetchAdd +*/ +void wolfSSL_Atomic_Uint_Init(wolfSSL_Atomic_Uint* c, unsigned int i); + +/*! + \ingroup Atomic + \brief Atomically adds to integer and returns old value. + + \return Old value before addition + + \param c Atomic integer pointer + \param i Value to add + + _Example_ + \code + wolfSSL_Atomic_Int counter; + int old = wolfSSL_Atomic_Int_FetchAdd(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Int_AddFetch +*/ +int wolfSSL_Atomic_Int_FetchAdd(wolfSSL_Atomic_Int* c, int i); + +/*! + \ingroup Atomic + \brief Atomically subtracts from integer and returns old value. + + \return Old value before subtraction + + \param c Atomic integer pointer + \param i Value to subtract + + _Example_ + \code + wolfSSL_Atomic_Int counter; + int old = wolfSSL_Atomic_Int_FetchSub(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Int_SubFetch +*/ +int wolfSSL_Atomic_Int_FetchSub(wolfSSL_Atomic_Int* c, int i); + +/*! + \ingroup Atomic + \brief Atomically adds to integer and returns new value. + + \return New value after addition + + \param c Atomic integer pointer + \param i Value to add + + _Example_ + \code + wolfSSL_Atomic_Int counter; + int new_val = wolfSSL_Atomic_Int_AddFetch(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Int_FetchAdd +*/ +int wolfSSL_Atomic_Int_AddFetch(wolfSSL_Atomic_Int* c, int i); + +/*! + \ingroup Atomic + \brief Atomically subtracts from integer and returns new value. + + \return New value after subtraction + + \param c Atomic integer pointer + \param i Value to subtract + + _Example_ + \code + wolfSSL_Atomic_Int counter; + int new_val = wolfSSL_Atomic_Int_SubFetch(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Int_FetchSub +*/ +int wolfSSL_Atomic_Int_SubFetch(wolfSSL_Atomic_Int* c, int i); + +/*! + \ingroup Atomic + \brief Atomically compares and exchanges integer. + + \return 1 if exchange occurred, 0 otherwise + + \param c Atomic integer pointer + \param expected_i Pointer to expected value + \param new_i New value to set + + _Example_ + \code + wolfSSL_Atomic_Int counter; + int expected = 0; + int ret = wolfSSL_Atomic_Int_CompareExchange(&counter, &expected, 1); + \endcode + + \sa wolfSSL_Atomic_Int_FetchAdd +*/ +int wolfSSL_Atomic_Int_CompareExchange(wolfSSL_Atomic_Int* c, + int *expected_i, int new_i); + +/*! + \ingroup Atomic + \brief Atomically adds to unsigned integer and returns old value. + + \return Old value before addition + + \param c Atomic unsigned integer pointer + \param i Value to add + + _Example_ + \code + wolfSSL_Atomic_Uint counter; + unsigned int old = wolfSSL_Atomic_Uint_FetchAdd(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Uint_AddFetch +*/ +unsigned int wolfSSL_Atomic_Uint_FetchAdd(wolfSSL_Atomic_Uint* c, + unsigned int i); + +/*! + \ingroup Atomic + \brief Atomically subtracts from unsigned integer, returns old value. + + \return Old value before subtraction + + \param c Atomic unsigned integer pointer + \param i Value to subtract + + _Example_ + \code + wolfSSL_Atomic_Uint counter; + unsigned int old = wolfSSL_Atomic_Uint_FetchSub(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Uint_SubFetch +*/ +unsigned int wolfSSL_Atomic_Uint_FetchSub(wolfSSL_Atomic_Uint* c, + unsigned int i); + +/*! + \ingroup Atomic + \brief Atomically adds to unsigned integer, returns new value. + + \return New value after addition + + \param c Atomic unsigned integer pointer + \param i Value to add + + _Example_ + \code + wolfSSL_Atomic_Uint counter; + unsigned int new_val = wolfSSL_Atomic_Uint_AddFetch(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Uint_FetchAdd +*/ +unsigned int wolfSSL_Atomic_Uint_AddFetch(wolfSSL_Atomic_Uint* c, + unsigned int i); + +/*! + \ingroup Atomic + \brief Atomically subtracts from unsigned integer, returns new value. + + \return New value after subtraction + + \param c Atomic unsigned integer pointer + \param i Value to subtract + + _Example_ + \code + wolfSSL_Atomic_Uint counter; + unsigned int new_val = wolfSSL_Atomic_Uint_SubFetch(&counter, 1); + \endcode + + \sa wolfSSL_Atomic_Uint_FetchSub +*/ +unsigned int wolfSSL_Atomic_Uint_SubFetch(wolfSSL_Atomic_Uint* c, + unsigned int i); + +/*! + \ingroup Atomic + \brief Atomically compares and exchanges unsigned integer. + + \return 1 if exchange occurred, 0 otherwise + + \param c Atomic unsigned integer pointer + \param expected_i Pointer to expected value + \param new_i New value to set + + _Example_ + \code + wolfSSL_Atomic_Uint counter; + unsigned int expected = 0; + int ret = wolfSSL_Atomic_Uint_CompareExchange(&counter, &expected, 1); + \endcode + + \sa wolfSSL_Atomic_Uint_FetchAdd +*/ +int wolfSSL_Atomic_Uint_CompareExchange(wolfSSL_Atomic_Uint* c, + unsigned int *expected_i, + unsigned int new_i); + +/*! + \ingroup Atomic + \brief Atomically compares and exchanges pointer. + + \return 1 if exchange occurred, 0 otherwise + + \param c Pointer to pointer + \param expected_ptr Pointer to expected pointer value + \param new_ptr New pointer value + + _Example_ + \code + void* ptr = NULL; + void* expected = NULL; + void* new_val = malloc(100); + int ret = wolfSSL_Atomic_Ptr_CompareExchange(&ptr, &expected, new_val); + \endcode + + \sa wolfSSL_Atomic_Int_CompareExchange +*/ +int wolfSSL_Atomic_Ptr_CompareExchange(void** c, void **expected_ptr, + void *new_ptr); + +/*! + \ingroup Mutex + \brief Initializes mutex. + + \return 0 on success + \return negative on error + + \param m Mutex pointer + + _Example_ + \code + wolfSSL_Mutex mutex; + int ret = wc_InitMutex(&mutex); + \endcode + + \sa wc_FreeMutex +*/ +int wc_InitMutex(wolfSSL_Mutex* m); + +/*! + \ingroup Mutex + \brief Frees mutex resources. + + \return 0 on success + \return negative on error + + \param m Mutex pointer + + _Example_ + \code + wolfSSL_Mutex mutex; + wc_InitMutex(&mutex); + int ret = wc_FreeMutex(&mutex); + \endcode + + \sa wc_InitMutex +*/ +int wc_FreeMutex(wolfSSL_Mutex* m); + +/*! + \ingroup Mutex + \brief Locks mutex. + + \return 0 on success + \return negative on error + + \param m Mutex pointer + + _Example_ + \code + wolfSSL_Mutex mutex; + int ret = wc_LockMutex(&mutex); + \endcode + + \sa wc_UnLockMutex +*/ +int wc_LockMutex(wolfSSL_Mutex* m); + +/*! + \ingroup Mutex + \brief Unlocks mutex. + + \return 0 on success + \return negative on error + + \param m Mutex pointer + + _Example_ + \code + wolfSSL_Mutex mutex; + wc_LockMutex(&mutex); + int ret = wc_UnLockMutex(&mutex); + \endcode + + \sa wc_LockMutex +*/ +int wc_UnLockMutex(wolfSSL_Mutex* m); + +/*! + \ingroup Mutex + \brief Initializes and allocates mutex. + + \return Pointer to mutex on success + \return NULL on error + + \param none No parameters + + _Example_ + \code + wolfSSL_Mutex* mutex = wc_InitAndAllocMutex(); + if (mutex != NULL) { + wc_LockMutex(mutex); + } + \endcode + + \sa wc_InitMutex +*/ +wolfSSL_Mutex* wc_InitAndAllocMutex(void); + +/*! + \ingroup RwLock + \brief Initializes read-write lock. + + \return 0 on success + \return negative on error + + \param m Read-write lock pointer + + _Example_ + \code + wolfSSL_RwLock lock; + int ret = wc_InitRwLock(&lock); + \endcode + + \sa wc_FreeRwLock +*/ +int wc_InitRwLock(wolfSSL_RwLock* m); + +/*! + \ingroup RwLock + \brief Frees read-write lock resources. + + \return 0 on success + \return negative on error + + \param m Read-write lock pointer + + _Example_ + \code + wolfSSL_RwLock lock; + wc_InitRwLock(&lock); + int ret = wc_FreeRwLock(&lock); + \endcode + + \sa wc_InitRwLock +*/ +int wc_FreeRwLock(wolfSSL_RwLock* m); + +/*! + \ingroup RwLock + \brief Locks read-write lock for writing. + + \return 0 on success + \return negative on error + + \param m Read-write lock pointer + + _Example_ + \code + wolfSSL_RwLock lock; + int ret = wc_LockRwLock_Wr(&lock); + \endcode + + \sa wc_UnLockRwLock +*/ +int wc_LockRwLock_Wr(wolfSSL_RwLock* m); + +/*! + \ingroup RwLock + \brief Locks read-write lock for reading. + + \return 0 on success + \return negative on error + + \param m Read-write lock pointer + + _Example_ + \code + wolfSSL_RwLock lock; + int ret = wc_LockRwLock_Rd(&lock); + \endcode + + \sa wc_UnLockRwLock +*/ +int wc_LockRwLock_Rd(wolfSSL_RwLock* m); + +/*! + \ingroup RwLock + \brief Unlocks read-write lock. + + \return 0 on success + \return negative on error + + \param m Read-write lock pointer + + _Example_ + \code + wolfSSL_RwLock lock; + wc_LockRwLock_Rd(&lock); + int ret = wc_UnLockRwLock(&lock); + \endcode + + \sa wc_LockRwLock_Rd +*/ +int wc_UnLockRwLock(wolfSSL_RwLock* m); + +/*! + \ingroup Mutex + \brief Locks mutex with debug info. + + \return 0 on success + \return negative on error + + \param flag Lock flag + \param type Lock type + \param file Source file name + \param line Source line number + + _Example_ + \code + int ret = wc_LockMutex_ex(0, 0, __FILE__, __LINE__); + \endcode + + \sa wc_LockMutex +*/ +int wc_LockMutex_ex(int flag, int type, const char* file, int line); + +/*! + \ingroup Mutex + \brief Sets mutex callback. + + \return 0 on success + \return negative on error + + \param cb Mutex callback pointer + + _Example_ + \code + mutex_cb cb; + int ret = wc_SetMutexCb(&cb); + \endcode + + \sa wc_GetMutexCb +*/ +int wc_SetMutexCb(mutex_cb* cb); + +/*! + \ingroup Mutex + \brief Gets mutex callback. + + \return Pointer to mutex callback + + \param none No parameters + + _Example_ + \code + mutex_cb* cb = wc_GetMutexCb(); + \endcode + + \sa wc_SetMutexCb +*/ +mutex_cb* wc_GetMutexCb(void); + +/*! + \ingroup Memory + \brief Checkpoints peak heap allocations. + + \return Peak allocation count + + \param none No parameters + + _Example_ + \code + long peak = wolfCrypt_heap_peakAllocs_checkpoint(); + \endcode + + \sa wolfCrypt_heap_peakBytes_checkpoint +*/ +long wolfCrypt_heap_peakAllocs_checkpoint(void); + +/*! + \ingroup Memory + \brief Checkpoints peak heap bytes. + + \return Peak bytes allocated + + \param none No parameters + + _Example_ + \code + long peak = wolfCrypt_heap_peakBytes_checkpoint(); + \endcode + + \sa wolfCrypt_heap_peakAllocs_checkpoint +*/ +long wolfCrypt_heap_peakBytes_checkpoint(void); + +/*! + \ingroup File + \brief Loads file into buffer. + + \return 0 on success + \return negative on error + + \param fname File name + \param buf Buffer pointer + \param bufLen Buffer length pointer + \param heap Heap hint + + _Example_ + \code + unsigned char* buf = NULL; + size_t len = 0; + int ret = wc_FileLoad("file.txt", &buf, &len, NULL); + \endcode + + \sa wc_FileExists +*/ +int wc_FileLoad(const char* fname, unsigned char** buf, size_t* bufLen, + void* heap); + +/*! + \ingroup File + \brief Reads first entry in directory. + + \return 0 on success + \return negative on error + + \param ctx Directory context + \param path Directory path + \param name Pointer to store entry name + + _Example_ + \code + ReadDirCtx ctx; + char* name; + int ret = wc_ReadDirFirst(&ctx, "/path", &name); + \endcode + + \sa wc_ReadDirNext +*/ +int wc_ReadDirFirst(ReadDirCtx* ctx, const char* path, char** name); + +/*! + \ingroup File + \brief Reads next entry in directory. + + \return 0 on success + \return negative on error + + \param ctx Directory context + \param path Directory path + \param name Pointer to store entry name + + _Example_ + \code + ReadDirCtx ctx; + char* name; + int ret = wc_ReadDirNext(&ctx, "/path", &name); + \endcode + + \sa wc_ReadDirFirst +*/ +int wc_ReadDirNext(ReadDirCtx* ctx, const char* path, char** name); + +/*! + \ingroup File + \brief Closes directory reading. + + \return none No returns + + \param ctx Directory context + + _Example_ + \code + ReadDirCtx ctx; + wc_ReadDirClose(&ctx); + \endcode + + \sa wc_ReadDirFirst +*/ +void wc_ReadDirClose(ReadDirCtx* ctx); + +/*! + \ingroup File + \brief Checks if file exists. + + \return 1 if file exists + \return 0 if file does not exist + + \param fname File name + + _Example_ + \code + if (wc_FileExists("file.txt")) { + // file exists + } + \endcode + + \sa wc_FileLoad +*/ +int wc_FileExists(const char* fname); + +/*! + \ingroup Callback + \brief Checks if handle callback is set. + + \return 1 if set + \return 0 if not set + + \param none No parameters + + _Example_ + \code + if (wolfSSL_GetHandleCbSet()) { + // callback is set + } + \endcode + + \sa wolfSSL_SetHandleCb +*/ +int wolfSSL_GetHandleCbSet(void); + +/*! + \ingroup Callback + \brief Sets handle callback. + + \return 0 on success + \return negative on error + + \param in Handle callback + + _Example_ + \code + int ret = wolfSSL_SetHandleCb(myHandleCallback); + \endcode + + \sa wolfSSL_GetHandleCbSet +*/ +int wolfSSL_SetHandleCb(wolfSSL_DSP_Handle_cb in); diff --git a/doc/dox_comments/header_files/wolfio.h b/doc/dox_comments/header_files/wolfio.h index 2197dbcc0..2ec951563 100644 --- a/doc/dox_comments/header_files/wolfio.h +++ b/doc/dox_comments/header_files/wolfio.h @@ -666,3 +666,801 @@ WOLFSSL_API void wolfSSL_SetRecvFrom(WOLFSSL* ssl, WolfSSLRecvFrom recvFrom); \sa wolfSSL_SSLSetIOSend */ WOLFSSL_API void wolfSSL_SetSendTo(WOLFSSL* ssl, WolfSSLSento sendTo); + +/*! + \ingroup IO + \brief Waits for socket to be ready for I/O with timeout. + + \return 0 on success + \return negative on error + + \param sockfd Socket file descriptor + \param to_sec Timeout in seconds + + _Example_ + \code + SOCKET_T sockfd; + int ret = wolfIO_Select(sockfd, 5); + \endcode + + \sa wolfIO_TcpConnect +*/ +int wolfIO_Select(SOCKET_T sockfd, int to_sec); + +/*! + \ingroup IO + \brief Connects to TCP server with timeout. + + \return 0 on success + \return negative on error + + \param sockfd Pointer to socket file descriptor + \param ip IP address string + \param port Port number + \param to_sec Timeout in seconds + + _Example_ + \code + SOCKET_T sockfd; + int ret = wolfIO_TcpConnect(&sockfd, "127.0.0.1", 443, 5); + \endcode + + \sa wolfIO_TcpBind +*/ +int wolfIO_TcpConnect(SOCKET_T* sockfd, const char* ip, + unsigned short port, int to_sec); + +/*! + \ingroup IO + \brief Accepts TCP connection. + + \return Socket descriptor on success + \return negative on error + + \param sockfd Socket file descriptor + \param peer_addr Peer address structure + \param peer_len Peer address length + + _Example_ + \code + SOCKET_T sockfd; + SOCKADDR peer; + XSOCKLENT len = sizeof(peer); + int ret = wolfIO_TcpAccept(sockfd, &peer, &len); + \endcode + + \sa wolfIO_TcpBind +*/ +int wolfIO_TcpAccept(SOCKET_T sockfd, SOCKADDR* peer_addr, + XSOCKLENT* peer_len); + +/*! + \ingroup IO + \brief Binds TCP socket to port. + + \return 0 on success + \return negative on error + + \param sockfd Pointer to socket file descriptor + \param port Port number + + _Example_ + \code + SOCKET_T sockfd; + int ret = wolfIO_TcpBind(&sockfd, 443); + \endcode + + \sa wolfIO_TcpAccept +*/ +int wolfIO_TcpBind(SOCKET_T* sockfd, word16 port); + +/*! + \ingroup IO + \brief Sends data on socket. + + \return Number of bytes sent on success + \return negative on error + + \param sd Socket descriptor + \param buf Buffer to send + \param sz Buffer size + \param wrFlags Write flags + + _Example_ + \code + SOCKET_T sd; + char buf[100]; + int ret = wolfIO_Send(sd, buf, sizeof(buf), 0); + \endcode + + \sa wolfIO_Recv +*/ +int wolfIO_Send(SOCKET_T sd, char *buf, int sz, int wrFlags); + +/*! + \ingroup IO + \brief Receives data from socket. + + \return Number of bytes received on success + \return negative on error + + \param sd Socket descriptor + \param buf Buffer to receive into + \param sz Buffer size + \param rdFlags Read flags + + _Example_ + \code + SOCKET_T sd; + char buf[100]; + int ret = wolfIO_Recv(sd, buf, sizeof(buf), 0); + \endcode + + \sa wolfIO_Send +*/ +int wolfIO_Recv(SOCKET_T sd, char *buf, int sz, int rdFlags); + +/*! + \ingroup IO + \brief Sends datagram to address. + + \return Number of bytes sent on success + \return negative on error + + \param sd Socket descriptor + \param addr Destination address + \param buf Buffer to send + \param sz Buffer size + \param wrFlags Write flags + + _Example_ + \code + SOCKET_T sd; + WOLFSSL_BIO_ADDR addr; + char buf[100]; + int ret = wolfIO_SendTo(sd, &addr, buf, sizeof(buf), 0); + \endcode + + \sa wolfIO_RecvFrom +*/ +int wolfIO_SendTo(SOCKET_T sd, WOLFSSL_BIO_ADDR *addr, char *buf, int sz, + int wrFlags); + +/*! + \ingroup IO + \brief Receives datagram from address. + + \return Number of bytes received on success + \return negative on error + + \param sd Socket descriptor + \param addr Source address + \param buf Buffer to receive into + \param sz Buffer size + \param rdFlags Read flags + + _Example_ + \code + SOCKET_T sd; + WOLFSSL_BIO_ADDR addr; + char buf[100]; + int ret = wolfIO_RecvFrom(sd, &addr, buf, sizeof(buf), 0); + \endcode + + \sa wolfIO_SendTo +*/ +int wolfIO_RecvFrom(SOCKET_T sd, WOLFSSL_BIO_ADDR *addr, char *buf, int sz, + int rdFlags); + +/*! + \ingroup IO + \brief BIO send callback. + + \return Number of bytes sent on success + \return negative on error + + \param ssl SSL object + \param buf Buffer to send + \param sz Buffer size + \param ctx Context pointer + + _Example_ + \code + WOLFSSL* ssl; + char buf[100]; + int ret = wolfSSL_BioSend(ssl, buf, sizeof(buf), NULL); + \endcode + + \sa wolfSSL_BioReceive +*/ +int wolfSSL_BioSend(WOLFSSL* ssl, char *buf, int sz, void *ctx); + +/*! + \ingroup IO + \brief BIO receive callback. + + \return Number of bytes received on success + \return negative on error + + \param ssl SSL object + \param buf Buffer to receive into + \param sz Buffer size + \param ctx Context pointer + + _Example_ + \code + WOLFSSL* ssl; + char buf[100]; + int ret = wolfSSL_BioReceive(ssl, buf, sizeof(buf), NULL); + \endcode + + \sa wolfSSL_BioSend +*/ +int wolfSSL_BioReceive(WOLFSSL* ssl, char* buf, int sz, void* ctx); + +/*! + \ingroup IO + \brief Receives multicast datagram. + + \return Number of bytes received on success + \return negative on error + + \param ssl SSL object + \param buf Buffer to receive into + \param sz Buffer size + \param ctx Context pointer + + _Example_ + \code + WOLFSSL* ssl; + char buf[100]; + int ret = EmbedReceiveFromMcast(ssl, buf, sizeof(buf), NULL); + \endcode + + \sa EmbedReceiveFrom +*/ +int EmbedReceiveFromMcast(WOLFSSL *ssl, char *buf, int sz, void *ctx); + +/*! + \ingroup IO + \brief Builds HTTP OCSP request. + + \return Request size on success + \return negative on error + + \param domainName Domain name + \param path URL path + \param ocspReqSz OCSP request size + \param buf Output buffer + \param bufSize Buffer size + + _Example_ + \code + char buf[1024]; + int ret = wolfIO_HttpBuildRequestOcsp("example.com", "/ocsp", 100, + (unsigned char*)buf, sizeof(buf)); + \endcode + + \sa wolfIO_HttpProcessResponseOcsp +*/ +int wolfIO_HttpBuildRequestOcsp(const char* domainName, const char* path, + int ocspReqSz, unsigned char* buf, + int bufSize); + +/*! + \ingroup IO + \brief Processes HTTP OCSP response with generic I/O. + + \return 0 on success + \return negative on error + + \param ioCb I/O callback + \param ioCbCtx I/O callback context + \param respBuf Response buffer pointer + \param httpBuf HTTP buffer + \param httpBufSz HTTP buffer size + \param heap Heap hint + + _Example_ + \code + unsigned char* resp = NULL; + unsigned char httpBuf[1024]; + int ret = wolfIO_HttpProcessResponseOcspGenericIO(myIoCb, ctx, &resp, + httpBuf, + sizeof(httpBuf), NULL); + \endcode + + \sa wolfIO_HttpProcessResponseOcsp +*/ +int wolfIO_HttpProcessResponseOcspGenericIO(WolfSSLGenericIORecvCb ioCb, + void* ioCbCtx, + unsigned char** respBuf, + unsigned char* httpBuf, + int httpBufSz, void* heap); + +/*! + \ingroup IO + \brief Processes HTTP OCSP response. + + \return 0 on success + \return negative on error + + \param sfd Socket file descriptor + \param respBuf Response buffer pointer + \param httpBuf HTTP buffer + \param httpBufSz HTTP buffer size + \param heap Heap hint + + _Example_ + \code + int sfd; + unsigned char* resp = NULL; + unsigned char httpBuf[1024]; + int ret = wolfIO_HttpProcessResponseOcsp(sfd, &resp, httpBuf, + sizeof(httpBuf), NULL); + \endcode + + \sa wolfIO_HttpBuildRequestOcsp +*/ +int wolfIO_HttpProcessResponseOcsp(int sfd, unsigned char** respBuf, + unsigned char* httpBuf, int httpBufSz, + void* heap); + +/*! + \ingroup IO + \brief OCSP lookup callback. + + \return 0 on success + \return negative on error + + \param ctx Context pointer + \param url URL string + \param urlSz URL size + \param ocspReqBuf OCSP request buffer + \param ocspReqSz OCSP request size + \param ocspRespBuf OCSP response buffer pointer + + _Example_ + \code + byte* resp = NULL; + byte req[100]; + int ret = EmbedOcspLookup(NULL, "http://example.com/ocsp", 25, req, + sizeof(req), &resp); + \endcode + + \sa EmbedOcspRespFree +*/ +int EmbedOcspLookup(void* ctx, const char* url, int urlSz, + byte* ocspReqBuf, int ocspReqSz, byte** ocspRespBuf); + +/*! + \ingroup IO + \brief Builds HTTP CRL request. + + \return Request size on success + \return negative on error + + \param url URL string + \param urlSz URL size + \param domainName Domain name + \param buf Output buffer + \param bufSize Buffer size + + _Example_ + \code + char buf[1024]; + int ret = wolfIO_HttpBuildRequestCrl("http://example.com/crl", 22, + "example.com", + (unsigned char*)buf, sizeof(buf)); + \endcode + + \sa wolfIO_HttpProcessResponseCrl +*/ +int wolfIO_HttpBuildRequestCrl(const char* url, int urlSz, + const char* domainName, unsigned char* buf, + int bufSize); + +/*! + \ingroup IO + \brief Processes HTTP CRL response. + + \return 0 on success + \return negative on error + + \param crl CRL object + \param sfd Socket file descriptor + \param httpBuf HTTP buffer + \param httpBufSz HTTP buffer size + + _Example_ + \code + WOLFSSL_CRL crl; + int sfd; + unsigned char httpBuf[1024]; + int ret = wolfIO_HttpProcessResponseCrl(&crl, sfd, httpBuf, + sizeof(httpBuf)); + \endcode + + \sa wolfIO_HttpBuildRequestCrl +*/ +int wolfIO_HttpProcessResponseCrl(WOLFSSL_CRL* crl, int sfd, + unsigned char* httpBuf, int httpBufSz); + +/*! + \ingroup IO + \brief CRL lookup callback. + + \return 0 on success + \return negative on error + + \param crl CRL object + \param url URL string + \param urlSz URL size + + _Example_ + \code + WOLFSSL_CRL crl; + int ret = EmbedCrlLookup(&crl, "http://example.com/crl", 22); + \endcode + + \sa wolfIO_HttpBuildRequestCrl +*/ +int EmbedCrlLookup(WOLFSSL_CRL* crl, const char* url, int urlSz); + +/*! + \ingroup IO + \brief Decodes URL into components. + + \return 0 on success + \return negative on error + + \param url URL string + \param urlSz URL size + \param outName Output domain name + \param outPath Output path + \param outPort Output port + + _Example_ + \code + char name[256], path[256]; + unsigned short port; + int ret = wolfIO_DecodeUrl("http://example.com:443/path", 28, name, + path, &port); + \endcode + + \sa wolfIO_HttpBuildRequest +*/ +int wolfIO_DecodeUrl(const char* url, int urlSz, char* outName, + char* outPath, unsigned short* outPort); + +/*! + \ingroup IO + \brief Builds generic HTTP request. + + \return Request size on success + \return negative on error + + \param reqType Request type (GET, POST, etc.) + \param domainName Domain name + \param path URL path + \param pathLen Path length + \param reqSz Request body size + \param contentType Content type + \param buf Output buffer + \param bufSize Buffer size + + _Example_ + \code + char buf[1024]; + int ret = wolfIO_HttpBuildRequest("POST", "example.com", "/api", 4, + 100, "application/json", + (unsigned char*)buf, sizeof(buf)); + \endcode + + \sa wolfIO_HttpProcessResponse +*/ +int wolfIO_HttpBuildRequest(const char* reqType, const char* domainName, + const char* path, int pathLen, int reqSz, + const char* contentType, unsigned char* buf, + int bufSize); + +/*! + \ingroup IO + \brief Processes HTTP response with generic I/O. + + \return 0 on success + \return negative on error + + \param ioCb I/O callback + \param ioCbCtx I/O callback context + \param appStrList Application string list + \param respBuf Response buffer pointer + \param httpBuf HTTP buffer + \param httpBufSz HTTP buffer size + \param dynType Dynamic type + \param heap Heap hint + + _Example_ + \code + unsigned char* resp = NULL; + unsigned char httpBuf[1024]; + const char* appStrs[] = {"200 OK", NULL}; + int ret = wolfIO_HttpProcessResponseGenericIO(myIoCb, ctx, appStrs, + &resp, httpBuf, + sizeof(httpBuf), 0, NULL); + \endcode + + \sa wolfIO_HttpProcessResponse +*/ +int wolfIO_HttpProcessResponseGenericIO(WolfSSLGenericIORecvCb ioCb, + void* ioCbCtx, + const char** appStrList, + unsigned char** respBuf, + unsigned char* httpBuf, + int httpBufSz, int dynType, + void* heap); + +/*! + \ingroup IO + \brief Processes HTTP response. + + \return 0 on success + \return negative on error + + \param sfd Socket file descriptor + \param appStrList Application string list + \param respBuf Response buffer pointer + \param httpBuf HTTP buffer + \param httpBufSz HTTP buffer size + \param dynType Dynamic type + \param heap Heap hint + + _Example_ + \code + int sfd; + unsigned char* resp = NULL; + unsigned char httpBuf[1024]; + const char* appStrs[] = {"200 OK", NULL}; + int ret = wolfIO_HttpProcessResponse(sfd, appStrs, &resp, httpBuf, + sizeof(httpBuf), 0, NULL); + \endcode + + \sa wolfIO_HttpBuildRequest +*/ +int wolfIO_HttpProcessResponse(int sfd, const char** appStrList, + unsigned char** respBuf, + unsigned char* httpBuf, int httpBufSz, + int dynType, void* heap); + +/*! + \ingroup IO + \brief Sets I/O send callback for context. + + \return none No returns + + \param ctx SSL context + \param CBIOSend Send callback + + _Example_ + \code + WOLFSSL_CTX* ctx; + wolfSSL_CTX_SetIOSend(ctx, mySendCallback); + \endcode + + \sa wolfSSL_SSLSetIOSend +*/ +void wolfSSL_CTX_SetIOSend(WOLFSSL_CTX *ctx, CallbackIOSend CBIOSend); + +/*! + \ingroup IO + \brief Sets I/O receive callback for SSL object. + + \return none No returns + + \param ssl SSL object + \param CBIORecv Receive callback + + _Example_ + \code + WOLFSSL* ssl; + wolfSSL_SSLSetIORecv(ssl, myRecvCallback); + \endcode + + \sa wolfSSL_CTX_SetIORecv +*/ +void wolfSSL_SSLSetIORecv(WOLFSSL *ssl, CallbackIORecv CBIORecv); + +/*! + \ingroup IO + \brief Sets I/O send callback for SSL object. + + \return none No returns + + \param ssl SSL object + \param CBIOSend Send callback + + _Example_ + \code + WOLFSSL* ssl; + wolfSSL_SSLSetIOSend(ssl, mySendCallback); + \endcode + + \sa wolfSSL_CTX_SetIOSend +*/ +void wolfSSL_SSLSetIOSend(WOLFSSL *ssl, CallbackIOSend CBIOSend); + +/*! + \ingroup IO + \brief Sets I/O for Mynewt platform. + + \return none No returns + + \param ssl SSL object + \param mnSocket Mynewt socket + \param mnSockAddrIn Mynewt socket address + + _Example_ + \code + WOLFSSL* ssl; + struct mn_socket sock; + struct mn_sockaddr_in addr; + wolfSSL_SetIO_Mynewt(ssl, &sock, &addr); + \endcode + + \sa wolfSSL_SetIO_LwIP +*/ +void wolfSSL_SetIO_Mynewt(WOLFSSL* ssl, struct mn_socket* mnSocket, + struct mn_sockaddr_in* mnSockAddrIn); + +/*! + \ingroup IO + \brief Sets I/O for LwIP platform. + + \return 0 on success + \return negative on error + + \param ssl SSL object + \param pcb Protocol control block + \param recv Receive callback + \param sent Sent callback + \param arg Argument pointer + + _Example_ + \code + WOLFSSL* ssl; + struct tcp_pcb* pcb; + int ret = wolfSSL_SetIO_LwIP(ssl, pcb, myRecv, mySent, NULL); + \endcode + + \sa wolfSSL_SetIO_Mynewt +*/ +int wolfSSL_SetIO_LwIP(WOLFSSL* ssl, void *pcb, tcp_recv_fn recv, + tcp_sent_fn sent, void *arg); + +/*! + \ingroup IO + \brief Sets cookie context for DTLS. + + \return none No returns + + \param ssl SSL object + \param ctx Cookie context + + _Example_ + \code + WOLFSSL* ssl; + void* ctx; + wolfSSL_SetCookieCtx(ssl, ctx); + \endcode + + \sa wolfSSL_GetCookieCtx +*/ +void wolfSSL_SetCookieCtx(WOLFSSL* ssl, void *ctx); + +/*! + \ingroup IO + \brief Gets cookie context for DTLS. + + \return Cookie context pointer + + \param ssl SSL object + + _Example_ + \code + WOLFSSL* ssl; + void* ctx = wolfSSL_GetCookieCtx(ssl); + \endcode + + \sa wolfSSL_SetCookieCtx +*/ +void* wolfSSL_GetCookieCtx(WOLFSSL* ssl); + +/*! + \ingroup IO + \brief Sets get peer callback for context. + + \return none No returns + + \param ctx SSL context + \param cb Get peer callback + + _Example_ + \code + WOLFSSL_CTX* ctx; + wolfSSL_CTX_SetIOGetPeer(ctx, myGetPeerCallback); + \endcode + + \sa wolfSSL_CTX_SetIOSetPeer +*/ +void wolfSSL_CTX_SetIOGetPeer(WOLFSSL_CTX* ctx, CallbackGetPeer cb); + +/*! + \ingroup IO + \brief Sets set peer callback for context. + + \return none No returns + + \param ctx SSL context + \param cb Set peer callback + + _Example_ + \code + WOLFSSL_CTX* ctx; + wolfSSL_CTX_SetIOSetPeer(ctx, mySetPeerCallback); + \endcode + + \sa wolfSSL_CTX_SetIOGetPeer +*/ +void wolfSSL_CTX_SetIOSetPeer(WOLFSSL_CTX* ctx, CallbackSetPeer cb); + +/*! + \ingroup IO + \brief Gets peer information. + + \return 0 on success + \return negative on error + + \param ssl SSL object + \param ip IP address buffer + \param ipSz IP address buffer size pointer + \param port Port number pointer + \param fam Address family pointer + + _Example_ + \code + WOLFSSL* ssl; + char ip[46]; + int ipSz = sizeof(ip); + unsigned short port; + int fam; + int ret = EmbedGetPeer(ssl, ip, &ipSz, &port, &fam); + \endcode + + \sa EmbedSetPeer +*/ +int EmbedGetPeer(WOLFSSL* ssl, char* ip, int* ipSz, unsigned short* port, + int* fam); + +/*! + \ingroup IO + \brief Sets peer information. + + \return 0 on success + \return negative on error + + \param ssl SSL object + \param ip IP address string + \param ipSz IP address string size + \param port Port number + \param fam Address family + + _Example_ + \code + WOLFSSL* ssl; + int ret = EmbedSetPeer(ssl, "127.0.0.1", 9, 443, AF_INET); + \endcode + + \sa EmbedGetPeer +*/ +int EmbedSetPeer(WOLFSSL* ssl, char* ip, int ipSz, unsigned short port, + int fam); diff --git a/linuxkm/Makefile b/linuxkm/Makefile index 4e48291de..f8be02bde 100644 --- a/linuxkm/Makefile +++ b/linuxkm/Makefile @@ -290,7 +290,7 @@ ifeq "$(ENABLED_LINUXKM_PIE)" "yes" # if the above make didn't build a fresh libwolfssl.ko, then the module is already up to date and we leave it untouched, assuring stability for purposes of module-update-fips-hash. @if [[ ! "$@" -nt "$$RELOC_TMP" ]]; then echo ' Module already up-to-date.'; exit 0; fi @SECTION_MAP=$$(mktemp) - @trap 'rm "$$SECTION_MAP"' EXIT + @trap 'rm "$$RELOC_TMP" "$$SECTION_MAP"' EXIT @export SECTION_MAP @$(READELF) --wide --sections --symbols "$@" | $(GENERATE_SECTION_MAP) @$(READELF) --wide --relocs "$@" | $(GENERATE_RELOC_TAB) >| '$(MODULE_TOP)/linuxkm/wc_linuxkm_pie_reloc_tab.c' diff --git a/linuxkm/include.am b/linuxkm/include.am index 63ffc5a58..87cfae72e 100644 --- a/linuxkm/include.am +++ b/linuxkm/include.am @@ -23,6 +23,7 @@ EXTRA_DIST += m4/ax_linuxkm.m4 \ linuxkm/wolfcrypt.lds \ linuxkm/patches/5.10.17/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v10v17.patch \ linuxkm/patches/5.10.236/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v10v236.patch \ + linuxkm/patches/5.14.0-570.58.1.el9_6/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v14-570v58v1-el9_6.patch \ linuxkm/patches/5.15/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v15.patch \ linuxkm/patches/5.17/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v17.patch \ linuxkm/patches/5.17-ubuntu-jammy-tegra/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v17-ubuntu-jammy-tegra.patch \ diff --git a/linuxkm/linuxkm_wc_port.h b/linuxkm/linuxkm_wc_port.h index 51bc3f26a..bb9dd03a7 100644 --- a/linuxkm/linuxkm_wc_port.h +++ b/linuxkm/linuxkm_wc_port.h @@ -937,7 +937,9 @@ typeof(kfree) *kfree; typeof(ksize) *ksize; +#ifndef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT typeof(get_random_bytes) *get_random_bytes; +#endif #if LINUX_VERSION_CODE < KERNEL_VERSION(4, 0, 0) typeof(getnstimeofday) *getnstimeofday; #elif LINUX_VERSION_CODE < KERNEL_VERSION(5, 0, 0) @@ -1072,9 +1074,7 @@ #endif /* !WOLFCRYPT_ONLY && !NO_CERTS */ - #ifdef WOLFSSL_DEBUG_BACKTRACE_ERROR_CODES typeof(dump_stack) *dump_stack; - #endif #ifdef CONFIG_ARM64 #ifndef CONFIG_ARCH_TEGRA @@ -1269,7 +1269,9 @@ #endif #define ksize WC_PIE_INDIRECT_SYM(ksize) +#ifndef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT #define get_random_bytes WC_PIE_INDIRECT_SYM(get_random_bytes) +#endif #if LINUX_VERSION_CODE < KERNEL_VERSION(4, 0, 0) #define getnstimeofday WC_PIE_INDIRECT_SYM(getnstimeofday) #elif LINUX_VERSION_CODE < KERNEL_VERSION(5, 0, 0) @@ -1345,9 +1347,7 @@ #endif /* !WOLFCRYPT_ONLY && !NO_CERTS */ - #ifdef WOLFSSL_DEBUG_BACKTRACE_ERROR_CODES #define dump_stack WC_PIE_INDIRECT_SYM(dump_stack) - #endif #undef preempt_count /* just in case -- not a macro on x86. */ #define preempt_count WC_PIE_INDIRECT_SYM(preempt_count) @@ -1729,4 +1729,13 @@ #error unexpected BITS_PER_LONG value. #endif +/* WC_DUMP_BACKTRACE_NONDEBUG is intended to dump a backtrace only if it hasn't + * already been dumped by the called function. + */ +#if defined(WOLFSSL_DEBUG_TRACE_ERROR_CODES) && defined(WOLFSSL_DEBUG_BACKTRACE_ERROR_CODES) + #define WC_DUMP_BACKTRACE_NONDEBUG WC_DO_NOTHING +#else + #define WC_DUMP_BACKTRACE_NONDEBUG dump_stack() +#endif + #endif /* LINUXKM_WC_PORT_H */ diff --git a/linuxkm/lkcapi_dh_glue.c b/linuxkm/lkcapi_dh_glue.c index dc419865e..a598307f5 100644 --- a/linuxkm/lkcapi_dh_glue.c +++ b/linuxkm/lkcapi_dh_glue.c @@ -749,10 +749,7 @@ static int km_ffdhe_init(struct crypto_kpp *tfm, int name, word32 nbits) ctx->name = name; ctx->nbits = nbits; - if (WOLFSSL_ATOMIC_LOAD(linuxkm_lkcapi_registering_now)) - err = LKCAPI_INITRNG_FOR_SELFTEST(&ctx->rng); - else - err = wc_InitRng(&ctx->rng); + err = LKCAPI_INITRNG(&ctx->rng); if (err) { #ifdef WOLFKM_DEBUG_DH pr_err("%s: init rng returned: %d\n", WOLFKM_DH_DRIVER, err); diff --git a/linuxkm/lkcapi_ecdh_glue.c b/linuxkm/lkcapi_ecdh_glue.c index 5a57bc0ea..1086bbf98 100644 --- a/linuxkm/lkcapi_ecdh_glue.c +++ b/linuxkm/lkcapi_ecdh_glue.c @@ -387,10 +387,7 @@ static int km_ecdh_init(struct crypto_kpp *tfm, int curve_id) ctx->curve_len = (word32) ret; } - if (WOLFSSL_ATOMIC_LOAD(linuxkm_lkcapi_registering_now)) - ret = LKCAPI_INITRNG_FOR_SELFTEST(&ctx->rng); - else - ret = wc_InitRng(&ctx->rng); + ret = LKCAPI_INITRNG(&ctx->rng); if (ret) { #ifdef WOLFKM_DEBUG_ECDH pr_err("%s: init rng returned: %d\n", WOLFKM_ECDH_DRIVER, ret); diff --git a/linuxkm/lkcapi_rsa_glue.c b/linuxkm/lkcapi_rsa_glue.c index c95e1eb75..3872ecd89 100644 --- a/linuxkm/lkcapi_rsa_glue.c +++ b/linuxkm/lkcapi_rsa_glue.c @@ -27,6 +27,10 @@ #error lkcapi_rsa_glue.c included in non-LINUXKM_LKCAPI_REGISTER project. #endif +#ifndef RHEL_RELEASE_VERSION + #define RHEL_RELEASE_VERSION(a, b) (((a) << 8) + (b)) +#endif + #if !defined(NO_RSA) #if (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \ (defined(LINUXKM_LKCAPI_REGISTER_ALL_KCONFIG) && defined(CONFIG_CRYPTO_RSA))) && \ @@ -630,14 +634,13 @@ out: static inline int km_rsa_ctx_init_rng(struct km_rsa_ctx * ctx) { switch (ctx->rng.status) { case WC_DRBG_OK: +#ifdef WC_RNG_BANK_SUPPORT + case WC_DRBG_BANKREF: +#endif return 0; case WC_DRBG_NOT_INIT: { - int err; - if (WOLFSSL_ATOMIC_LOAD(linuxkm_lkcapi_registering_now)) - err = LKCAPI_INITRNG_FOR_SELFTEST(&ctx->rng); - else - err = wc_InitRng(&ctx->rng); + int err = LKCAPI_INITRNG(&ctx->rng); if (err) { pr_err("%s: init rng returned: %d\n", WOLFKM_RSA_DRIVER, err); if (err == WC_NO_ERR_TRACE(MEMORY_E)) @@ -2101,7 +2104,7 @@ static int linuxkm_test_rsa_driver(const char * driver, int nbits) memset(&rng, 0, sizeof(rng)); memset(key, 0, sizeof(RsaKey)); - ret = LKCAPI_INITRNG_FOR_SELFTEST(&rng); + ret = LKCAPI_INITRNG(&rng); if (ret) { pr_err("error: init rng returned: %d\n", ret); @@ -2347,6 +2350,14 @@ static int linuxkm_test_rsa_driver(const char * driver, int nbits) memset(dec, 0, key_len); ret = crypto_akcipher_decrypt(req); + #if defined(RHEL_RELEASE_CODE) && \ + (RHEL_RELEASE_CODE >= RHEL_RELEASE_VERSION(9, 6)) + if (ret == -ENOSYS) { + pr_info("info: ignoring failure from crypto_akcipher_decrypt (disabled by RHEL policy)\n"); + test_rc = 0; + goto test_rsa_end; + } + #endif if (ret) { pr_err("error: crypto_akcipher_decrypt returned: %d\n", ret); goto test_rsa_end; @@ -2471,7 +2482,7 @@ static int linuxkm_test_pkcs1pad_driver(const char * driver, int nbits, memset(&rng, 0, sizeof(rng)); memset(key, 0, sizeof(RsaKey)); - ret = LKCAPI_INITRNG_FOR_SELFTEST(&rng); + ret = LKCAPI_INITRNG(&rng); if (ret) { pr_err("error: init rng returned: %d\n", ret); goto test_pkcs1_end; @@ -2721,6 +2732,14 @@ static int linuxkm_test_pkcs1pad_driver(const char * driver, int nbits, akcipher_request_set_crypt(req, &src, &dst, hash_len, key_len); ret = crypto_akcipher_sign(req); + #if defined(RHEL_RELEASE_CODE) && \ + (RHEL_RELEASE_CODE >= RHEL_RELEASE_VERSION(9, 6)) + if (ret == -ENOSYS) { + pr_info("info: ignoring failure from crypto_akcipher_sign (disabled by RHEL policy)\n"); + test_rc = 0; + goto test_pkcs1_end; + } + #endif if (ret) { pr_err("error: crypto_akcipher_sign returned: %d\n", ret); test_rc = BAD_FUNC_ARG; @@ -2847,6 +2866,14 @@ static int linuxkm_test_pkcs1pad_driver(const char * driver, int nbits, } ret = crypto_akcipher_decrypt(req); + #if defined(RHEL_RELEASE_CODE) && \ + (RHEL_RELEASE_CODE >= RHEL_RELEASE_VERSION(9, 6)) + if (ret == -ENOSYS) { + pr_info("info: ignoring failure from crypto_akcipher_decrypt (disabled by RHEL policy)\n"); + test_rc = 0; + goto test_pkcs1_end; + } + #endif if (ret) { pr_err("error: crypto_akcipher_decrypt returned: %d\n", ret); test_rc = BAD_FUNC_ARG; @@ -2979,7 +3006,7 @@ static int linuxkm_test_pkcs1_driver(const char * driver, int nbits, memset(&rng, 0, sizeof(rng)); memset(key, 0, sizeof(RsaKey)); - ret = LKCAPI_INITRNG_FOR_SELFTEST(&rng); + ret = LKCAPI_INITRNG(&rng); if (ret) { pr_err("error: init rng returned: %d\n", ret); goto test_pkcs1_end; diff --git a/linuxkm/lkcapi_sha_glue.c b/linuxkm/lkcapi_sha_glue.c index fb3dc222f..1bdc56a8a 100644 --- a/linuxkm/lkcapi_sha_glue.c +++ b/linuxkm/lkcapi_sha_glue.c @@ -955,75 +955,79 @@ struct wc_swallow_the_semicolon #include #endif #include - -struct wc_linuxkm_drbg_ctx { - size_t n_rngs; - struct wc_rng_inst { - wolfSSL_Atomic_Int lock; - WC_RNG rng; - } *rngs; /* one per CPU ID */ -}; - -static inline void wc_linuxkm_drbg_ctx_clear(struct wc_linuxkm_drbg_ctx * ctx) -{ - unsigned int i; - - if (ctx->rngs) { - for (i = 0; i < ctx->n_rngs; ++i) { - if (ctx->rngs[i].lock != 0) { - /* better to leak than to crash. */ - pr_err("BUG: wc_linuxkm_drbg_ctx_clear called with DRBG #%d still locked.", i); - ctx->rngs = NULL; - ctx->n_rngs = 0; - return; - } - else - wc_FreeRng(&ctx->rngs[i].rng); - } - free(ctx->rngs); - ctx->rngs = NULL; - ctx->n_rngs = 0; - } - - return; -} +#include static volatile int wc_linuxkm_drbg_init_tfm_disable_vector_registers = 0; +#ifndef WC_LINUXKM_INITRNG_TIMEOUT_SEC + #define WC_LINUXKM_INITRNG_TIMEOUT_SEC 30 +#endif + +static int linuxkm_affinity_lock(void *arg) { + (void)arg; + if (preempt_count() != 0) + return ALREADY_E; +#if defined(CONFIG_SMP) && (LINUX_VERSION_CODE >= KERNEL_VERSION(5, 7, 0)) + migrate_disable(); /* this actually makes irq_count() nonzero, so that + * DISABLE_VECTOR_REGISTERS() is superfluous, but + * don't depend on that. + */ +#endif + local_bh_disable(); + return 0; +} + +static int linuxkm_affinity_get_id(void *arg, int *id) { + (void)arg; + *id = raw_smp_processor_id(); + return 0; +} + +static int linuxkm_affinity_unlock(void *arg) { + (void)arg; + local_bh_enable(); +#if defined(CONFIG_SMP) && (LINUX_VERSION_CODE >= KERNEL_VERSION(5, 7, 0)) + migrate_enable(); +#endif + return 0; +} + static int wc_linuxkm_drbg_init_tfm(struct crypto_tfm *tfm) { - struct wc_linuxkm_drbg_ctx *ctx = (struct wc_linuxkm_drbg_ctx *)crypto_tfm_ctx(tfm); - unsigned int i; + struct wc_rng_bank *ctx = (struct wc_rng_bank *)crypto_tfm_ctx(tfm); int ret; - int need_reenable_vec = 0; - int can_sleep = (preempt_count() == 0); + word32 flags = WC_RNG_BANK_FLAG_CAN_WAIT; - ctx->n_rngs = nr_cpu_ids + 4; - ctx->rngs = (struct wc_rng_inst *)malloc(sizeof(*ctx->rngs) * ctx->n_rngs); - if (! ctx->rngs) { - ctx->n_rngs = 0; - return -ENOMEM; - } - XMEMSET(ctx->rngs, 0, sizeof(*ctx->rngs) * ctx->n_rngs); + if (wc_linuxkm_drbg_init_tfm_disable_vector_registers) + flags |= WC_RNG_BANK_FLAG_NO_VECTOR_OPS; - for (i = 0; i < ctx->n_rngs; ++i) { - ctx->rngs[i].lock = 0; - if (wc_linuxkm_drbg_init_tfm_disable_vector_registers) - need_reenable_vec = (DISABLE_VECTOR_REGISTERS() == 0); - ret = wc_InitRng(&ctx->rngs[i].rng); - if (need_reenable_vec) - REENABLE_VECTOR_REGISTERS(); + ret = wc_rng_bank_init( + ctx, nr_cpu_ids + 4, flags, WC_LINUXKM_INITRNG_TIMEOUT_SEC, + NULL /* heap */, INVALID_DEVID); + + if (ret == 0) { + ret = wc_rng_bank_set_affinity_handlers( + ctx, + linuxkm_affinity_lock, + linuxkm_affinity_get_id, + linuxkm_affinity_unlock, + NULL); if (ret != 0) { - pr_warn_once("WARNING: wc_InitRng returned %d\n",ret); - ret = -EINVAL; - break; + (void)wc_rng_bank_fini(ctx); + pr_err("ERROR: wc_rng_bank_set_affinity_handlers() in wc_linuxkm_drbg_init_tfm() returned err %d\n", ret); + WC_DUMP_BACKTRACE_NONDEBUG; } - if (can_sleep) - cond_resched(); } - - if (ret != 0) { - wc_linuxkm_drbg_ctx_clear(ctx); + else { + pr_err("ERROR: wc_rng_bank_init() in wc_linuxkm_drbg_init_tfm() returned err %d\n", ret); + if (ret == WC_NO_ERR_TRACE(MEMORY_E)) + ret = -ENOMEM; + else if (ret == WC_NO_ERR_TRACE(WC_TIMEOUT_E)) + ret = -ETIMEDOUT; + else if (ret == WC_NO_ERR_TRACE(INTERRUPTED_E)) + ret = -EINTR; + else + ret = -EINVAL; } return ret; @@ -1031,101 +1035,54 @@ static int wc_linuxkm_drbg_init_tfm(struct crypto_tfm *tfm) static void wc_linuxkm_drbg_exit_tfm(struct crypto_tfm *tfm) { - struct wc_linuxkm_drbg_ctx *ctx = (struct wc_linuxkm_drbg_ctx *)crypto_tfm_ctx(tfm); + struct wc_rng_bank *ctx = (struct wc_rng_bank *)crypto_tfm_ctx(tfm); + int ret = wc_rng_bank_fini(ctx); - wc_linuxkm_drbg_ctx_clear(ctx); + if (ret != 0) + pr_err("ERROR: wc_rng_bank_fini() in wc_linuxkm_drbg_exit_tfm() returned err %d\n", ret); return; } static int wc_linuxkm_drbg_default_instance_registered = 0; -/* get_drbg() uses atomic operations to get exclusive ownership of a DRBG - * without delay. It expects to be called in uninterruptible context, though - * works fine in any context. It starts by trying the DRBG matching the current - * CPU ID, and if that doesn't immediately succeed, it iterates upward until one - * succeeds. The first attempt will always succeed, even under intense load, - * unless there is or has recently been a reseed or mix-in operation competing - * with generators. - * - * Note that wc_linuxkm_drbg_init_tfm() allocates at least 4 DRBGs, regardless - * of nominal core count, to avoid stalling generators on unicore targets. - */ +static struct wc_rng_bank_inst *linuxkm_get_drbg(struct crypto_rng *tfm) { + struct wc_rng_bank *ctx = (struct wc_rng_bank *)crypto_rng_ctx(tfm); + int err; + struct wc_rng_bank_inst *ret; + word32 flags = + WC_RNG_BANK_FLAG_CAN_FAIL_OVER_INST | + WC_RNG_BANK_FLAG_CAN_WAIT | + WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST; -static inline struct wc_rng_inst *get_drbg(struct crypto_rng *tfm) { - struct wc_linuxkm_drbg_ctx *ctx = (struct wc_linuxkm_drbg_ctx *)crypto_rng_ctx(tfm); - int n, new_lock_value; - - /* check for mismatched handler or missing instance array. */ - if ((tfm->base.__crt_alg->cra_init != wc_linuxkm_drbg_init_tfm) || - (ctx->rngs == NULL)) - { + /* check for mismatched handler. */ + if (tfm->base.__crt_alg->cra_init != wc_linuxkm_drbg_init_tfm) { + pr_err("BUG: linuxkm_get_drbg() called on foreign tfm.\n"); return NULL; } - if ((tfm == crypto_default_rng) && (preempt_count() == 0)) { - #if defined(CONFIG_SMP) && (LINUX_VERSION_CODE >= KERNEL_VERSION(5, 7, 0)) - migrate_disable(); /* this actually makes irq_count() nonzero, so that - * DISABLE_VECTOR_REGISTERS() is superfluous, but - * don't depend on that. - */ - #endif - local_bh_disable(); - new_lock_value = 2; - } + if (preempt_count() == 0) + flags |= WC_RNG_BANK_FLAG_AFFINITY_LOCK; else - { - new_lock_value = 1; + flags |= WC_RNG_BANK_FLAG_NO_VECTOR_OPS; + + err = wc_rng_bank_checkout(ctx, &ret, 0, WC_LINUXKM_INITRNG_TIMEOUT_SEC, flags); + + if (err != 0) { + pr_err("ERROR: wc_rng_bank_checkout() in linuxkm_get_drbg() returned err %d.\n", err); + WC_DUMP_BACKTRACE_NONDEBUG; + return NULL; } - n = raw_smp_processor_id(); - - for (;;) { - int expected = 0; - if (likely(__atomic_compare_exchange_n(&ctx->rngs[n].lock, &expected, new_lock_value, 0, __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE))) - return &ctx->rngs[n]; - ++n; - if (n >= (int)ctx->n_rngs) - n = 0; - cpu_relax(); - } - - __builtin_unreachable(); + return ret; } -/* get_drbg_n() is used by bulk seed, mix-in, and reseed operations. It expects - * the caller to be able to wait until the requested DRBG is available. If the - * caller can't sleep and the requested DRBG is busy, it returns immediately -- - * this avoids priority inversions and deadlocks. - */ -static inline struct wc_rng_inst *get_drbg_n(struct wc_linuxkm_drbg_ctx *ctx, int n, int can_spin) { - int can_sleep = (preempt_count() == 0); - - for (;;) { - int expected = 0; - if (likely(__atomic_compare_exchange_n(&ctx->rngs[n].lock, &expected, 1, 0, __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE))) - return &ctx->rngs[n]; - if (can_sleep && can_spin) { - if (signal_pending(current)) - return NULL; - cond_resched(); - } - else - return NULL; - } - - __builtin_unreachable(); -} - -static inline void put_drbg(struct wc_rng_inst *drbg) { - int migration_disabled = (drbg->lock == 2); - __atomic_store_n(&(drbg->lock),0,__ATOMIC_RELEASE); - - if (migration_disabled) { - local_bh_enable(); - #if defined(CONFIG_SMP) && (LINUX_VERSION_CODE >= KERNEL_VERSION(5, 7, 0)) - migrate_enable(); - #endif +static void linuxkm_put_drbg(struct crypto_rng *tfm, struct wc_rng_bank_inst **drbg) { + struct wc_rng_bank *ctx = (struct wc_rng_bank *)crypto_rng_ctx(tfm); + int ret = wc_rng_bank_checkin(ctx, drbg); + if (ret != 0) { + pr_err("ERROR: wc_rng_bank_checkin() in linuxkm_put_drbg() returned err %d.\n", ret); + WC_DUMP_BACKTRACE_NONDEBUG; } } @@ -1159,196 +1116,43 @@ static inline struct crypto_rng *get_crypto_default_rng(void) { return current_crypto_default_rng; } -static int drbg_init_from(WC_RNG *source_rng, struct DRBG_internal* dest_drbg) { - int ret; - int need_vec_reenable; - - XMEMSET(dest_drbg, 0, sizeof(struct DRBG_internal)); - - need_vec_reenable = (DISABLE_VECTOR_REGISTERS() == 0); - - /* Don't copy out the low level DRBG itself -- it contains sensitive secret - * state. Instead, use it to generate fresh V and C values in a - * non-intrusive way. - */ - ret = wc_RNG_GenerateBlock(source_rng, dest_drbg->V, sizeof dest_drbg->V); - if (ret != 0) { - pr_err("drbg_init_from: wc_RNG_GenerateBlock for V returned %d\n", ret); - goto out; - } - ret = wc_RNG_GenerateBlock(source_rng, dest_drbg->C, sizeof dest_drbg->C); - if (ret != 0) { - pr_err("drbg_init_from: wc_RNG_GenerateBlock for C returned %d\n", ret); - goto out; - } - - dest_drbg->heap = source_rng->heap; -#if defined(WOLFSSL_ASYNC_CRYPT) || defined(WOLF_CRYPTO_CB) - dest_drbg->devId = source_rng->devId; +#ifndef WC_DRBG_BANKREF + #error LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT requires WC_DRBG_BANKREF support. #endif - ret = wc_InitSha256_ex(&dest_drbg->sha256, dest_drbg->heap, -#if defined(WOLFSSL_ASYNC_CRYPT) || defined(WOLF_CRYPTO_CB) - source_rng->dev_id -#else - INVALID_DEVID -#endif - ); - if (ret != 0) - goto out; - - dest_drbg->reseedCtr = 1; - - ret = 0; - -out: - - if (need_vec_reenable) - REENABLE_VECTOR_REGISTERS(); - - return ret; -} - -/* fork_default_rng() is a non-FIPS-compliant helper function to initialize an - * RNG for glue layer POSTs. Direct replacement for wc_InitRng(), and secure in - * principle, but not permissible to use as such in FIPS runtimes. - */ -static WC_MAYBE_UNUSED int fork_default_rng(WC_RNG *forked_rng) { - struct crypto_rng *current_crypto_default_rng; - struct wc_rng_inst *rng = NULL; - struct DRBG_internal *drbg = NULL; - struct DRBG_internal *drbg_scratch = NULL; - byte *health_check_scratch = NULL; - byte *newSeed_buf = NULL; +WC_MAYBE_UNUSED static int linuxkm_InitRng_DefaultRef(WC_RNG* rng) { int ret; - - if (forked_rng == NULL) - return BAD_FUNC_ARG; - - XMEMSET(forked_rng, 0, sizeof *forked_rng); - - health_check_scratch = - (byte *)XMALLOC(RNG_HEALTH_TEST_CHECK_SIZE, NULL, - DYNAMIC_TYPE_TMP_BUFFER); - if (health_check_scratch == NULL) { - ret = MEMORY_E; - goto out; - } - - newSeed_buf = (byte*)XMALLOC(WC_DRBG_SEED_SZ + - WC_DRBG_SEED_BLOCK_SZ, - NULL, - DYNAMIC_TYPE_SEED); - if (newSeed_buf == NULL) { - ret = MEMORY_E; - goto out; - } - - drbg = (struct DRBG_internal *)XMALLOC(sizeof *drbg, NULL, - DYNAMIC_TYPE_RNG); - if (drbg == NULL) { - ret = MEMORY_E; - goto out; - } - - drbg_scratch = - (struct DRBG_internal *)XMALLOC(sizeof *drbg_scratch, NULL, - DYNAMIC_TYPE_RNG); - if (drbg_scratch == NULL) { - ret = MEMORY_E; - goto out; - } - - current_crypto_default_rng = get_crypto_default_rng(); + struct crypto_rng *current_crypto_default_rng = get_crypto_default_rng(); if (current_crypto_default_rng == NULL) { - ret = BAD_STATE_E; - goto out; + pr_warn_once("WARNING: get_crypto_default_rng() failed in linuxkm_InitRng_DefaultRef(); falling through to wc_InitRng().\n"); + return wc_InitRng(rng); } - - rng = get_drbg(current_crypto_default_rng); - if (rng == NULL) { - ret = BAD_STATE_E; - goto out; - } - - if (rng->rng.status != WC_DRBG_OK) { - pr_err("fork_default_rng: rng->rng.status = %d\n", rng->rng.status); - ret = RNG_FAILURE_E; - goto out; - } - - XMEMCPY(forked_rng, &rng->rng, sizeof *forked_rng); - forked_rng->drbg = (struct DRBG *)drbg; - forked_rng->drbg_scratch = drbg_scratch; - forked_rng->health_check_scratch = health_check_scratch; - forked_rng->newSeed_buf = newSeed_buf; - - ret = drbg_init_from(&rng->rng, (struct DRBG_internal*)forked_rng->drbg); - if (ret != 0) - goto out; - - ret = drbg_init_from(&rng->rng, (struct DRBG_internal*)forked_rng->drbg_scratch); - if (ret != 0) - goto out; - - put_drbg(rng); - rng = NULL; - - { - byte scratch[4]; - ret = wc_RNG_GenerateBlock(forked_rng, scratch, sizeof scratch); - if (ret != 0) - goto out; - } - - ret = 0; - -out: - - if (ret == 0) - return ret; else { - if (rng) - put_drbg(rng); - XFREE(drbg, rng->rng.heap, DYNAMIC_TYPE_RNG); - XFREE(drbg_scratch, rng->rng.heap, DYNAMIC_TYPE_RNG); - XFREE(health_check_scratch, rng->rng.heap, DYNAMIC_TYPE_RNG); - XFREE(newSeed_buf, rng->rng.heap, DYNAMIC_TYPE_RNG); - pr_warn("WARNING: fork_default_rng: ret=%d; falling through to wc_InitRng()\n", ret); - return wc_InitRng(forked_rng); + struct wc_rng_bank *default_bank = (struct wc_rng_bank *)crypto_rng_ctx(current_crypto_default_rng); + ret = wc_InitRng_BankRef(default_bank, rng); + return ret; } + + __builtin_unreachable(); } +#define LKCAPI_INITRNG(rng) linuxkm_InitRng_DefaultRef(rng) -#define LKCAPI_INITRNG_FOR_SELFTEST(rng) fork_default_rng(rng) - -#else /* !LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT || !HAVE_HASHDRBG */ - -#define LKCAPI_INITRNG_FOR_SELFTEST(rng) wc_InitRng(rng) - -#endif /* !LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT || !HAVE_HASHDRBG */ +#endif /* LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT && HAVE_HASHDRBG */ static int wc_linuxkm_drbg_generate(struct crypto_rng *tfm, const u8 *src, unsigned int slen, u8 *dst, unsigned int dlen) { int ret, retried = 0; - int need_fpu_restore; - struct wc_rng_inst *drbg = get_drbg(tfm); + struct wc_rng_bank_inst *drbg = linuxkm_get_drbg(tfm); if (! drbg) { - pr_err_once("BUG: get_drbg() failed."); + pr_err_once("BUG: linuxkm_get_drbg() failed."); return -EFAULT; } - /* for the default RNG, make sure we don't cache an underlying SHA256 - * method that uses vector insns (forbidden from irq handlers). - */ - need_fpu_restore = (tfm == crypto_default_rng) ? (DISABLE_VECTOR_REGISTERS() == 0) : 0; - -retry: - if (slen > 0) { - ret = wc_RNG_DRBG_Reseed(&drbg->rng, src, slen); + ret = wc_RNG_DRBG_Reseed(WC_RNG_BANK_INST_TO_RNG(drbg), src, slen); if (ret != 0) { pr_warn_once("WARNING: wc_RNG_DRBG_Reseed returned %d\n",ret); ret = -EINVAL; @@ -1359,7 +1163,7 @@ retry: for (;;) { #define RNG_MAX_BLOCK_LEN_ROUNDED (RNG_MAX_BLOCK_LEN & ~0xfU) if (dlen > RNG_MAX_BLOCK_LEN_ROUNDED) { - ret = wc_RNG_GenerateBlock(&drbg->rng, dst, RNG_MAX_BLOCK_LEN_ROUNDED); + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(drbg), dst, RNG_MAX_BLOCK_LEN_ROUNDED); if (ret == 0) { dlen -= RNG_MAX_BLOCK_LEN_ROUNDED; dst += RNG_MAX_BLOCK_LEN_ROUNDED; @@ -1367,38 +1171,48 @@ retry: } #undef RNG_MAX_BLOCK_LEN_ROUNDED else { - ret = wc_RNG_GenerateBlock(&drbg->rng, dst, dlen); - dlen -= dlen; + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(drbg), dst, dlen); + if (ret == 0) + dlen = 0; } + if (dlen == 0) + break; + + if (ret == 0) + continue; + if (unlikely(ret == WC_NO_ERR_TRACE(RNG_FAILURE_E)) && (! retried)) { + if (slen > 0) + break; + retried = 1; - wc_FreeRng(&drbg->rng); - ret = wc_InitRng(&drbg->rng); + + ret = wc_rng_bank_inst_reinit((struct wc_rng_bank *)crypto_rng_ctx(tfm), + drbg, + WC_LINUXKM_INITRNG_TIMEOUT_SEC, + WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret == 0) { - pr_warn("WARNING: reinitialized DRBG #%d after RNG_FAILURE_E.", raw_smp_processor_id()); - goto retry; + pr_warn("WARNING: reinitialized DRBG #%d after RNG_FAILURE_E from wc_RNG_GenerateBlock().", raw_smp_processor_id()); + continue; } else { pr_warn_once("ERROR: reinitialization of DRBG #%d after RNG_FAILURE_E failed with ret %d.", raw_smp_processor_id(), ret); ret = -EINVAL; + break; } } - else if (ret != 0) { - pr_warn_once("WARNING: wc_RNG_GenerateBlock returned %d\n",ret); + else { + pr_warn_once("ERROR: wc_linuxkm_drbg_generate() wc_RNG_GenerateBlock returned %d.\n",ret); ret = -EINVAL; break; } - - if (! dlen) - break; } out: - if (need_fpu_restore) - REENABLE_VECTOR_REGISTERS(); - put_drbg(drbg); + linuxkm_put_drbg(tfm, &drbg); return ret; } @@ -1406,13 +1220,10 @@ out: static int wc_linuxkm_drbg_seed(struct crypto_rng *tfm, const u8 *seed, unsigned int slen) { - struct wc_linuxkm_drbg_ctx *ctx = (struct wc_linuxkm_drbg_ctx *)crypto_rng_ctx(tfm); - u8 *seed_copy = NULL; - int ret = 0; - int n; + struct wc_rng_bank *ctx = (struct wc_rng_bank *)crypto_rng_ctx(tfm); + int ret; - if ((tfm->base.__crt_alg->cra_init != wc_linuxkm_drbg_init_tfm) || - (ctx->rngs == NULL)) + if (tfm->base.__crt_alg->cra_init != wc_linuxkm_drbg_init_tfm) { pr_err_once("BUG: mismatched tfm."); return -EFAULT; @@ -1421,51 +1232,12 @@ static int wc_linuxkm_drbg_seed(struct crypto_rng *tfm, if (slen == 0) return 0; - seed_copy = (u8 *)malloc(slen + 2); - if (! seed_copy) - return -ENOMEM; - XMEMCPY(seed_copy + 2, seed, slen); - - /* this iteration counts down, whereas the iteration in get_drbg() counts - * up, to assure they can't possibly phase-lock to each other. - */ - for (n = ctx->n_rngs - 1; n >= 0; --n) { - struct wc_rng_inst *drbg = get_drbg_n(ctx, n, 1); - - if (! drbg) { - ret = -EINTR; - break; - } - - /* perturb the seed with the CPU ID, so that no DRBG has the exact same - * seed. - */ - seed_copy[0] = (u8)(n >> 8); - seed_copy[1] = (u8)n; - - { - /* for the default RNG, make sure we don't cache an underlying SHA256 - * method that uses vector insns (forbidden from irq handlers). - */ - int need_fpu_restore = (tfm == crypto_default_rng) ? (DISABLE_VECTOR_REGISTERS() == 0) : 0; - ret = wc_RNG_DRBG_Reseed(&drbg->rng, seed_copy, slen + 2); - if (need_fpu_restore) - REENABLE_VECTOR_REGISTERS(); - } - - if (ret != 0) { - pr_warn_once("WARNING: wc_RNG_DRBG_Reseed returned %d\n",ret); - ret = -EINVAL; - } - - put_drbg(drbg); - - if (ret != 0) - break; + ret = wc_rng_bank_seed(ctx, seed, slen, WC_LINUXKM_INITRNG_TIMEOUT_SEC, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) { + pr_err("wc_rng_bank_seed() in wc_linuxkm_drbg_seed() returned err %d.\n", ret); + ret = -EINVAL; } - free(seed_copy); - return ret; } @@ -1477,7 +1249,7 @@ static struct rng_alg wc_linuxkm_drbg = { .cra_name = WOLFKM_STDRNG_NAME, .cra_driver_name = WOLFKM_STDRNG_DRIVER, .cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY, - .cra_ctxsize = sizeof(struct wc_linuxkm_drbg_ctx), + .cra_ctxsize = sizeof(struct wc_rng_bank), .cra_init = wc_linuxkm_drbg_init_tfm, .cra_exit = wc_linuxkm_drbg_exit_tfm, .cra_module = THIS_MODULE @@ -1520,9 +1292,9 @@ static int wc_linuxkm_drbg_loaded = 0; #ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS -static inline struct wc_linuxkm_drbg_ctx *get_default_drbg_ctx(void) { +static inline struct wc_rng_bank *get_default_drbg_ctx(void) { struct crypto_rng *current_crypto_default_rng = get_crypto_default_rng(); - struct wc_linuxkm_drbg_ctx *ctx = (current_crypto_default_rng ? (struct wc_linuxkm_drbg_ctx *)crypto_rng_ctx(current_crypto_default_rng) : NULL); + struct wc_rng_bank *ctx = (current_crypto_default_rng ? (struct wc_rng_bank *)crypto_rng_ctx(current_crypto_default_rng) : NULL); if (ctx && (! ctx->rngs)) { pr_err_once("BUG: get_default_drbg_ctx() found null ctx->rngs."); return NULL; @@ -1652,7 +1424,7 @@ static ssize_t wc_extract_crng_user(void __user *buf, size_t nbytes) { } static int wc_mix_pool_bytes(const void *buf, size_t len) { - struct wc_linuxkm_drbg_ctx *ctx; + struct wc_rng_bank *ctx; size_t i; int n; int can_sleep = (preempt_count() == 0); @@ -1664,19 +1436,20 @@ static int wc_mix_pool_bytes(const void *buf, size_t len) { return -EFAULT; for (n = ctx->n_rngs - 1; n >= 0; --n) { - struct wc_rng_inst *drbg = get_drbg_n(ctx, n, 0); + struct wc_rng_bank_inst *drbg; + int V_offset; - if (! drbg) + if (wc_rng_bank_checkout(ctx, &drbg, n, 0, WC_RNG_BANK_FLAG_NONE) != 0) continue; for (i = 0, V_offset = 0; i < len; ++i) { - ((struct DRBG_internal *)drbg->rng.drbg)->V[V_offset++] += ((byte *)buf)[i]; - if (V_offset == (int)sizeof ((struct DRBG_internal *)drbg->rng.drbg)->V) + ((struct DRBG_internal *)WC_RNG_BANK_INST_TO_RNG(drbg)->drbg)->V[V_offset++] += ((byte *)buf)[i]; + if (V_offset == (int)sizeof ((struct DRBG_internal *)WC_RNG_BANK_INST_TO_RNG(drbg)->drbg)->V) V_offset = 0; } - put_drbg(drbg); + wc_rng_bank_checkin(ctx, &drbg); if (can_sleep) { if (signal_pending(current)) return -EINTR; @@ -1688,40 +1461,23 @@ static int wc_mix_pool_bytes(const void *buf, size_t len) { } static int wc_crng_reseed(void) { - struct wc_linuxkm_drbg_ctx *ctx = get_default_drbg_ctx(); - int n; + struct wc_rng_bank *ctx = get_default_drbg_ctx(); int can_sleep = (preempt_count() == 0); + int ret; - if (! ctx) - return -EFAULT; - - for (n = ctx->n_rngs - 1; n >= 0; --n) { - struct wc_rng_inst *drbg = get_drbg_n(ctx, n, 1); - - if (! drbg) - return -EINTR; - - ((struct DRBG_internal *)drbg->rng.drbg)->reseedCtr = WC_RESEED_INTERVAL; - - if (can_sleep) { - byte scratch[4]; - int need_reenable_vec = (DISABLE_VECTOR_REGISTERS() == 0); - int ret = wc_RNG_GenerateBlock(&drbg->rng, scratch, (word32)sizeof(scratch)); - if (need_reenable_vec) - REENABLE_VECTOR_REGISTERS(); - if (ret != 0) - pr_err("ERROR: wc_crng_reseed() wc_RNG_GenerateBlock() for DRBG #%d returned %d.", n, ret); - put_drbg(drbg); - if (signal_pending(current)) - return -EINTR; - cond_resched(); - } - else { - put_drbg(drbg); - } + ret = wc_rng_bank_reseed(ctx, WC_LINUXKM_INITRNG_TIMEOUT_SEC, + can_sleep + ? + WC_RNG_BANK_FLAG_CAN_WAIT + : + WC_RNG_BANK_FLAG_NONE); + if (ret != 0) { + pr_err("ERROR: wc_rng_bank_reseed() returned err %d.\n", ret); + return -EINVAL; + } + else { + return 0; } - - return 0; } struct wolfssl_linuxkm_random_bytes_handlers random_bytes_handlers = { @@ -2150,25 +1906,18 @@ static int wc_linuxkm_drbg_startup(void) } static int wc_linuxkm_drbg_cleanup(void) { - int cur_refcnt = WC_LKM_REFCOUNT_TO_INT(wc_linuxkm_drbg.base.cra_refcnt); + int cur_refcnt; if (! wc_linuxkm_drbg_loaded) { pr_err("ERROR: wc_linuxkm_drbg_cleanup called with ! wc_linuxkm_drbg_loaded"); return -EINVAL; } - if (cur_refcnt - wc_linuxkm_drbg_default_instance_registered != 1) { - pr_err("ERROR: wc_linuxkm_drbg_cleanup called with refcnt = %d, with wc_linuxkm_drbg %sset as default rng", - cur_refcnt, wc_linuxkm_drbg_default_instance_registered ? "" : "not "); - return -EBUSY; - } - - /* The below is racey, but the kernel doesn't provide any other way. It's - * written to be retryable. - */ - #ifdef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT if (wc_linuxkm_drbg_default_instance_registered) { + /* These deinstallations are racey, but the kernel doesn't provide any other + * way. It's written to be retryable. + */ int ret; #ifdef LINUXKM_DRBG_GET_RANDOM_BYTES @@ -2193,16 +1942,16 @@ static int wc_linuxkm_drbg_cleanup(void) { #elif defined(WOLFSSL_LINUXKM_USE_GET_RANDOM_KPROBES) if (wc_get_random_bytes_kprobe_installed) { - wc_get_random_bytes_kprobe_installed = 0; - barrier(); unregister_kprobe(&wc_get_random_bytes_kprobe); + barrier(); + wc_get_random_bytes_kprobe_installed = 0; pr_info("libwolfssl: wc_get_random_bytes_kprobe uninstalled\n"); } #ifdef WOLFSSL_LINUXKM_USE_GET_RANDOM_USER_KRETPROBE if (wc_get_random_bytes_user_kretprobe_installed) { - wc_get_random_bytes_user_kretprobe_installed = 0; - barrier(); unregister_kretprobe(&wc_get_random_bytes_user_kretprobe); + barrier(); + wc_get_random_bytes_user_kretprobe_installed = 0; pr_info("libwolfssl: wc_get_random_bytes_user_kretprobe uninstalled\n"); } #endif /* WOLFSSL_LINUXKM_USE_GET_RANDOM_USER_KRETPROBE */ @@ -2218,14 +1967,18 @@ static int wc_linuxkm_drbg_cleanup(void) { pr_err("ERROR: crypto_del_default_rng failed: %d", ret); return ret; } - cur_refcnt = WC_LKM_REFCOUNT_TO_INT(wc_linuxkm_drbg.base.cra_refcnt); - if (cur_refcnt != 1) { - pr_warn("WARNING: wc_linuxkm_drbg refcnt = %d after crypto_del_default_rng()", cur_refcnt); - return -EINVAL; - } + + wc_linuxkm_drbg_default_instance_registered = 0; } #endif /* LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT */ + cur_refcnt = WC_LKM_REFCOUNT_TO_INT(wc_linuxkm_drbg.base.cra_refcnt); + + if (cur_refcnt != 1) { + pr_err("ERROR: wc_linuxkm_drbg_cleanup called with refcnt = %d", cur_refcnt); + return -EBUSY; + } + crypto_unregister_rng(&wc_linuxkm_drbg); if (! (wc_linuxkm_drbg.base.cra_flags & CRYPTO_ALG_DEAD)) { @@ -2233,10 +1986,6 @@ static int wc_linuxkm_drbg_cleanup(void) { return -EBUSY; } -#ifdef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT - wc_linuxkm_drbg_default_instance_registered = 0; -#endif /* LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT */ - wc_linuxkm_drbg_loaded = 0; return 0; @@ -2244,4 +1993,8 @@ static int wc_linuxkm_drbg_cleanup(void) { #endif /* LINUXKM_LKCAPI_REGISTER_HASH_DRBG */ +#ifndef LKCAPI_INITRNG + #define LKCAPI_INITRNG(rng) wc_InitRng(rng) +#endif + #endif /* !WC_SKIP_INCLUDED_C_FILES */ diff --git a/linuxkm/module_exports.c.template b/linuxkm/module_exports.c.template index 2784041f8..6af571d94 100644 --- a/linuxkm/module_exports.c.template +++ b/linuxkm/module_exports.c.template @@ -66,6 +66,9 @@ #include #endif #include + #ifdef WC_RNG_BANK_SUPPORT + #include + #endif #endif #include #include diff --git a/linuxkm/module_hooks.c b/linuxkm/module_hooks.c index c92074b0a..df7dad320 100644 --- a/linuxkm/module_hooks.c +++ b/linuxkm/module_hooks.c @@ -297,8 +297,10 @@ void wc_linuxkm_relax_long_loop(void) { */ } #endif + return; } #endif + cpu_relax(); } #if defined(WC_LINUXKM_WOLFENTROPY_IN_GLUE_LAYER) @@ -1286,7 +1288,9 @@ static int set_up_wolfssl_linuxkm_pie_redirect_table(void) { wolfssl_linuxkm_pie_redirect_table.kvfree = kvfree; #endif +#ifndef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT wolfssl_linuxkm_pie_redirect_table.get_random_bytes = get_random_bytes; +#endif #if LINUX_VERSION_CODE < KERNEL_VERSION(4, 0, 0) wolfssl_linuxkm_pie_redirect_table.getnstimeofday = getnstimeofday; @@ -1475,9 +1479,7 @@ static int set_up_wolfssl_linuxkm_pie_redirect_table(void) { #endif /* OPENSSL_EXTRA || OPENSSL_EXTRA_X509_SMALL */ #endif /* !WOLFCRYPT_ONLY && !NO_CERTS */ -#ifdef WOLFSSL_DEBUG_BACKTRACE_ERROR_CODES wolfssl_linuxkm_pie_redirect_table.dump_stack = dump_stack; -#endif wolfssl_linuxkm_pie_redirect_table.preempt_count = my_preempt_count; #ifndef _raw_spin_lock_irqsave diff --git a/linuxkm/patches/5.14.0-570.58.1.el9_6/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v14-570v58v1-el9_6.patch b/linuxkm/patches/5.14.0-570.58.1.el9_6/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v14-570v58v1-el9_6.patch new file mode 100644 index 000000000..aa1d1b6c6 --- /dev/null +++ b/linuxkm/patches/5.14.0-570.58.1.el9_6/WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS-5v14-570v58v1-el9_6.patch @@ -0,0 +1,462 @@ +--- 5.14.0-570.58.1.el9_6/drivers/char/random.c.dist 2026-01-12 10:50:48.537243229 -0600 ++++ 5.14.0-570.58.1.el9_6/drivers/char/random.c 2026-01-12 11:17:45.002091787 -0600 +@@ -63,6 +63,260 @@ + #include + #include + ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ ++#include ++ ++static atomic_long_t random_bytes_cb_owner = ++ ATOMIC_INIT((long)NULL); ++static atomic_t random_bytes_cb_refcnt = ++ ATOMIC_INIT(0); /* 0 if unregistered, 1 if no calls in flight. */ ++static _get_random_bytes_cb_t _get_random_bytes_cb = NULL; ++static get_random_bytes_user_cb_t get_random_bytes_user_cb = NULL; ++static crng_ready_cb_t crng_ready_cb = NULL; ++static mix_pool_bytes_cb_t mix_pool_bytes_cb = NULL; ++static credit_init_bits_cb_t credit_init_bits_cb = NULL; ++static crng_reseed_cb_t crng_reseed_cb = NULL; ++ ++int wolfssl_linuxkm_register_random_bytes_handlers( ++ struct module *new_random_bytes_cb_owner, ++ const struct wolfssl_linuxkm_random_bytes_handlers *handlers) ++{ ++ if ((! new_random_bytes_cb_owner) || ++ (! handlers) || ++ (! handlers->_get_random_bytes) || ++ (! handlers->get_random_bytes_user)) ++ { ++ return -EINVAL; ++ } ++ ++ /* random_bytes_cb_owner is used to enforce serialization of ++ * wolfssl_register_random_bytes_handlers() and ++ * wolfssl_unregister_random_bytes_handlers(). ++ */ ++ if (atomic_long_cmpxchg(&random_bytes_cb_owner, ++ (long)NULL, ++ (long)new_random_bytes_cb_owner) ++ != (long)NULL) ++ { ++ return -EBUSY; ++ } ++ ++ { ++ int current_random_bytes_cb_refcnt = atomic_read(&random_bytes_cb_refcnt); ++ if (current_random_bytes_cb_refcnt) { ++ pr_err("BUG: random_bytes_cb_refcnt == %d with null random_bytes_cb_owner", current_random_bytes_cb_refcnt); ++ atomic_long_set(&random_bytes_cb_owner, (long)NULL); ++ return -EFAULT; ++ } ++ } ++ ++ if (! try_module_get(new_random_bytes_cb_owner)) { ++ atomic_long_set(&random_bytes_cb_owner, (long)NULL); ++ return -ENODEV; ++ } ++ ++ _get_random_bytes_cb = handlers->_get_random_bytes; ++ get_random_bytes_user_cb = handlers->get_random_bytes_user; ++ crng_ready_cb = handlers->crng_ready; ++ mix_pool_bytes_cb = handlers->mix_pool_bytes; ++ credit_init_bits_cb = handlers->credit_init_bits; ++ crng_reseed_cb = handlers->crng_reseed; ++ ++ barrier(); ++ atomic_set_release(&random_bytes_cb_refcnt, 1); ++ ++ return 0; ++} ++EXPORT_SYMBOL_GPL(wolfssl_linuxkm_register_random_bytes_handlers); ++ ++int wolfssl_linuxkm_unregister_random_bytes_handlers(void) ++{ ++ int current_random_bytes_cb_refcnt; ++ int n_tries; ++ if (! atomic_long_read(&random_bytes_cb_owner)) ++ return -ENODEV; ++ ++ /* we're racing the kernel at large to try to catch random_bytes_cb_refcnt ++ * with no callers in flight -- retry and relax up to 100 times. ++ */ ++ for (n_tries = 0; n_tries < 100; ++n_tries) { ++ current_random_bytes_cb_refcnt = atomic_cmpxchg(&random_bytes_cb_refcnt, 1, 0); ++ if (current_random_bytes_cb_refcnt == 1) ++ break; ++ if (current_random_bytes_cb_refcnt < 0) { ++ pr_err("BUG: random_bytes_cb_refcnt is %d in wolfssl_linuxkm_unregister_random_bytes_handlers.", current_random_bytes_cb_refcnt); ++ break; ++ } ++ if (msleep_interruptible(10) != 0) ++ return -EINTR; ++ } ++ if (current_random_bytes_cb_refcnt != 1) { ++ pr_warn("WARNING: wolfssl_unregister_random_bytes_handlers called with random_bytes_cb_refcnt == %d", current_random_bytes_cb_refcnt); ++ return -EBUSY; ++ } ++ ++ _get_random_bytes_cb = NULL; ++ get_random_bytes_user_cb = NULL; ++ crng_ready_cb = NULL; ++ mix_pool_bytes_cb = NULL; ++ credit_init_bits_cb = NULL; ++ crng_reseed_cb = NULL; ++ ++ module_put((struct module *)atomic_long_read(&random_bytes_cb_owner)); ++ barrier(); ++ atomic_long_set(&random_bytes_cb_owner, (long)NULL); ++ ++ return 0; ++} ++EXPORT_SYMBOL_GPL(wolfssl_linuxkm_unregister_random_bytes_handlers); ++ ++static __always_inline int reserve_random_bytes_cb(void) { ++ int current_random_bytes_cb_refcnt = ++ atomic_read_acquire(&random_bytes_cb_refcnt); ++ ++ if (! current_random_bytes_cb_refcnt) ++ return -ENODEV; ++ ++ if (current_random_bytes_cb_refcnt < 0) { ++ pr_err("BUG: random_bytes_cb_refcnt is %d in reserve_random_bytes_cb.", current_random_bytes_cb_refcnt); ++ return -EFAULT; ++ } ++ ++ for (;;) { ++ int orig_random_bytes_cb_refcnt = ++ atomic_cmpxchg( ++ &random_bytes_cb_refcnt, ++ current_random_bytes_cb_refcnt, ++ current_random_bytes_cb_refcnt + 1); ++ if (orig_random_bytes_cb_refcnt == current_random_bytes_cb_refcnt) ++ return 0; ++ else if (! orig_random_bytes_cb_refcnt) ++ return -ENODEV; ++ else ++ current_random_bytes_cb_refcnt = orig_random_bytes_cb_refcnt; ++ } ++ ++ __builtin_unreachable(); ++} ++ ++static __always_inline void release_random_bytes_cb(void) { ++ atomic_dec(&random_bytes_cb_refcnt); ++} ++ ++static inline int call__get_random_bytes_cb(void *buf, size_t len) ++{ ++ int ret; ++ ++ if (! _get_random_bytes_cb) ++ return -ENODEV; ++ ++ ret = reserve_random_bytes_cb(); ++ if (ret) ++ return ret; ++ ++ ret = _get_random_bytes_cb(buf, len); ++ ++ release_random_bytes_cb(); ++ ++ return ret; ++} ++ ++static inline ssize_t call_get_random_bytes_user_cb(struct iov_iter *iter) ++{ ++ ssize_t ret; ++ ++ if (! get_random_bytes_user_cb) ++ return -ECANCELED; ++ ++ ret = (ssize_t)reserve_random_bytes_cb(); ++ if (ret) ++ return ret; ++ ++ ret = get_random_bytes_user_cb(iter); ++ ++ release_random_bytes_cb(); ++ ++ return ret; ++} ++ ++static inline bool call_crng_ready_cb(void) ++{ ++ bool ret; ++ ++ /* Null crng_ready_cb signifies that the DRBG is always ready, i.e. that if ++ * called, it will always have or obtain sufficient entropy to fulfill the ++ * call. ++ */ ++ if (! crng_ready_cb) ++ return 1; ++ ++ if (reserve_random_bytes_cb() != 0) ++ return 0; ++ ++ ret = crng_ready_cb(); ++ ++ release_random_bytes_cb(); ++ ++ return ret; ++} ++ ++static inline int call_mix_pool_bytes_cb(const void *buf, size_t len) ++{ ++ int ret; ++ ++ if (! mix_pool_bytes_cb) ++ return -ENODEV; ++ ++ ret = reserve_random_bytes_cb(); ++ if (ret) ++ return ret; ++ ++ ret = mix_pool_bytes_cb(buf, len); ++ ++ release_random_bytes_cb(); ++ ++ return ret; ++} ++ ++static inline int call_credit_init_bits_cb(size_t bits) ++{ ++ int ret; ++ ++ if (! credit_init_bits_cb) ++ return -ENODEV; ++ ++ ret = reserve_random_bytes_cb(); ++ if (ret) ++ return ret; ++ ++ ret = credit_init_bits_cb(bits); ++ ++ release_random_bytes_cb(); ++ ++ return ret; ++} ++ ++static inline int call_crng_reseed_cb(void) ++{ ++ int ret; ++ ++ if (! crng_reseed_cb) ++ return -ENODEV; ++ ++ ret = reserve_random_bytes_cb(); ++ if (ret) ++ return ret; ++ ++ ret = crng_reseed_cb(); ++ ++ release_random_bytes_cb(); ++ ++ return ret; ++} ++ ++#endif /* WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS */ ++ + /********************************************************************* + * + * Initialization and readiness waiting. +@@ -83,7 +337,15 @@ static enum { + CRNG_READY = 2 /* Fully initialized with POOL_READY_BITS collected */ + } crng_init __read_mostly = CRNG_EMPTY; + static DEFINE_STATIC_KEY_FALSE(crng_is_ready); ++ + #define crng_ready() (static_branch_likely(&crng_is_ready) || crng_init >= CRNG_READY) ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ #define crng_ready_by_cb() (atomic_read(&random_bytes_cb_refcnt) && call_crng_ready_cb()) ++ #define crng_ready_maybe_cb() (atomic_read(&random_bytes_cb_refcnt) ? (call_crng_ready_cb() || crng_ready()) : crng_ready()) ++#else ++ #define crng_ready_maybe_cb() crng_ready() ++#endif ++ + /* Various types of waiters for crng_init->CRNG_READY transition. */ + static DECLARE_WAIT_QUEUE_HEAD(crng_init_wait); + static struct fasync_struct *fasync; +@@ -108,7 +370,7 @@ MODULE_PARM_DESC(ratelimit_disable, "Dis + */ + bool rng_is_initialized(void) + { +- return crng_ready(); ++ return crng_ready_maybe_cb(); + } + EXPORT_SYMBOL(rng_is_initialized); + +@@ -132,11 +394,11 @@ static void try_to_generate_entropy(void + */ + int wait_for_random_bytes(void) + { +- while (!crng_ready()) { ++ while (!crng_ready_maybe_cb()) { + int ret; + + try_to_generate_entropy(); +- ret = wait_event_interruptible_timeout(crng_init_wait, crng_ready(), HZ); ++ ret = wait_event_interruptible_timeout(crng_init_wait, crng_ready_maybe_cb(), HZ); + if (ret) + return ret > 0 ? 0 : ret; + } +@@ -165,7 +427,7 @@ int __cold execute_with_initialized_rng( + } + + #define warn_unseeded_randomness() \ +- if (IS_ENABLED(CONFIG_WARN_ALL_UNSEEDED_RANDOM) && !crng_ready()) \ ++ if (IS_ENABLED(CONFIG_WARN_ALL_UNSEEDED_RANDOM) && !crng_ready_maybe_cb()) \ + printk_deferred(KERN_NOTICE "random: %s called from %pS with crng_init=%d\n", \ + __func__, (void *)_RET_IP_, crng_init) + +@@ -388,6 +650,14 @@ static void _get_random_bytes(void *buf, + if (!len) + return; + ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ /* If call__get_random_bytes_cb() doesn't succeed, flow falls through to ++ * the native implementation. _get_random_bytes() must succeed. ++ */ ++ if (call__get_random_bytes_cb(buf, len) == 0) ++ return; ++#endif ++ + first_block_len = min_t(size_t, 32, len); + crng_make_state(chacha_state, buf, first_block_len); + len -= first_block_len; +@@ -434,6 +704,18 @@ static ssize_t get_random_bytes_user(str + if (unlikely(!iov_iter_count(iter))) + return 0; + ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ { ++ ssize_t cb_ret = call_get_random_bytes_user_cb(iter); ++ /* If the callback returns -ECANCELED, that signals that iter is ++ * still intact, and flow can safely fall through to the native ++ * implementation. ++ */ ++ if (cb_ret != -ECANCELED) ++ return cb_ret; ++ } ++#endif ++ + /* + * Immediately overwrite the ChaCha key at index 4 with random + * bytes, in case userspace causes copy_to_iter() below to sleep +@@ -510,7 +792,7 @@ type get_random_ ##type(void) \ + \ + warn_unseeded_randomness(); \ + \ +- if (!crng_ready()) { \ ++ if (!crng_ready_maybe_cb()) { \ + _get_random_bytes(&ret, sizeof(ret)); \ + return ret; \ + } \ +@@ -649,6 +931,11 @@ static void mix_pool_bytes(const void *b + { + unsigned long flags; + ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ (void)call_mix_pool_bytes_cb(buf, len); ++ /* fall through to mix into native pool too. */ ++#endif ++ + spin_lock_irqsave(&input_pool.lock, flags); + _mix_pool_bytes(buf, len); + spin_unlock_irqrestore(&input_pool.lock, flags); +@@ -708,7 +995,11 @@ static void extract_entropy(void *buf, s + memzero_explicit(&block, sizeof(block)); + } + ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++#define credit_init_bits(bits) do { (void)call_credit_init_bits_cb(bits); if (!crng_ready()) _credit_init_bits(bits); } while (0) ++#else + #define credit_init_bits(bits) if (!crng_ready()) _credit_init_bits(bits) ++#endif + + static void __cold _credit_init_bits(size_t bits) + { +@@ -1419,7 +1710,7 @@ SYSCALL_DEFINE3(getrandom, char __user * + return ret; + } + +- if (!crng_ready() && !(flags & GRND_INSECURE)) { ++ if (!crng_ready_maybe_cb() && !(flags & GRND_INSECURE)) { + if (flags & GRND_NONBLOCK) + return -EAGAIN; + ret = wait_for_random_bytes(); +@@ -1435,6 +1726,10 @@ SYSCALL_DEFINE3(getrandom, char __user * + + static __poll_t random_poll(struct file *file, poll_table *wait) + { ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ if (crng_ready_by_cb()) ++ return EPOLLIN | EPOLLRDNORM; ++#endif + poll_wait(file, &crng_init_wait, wait); + return crng_ready() ? EPOLLIN | EPOLLRDNORM : EPOLLOUT | EPOLLWRNORM; + } +@@ -1490,7 +1785,7 @@ static ssize_t urandom_read_iter(struct + if (!crng_ready()) + try_to_generate_entropy(); + +- if (!crng_ready()) { ++ if (!crng_ready_maybe_cb()) { + if (!ratelimit_disable && maxwarn <= 0) + ++urandom_warning.missed; + else if (ratelimit_disable || __ratelimit(&urandom_warning)) { +@@ -1573,6 +1868,14 @@ static long random_ioctl(struct file *f, + case RNDRESEEDCRNG: + if (!capable(CAP_SYS_ADMIN)) + return -EPERM; ++#ifdef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ /* fall through to reseed native crng too. */ ++ if (call_crng_reseed_cb() == 0) { ++ if (crng_ready()) ++ crng_reseed(NULL); ++ return 0; ++ } ++#endif + if (!crng_ready()) + return -ENODATA; + crng_reseed(NULL); +--- 5.14.0-570.58.1.el9_6/include/linux/random.h.dist 2026-01-12 10:50:57.004413581 -0600 ++++ 5.14.0-570.58.1.el9_6/include/linux/random.h 2026-01-12 10:56:29.124034816 -0600 +@@ -175,4 +175,37 @@ int random_online_cpu(unsigned int cpu); + extern const struct file_operations random_fops, urandom_fops; + #endif + ++#ifndef WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS ++ #define WOLFSSL_LINUXKM_HAVE_GET_RANDOM_CALLBACKS 1 ++#endif ++ ++typedef int (*_get_random_bytes_cb_t)(void *buf, size_t len); ++struct iov_iter; ++/* kernels >= 5.17.0 use get_random_bytes_user() */ ++typedef ssize_t (*get_random_bytes_user_cb_t)(struct iov_iter *iter); ++/* kernels < 5.17.0 use extract_crng_user(), though some LTS kernels, ++ * e.g. 5.10.236, have the 5.17+ architecture backported. ++ */ ++typedef ssize_t (*extract_crng_user_cb_t)(void __user *buf, size_t nbytes); ++typedef bool (*crng_ready_cb_t)(void); ++typedef int (*mix_pool_bytes_cb_t)(const void *buf, size_t len); ++typedef int (*credit_init_bits_cb_t)(size_t bits); ++typedef int (*crng_reseed_cb_t)(void); ++ ++struct wolfssl_linuxkm_random_bytes_handlers { ++ _get_random_bytes_cb_t _get_random_bytes; ++ get_random_bytes_user_cb_t get_random_bytes_user; ++ extract_crng_user_cb_t extract_crng_user; ++ crng_ready_cb_t crng_ready; ++ mix_pool_bytes_cb_t mix_pool_bytes; ++ credit_init_bits_cb_t credit_init_bits; ++ crng_reseed_cb_t crng_reseed; ++}; ++ ++int wolfssl_linuxkm_register_random_bytes_handlers( ++ struct module *new_random_bytes_cb_owner, ++ const struct wolfssl_linuxkm_random_bytes_handlers *handlers); ++ ++int wolfssl_linuxkm_unregister_random_bytes_handlers(void); ++ + #endif /* _LINUX_RANDOM_H */ diff --git a/linuxkm/x86_vector_register_glue.c b/linuxkm/x86_vector_register_glue.c index 4e1e47ba2..c7bd83eae 100644 --- a/linuxkm/x86_vector_register_glue.c +++ b/linuxkm/x86_vector_register_glue.c @@ -332,7 +332,8 @@ WARN_UNUSED_RESULT int wc_save_vector_registers_x86(enum wc_svr_flags flags) * a second look at preempt_count(). */ if (((preempt_count() & (NMI_MASK | HARDIRQ_MASK)) != 0) || (task_pid_nr(current) == 0)) { - VRG_PR_WARN_X("WARNING: wc_save_vector_registers_x86 called with preempt_count 0x%x and pid %d on CPU %d.\n", preempt_count(), task_pid_nr(current), raw_smp_processor_id()); + if (! (flags & WC_SVR_FLAG_INHIBIT)) + VRG_PR_WARN_X("WARNING: wc_save_vector_registers_x86(0x%x) called with preempt_count 0x%x and pid %d on CPU %d.\n", (unsigned)flags, preempt_count(), task_pid_nr(current), raw_smp_processor_id()); return WC_ACCEL_INHIBIT_E; } diff --git a/scripts/ocsp-stapling.test b/scripts/ocsp-stapling.test index 2c5e80588..ff9d8bfc8 100755 --- a/scripts/ocsp-stapling.test +++ b/scripts/ocsp-stapling.test @@ -119,6 +119,29 @@ remove_single_rF(){ fi } +retry_with_backoff() { + local max_attempts=$1 + shift + local attempt=1 + local delay=1 + local status=0 + + while :; do + "$@" + status=$? + if [ $status -eq 0 ]; then + return 0 + fi + if [ $attempt -ge $max_attempts ]; then + return $status + fi + printf '%s\n' "Retry $attempt/$max_attempts failed, backing off ${delay}s..." + sleep $delay + attempt=$((attempt + 1)) + delay=$((delay * 2)) + done +} + #create a configure file for cert generation with the port 0 solution create_new_cnf() { printf '%s\n' "Random Port Selected: $1" @@ -304,7 +327,7 @@ server=login.live.com ca=./certs/external/ca_collection.pem if [[ "$V4V6" == "4" ]]; then - ./examples/client/client -C -h $server -p 443 -A $ca -g -W 1 + retry_with_backoff 3 ./examples/client/client -C -h $server -p 443 -A $ca -g -W 1 RESULT=$? [ $RESULT -ne 0 ] && echo -e "\n\nClient connection failed" && exit 1 else diff --git a/src/crl.c b/src/crl.c index 9056bd1c6..d3b93bf40 100644 --- a/src/crl.c +++ b/src/crl.c @@ -138,7 +138,7 @@ static int InitCRL_Entry(CRL_Entry* crle, DecodedCRL* dcrl, const byte* buff, crle->totalCerts = dcrl->totalCerts; crle->crlNumberSet = dcrl->crlNumberSet; if (crle->crlNumberSet) { - XMEMCPY(crle->crlNumber, dcrl->crlNumber, CRL_MAX_NUM_SZ); + XMEMCPY(crle->crlNumber, dcrl->crlNumber, sizeof(crle->crlNumber)); } crle->verified = verified; if (!verified) { @@ -446,7 +446,8 @@ static int CheckCertCRLList(WOLFSSL_CRL* crl, byte* issuerHash, byte* serial, #endif { #if !defined(NO_ASN_TIME) && !defined(WOLFSSL_NO_CRL_DATE_CHECK) - if (!XVALIDATE_DATE(crle->nextDate,crle->nextDateFormat, ASN_AFTER)) { + if (!XVALIDATE_DATE(crle->nextDate, crle->nextDateFormat, + ASN_AFTER, MAX_DATE_SIZE)) { WOLFSSL_MSG("CRL next date is no longer valid"); nextDateValid = 0; } @@ -596,7 +597,7 @@ static void SetCrlInfo(CRL_Entry* entry, CrlInfo *info) info->nextDateFormat = entry->nextDateFormat; info->crlNumberSet = entry->crlNumberSet; if (info->crlNumberSet) - XMEMCPY(info->crlNumber, entry->crlNumber, CRL_MAX_NUM_SZ); + XMEMCPY(info->crlNumber, entry->crlNumber, sizeof(entry->crlNumber)); } static void SetCrlInfoFromDecoded(DecodedCRL* entry, CrlInfo *info) @@ -611,7 +612,7 @@ static void SetCrlInfoFromDecoded(DecodedCRL* entry, CrlInfo *info) info->nextDateFormat = entry->nextDateFormat; info->crlNumberSet = entry->crlNumberSet; if (info->crlNumberSet) - XMEMCPY(info->crlNumber, entry->crlNumber, CRL_MAX_NUM_SZ); + XMEMCPY(info->crlNumber, entry->crlNumber, sizeof(entry->crlNumber)); } #endif @@ -621,14 +622,14 @@ static void SetCrlInfoFromDecoded(DecodedCRL* entry, CrlInfo *info) static int CompareCRLnumber(CRL_Entry* prev, CRL_Entry* curr) { int ret = 0; - DECL_MP_INT_SIZE_DYN(prev_num, CRL_MAX_NUM_SZ * CHAR_BIT, - CRL_MAX_NUM_SZ * CHAR_BIT); - DECL_MP_INT_SIZE_DYN(curr_num, CRL_MAX_NUM_SZ * CHAR_BIT, - CRL_MAX_NUM_SZ * CHAR_BIT); + DECL_MP_INT_SIZE_DYN(prev_num, CRL_MAX_NUM_SZ_BITS, + CRL_MAX_NUM_SZ_BITS); + DECL_MP_INT_SIZE_DYN(curr_num, CRL_MAX_NUM_SZ_BITS, + CRL_MAX_NUM_SZ_BITS); - NEW_MP_INT_SIZE(prev_num, CRL_MAX_NUM_SZ * CHAR_BIT, NULL, + NEW_MP_INT_SIZE(prev_num, CRL_MAX_NUM_SZ_BITS, NULL, DYNAMIC_TYPE_TMP_BUFFER); - NEW_MP_INT_SIZE(curr_num, CRL_MAX_NUM_SZ * CHAR_BIT, NULL, + NEW_MP_INT_SIZE(curr_num, CRL_MAX_NUM_SZ_BITS, NULL, DYNAMIC_TYPE_TMP_BUFFER); #ifdef MP_INT_SIZE_CHECK_NULL if ((prev_num == NULL) || (curr_num == NULL)) { @@ -636,9 +637,9 @@ static int CompareCRLnumber(CRL_Entry* prev, CRL_Entry* curr) } #endif - if (ret == 0 && ((INIT_MP_INT_SIZE(prev_num, CRL_MAX_NUM_SZ * CHAR_BIT) + if (ret == 0 && ((INIT_MP_INT_SIZE(prev_num, CRL_MAX_NUM_SZ_BITS) != MP_OKAY) || (INIT_MP_INT_SIZE(curr_num, - CRL_MAX_NUM_SZ * CHAR_BIT)) != MP_OKAY)) { + CRL_MAX_NUM_SZ_BITS)) != MP_OKAY)) { ret = MP_INIT_E; } @@ -658,11 +659,10 @@ static int CompareCRLnumber(CRL_Entry* prev, CRL_Entry* curr) return ret; } -/* Add Decoded CRL, 0 on success */ -static int AddCRL(WOLFSSL_CRL* crl, DecodedCRL* dcrl, const byte* buff, - int verified) +/* Add or replace a decoded CRL, 0 on success */ +static int AddCRL(WOLFSSL_CRL* crl, DecodedCRL* dcrl, CRL_Entry* crle, + const byte* buff, int verified) { - CRL_Entry* crle = NULL; CRL_Entry* curr = NULL; CRL_Entry* prev = NULL; #ifdef HAVE_CRL_UPDATE_CB @@ -676,25 +676,13 @@ static int AddCRL(WOLFSSL_CRL* crl, DecodedCRL* dcrl, const byte* buff, if (crl == NULL) return WOLFSSL_FATAL_ERROR; - crle = crl->currentEntry; - - if (crle == NULL) { - crle = CRL_Entry_new(crl->heap); - if (crle == NULL) { - WOLFSSL_MSG("alloc CRL Entry failed"); - return MEMORY_E; - } - } - if (InitCRL_Entry(crle, dcrl, buff, verified, crl->heap) < 0) { WOLFSSL_MSG("Init CRL Entry failed"); - CRL_Entry_free(crle, crl->heap); return WOLFSSL_FATAL_ERROR; } if (wc_LockRwLock_Wr(&crl->crlLock) != 0) { WOLFSSL_MSG("wc_LockRwLock_Wr failed"); - CRL_Entry_free(crle, crl->heap); return BAD_MUTEX_E; } @@ -705,15 +693,16 @@ static int AddCRL(WOLFSSL_CRL* crl, DecodedCRL* dcrl, const byte* buff, * authoritative than the existing entry */ if (ret == MP_LT || ret == MP_EQ) { WOLFSSL_MSG("Same or newer CRL entry already exists"); - CRL_Entry_free(crle, crl->heap); wc_UnLockRwLock(&crl->crlLock); return BAD_FUNC_ARG; } else if (ret < 0) { WOLFSSL_MSG("Error comparing CRL Numbers"); + wc_UnLockRwLock(&crl->crlLock); return ret; } + /* Insert the new entry after the current entry. */ crle->next = curr->next; if (prev != NULL) { prev->next = crle; @@ -730,22 +719,21 @@ static int AddCRL(WOLFSSL_CRL* crl, DecodedCRL* dcrl, const byte* buff, } #endif + /* Remove the current entry which was replaced */ + CRL_Entry_free(curr, crl->heap); + break; } prev = curr; } - if (curr != NULL) { - CRL_Entry_free(curr, crl->heap); - } - else { + if (curr == NULL) { + /* No replacement occurred, prepend the new entry. */ crle->next = crl->crlList; crl->crlList = crle; } - wc_UnLockRwLock(&crl->crlLock); - /* Avoid heap-use-after-free after crl->crlList is released */ - crl->currentEntry = NULL; + wc_UnLockRwLock(&crl->crlLock); return 0; } @@ -809,12 +797,14 @@ int BufferLoadCRL(WOLFSSL_CRL* crl, const byte* buff, long sz, int type, crl->currentEntry = NULL; } else { - ret = AddCRL(crl, dcrl, myBuffer, + ret = AddCRL(crl, dcrl, crl->currentEntry, myBuffer, ret != WC_NO_ERR_TRACE(ASN_CRL_NO_SIGNER_E)); if (ret != 0) { WOLFSSL_MSG_CERT_LOG("AddCRL error"); - crl->currentEntry = NULL; + CRL_Entry_free(crl->currentEntry, crl->heap); } + /* Entry now is in the list, or has been freed due to error */ + crl->currentEntry = NULL; } FreeDecodedCRL(dcrl); @@ -826,6 +816,7 @@ int BufferLoadCRL(WOLFSSL_CRL* crl, const byte* buff, long sz, int type, return ret ? ret : WOLFSSL_SUCCESS; /* convert 0 to WOLFSSL_SUCCESS */ } + #ifdef HAVE_CRL_UPDATE_CB /* Fill out CRL info structure, WOLFSSL_SUCCESS on ok */ int GetCRLInfo(WOLFSSL_CRL* crl, CrlInfo* info, const byte* buff, diff --git a/src/dtls13.c b/src/dtls13.c index a140b2d10..d3d5ee624 100644 --- a/src/dtls13.c +++ b/src/dtls13.c @@ -110,7 +110,9 @@ typedef struct Dtls13RecordPlaintextHeader { supported. */ #define DTLS13_UNIFIED_HEADER_SIZE 5 #define DTLS13_MIN_CIPHERTEXT 16 -#define DTLS13_MIN_RTX_INTERVAL 1 +#ifndef DTLS13_MIN_RTX_INTERVAL +#define DTLS13_MIN_RTX_INTERVAL (DTLS_TIMEOUT_INIT * 1000) +#endif #ifndef NO_WOLFSSL_CLIENT WOLFSSL_METHOD* wolfDTLSv1_3_client_method_ex(void* heap) @@ -978,7 +980,8 @@ static int Dtls13SendOneFragmentRtx(WOLFSSL* ssl, static int Dtls13SendFragmentedInternal(WOLFSSL* ssl) { int fragLength, rlHeaderLength; - int remainingSize, maxFragment; + word32 remainingSize; + int maxFragment; int recordLength, outputSz; byte isEncrypted; byte* output; @@ -988,16 +991,19 @@ static int Dtls13SendFragmentedInternal(WOLFSSL* ssl) (enum HandShakeType)ssl->dtls13FragHandshakeType); rlHeaderLength = Dtls13GetRlHeaderLength(ssl, isEncrypted); maxFragment = wolfssl_local_GetMaxPlaintextSize(ssl); - + if (maxFragment <= DTLS_HANDSHAKE_HEADER_SZ || + maxFragment > MAX_RECORD_SIZE || + ssl->dtls13FragOffset > ssl->dtls13MessageLength) { + Dtls13FreeFragmentsBuffer(ssl); + return BUFFER_E; + } remainingSize = ssl->dtls13MessageLength - ssl->dtls13FragOffset; while (remainingSize > 0) { fragLength = maxFragment - DTLS_HANDSHAKE_HEADER_SZ; - - if (fragLength > remainingSize) { - fragLength = remainingSize; - } + if (fragLength > (int)remainingSize) + fragLength = (int)remainingSize; recordLength = fragLength + rlHeaderLength + DTLS_HANDSHAKE_HEADER_SZ; outputSz = wolfssl_local_GetRecordSize(ssl, @@ -1041,7 +1047,7 @@ static int Dtls13SendFragmentedInternal(WOLFSSL* ssl) } ssl->dtls13FragOffset += fragLength; - remainingSize -= fragLength; + remainingSize -= (word32)fragLength; } /* we sent all fragments */ @@ -1566,21 +1572,24 @@ static int Dtls13RtxSendBuffered(WOLFSSL* ssl) int isLast; int sendSz; #ifndef NO_ASN_TIME +#ifdef WOLFSSL_32BIT_MILLI_TIME word32 now; +#else + sword64 now; +#endif #endif int ret; WOLFSSL_ENTER("Dtls13RtxSendBuffered"); #ifndef NO_ASN_TIME - now = LowResTimer(); + now = TimeNowInMilliseconds(); if (now - ssl->dtls13Rtx.lastRtx < DTLS13_MIN_RTX_INTERVAL) { #ifdef WOLFSSL_DEBUG_TLS WOLFSSL_MSG("Avoid too fast retransmission"); #endif /* WOLFSSL_DEBUG_TLS */ return 0; } - ssl->dtls13Rtx.lastRtx = now; #endif diff --git a/src/include.am b/src/include.am index 96736c2b9..fe9732063 100644 --- a/src/include.am +++ b/src/include.am @@ -185,6 +185,10 @@ if BUILD_MEMUSE src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/wolfentropy.c endif +if BUILD_RNG_BANK +src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/rng_bank.c +endif + src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/kdf.c if BUILD_RSA @@ -304,7 +308,11 @@ if BUILD_PPC32_ASM if BUILD_PPC32_ASM_INLINE src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c else +if BUILD_PPC32_ASM_INLINE_REG +src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm_cr.c +else src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S +endif !BUILD_PPC32_ASM_INLINE_REG endif !BUILD_PPC32_ASM_INLINE endif BUILD_PPC32_ASM @@ -430,6 +438,10 @@ if BUILD_MEMUSE src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/wolfentropy.c endif +if BUILD_RNG_BANK +src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/rng_bank.c +endif + src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/kdf.c if BUILD_RSA @@ -539,7 +551,11 @@ if BUILD_PPC32_ASM if BUILD_PPC32_ASM_INLINE src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c else +if BUILD_PPC32_ASM_INLINE_REG +src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm_cr.c +else src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S +endif !BUILD_PPC32_ASM_INLINE_REG endif !BUILD_PPC32_ASM_INLINE endif BUILD_PPC32_ASM @@ -781,6 +797,9 @@ src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/random.c if BUILD_MEMUSE src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/wolfentropy.c endif +if BUILD_RNG_BANK +src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/rng_bank.c +endif endif endif !BUILD_FIPS_V2_PLUS @@ -830,7 +849,11 @@ if BUILD_PPC32_ASM if BUILD_PPC32_ASM_INLINE src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c else +if BUILD_PPC32_ASM_INLINE_REG +src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm_cr.c +else src_libwolfssl@LIBSUFFIX@_la_SOURCES += wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S +endif !BUILD_PPC32_ASM_INLINE_REG endif !BUILD_PPC32_ASM_INLINE endif BUILD_PPC32_ASM diff --git a/src/internal.c b/src/internal.c index 467274b51..5587863e4 100644 --- a/src/internal.c +++ b/src/internal.c @@ -7136,7 +7136,7 @@ int SetSSL_CTX(WOLFSSL* ssl, WOLFSSL_CTX* ctx, int writeDup) ssl->buffers.altKey = ctx->altPrivateKey; #else if (ctx->altPrivateKey != NULL) { - ret = AllocCopyDer(&ssl->buffers.altkey, ctx->altPrivateKey->buffer, + ret = AllocCopyDer(&ssl->buffers.altKey, ctx->altPrivateKey->buffer, ctx->altPrivateKey->length, ctx->altPrivateKey->type, ctx->altPrivateKey->heap); if (ret != 0) { @@ -8715,6 +8715,10 @@ void wolfSSL_ResourceFree(WOLFSSL* ssl) } #endif #endif +#if defined(HAVE_DILITHIUM) + FreeKey(ssl, DYNAMIC_TYPE_DILITHIUM, (void**)&ssl->peerDilithiumKey); + ssl->peerDilithiumKeyPresent = 0; +#endif #if defined(HAVE_FALCON) FreeKey(ssl, DYNAMIC_TYPE_FALCON, (void**)&ssl->peerFalconKey); ssl->peerFalconKeyPresent = 0; @@ -10770,6 +10774,8 @@ static int SendHandshakeMsg(WOLFSSL* ssl, byte* input, word32 inputSz, maxFrag -= DTLS_HANDSHAKE_HEADER_SZ; } #endif + if (maxFrag <= 0 || maxFrag > MAX_RECORD_SIZE) + return BUFFER_E; /* Make sure input is not the ssl output buffer as this * function doesn't handle that */ @@ -10805,6 +10811,8 @@ static int SendHandshakeMsg(WOLFSSL* ssl, byte* input, word32 inputSz, fragSz = inputSz - ssl->fragOffset; /* check for available size */ + if (fragSz > (word32)MAX_RECORD_SIZE) + return BUFFER_E; outputSz = headerSz + (int)fragSz; if (IsEncryptionOn(ssl, 1)) outputSz += cipherExtraData(ssl); @@ -10820,6 +10828,8 @@ static int SendHandshakeMsg(WOLFSSL* ssl, byte* input, word32 inputSz, int dataSz = (int)fragSz; #ifdef WOLFSSL_DTLS if (ssl->options.dtls) { + if (fragSz + DTLS_HANDSHAKE_HEADER_SZ > (word32)MAX_RECORD_SIZE) + return BUFFER_E; data -= DTLS_HANDSHAKE_HEADER_SZ; dataSz += DTLS_HANDSHAKE_HEADER_SZ; AddHandShakeHeader(data, inputSz, ssl->fragOffset, fragSz, @@ -15602,14 +15612,14 @@ int ProcessPeerCerts(WOLFSSL* ssl, byte* input, word32* inOutIdx, /* Empty certificate message. */ if ((ssl->options.side == WOLFSSL_SERVER_END) && (ssl->options.mutualAuth || (ssl->options.failNoCert && - IsAtLeastTLSv1_3(ssl->version)))) { + IsAtLeastTLSv1_2(ssl)))) { WOLFSSL_MSG("No peer cert from Client"); ret = NO_PEER_CERT; WOLFSSL_ERROR_VERBOSE(ret); DoCertFatalAlert(ssl, ret); } else if ((ssl->options.side == WOLFSSL_CLIENT_END) && - IsAtLeastTLSv1_3(ssl->version)) { + IsAtLeastTLSv1_2(ssl)) { WOLFSSL_MSG("No peer cert from Server"); ret = NO_PEER_CERT; WOLFSSL_ERROR_VERBOSE(ret); @@ -16213,23 +16223,31 @@ int ProcessPeerCerts(WOLFSSL* ssl, byte* input, word32* inOutIdx, } #endif + #if defined(__APPLE__) && defined(WOLFSSL_SYS_CA_CERTS) + /* If we can't validate the peer cert chain against the CAs + * loaded into wolfSSL, try to validate against the system + * certificates using Apple's native trust APIs BEFORE + * calling the verify callback so the callback sees the + * correct validation result */ + if ((ret == WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)) && + (ssl->ctx->doAppleNativeCertValidationFlag)) { + if (DoAppleNativeCertValidation(ssl, args->certs, + args->totalCerts)) { + WOLFSSL_MSG("Apple native cert chain validation " + "SUCCESS"); + ret = 0; + } + else { + WOLFSSL_MSG("Apple native cert chain validation " + "FAIL"); + } + } + #endif /* defined(__APPLE__) && defined(WOLFSSL_SYS_CA_CERTS) */ + /* Do verify callback. */ args->leafVerifyErr = ret = DoVerifyCallback(SSL_CM(ssl), ssl, ret, args); - #if defined(__APPLE__) && defined(WOLFSSL_SYS_CA_CERTS) - /* Disregard failure to verify peer cert, as we will verify - * the whole chain with the native API later */ - if (ssl->ctx->doAppleNativeCertValidationFlag) { - WOLFSSL_MSG("\tApple native CA validation override" - " available, will continue"); - /* check if fatal error */ - args->fatal = (args->verifyErr) ? 1 : 0; - if (args->fatal) - DoCertFatalAlert(ssl, ret); - } - else - #endif/*defined(__APPLE__)&& defined(WOLFSSL_SYS_CA_CERTS)*/ if (ret != 0) { WOLFSSL_MSG("\tfatal cert error"); args->fatal = 1; @@ -16998,23 +17016,6 @@ int ProcessPeerCerts(WOLFSSL* ssl, byte* input, word32* inOutIdx, } #endif - #if defined(__APPLE__) && defined(WOLFSSL_SYS_CA_CERTS) - /* If we can't validate the peer cert chain against the CAs loaded - * into wolfSSL, try to validate against the system certificates - * using Apple's native trust APIs */ - if ((ret == WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)) && - (ssl->ctx->doAppleNativeCertValidationFlag)) { - if (DoAppleNativeCertValidation(ssl, args->certs, - args->totalCerts)) { - WOLFSSL_MSG("Apple native cert chain validation SUCCESS"); - ret = 0; - } - else { - WOLFSSL_MSG("Apple native cert chain validation FAIL"); - } - } - #endif /* defined(__APPLE__) && defined(WOLFSSL_SYS_CA_CERTS) */ - /* Do leaf verify callback when it wasn't called yet */ if (ret == 0 || ret != args->leafVerifyErr) ret = DoVerifyCallback(SSL_CM(ssl), ssl, ret, args); @@ -17607,7 +17608,9 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) if (ssl->msgsReceived.got_certificate_status || ssl->msgsReceived.got_server_key_exchange || ssl->msgsReceived.got_certificate_request || - ssl->msgsReceived.got_server_hello_done) { + ssl->msgsReceived.got_server_hello_done || + ssl->msgsReceived.got_change_cipher || + ssl->msgsReceived.got_finished) { WOLFSSL_MSG("Cert received in wrong order"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; @@ -17648,19 +17651,21 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) return DUPLICATE_MSG_E; } - if (ssl->msgsReceived.got_certificate == 0) { + if (!ssl->msgsReceived.got_certificate) { WOLFSSL_MSG("No Certificate before CertificateStatus"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } - if (ssl->msgsReceived.got_server_key_exchange != 0) { + if (ssl->msgsReceived.got_server_key_exchange) { WOLFSSL_MSG("CertificateStatus after ServerKeyExchange"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } if (ssl->msgsReceived.got_server_key_exchange || ssl->msgsReceived.got_certificate_request || - ssl->msgsReceived.got_server_hello_done) { + ssl->msgsReceived.got_server_hello_done || + ssl->msgsReceived.got_change_cipher || + ssl->msgsReceived.got_finished) { WOLFSSL_MSG("CertificateStatus received in wrong order"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; @@ -17684,13 +17689,25 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) WOLFSSL_ERROR_VERBOSE(DUPLICATE_MSG_E); return DUPLICATE_MSG_E; } - if (ssl->msgsReceived.got_server_hello == 0) { + if (!ssl->msgsReceived.got_server_hello) { WOLFSSL_MSG("No ServerHello before ServerKeyExchange"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } + if (!ssl->msgsReceived.got_certificate) { + if (ssl->specs.kea != psk_kea && + ssl->specs.kea != dhe_psk_kea && + ssl->specs.kea != ecdhe_psk_kea && + !ssl->options.usingAnon_cipher) { + WOLFSSL_MSG("No Certificate before ServerKeyExchange"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } + } if (ssl->msgsReceived.got_certificate_request || - ssl->msgsReceived.got_server_hello_done) { + ssl->msgsReceived.got_server_hello_done || + ssl->msgsReceived.got_change_cipher || + ssl->msgsReceived.got_finished) { WOLFSSL_MSG("ServerKeyExchange received in wrong order"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; @@ -17714,11 +17731,16 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) WOLFSSL_ERROR_VERBOSE(DUPLICATE_MSG_E); return DUPLICATE_MSG_E; } - if (ssl->msgsReceived.got_server_hello == 0) { + if (!ssl->msgsReceived.got_server_hello) { WOLFSSL_MSG("No ServerHello before CertificateRequest"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } + if (!ssl->msgsReceived.got_certificate) { + WOLFSSL_MSG("No Certificate before CertificateRequest"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } if (!ssl->options.resuming && ssl->specs.kea != rsa_kea && (ssl->specs.kea != ecc_diffie_hellman_kea || !ssl->specs.static_ecdh) && @@ -17728,12 +17750,9 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } - if (!ssl->msgsReceived.got_certificate) { - WOLFSSL_MSG("No Certificate before CertificateRequest"); - WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); - return OUT_OF_ORDER_E; - } - if (ssl->msgsReceived.got_server_hello_done) { + if (ssl->msgsReceived.got_server_hello_done || + ssl->msgsReceived.got_change_cipher || + ssl->msgsReceived.got_finished) { WOLFSSL_MSG("CertificateRequest received in wrong order"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; @@ -17759,7 +17778,7 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) } ssl->msgsReceived.got_server_hello_done = 1; - if (ssl->msgsReceived.got_certificate == 0) { + if (!ssl->msgsReceived.got_certificate) { if (ssl->specs.kea == psk_kea || ssl->specs.kea == dhe_psk_kea || ssl->specs.kea == ecdhe_psk_kea || @@ -17772,7 +17791,7 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) return OUT_OF_ORDER_E; } } - if (ssl->msgsReceived.got_server_key_exchange == 0) { + if (!ssl->msgsReceived.got_server_key_exchange) { int pskNoServerHint = 0; /* not required in this case */ #ifndef NO_PSK @@ -17794,7 +17813,7 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) } #if defined(HAVE_CERTIFICATE_STATUS_REQUEST) || \ defined(HAVE_CERTIFICATE_STATUS_REQUEST_V2) - if (ssl->msgsReceived.got_certificate_status == 0) { + if (!ssl->msgsReceived.got_certificate_status) { int csrRet = 0; #ifdef HAVE_CERTIFICATE_STATUS_REQUEST if (csrRet == 0 && ssl->status_request) { @@ -17840,6 +17859,12 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) } } #endif + if (ssl->msgsReceived.got_change_cipher || + ssl->msgsReceived.got_finished) { + WOLFSSL_MSG("ServerHelloDone received in wrong order"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } break; #endif @@ -17857,7 +17882,12 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) WOLFSSL_ERROR_VERBOSE(DUPLICATE_MSG_E); return DUPLICATE_MSG_E; } - if ( ssl->msgsReceived.got_certificate == 0) { + if (!ssl->msgsReceived.got_client_key_exchange) { + WOLFSSL_MSG("No ClientKeyExchange before CertVerify"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } + if (!ssl->msgsReceived.got_certificate) { WOLFSSL_MSG("No Cert before CertVerify"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; @@ -17886,7 +17916,7 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) WOLFSSL_ERROR_VERBOSE(DUPLICATE_MSG_E); return DUPLICATE_MSG_E; } - if (ssl->msgsReceived.got_client_hello == 0) { + if (!ssl->msgsReceived.got_client_hello) { WOLFSSL_MSG("No ClientHello before ClientKeyExchange"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; @@ -17917,7 +17947,7 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) } } #endif - if (ssl->msgsReceived.got_change_cipher == 0) { + if (!ssl->msgsReceived.got_change_cipher) { WOLFSSL_MSG("Finished received before ChangeCipher"); WOLFSSL_ERROR_VERBOSE(NO_CHANGE_CIPHER_E); return NO_CHANGE_CIPHER_E; @@ -17938,62 +17968,63 @@ static int SanityCheckMsgReceived(WOLFSSL* ssl, byte type) #ifndef NO_WOLFSSL_CLIENT if (ssl->options.side == WOLFSSL_CLIENT_END) { + if (!ssl->msgsReceived.got_server_hello) { + WOLFSSL_MSG("ChangeCipherSpec received in wrong order"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } if (!ssl->options.resuming) { - if (ssl->msgsReceived.got_server_hello_done == 0) { + if (!ssl->msgsReceived.got_server_hello_done) { WOLFSSL_MSG("No ServerHelloDone before ChangeCipher"); WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } } - else { - if (ssl->msgsReceived.got_server_hello == 0) { - WOLFSSL_MSG("No ServerHello before ChangeCipher on " - "Resume"); - WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); - return OUT_OF_ORDER_E; - } + #ifdef HAVE_SESSION_TICKET + if (ssl->expect_session_ticket) { + WOLFSSL_MSG("Expected session ticket missing"); + #ifdef WOLFSSL_DTLS + if (ssl->options.dtls) { + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } + #endif + WOLFSSL_ERROR_VERBOSE(SESSION_TICKET_EXPECT_E); + return SESSION_TICKET_EXPECT_E; } - #ifdef HAVE_SESSION_TICKET - if (ssl->expect_session_ticket) { - WOLFSSL_MSG("Expected session ticket missing"); + #endif + } +#endif +#ifndef NO_WOLFSSL_SERVER + if (ssl->options.side == WOLFSSL_SERVER_END) { + if (!ssl->msgsReceived.got_client_hello) { + WOLFSSL_MSG("ChangeCipherSpec received in wrong order"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } + if (!ssl->options.resuming && + !ssl->msgsReceived.got_client_key_exchange) { + WOLFSSL_MSG("No ClientKeyExchange before ChangeCipher"); + WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); + return OUT_OF_ORDER_E; + } + #ifndef NO_CERTS + if (ssl->options.verifyPeer && + ssl->options.havePeerCert) { + if (!ssl->options.havePeerVerify || + !ssl->msgsReceived.got_certificate_verify) { + WOLFSSL_MSG("client didn't send cert verify"); #ifdef WOLFSSL_DTLS if (ssl->options.dtls) { WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); return OUT_OF_ORDER_E; } #endif - WOLFSSL_ERROR_VERBOSE(SESSION_TICKET_EXPECT_E); - return SESSION_TICKET_EXPECT_E; + WOLFSSL_ERROR_VERBOSE(NO_PEER_VERIFY); + return NO_PEER_VERIFY; } - #endif - } -#endif -#ifndef NO_WOLFSSL_SERVER - if (ssl->options.side == WOLFSSL_SERVER_END) { - if (!ssl->options.resuming && - ssl->msgsReceived.got_client_key_exchange == 0) { - WOLFSSL_MSG("No ClientKeyExchange before ChangeCipher"); - WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); - return OUT_OF_ORDER_E; } - #ifndef NO_CERTS - if (ssl->options.verifyPeer && - ssl->options.havePeerCert) { - - if (!ssl->options.havePeerVerify || - !ssl->msgsReceived.got_certificate_verify) { - WOLFSSL_MSG("client didn't send cert verify"); - #ifdef WOLFSSL_DTLS - if (ssl->options.dtls) { - WOLFSSL_ERROR_VERBOSE(OUT_OF_ORDER_E); - return OUT_OF_ORDER_E; - } - #endif - WOLFSSL_ERROR_VERBOSE(NO_PEER_VERIFY); - return NO_PEER_VERIFY; - } - } - #endif + #endif } #endif /* !NO_WOLFSSL_SERVER */ if (ssl->options.dtls) @@ -21222,13 +21253,11 @@ static byte MaskMac(const byte* data, int sz, int macSz, byte* expMac) int i, j; int r = 0; unsigned char mac[WC_MAX_DIGEST_SIZE]; - volatile int scanStart = sz - 1 - TLS_MAX_PAD_SZ - macSz; + int scanStart = sz - 1 - TLS_MAX_PAD_SZ - macSz; volatile int macEnd = sz - 1 - data[sz - 1]; - volatile int macStart = macEnd - macSz; + int macStart = macEnd - macSz; volatile int maskScanStart; volatile int maskMacStart; - volatile unsigned char started; - volatile unsigned char notEnded; unsigned char good = 0; maskScanStart = ctMaskIntGTE(scanStart, 0); @@ -21238,22 +21267,31 @@ static byte MaskMac(const byte* data, int sz, int macSz, byte* expMac) /* Div on Intel has different speeds depending on value. * Use a bitwise AND or mod a specific value (converted to mul). */ - if ((macSz & (macSz - 1)) == 0) - r = (macSz - (scanStart - macStart)) & (macSz - 1); + if ((macSz & (macSz - 1)) == 0) { + r = macSz - scanStart; + r += macStart; + r &= (macSz - 1); + } #ifndef NO_SHA - else if (macSz == WC_SHA_DIGEST_SIZE) - r = (macSz - (scanStart - macStart)) % WC_SHA_DIGEST_SIZE; + else if (macSz == WC_SHA_DIGEST_SIZE) { + r = macSz - scanStart; + r += macStart; + r %= WC_SHA_DIGEST_SIZE; + } #endif #ifdef WOLFSSL_SHA384 - else if (macSz == WC_SHA384_DIGEST_SIZE) - r = (macSz - (scanStart - macStart)) % WC_SHA384_DIGEST_SIZE; + else if (macSz == WC_SHA384_DIGEST_SIZE) { + r = macSz - scanStart; + r += macStart; + r %= WC_SHA384_DIGEST_SIZE; + } #endif XMEMSET(mac, 0, (size_t)(macSz)); for (i = scanStart; i < sz; i += macSz) { for (j = 0; j < macSz && j + i < sz; j++) { - started = ctMaskGTE(i + j, macStart); - notEnded = ctMaskLT(i + j, macEnd); + unsigned char started = ctMaskGTE(i + j, macStart); + unsigned char notEnded = ctMaskLT(i + j, macEnd); mac[j] |= started & notEnded & data[i + j]; } } @@ -26603,6 +26641,66 @@ int SendAlert(WOLFSSL* ssl, int severity, int type) #include #endif +#if !defined(NO_ERROR_STRINGS) && (defined(OPENSSL_EXTRA) || \ + defined(OPENSSL_EXTRA_X509_SMALL) || \ + defined(HAVE_WEBSERVER) || defined(HAVE_MEMCACHED)) +static const char* wolfSSL_ERR_reason_error_string_OpenSSL(unsigned long e) +{ + switch (e) { + /* TODO: -WOLFSSL_X509_V_ERR_CERT_SIGNATURE_FAILURE. Conflicts with + * -WOLFSSL_ERROR_WANT_CONNECT. + */ + case WOLFSSL_X509_V_ERR_CRL_HAS_EXPIRED: + return "CRL has expired"; + + case WOLFSSL_X509_V_ERR_UNABLE_TO_GET_CRL: + return "unable to get CRL"; + + case WOLFSSL_X509_V_ERR_CERT_NOT_YET_VALID: + return "certificate not yet valid"; + + case WOLFSSL_X509_V_ERR_CERT_HAS_EXPIRED: + return "certificate has expired"; + + case WOLFSSL_X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD: + return "certificate signature failure"; + + case WOLFSSL_X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD: + return "format error in certificate's notAfter field"; + + case WOLFSSL_X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT: + return "self-signed certificate in certificate chain"; + + case WOLFSSL_X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY: + return "unable to get local issuer certificate"; + + case WOLFSSL_X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE: + return "unable to verify the first certificate"; + + case WOLFSSL_X509_V_ERR_CERT_CHAIN_TOO_LONG: + return "certificate chain too long"; + + case WOLFSSL_X509_V_ERR_CERT_REVOKED: + return "certificate revoked"; + + case WOLFSSL_X509_V_ERR_INVALID_CA: + return "invalid CA certificate"; + + case WOLFSSL_X509_V_ERR_PATH_LENGTH_EXCEEDED: + return "path length constraint exceeded"; + + case WOLFSSL_X509_V_ERR_CERT_REJECTED: + return "certificate rejected"; + + case WOLFSSL_X509_V_ERR_SUBJECT_ISSUER_MISMATCH: + return "subject issuer mismatch"; + + default: + return NULL; + } +} +#endif /* OPENSSL_EXTRA || OPENSSL_EXTRA_X509_SMALL || HAVE_WEBSERVER || HAVE_MEMCACHED */ + const char* wolfSSL_ERR_reason_error_string(unsigned long e) { #ifdef NO_ERROR_STRINGS @@ -26614,11 +26712,18 @@ const char* wolfSSL_ERR_reason_error_string(unsigned long e) int error = (int)e; - /* OpenSSL uses positive error codes */ if (error > 0) { +#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) || \ + defined(HAVE_WEBSERVER) || defined(HAVE_MEMCACHED) + /* Check the OpenSSL error strings first. */ + const char* ossl_err = wolfSSL_ERR_reason_error_string_OpenSSL(e); + if (ossl_err != NULL) { + return ossl_err; + } + /* try to find error strings from wolfSSL */ +#endif error = -error; } - /* pass to wolfCrypt */ if ((error <= WC_SPAN1_FIRST_E && error >= WC_SPAN1_MIN_CODE_E) || (error <= WC_SPAN2_FIRST_E && error >= WC_SPAN2_MIN_CODE_E)) @@ -27177,55 +27282,6 @@ const char* wolfSSL_ERR_reason_error_string(unsigned long e) return "Private key decode error (EVP)"; } -#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) || \ - defined(HAVE_WEBSERVER) || defined(HAVE_MEMCACHED) - - switch (error) { - /* TODO: -WOLFSSL_X509_V_ERR_CERT_SIGNATURE_FAILURE. Conflicts with - * -WOLFSSL_ERROR_WANT_CONNECT. - */ - - case -WOLFSSL_X509_V_ERR_CERT_NOT_YET_VALID: - return "certificate not yet valid"; - - case -WOLFSSL_X509_V_ERR_CERT_HAS_EXPIRED: - return "certificate has expired"; - - case -WOLFSSL_X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD: - return "certificate signature failure"; - - case -WOLFSSL_X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD: - return "format error in certificate's notAfter field"; - - case -WOLFSSL_X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT: - return "self-signed certificate in certificate chain"; - - case -WOLFSSL_X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY: - return "unable to get local issuer certificate"; - - case -WOLFSSL_X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE: - return "unable to verify the first certificate"; - - case -WOLFSSL_X509_V_ERR_CERT_CHAIN_TOO_LONG: - return "certificate chain too long"; - - case -WOLFSSL_X509_V_ERR_CERT_REVOKED: - return "certificate revoked"; - - case -WOLFSSL_X509_V_ERR_INVALID_CA: - return "invalid CA certificate"; - - case -WOLFSSL_X509_V_ERR_PATH_LENGTH_EXCEEDED: - return "path length constraint exceeded"; - - case -WOLFSSL_X509_V_ERR_CERT_REJECTED: - return "certificate rejected"; - - case -WOLFSSL_X509_V_ERR_SUBJECT_ISSUER_MISMATCH: - return "subject issuer mismatch"; - } -#endif /* OPENSSL_EXTRA || OPENSSL_EXTRA_X509_SMALL || HAVE_WEBSERVER || HAVE_MEMCACHED */ - return "unknown error number"; #endif /* NO_ERROR_STRINGS */ @@ -29756,24 +29812,32 @@ int CreateDevPrivateKey(void** pkey, byte* data, word32 length, int hsType, } #endif /* WOLF_PRIVATE_KEY_ID && !NO_CHECK_PRIVATE_KEY */ -/* Decode the private key - RSA/ECC/Ed25519/Ed448/Falcon/Dilithium - and - * creates a key object. +/* Decode a private key - RSA/ECC/Ed25519/Ed448/Falcon/Dilithium - and + * create a key object. The signature type is set as well. * - * The signature type is set as well. - * The maximum length of a signature is returned. + * ssl The SSL/TLS object. + * keyType The type of the key to decode. + * key The key to decode. + * hsType The handshake related type of the decoded key. + * hsKey The decoded key. + * keyDevId The devId of the key. + * keyIdSet The key data contains a key id. + * keyLabelSet The key data contains a key label. + * keySz The size of the key. + * sigLen The length of a signature. * - * ssl The SSL/TLS object. - * length The length of a signature. * returns 0 on success, otherwise failure. */ -int DecodePrivateKey(WOLFSSL *ssl, word32* length) +static int DecodePrivateKey_ex(WOLFSSL *ssl, byte keyType, const DerBuffer* key, + word32* hsType, void** hsKey, int keyDevId, byte keyIdSet, + byte keyLabelSet, int keySz, word32* sigLen) { int ret = WC_NO_ERR_TRACE(BAD_FUNC_ARG); - int keySz; + int keySzDecoded; word32 idx; /* make sure private key exists */ - if (ssl->buffers.key == NULL || ssl->buffers.key->buffer == NULL) { + if (key == NULL || key->buffer == NULL) { /* allow no private key if using external */ #ifdef WOLF_PRIVATE_KEY_ID if (ssl->devId != INVALID_DEVID @@ -29781,7 +29845,7 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) || wolfSSL_CTX_IsPrivatePkSet(ssl->ctx) #endif ) { - *length = (word32)GetPrivateKeySigSize(ssl); + *sigLen = (word32)GetPrivateKeySigSize(ssl); return 0; } else @@ -29792,148 +29856,102 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) } } -#ifdef WOLF_PRIVATE_KEY_ID - if (ssl->buffers.keyDevId != INVALID_DEVID && (ssl->buffers.keyId || - ssl->buffers.keyLabel)) { - if (ssl->buffers.keyType == rsa_sa_algo) - ssl->hsType = DYNAMIC_TYPE_RSA; - else if (ssl->buffers.keyType == ecc_dsa_sa_algo) - ssl->hsType = DYNAMIC_TYPE_ECC; - else if ((ssl->buffers.keyType == falcon_level1_sa_algo) || - (ssl->buffers.keyType == falcon_level5_sa_algo)) - ssl->hsType = DYNAMIC_TYPE_FALCON; - else if ((ssl->buffers.keyType == dilithium_level2_sa_algo) || - (ssl->buffers.keyType == dilithium_level3_sa_algo) || - (ssl->buffers.keyType == dilithium_level5_sa_algo)) - ssl->hsType = DYNAMIC_TYPE_DILITHIUM; - ret = AllocKey(ssl, (int)(ssl->hsType), &ssl->hsKey); +#if defined(WOLF_PRIVATE_KEY_ID) && !defined(NO_CHECK_PRIVATE_KEY) + if (keyDevId != INVALID_DEVID && (keyIdSet || keyLabelSet)) { + /* Set hsType */ + if (keyType == rsa_sa_algo) + *hsType = DYNAMIC_TYPE_RSA; + else if (keyType == ecc_dsa_sa_algo) + *hsType = DYNAMIC_TYPE_ECC; + else if ((keyType == falcon_level1_sa_algo) || + (keyType == falcon_level5_sa_algo)) + *hsType = DYNAMIC_TYPE_FALCON; + else if ((keyType == dilithium_level2_sa_algo) || + (keyType == dilithium_level3_sa_algo) || + (keyType == dilithium_level5_sa_algo)) + *hsType = DYNAMIC_TYPE_DILITHIUM; + + /* Create the private key */ + ret = CreateDevPrivateKey(hsKey, key->buffer, + key->length, *hsType, + keyLabelSet, keyIdSet, ssl->heap, + keyDevId); if (ret != 0) { goto exit_dpk; } - if (ssl->buffers.keyType == rsa_sa_algo) { + /* Check key size */ + if (*hsType == DYNAMIC_TYPE_RSA) { #ifndef NO_RSA - if (ssl->buffers.keyLabel) { - ret = wc_InitRsaKey_Label((RsaKey*)ssl->hsKey, - (char*)ssl->buffers.key->buffer, - ssl->heap, ssl->buffers.keyDevId); + if (keySz < ssl->options.minRsaKeySz) { + WOLFSSL_MSG("RSA key size too small"); + ERROR_OUT(RSA_KEY_SIZE_E, exit_dpk); } - else if (ssl->buffers.keyId) { - ret = wc_InitRsaKey_Id((RsaKey*)ssl->hsKey, - (ssl->buffers.key->buffer), - (int)(ssl->buffers.key->length), - ssl->heap, - ssl->buffers.keyDevId); - } - if (ret == 0) { - if (ssl->buffers.keySz < ssl->options.minRsaKeySz) { - WOLFSSL_MSG("RSA key size too small"); - ERROR_OUT(RSA_KEY_SIZE_E, exit_dpk); - } - /* Return the maximum signature length. */ - *length = (word32)ssl->buffers.keySz; - } + /* Return the maximum signature length. */ + *sigLen = (word32)keySz; #else ret = NOT_COMPILED_IN; #endif } - else if (ssl->buffers.keyType == ecc_dsa_sa_algo) { + else if (*hsType == DYNAMIC_TYPE_ECC) { #ifdef HAVE_ECC - if (ssl->buffers.keyLabel) { - ret = wc_ecc_init_label((ecc_key*)ssl->hsKey, - (char*)ssl->buffers.key->buffer, - ssl->heap, ssl->buffers.keyDevId); + if (keySz < ssl->options.minEccKeySz) { + WOLFSSL_MSG("ECC key size too small"); + ERROR_OUT(ECC_KEY_SIZE_E, exit_dpk); } - else if (ssl->buffers.keyId) { - ret = wc_ecc_init_id((ecc_key*)ssl->hsKey, - (ssl->buffers.key->buffer), - ssl->buffers.key->length, ssl->heap, - ssl->buffers.keyDevId); - } - if (ret == 0) { - if (ssl->buffers.keySz < ssl->options.minEccKeySz) { - WOLFSSL_MSG("ECC key size too small"); - ERROR_OUT(ECC_KEY_SIZE_E, exit_dpk); - } - /* Return the maximum signature length. */ - *length = (word32)wc_ecc_sig_size_calc(ssl->buffers.keySz); - } + /* Return the maximum signature length. */ + *sigLen = (word32)wc_ecc_sig_size_calc(keySz); #else ret = NOT_COMPILED_IN; #endif } - else if ((ssl->buffers.keyType == falcon_level1_sa_algo) || - (ssl->buffers.keyType == falcon_level5_sa_algo)) { + else if (*hsType == DYNAMIC_TYPE_FALCON) { #if defined(HAVE_FALCON) - if (ssl->buffers.keyLabel) { - ret = wc_falcon_init_label((falcon_key*)ssl->hsKey, - (char*)ssl->buffers.key->buffer, - ssl->heap, ssl->buffers.keyDevId); + if (keyType == falcon_level1_sa_algo) { + ret = wc_falcon_set_level((falcon_key*)*hsKey, 1); } - else if (ssl->buffers.keyId) { - ret = wc_falcon_init_id((falcon_key*)ssl->hsKey, - ssl->buffers.key->buffer, - ssl->buffers.key->length, ssl->heap, - ssl->buffers.keyDevId); + else if (keyType == falcon_level5_sa_algo) { + ret = wc_falcon_set_level((falcon_key*)*hsKey, 5); } + if (ret == 0) { - if (ssl->buffers.keyType == falcon_level1_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsKey, 1); - } - else if (ssl->buffers.keyType == falcon_level5_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsKey, 5); - } - } - if (ret == 0) { - if (ssl->buffers.keySz < ssl->options.minFalconKeySz) { + if (keySz < ssl->options.minFalconKeySz) { WOLFSSL_MSG("Falcon key size too small"); ERROR_OUT(FALCON_KEY_SIZE_E, exit_dpk); } /* Return the maximum signature length. */ - *length = wc_falcon_sig_size((falcon_key*)ssl->hsKey); + *sigLen = wc_falcon_sig_size((falcon_key*)*hsKey); } #else ret = NOT_COMPILED_IN; #endif } - else if ((ssl->buffers.keyType == dilithium_level2_sa_algo) || - (ssl->buffers.keyType == dilithium_level3_sa_algo) || - (ssl->buffers.keyType == dilithium_level5_sa_algo)) { + else if (*hsType == DYNAMIC_TYPE_DILITHIUM) { #if defined(HAVE_DILITHIUM) && !defined(WOLFSSL_DILITHIUM_NO_SIGN) - if (ssl->buffers.keyLabel) { - ret = wc_dilithium_init_label((dilithium_key*)ssl->hsKey, - (char*)ssl->buffers.key->buffer, - ssl->heap, ssl->buffers.keyDevId); + if (keyType == dilithium_level2_sa_algo) { + ret = wc_dilithium_set_level((dilithium_key*)*hsKey, + WC_ML_DSA_44); } - else if (ssl->buffers.keyId) { - ret = wc_dilithium_init_id((dilithium_key*)ssl->hsKey, - ssl->buffers.key->buffer, - ssl->buffers.key->length, ssl->heap, - ssl->buffers.keyDevId); + else if (keyType == dilithium_level3_sa_algo) { + ret = wc_dilithium_set_level((dilithium_key*)*hsKey, + WC_ML_DSA_65); } + else if (keyType == dilithium_level5_sa_algo) { + ret = wc_dilithium_set_level((dilithium_key*)*hsKey, + WC_ML_DSA_87); + } + if (ret == 0) { - if (ssl->buffers.keyType == dilithium_level2_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsKey, WC_ML_DSA_44); - } - else if (ssl->buffers.keyType == dilithium_level3_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsKey, WC_ML_DSA_65); - } - else if (ssl->buffers.keyType == dilithium_level5_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsKey, WC_ML_DSA_87); - } - } - if (ret == 0) { - if (ssl->buffers.keySz < ssl->options.minDilithiumKeySz) { + if (keySz < ssl->options.minDilithiumKeySz) { WOLFSSL_MSG("Dilithium key size too small"); ERROR_OUT(DILITHIUM_KEY_SIZE_E, exit_dpk); } /* Return the maximum signature length. */ - *length = wc_dilithium_sig_size( - (dilithium_key*)ssl->hsKey); + *sigLen = wc_dilithium_sig_size((dilithium_key*)*hsKey); } #else ret = NOT_COMPILED_IN; @@ -29944,9 +29962,9 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #endif /* WOLF_PRIVATE_KEY_ID */ #ifndef NO_RSA - if (ssl->buffers.keyType == rsa_sa_algo || ssl->buffers.keyType == 0) { - ssl->hsType = DYNAMIC_TYPE_RSA; - ret = AllocKey(ssl, (int)ssl->hsType, &ssl->hsKey); + if (keyType == rsa_sa_algo || keyType == 0) { + *hsType = DYNAMIC_TYPE_RSA; + ret = AllocKey(ssl, (int)*hsType, hsKey); if (ret != 0) { goto exit_dpk; } @@ -29956,8 +29974,8 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) /* Set start of data to beginning of buffer. */ idx = 0; /* Decode the key assuming it is an RSA private key. */ - ret = wc_RsaPrivateKeyDecode(ssl->buffers.key->buffer, &idx, - (RsaKey*)ssl->hsKey, ssl->buffers.key->length); + ret = wc_RsaPrivateKeyDecode(key->buffer, &idx, + (RsaKey*)*hsKey, key->length); #ifdef WOLF_PRIVATE_KEY_ID /* if using external key then allow using a public key */ if (ret != 0 && (ssl->devId != INVALID_DEVID @@ -29967,26 +29985,26 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) )) { WOLFSSL_MSG("Trying RSA public key with crypto callbacks"); idx = 0; - ret = wc_RsaPublicKeyDecode(ssl->buffers.key->buffer, &idx, - (RsaKey*)ssl->hsKey, ssl->buffers.key->length); + ret = wc_RsaPublicKeyDecode(key->buffer, &idx, + (RsaKey*)*hsKey, key->length); } #endif if (ret == 0) { WOLFSSL_MSG("Using RSA private key"); /* It worked so check it meets minimum key size requirements. */ - keySz = wc_RsaEncryptSize((RsaKey*)ssl->hsKey); - if (keySz < 0) { /* check if keySz has error case */ - ERROR_OUT(keySz, exit_dpk); + keySzDecoded = wc_RsaEncryptSize((RsaKey*)*hsKey); + if (keySzDecoded < 0) { /* check if keySzDecoded has error case */ + ERROR_OUT(keySzDecoded, exit_dpk); } - if (keySz < ssl->options.minRsaKeySz) { + if (keySzDecoded < ssl->options.minRsaKeySz) { WOLFSSL_MSG("RSA key size too small"); ERROR_OUT(RSA_KEY_SIZE_E, exit_dpk); } /* Return the maximum signature length. */ - *length = (word32)keySz; + *sigLen = (word32)keySzDecoded; goto exit_dpk; } @@ -29995,32 +30013,28 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #ifdef HAVE_ECC #ifndef NO_RSA - FreeKey(ssl, (int)ssl->hsType, (void**)&ssl->hsKey); + FreeKey(ssl, (int)*hsType, hsKey); #endif /* !NO_RSA */ - if (ssl->buffers.keyType == ecc_dsa_sa_algo || ssl->buffers.keyType == 0 + if (keyType == ecc_dsa_sa_algo || keyType == 0 #if defined(WOLFSSL_SM2) && defined(WOLFSSL_SM3) - || ssl->buffers.keyType == sm2_sa_algo + || keyType == sm2_sa_algo #endif ) { - ssl->hsType = DYNAMIC_TYPE_ECC; - ret = AllocKey(ssl, (int)ssl->hsType, &ssl->hsKey); + *hsType = DYNAMIC_TYPE_ECC; + ret = AllocKey(ssl, (int)*hsType, hsKey); if (ret != 0) { goto exit_dpk; } - #ifndef NO_RSA - WOLFSSL_MSG("Trying ECC private key, RSA didn't work"); - #else WOLFSSL_MSG("Trying ECC private key"); - #endif /* Set start of data to beginning of buffer. */ idx = 0; /* Decode the key assuming it is an ECC private key. */ - ret = wc_EccPrivateKeyDecode(ssl->buffers.key->buffer, &idx, - (ecc_key*)ssl->hsKey, - ssl->buffers.key->length); + ret = wc_EccPrivateKeyDecode(key->buffer, &idx, + (ecc_key*)*hsKey, + key->length); #ifdef WOLF_PRIVATE_KEY_ID /* if using external key then allow using a public key */ if (ret != 0 && (ssl->devId != INVALID_DEVID @@ -30030,14 +30044,14 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) )) { WOLFSSL_MSG("Trying ECC public key with crypto callbacks"); idx = 0; - ret = wc_EccPublicKeyDecode(ssl->buffers.key->buffer, &idx, - (ecc_key*)ssl->hsKey, - ssl->buffers.key->length); + ret = wc_EccPublicKeyDecode(key->buffer, &idx, + (ecc_key*)*hsKey, + key->length); } #endif #ifdef WOLFSSL_SM2 - if ((ret == 0) && (ssl->buffers.keyType == sm2_sa_algo)) { - ret = wc_ecc_set_curve((ecc_key*)ssl->hsKey, + if ((ret == 0) && (keyType == sm2_sa_algo)) { + ret = wc_ecc_set_curve((ecc_key*)*hsKey, WOLFSSL_SM2_KEY_BITS / 8, ECC_SM2P256V1); } #endif @@ -30045,14 +30059,14 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) WOLFSSL_MSG("Using ECC private key"); /* Check it meets the minimum ECC key size requirements. */ - keySz = wc_ecc_size((ecc_key*)ssl->hsKey); - if (keySz < ssl->options.minEccKeySz) { + keySzDecoded = wc_ecc_size((ecc_key*)*hsKey); + if (keySzDecoded < ssl->options.minEccKeySz) { WOLFSSL_MSG("ECC key size too small"); ERROR_OUT(ECC_KEY_SIZE_E, exit_dpk); } /* Return the maximum signature length. */ - *length = (word32)wc_ecc_sig_size((ecc_key*)ssl->hsKey); + *sigLen = (word32)wc_ecc_sig_size((ecc_key*)*hsKey); goto exit_dpk; } @@ -30060,30 +30074,24 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #endif #if defined(HAVE_ED25519) && defined(HAVE_ED25519_KEY_IMPORT) #if !defined(NO_RSA) || defined(HAVE_ECC) - FreeKey(ssl, ssl->hsType, (void**)&ssl->hsKey); + FreeKey(ssl, *hsType, hsKey); #endif - if (ssl->buffers.keyType == ed25519_sa_algo || ssl->buffers.keyType == 0) { - ssl->hsType = DYNAMIC_TYPE_ED25519; - ret = AllocKey(ssl, ssl->hsType, &ssl->hsKey); + if (keyType == ed25519_sa_algo || keyType == 0) { + *hsType = DYNAMIC_TYPE_ED25519; + ret = AllocKey(ssl, *hsType, hsKey); if (ret != 0) { goto exit_dpk; } - #ifdef HAVE_ECC - WOLFSSL_MSG("Trying ED25519 private key, ECC didn't work"); - #elif !defined(NO_RSA) - WOLFSSL_MSG("Trying ED25519 private key, RSA didn't work"); - #else - WOLFSSL_MSG("Trying ED25519 private key"); - #endif + WOLFSSL_MSG("Trying ED25519 private key"); /* Set start of data to beginning of buffer. */ idx = 0; /* Decode the key assuming it is an ED25519 private key. */ - ret = wc_Ed25519PrivateKeyDecode(ssl->buffers.key->buffer, &idx, - (ed25519_key*)ssl->hsKey, - ssl->buffers.key->length); + ret = wc_Ed25519PrivateKeyDecode(key->buffer, &idx, + (ed25519_key*)*hsKey, + key->length); #ifdef WOLF_PRIVATE_KEY_ID /* if using external key then allow using a public key */ if (ret != 0 && (ssl->devId != INVALID_DEVID @@ -30093,9 +30101,9 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) )) { WOLFSSL_MSG("Trying ED25519 public key with crypto callbacks"); idx = 0; - ret = wc_Ed25519PublicKeyDecode(ssl->buffers.key->buffer, &idx, - (ed25519_key*)ssl->hsKey, - ssl->buffers.key->length); + ret = wc_Ed25519PublicKeyDecode(key->buffer, &idx, + (ed25519_key*)*hsKey, + key->length); } #endif if (ret == 0) { @@ -30108,7 +30116,7 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) } /* Return the maximum signature length. */ - *length = ED25519_SIG_SIZE; + *sigLen = ED25519_SIG_SIZE; goto exit_dpk; } @@ -30116,32 +30124,24 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #endif /* HAVE_ED25519 && HAVE_ED25519_KEY_IMPORT */ #if defined(HAVE_ED448) && defined(HAVE_ED448_KEY_IMPORT) #if !defined(NO_RSA) || defined(HAVE_ECC) - FreeKey(ssl, ssl->hsType, (void**)&ssl->hsKey); + FreeKey(ssl, *hsType, hsKey); #endif - if (ssl->buffers.keyType == ed448_sa_algo || ssl->buffers.keyType == 0) { - ssl->hsType = DYNAMIC_TYPE_ED448; - ret = AllocKey(ssl, ssl->hsType, &ssl->hsKey); + if (keyType == ed448_sa_algo || keyType == 0) { + *hsType = DYNAMIC_TYPE_ED448; + ret = AllocKey(ssl, *hsType, hsKey); if (ret != 0) { goto exit_dpk; } - #ifdef HAVE_ED25519 - WOLFSSL_MSG("Trying ED448 private key, ED25519 didn't work"); - #elif defined(HAVE_ECC) - WOLFSSL_MSG("Trying ED448 private key, ECC didn't work"); - #elif !defined(NO_RSA) - WOLFSSL_MSG("Trying ED448 private key, RSA didn't work"); - #else - WOLFSSL_MSG("Trying ED448 private key"); - #endif + WOLFSSL_MSG("Trying ED448 private key"); /* Set start of data to beginning of buffer. */ idx = 0; /* Decode the key assuming it is an ED448 private key. */ - ret = wc_Ed448PrivateKeyDecode(ssl->buffers.key->buffer, &idx, - (ed448_key*)ssl->hsKey, - ssl->buffers.key->length); + ret = wc_Ed448PrivateKeyDecode(key->buffer, &idx, + (ed448_key*)*hsKey, + key->length); #ifdef WOLF_PRIVATE_KEY_ID /* if using external key then allow using a public key */ if (ret != 0 && (ssl->devId != INVALID_DEVID @@ -30151,9 +30151,9 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) )) { WOLFSSL_MSG("Trying ED25519 public key with crypto callbacks"); idx = 0; - ret = wc_Ed448PublicKeyDecode(ssl->buffers.key->buffer, &idx, - (ed448_key*)ssl->hsKey, - ssl->buffers.key->length); + ret = wc_Ed448PublicKeyDecode(key->buffer, &idx, + (ed448_key*)*hsKey, + key->length); } #endif if (ret == 0) { @@ -30166,7 +30166,7 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) } /* Return the maximum signature length. */ - *length = ED448_SIG_SIZE; + *sigLen = ED448_SIG_SIZE; goto exit_dpk; } @@ -30174,27 +30174,27 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #endif /* HAVE_ED448 && HAVE_ED448_KEY_IMPORT */ #if defined(HAVE_FALCON) #if !defined(NO_RSA) || defined(HAVE_ECC) - FreeKey(ssl, ssl->hsType, (void**)&ssl->hsKey); + FreeKey(ssl, *hsType, hsKey); #endif - if (ssl->buffers.keyType == falcon_level1_sa_algo || - ssl->buffers.keyType == falcon_level5_sa_algo || - ssl->buffers.keyType == 0) { + if (keyType == falcon_level1_sa_algo || + keyType == falcon_level5_sa_algo || + keyType == 0) { - ssl->hsType = DYNAMIC_TYPE_FALCON; - ret = AllocKey(ssl, ssl->hsType, &ssl->hsKey); + *hsType = DYNAMIC_TYPE_FALCON; + ret = AllocKey(ssl, *hsType, hsKey); if (ret != 0) { goto exit_dpk; } - if (ssl->buffers.keyType == falcon_level1_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsKey, 1); + if (keyType == falcon_level1_sa_algo) { + ret = wc_falcon_set_level((falcon_key*)*hsKey, 1); } - else if (ssl->buffers.keyType == falcon_level5_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsKey, 5); + else if (keyType == falcon_level5_sa_algo) { + ret = wc_falcon_set_level((falcon_key*)*hsKey, 5); } else { - /* What if ssl->buffers.keyType is 0? We might want to do something + /* What if keyType is 0? We might want to do something * more graceful here. */ ret = ALGO_ID_E; } @@ -30203,36 +30203,26 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) goto exit_dpk; } - #if defined(HAVE_ED448) - WOLFSSL_MSG("Trying Falcon private key, ED448 didn't work"); - #elif defined(HAVE_ED25519) - WOLFSSL_MSG("Trying Falcon private key, ED25519 didn't work"); - #elif defined(HAVE_ECC) - WOLFSSL_MSG("Trying Falcon private key, ECC didn't work"); - #elif !defined(NO_RSA) - WOLFSSL_MSG("Trying Falcon private key, RSA didn't work"); - #else - WOLFSSL_MSG("Trying Falcon private key"); - #endif + WOLFSSL_MSG("Trying Falcon private key"); /* Set start of data to beginning of buffer. */ idx = 0; /* Decode the key assuming it is a Falcon private key. */ - ret = wc_falcon_import_private_only(ssl->buffers.key->buffer, - ssl->buffers.key->length, - (falcon_key*)ssl->hsKey); + ret = wc_falcon_import_private_only(key->buffer, + key->length, + (falcon_key*)*hsKey); if (ret == 0) { WOLFSSL_MSG("Using Falcon private key"); /* Check it meets the minimum Falcon key size requirements. */ - keySz = wc_falcon_size((falcon_key*)ssl->hsKey); - if (keySz < ssl->options.minFalconKeySz) { + keySzDecoded = wc_falcon_size((falcon_key*)*hsKey); + if (keySzDecoded < ssl->options.minFalconKeySz) { WOLFSSL_MSG("Falcon key size too small"); ERROR_OUT(FALCON_KEY_SIZE_E, exit_dpk); } /* Return the maximum signature length. */ - *length = wc_falcon_sig_size((falcon_key*)ssl->hsKey); + *sigLen = wc_falcon_sig_size((falcon_key*)*hsKey); goto exit_dpk; } @@ -30241,31 +30231,31 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #if defined(HAVE_DILITHIUM) && !defined(WOLFSSL_DILITHIUM_NO_SIGN) && \ !defined(WOLFSSL_DILITHIUM_NO_ASN1) #if !defined(NO_RSA) || defined(HAVE_ECC) - FreeKey(ssl, ssl->hsType, (void**)&ssl->hsKey); + FreeKey(ssl, *hsType, hsKey); #endif - if (ssl->buffers.keyType == dilithium_level2_sa_algo || - ssl->buffers.keyType == dilithium_level3_sa_algo || - ssl->buffers.keyType == dilithium_level5_sa_algo || - ssl->buffers.keyType == 0) { + if (keyType == dilithium_level2_sa_algo || + keyType == dilithium_level3_sa_algo || + keyType == dilithium_level5_sa_algo || + keyType == 0) { - ssl->hsType = DYNAMIC_TYPE_DILITHIUM; - ret = AllocKey(ssl, ssl->hsType, &ssl->hsKey); + *hsType = DYNAMIC_TYPE_DILITHIUM; + ret = AllocKey(ssl, *hsType, hsKey); if (ret != 0) { goto exit_dpk; } - if (ssl->buffers.keyType == dilithium_level2_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsKey, WC_ML_DSA_44); + if (keyType == dilithium_level2_sa_algo) { + ret = wc_dilithium_set_level((dilithium_key*)*hsKey, WC_ML_DSA_44); } - else if (ssl->buffers.keyType == dilithium_level3_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsKey, WC_ML_DSA_65); + else if (keyType == dilithium_level3_sa_algo) { + ret = wc_dilithium_set_level((dilithium_key*)*hsKey, WC_ML_DSA_65); } - else if (ssl->buffers.keyType == dilithium_level5_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsKey, WC_ML_DSA_87); + else if (keyType == dilithium_level5_sa_algo) { + ret = wc_dilithium_set_level((dilithium_key*)*hsKey, WC_ML_DSA_87); } else { - /* What if ssl->buffers.keyType is 0? We might want to do something + /* What if keyType is 0? We might want to do something * more graceful here. */ ret = ALGO_ID_E; } @@ -30274,39 +30264,27 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) goto exit_dpk; } - #if defined(HAVE_ED448) - WOLFSSL_MSG("Trying Dilithium private key, ED448 didn't work"); - #elif defined(HAVE_ED25519) - WOLFSSL_MSG("Trying Dilithium private key, ED25519 didn't work"); - #elif defined(HAVE_ECC) - WOLFSSL_MSG("Trying Dilithium private key, ECC didn't work"); - #elif !defined(NO_RSA) - WOLFSSL_MSG("Trying Dilithium private key, RSA didn't work"); - #elif defined(HAVE_FALCON) - WOLFSSL_MSG("Trying Dilithium private key, Falcon didn't work"); - #else - WOLFSSL_MSG("Trying Dilithium private key"); - #endif + WOLFSSL_MSG("Trying Dilithium private key"); /* Set start of data to beginning of buffer. */ idx = 0; /* Decode the key assuming it is a Dilithium private key. */ - ret = wc_Dilithium_PrivateKeyDecode(ssl->buffers.key->buffer, + ret = wc_Dilithium_PrivateKeyDecode(key->buffer, &idx, - (dilithium_key*)ssl->hsKey, - ssl->buffers.key->length); + (dilithium_key*)*hsKey, + key->length); if (ret == 0) { WOLFSSL_MSG("Using Dilithium private key"); /* Check it meets the minimum Dilithium key size requirements. */ - keySz = wc_dilithium_size((dilithium_key*)ssl->hsKey); - if (keySz < ssl->options.minDilithiumKeySz) { + keySzDecoded = wc_dilithium_size((dilithium_key*)*hsKey); + if (keySzDecoded < ssl->options.minDilithiumKeySz) { WOLFSSL_MSG("Dilithium key size too small"); ERROR_OUT(DILITHIUM_KEY_SIZE_E, exit_dpk); } /* Return the maximum signature length. */ - *length = wc_dilithium_sig_size((dilithium_key*)ssl->hsKey); + *sigLen = wc_dilithium_sig_size((dilithium_key*)*hsKey); goto exit_dpk; } @@ -30314,8 +30292,16 @@ int DecodePrivateKey(WOLFSSL *ssl, word32* length) #endif /* HAVE_DILITHIUM */ (void)idx; + (void)keySzDecoded; + (void)keyType; + (void)key; + (void)hsType; + (void)hsKey; + (void)keyDevId; + (void)keyIdSet; + (void)keyLabelSet; (void)keySz; - (void)length; + (void)sigLen; exit_dpk: if (ret != 0) { @@ -30325,440 +30311,40 @@ exit_dpk: return ret; } -#if defined(WOLFSSL_DUAL_ALG_CERTS) -/* This is just like the above, but only consider RSA, ECC, Falcon and - * Dilthium; Furthermore, use the alternative key, not the native key. +/* Decode the private key - RSA/ECC/Ed25519/Ed448/Falcon/Dilithium - and + * creates a key object. + * + * The signature type is set as well. + * The maximum length of a signature is returned. + * + * ssl The SSL/TLS object. + * sigLen The length of a signature. + * returns 0 on success, otherwise failure. */ -int DecodeAltPrivateKey(WOLFSSL *ssl, word32* length) +int DecodePrivateKey(WOLFSSL *ssl, word32* sigLen) { - int ret = WC_NO_ERR_TRACE(BAD_FUNC_ARG); - int keySz; - word32 idx; + int ret = 0; - /* make sure alt private key exists */ - if (ssl->buffers.altKey == NULL || ssl->buffers.altKey->buffer == NULL) { - WOLFSSL_MSG("Alternative Private key missing!"); - ERROR_OUT(NO_PRIVATE_KEY, exit_dapk); - } + ret = DecodePrivateKey_ex(ssl, ssl->buffers.keyType, ssl->buffers.key, + &ssl->hsType, &ssl->hsKey, ssl->buffers.keyDevId, + ssl->buffers.keyId, ssl->buffers.keyLabel, ssl->buffers.keySz, + sigLen); -#ifdef WOLFSSL_BLIND_PRIVATE_KEY - wolfssl_priv_der_blind_toggle(ssl->buffers.altKey, ssl->buffers.altKeyMask); -#endif + return ret; +} -#ifdef WOLF_PRIVATE_KEY_ID - if (ssl->buffers.altKeyDevId != INVALID_DEVID && - (ssl->buffers.altKeyId || ssl->buffers.altKeyLabel)) { - if (ssl->buffers.altKeyType == rsa_sa_algo) - ssl->hsAltType = DYNAMIC_TYPE_RSA; - else if (ssl->buffers.altKeyType == ecc_dsa_sa_algo) - ssl->hsAltType = DYNAMIC_TYPE_ECC; - else if ((ssl->buffers.altKeyType == falcon_level1_sa_algo) || - (ssl->buffers.altKeyType == falcon_level5_sa_algo)) - ssl->hsAltType = DYNAMIC_TYPE_FALCON; - else if ((ssl->buffers.altKeyType == dilithium_level2_sa_algo) || - (ssl->buffers.altKeyType == dilithium_level3_sa_algo) || - (ssl->buffers.altKeyType == dilithium_level5_sa_algo)) - ssl->hsAltType = DYNAMIC_TYPE_DILITHIUM; - ret = AllocKey(ssl, ssl->hsAltType, &ssl->hsAltKey); - if (ret != 0) { - goto exit_dapk; - } +#if defined(WOLFSSL_DUAL_ALG_CERTS) +/* This is just like the above, but uses the alternative key of the ssl object, + * not the primary key. + */ +int DecodeAltPrivateKey(WOLFSSL *ssl, word32* sigLen) +{ + int ret = 0; - if (ssl->buffers.altKeyType == rsa_sa_algo) { - #ifndef NO_RSA - if (ssl->buffers.altKeyLabel) { - ret = wc_InitRsaKey_Label((RsaKey*)ssl->hsAltKey, - (char*)ssl->buffers.altKey->buffer, - ssl->heap, ssl->buffers.altKeyDevId); - } - else if (ssl->buffers.altKeyId) { - ret = wc_InitRsaKey_Id((RsaKey*)ssl->hsAltKey, - ssl->buffers.altKey->buffer, - ssl->buffers.altKey->length, ssl->heap, - ssl->buffers.altKeyDevId); - } - if (ret == 0) { - if (ssl->buffers.altKeySz < ssl->options.minRsaKeySz) { - WOLFSSL_MSG("RSA key size too small"); - ERROR_OUT(RSA_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = ssl->buffers.altKeySz; - } - #else - ret = NOT_COMPILED_IN; - #endif - } - else if (ssl->buffers.altKeyType == ecc_dsa_sa_algo) { - #ifdef HAVE_ECC - if (ssl->buffers.altKeyLabel) { - ret = wc_ecc_init_label((ecc_key*)ssl->hsAltKey, - (char*)ssl->buffers.altKey->buffer, - ssl->heap, ssl->buffers.altKeyDevId); - } - else if (ssl->buffers.altKeyId) { - ret = wc_ecc_init_id((ecc_key*)ssl->hsAltKey, - ssl->buffers.altKey->buffer, - ssl->buffers.altKey->length, ssl->heap, - ssl->buffers.altKeyDevId); - } - if (ret == 0) { - if (ssl->buffers.altKeySz < ssl->options.minEccKeySz) { - WOLFSSL_MSG("ECC key size too small"); - ERROR_OUT(ECC_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = wc_ecc_sig_size_calc(ssl->buffers.altKeySz); - } - #else - ret = NOT_COMPILED_IN; - #endif - } - else if ((ssl->buffers.altKeyType == falcon_level1_sa_algo) || - (ssl->buffers.altKeyType == falcon_level5_sa_algo)) { - #if defined(HAVE_FALCON) - if (ssl->buffers.altKeyLabel) { - ret = wc_falcon_init_label((falcon_key*)ssl->hsAltKey, - (char*)ssl->buffers.altKey->buffer, - ssl->heap, ssl->buffers.altKeyDevId); - } - else if (ssl->buffers.altKeyId) { - ret = wc_falcon_init_id((falcon_key*)ssl->hsAltKey, - ssl->buffers.altKey->buffer, - ssl->buffers.altKey->length, ssl->heap, - ssl->buffers.altKeyDevId); - } - if (ret == 0) { - if (ssl->buffers.altKeyType == falcon_level1_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsAltKey, 1); - } - else if (ssl->buffers.altKeyType == falcon_level5_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsAltKey, 5); - } - } - if (ret == 0) { - if (ssl->buffers.altKeySz < ssl->options.minFalconKeySz) { - WOLFSSL_MSG("Falcon key size too small"); - ERROR_OUT(FALCON_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = wc_falcon_sig_size((falcon_key*)ssl->hsAltKey); - } - #else - ret = NOT_COMPILED_IN; - #endif - } - else if ((ssl->buffers.altKeyType == dilithium_level2_sa_algo) || - (ssl->buffers.altKeyType == dilithium_level3_sa_algo) || - (ssl->buffers.altKeyType == dilithium_level5_sa_algo)) { - #if defined(HAVE_DILITHIUM) - if (ssl->buffers.altKeyLabel) { - ret = wc_dilithium_init_label((dilithium_key*)ssl->hsAltKey, - (char*)ssl->buffers.altKey->buffer, - ssl->heap, ssl->buffers.altKeyDevId); - } - else if (ssl->buffers.altKeyId) { - ret = wc_dilithium_init_id((dilithium_key*)ssl->hsAltKey, - ssl->buffers.altKey->buffer, - ssl->buffers.altKey->length, ssl->heap, - ssl->buffers.altKeyDevId); - } - if (ret == 0) { - if (ssl->buffers.altKeyType == dilithium_level2_sa_algo) { - ret = wc_dilithium_set_level( - (dilithium_key*)ssl->hsAltKey, WC_ML_DSA_44); - } - else if (ssl->buffers.altKeyType == dilithium_level3_sa_algo) { - ret = wc_dilithium_set_level( - (dilithium_key*)ssl->hsAltKey, WC_ML_DSA_65); - } - else if (ssl->buffers.altKeyType == dilithium_level5_sa_algo) { - ret = wc_dilithium_set_level( - (dilithium_key*)ssl->hsAltKey, WC_ML_DSA_87); - } - } - if (ret == 0) { - if (ssl->buffers.altKeySz < ssl->options.minDilithiumKeySz) { - WOLFSSL_MSG("Dilithium key size too small"); - ERROR_OUT(DILITHIUM_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = wc_dilithium_sig_size( - (dilithium_key*)ssl->hsAltKey); - } - #else - ret = NOT_COMPILED_IN; - #endif - } - goto exit_dapk; - } -#endif /* WOLF_PRIVATE_KEY_ID */ - -#ifndef NO_RSA - if (ssl->buffers.altKeyType == rsa_sa_algo || - ssl->buffers.altKeyType == 0) { - ssl->hsAltType = DYNAMIC_TYPE_RSA; - ret = AllocKey(ssl, ssl->hsAltType, &ssl->hsAltKey); - if (ret != 0) { - goto exit_dapk; - } - - WOLFSSL_MSG("Trying RSA private key"); - - /* Set start of data to beginning of buffer. */ - idx = 0; - /* Decode the key assuming it is an RSA private key. */ - ret = wc_RsaPrivateKeyDecode(ssl->buffers.altKey->buffer, &idx, - (RsaKey*)ssl->hsAltKey, ssl->buffers.altKey->length); - #ifdef WOLF_PRIVATE_KEY_ID - /* if using external key then allow using a public key */ - if (ret != 0 && (ssl->devId != INVALID_DEVID - #ifdef HAVE_PK_CALLBACKS - || wolfSSL_CTX_IsPrivatePkSet(ssl->ctx) - #endif - )) { - WOLFSSL_MSG("Trying RSA public key with crypto callbacks"); - idx = 0; - ret = wc_RsaPublicKeyDecode(ssl->buffers.altKey->buffer, &idx, - (RsaKey*)ssl->hsAltKey, ssl->buffers.altKey->length); - } - #endif - if (ret == 0) { - WOLFSSL_MSG("Using RSA private key"); - - /* It worked so check it meets minimum key size requirements. */ - keySz = wc_RsaEncryptSize((RsaKey*)ssl->hsAltKey); - if (keySz < 0) { /* check if keySz has error case */ - ERROR_OUT(keySz, exit_dapk); - } - - if (keySz < ssl->options.minRsaKeySz) { - WOLFSSL_MSG("RSA key size too small"); - ERROR_OUT(RSA_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = keySz; - - goto exit_dapk; - } - } -#endif /* !NO_RSA */ - -#ifdef HAVE_ECC -#ifndef NO_RSA - FreeKey(ssl, ssl->hsAltType, (void**)&ssl->hsAltKey); -#endif /* !NO_RSA */ - - if (ssl->buffers.altKeyType == ecc_dsa_sa_algo || - ssl->buffers.altKeyType == 0 - #if defined(WOLFSSL_SM2) && defined(WOLFSSL_SM3) - || ssl->buffers.altKeyType == sm2_sa_algo - #endif - ) { - ssl->hsAltType = DYNAMIC_TYPE_ECC; - ret = AllocKey(ssl, ssl->hsAltType, &ssl->hsAltKey); - if (ret != 0) { - goto exit_dapk; - } - - #ifndef NO_RSA - WOLFSSL_MSG("Trying ECC private key, RSA didn't work"); - #else - WOLFSSL_MSG("Trying ECC private key"); - #endif - - /* Set start of data to beginning of buffer. */ - idx = 0; - /* Decode the key assuming it is an ECC private key. */ - ret = wc_EccPrivateKeyDecode(ssl->buffers.altKey->buffer, &idx, - (ecc_key*)ssl->hsAltKey, - ssl->buffers.altKey->length); - #ifdef WOLF_PRIVATE_KEY_ID - /* if using external key then allow using a public key */ - if (ret != 0 && (ssl->devId != INVALID_DEVID - #ifdef HAVE_PK_CALLBACKS - || wolfSSL_CTX_IsPrivatePkSet(ssl->ctx) - #endif - )) { - WOLFSSL_MSG("Trying ECC public key with crypto callbacks"); - idx = 0; - ret = wc_EccPublicKeyDecode(ssl->buffers.altKey->buffer, &idx, - (ecc_key*)ssl->hsAltKey, - ssl->buffers.altKey->length); - } - #endif - if (ret == 0) { - WOLFSSL_MSG("Using ECC private key"); - - /* Check it meets the minimum ECC key size requirements. */ - keySz = wc_ecc_size((ecc_key*)ssl->hsAltKey); - if (keySz < ssl->options.minEccKeySz) { - WOLFSSL_MSG("ECC key size too small"); - ERROR_OUT(ECC_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = wc_ecc_sig_size((ecc_key*)ssl->hsAltKey); - - goto exit_dapk; - } - } -#endif -#if defined(HAVE_FALCON) - #if !defined(NO_RSA) || defined(HAVE_ECC) - FreeKey(ssl, ssl->hsAltType, (void**)&ssl->hsAltKey); - #endif - - if (ssl->buffers.altKeyType == falcon_level1_sa_algo || - ssl->buffers.altKeyType == falcon_level5_sa_algo || - ssl->buffers.altKeyType == 0) { - - ssl->hsAltType = DYNAMIC_TYPE_FALCON; - ret = AllocKey(ssl, ssl->hsAltType, &ssl->hsAltKey); - if (ret != 0) { - goto exit_dapk; - } - - if (ssl->buffers.altKeyType == falcon_level1_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsAltKey, 1); - } - else if (ssl->buffers.altKeyType == falcon_level5_sa_algo) { - ret = wc_falcon_set_level((falcon_key*)ssl->hsAltKey, 5); - } - else { - /* What if ssl->buffers.keyType is 0? We might want to do something - * more graceful here. */ - ret = ALGO_ID_E; - } - - if (ret != 0) { - goto exit_dapk; - } - - #if defined(HAVE_ECC) - WOLFSSL_MSG("Trying Falcon private key, ECC didn't work"); - #elif !defined(NO_RSA) - WOLFSSL_MSG("Trying Falcon private key, RSA didn't work"); - #else - WOLFSSL_MSG("Trying Falcon private key"); - #endif - - /* Set start of data to beginning of buffer. */ - idx = 0; - /* Decode the key assuming it is a Falcon private key. */ - ret = wc_falcon_import_private_only(ssl->buffers.altKey->buffer, - ssl->buffers.altKey->length, - (falcon_key*)ssl->hsAltKey); - if (ret == 0) { - WOLFSSL_MSG("Using Falcon private key"); - - /* Check it meets the minimum Falcon key size requirements. */ - keySz = wc_falcon_size((falcon_key*)ssl->hsAltKey); - if (keySz < ssl->options.minFalconKeySz) { - WOLFSSL_MSG("Falcon key size too small"); - ERROR_OUT(FALCON_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = wc_falcon_sig_size((falcon_key*)ssl->hsAltKey); - - goto exit_dapk; - } - } -#endif /* HAVE_FALCON */ -#if defined(HAVE_DILITHIUM) - #if !defined(NO_RSA) || defined(HAVE_ECC) - FreeKey(ssl, ssl->hsAltType, (void**)&ssl->hsAltKey); - #endif - - if (ssl->buffers.altKeyType == dilithium_level2_sa_algo || - ssl->buffers.altKeyType == dilithium_level3_sa_algo || - ssl->buffers.altKeyType == dilithium_level5_sa_algo || - ssl->buffers.altKeyType == 0) { - - ssl->hsAltType = DYNAMIC_TYPE_DILITHIUM; - ret = AllocKey(ssl, ssl->hsAltType, &ssl->hsAltKey); - if (ret != 0) { - goto exit_dapk; - } - - if (ssl->buffers.altKeyType == dilithium_level2_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsAltKey, WC_ML_DSA_44); - } - else if (ssl->buffers.altKeyType == dilithium_level3_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsAltKey, WC_ML_DSA_65); - } - else if (ssl->buffers.altKeyType == dilithium_level5_sa_algo) { - ret = wc_dilithium_set_level((dilithium_key*)ssl->hsAltKey, WC_ML_DSA_87); - } - else { - /* What if ssl->buffers.keyType is 0? We might want to do something - * more graceful here. */ - ret = ALGO_ID_E; - } - - if (ret != 0) { - goto exit_dapk; - } - - #if defined(HAVE_FALCON) - WOLFSSL_MSG("Trying Dilithium private key, Falcon didn't work"); - #elif defined(HAVE_ECC) - WOLFSSL_MSG("Trying Dilithium private key, ECC didn't work"); - #elif !defined(NO_RSA) - WOLFSSL_MSG("Trying Dilithium private key, RSA didn't work"); - #else - WOLFSSL_MSG("Trying Dilithium private key"); - #endif - - /* Set start of data to beginning of buffer. */ - idx = 0; - /* Decode the key assuming it is a Dilithium private key. */ - ret = wc_Dilithium_PrivateKeyDecode(ssl->buffers.altKey->buffer, - &idx, - (dilithium_key*)ssl->hsAltKey, - ssl->buffers.altKey->length); - if (ret == 0) { - WOLFSSL_MSG("Using Dilithium private key"); - - /* Check it meets the minimum Dilithium key size requirements. */ - keySz = wc_dilithium_size((dilithium_key*)ssl->hsAltKey); - if (keySz < ssl->options.minDilithiumKeySz) { - WOLFSSL_MSG("Dilithium key size too small"); - ERROR_OUT(DILITHIUM_KEY_SIZE_E, exit_dapk); - } - - /* Return the maximum signature length. */ - *length = wc_dilithium_sig_size((dilithium_key*)ssl->hsAltKey); - - goto exit_dapk; - } - } -#endif /* HAVE_DILITHIUM */ - - (void)idx; - (void)keySz; - (void)length; - -exit_dapk: -#ifdef WOLFSSL_BLIND_PRIVATE_KEY - if (ret == 0) { - ret = wolfssl_priv_der_blind(ssl->rng, ssl->buffers.altKey, - &ssl->buffers.altKeyMask); - } - else { - wolfssl_priv_der_blind_toggle(ssl->buffers.key, ssl->buffers.keyMask); - } -#endif - - if (ret != 0) { - WOLFSSL_ERROR_VERBOSE(ret); - } + ret = DecodePrivateKey_ex(ssl, ssl->buffers.altKeyType, ssl->buffers.altKey, + &ssl->hsAltType, &ssl->hsAltKey, ssl->buffers.altKeyDevId, + ssl->buffers.altKeyId, ssl->buffers.altKeyLabel, + ssl->buffers.altKeySz, sigLen); return ret; } @@ -35276,6 +34862,8 @@ static int DoSessionTicket(WOLFSSL* ssl, const byte* input, word32* inOutIdx, return wolfssl_alert_protocol_version; case WC_NO_ERR_TRACE(BAD_CERTIFICATE_STATUS_ERROR): return bad_certificate_status_response; + case WC_NO_ERR_TRACE(OUT_OF_ORDER_E): + return unexpected_message; default: return invalid_alert; } @@ -42491,12 +42079,17 @@ static int DoAppleNativeCertValidation(WOLFSSL* ssl, kCFAllocatorDefault, (const char*)ssl->buffers.domainName.buffer, kCFStringEncodingUTF8); } - if (hostname != NULL) { - policy = SecPolicyCreateSSL(true, hostname); - } - else { - policy = SecPolicyCreateSSL(true, NULL); + + /* If we're the client, we're validating the server's cert - use server + * policy (true). If we're the server, we're validating the client's cert - + * use client policy (false). Hostname validation only applies to server + * certs. */ + { + int isServerCert = (ssl->options.side == WOLFSSL_CLIENT_END); + policy = SecPolicyCreateSSL(isServerCert, + isServerCert ? hostname : NULL); } + status = SecTrustCreateWithCertificates(certArray, policy, &trust); if (status != errSecSuccess) { WOLFSSL_MSG_EX("Error creating trust object, " diff --git a/src/ocsp.c b/src/ocsp.c index 2348af7df..77cce2b1a 100644 --- a/src/ocsp.c +++ b/src/ocsp.c @@ -296,10 +296,10 @@ static int GetOcspStatus(WOLFSSL_OCSP* ocsp, OcspRequest* request, else if (*status) { #ifndef NO_ASN_TIME if (XVALIDATE_DATE((*status)->thisDate, - (*status)->thisDateFormat, ASN_BEFORE) + (*status)->thisDateFormat, ASN_BEFORE, MAX_DATE_SIZE) && ((*status)->nextDate[0] != 0) && XVALIDATE_DATE((*status)->nextDate, - (*status)->nextDateFormat, ASN_AFTER)) + (*status)->nextDateFormat, ASN_AFTER, MAX_DATE_SIZE)) #endif { ret = xstat2err((*status)->status); diff --git a/src/sniffer.c b/src/sniffer.c index 128c0dc40..905e6abc9 100644 --- a/src/sniffer.c +++ b/src/sniffer.c @@ -2121,6 +2121,11 @@ static int CheckIp6Hdr(Ip6Hdr* iphdr, IpInfo* info, int length, char* error) int version = IP_V(iphdr); int exthdrsz = IP6_HDR_SZ; + if (length < IP6_HDR_SZ) { + SetError(BAD_IPVER_STR, error, NULL, 0); + return WOLFSSL_FATAL_ERROR; + } + TraceIP6(iphdr); Trace(IP_CHECK_STR); @@ -2175,6 +2180,11 @@ static int CheckIpHdr(IpHdr* iphdr, IpInfo* info, int length, char* error, if (version == IPV6) return CheckIp6Hdr((Ip6Hdr*)iphdr, info, length, error); + if (length < IP_HDR_SZ) { + SetError(PACKET_HDR_SHORT_STR, error, NULL, 0); + return WOLFSSL_FATAL_ERROR; + } + if (trace) { TraceIP(iphdr); Trace(IP_CHECK_STR); @@ -5026,6 +5036,10 @@ static const byte* DecryptMessage(WOLFSSL* ssl, const byte* input, word32 sz, #ifdef WOLFSSL_TLS13 if (IsAtLeastTLSv1_3(ssl->version)) { + if (sz < ssl->specs.aead_mac_size) { + *error = BUFFER_ERROR; + return NULL; + } ret = DecryptTls13(ssl, output, input, sz, (byte*)rh, RECORD_HEADER_SZ); } else @@ -5404,6 +5418,12 @@ static int CheckHeaders(IpInfo* ipInfo, TcpInfo* tcpInfo, const byte* packet, /* trim VLAN header and try again */ packet += 8; length -= 8; + if (length < IP_HDR_SZ) { + SetError(PACKET_HDR_SHORT_STR, error, NULL, 0); + return WOLFSSL_FATAL_ERROR; + } + iphdr = (IpHdr*)packet; + version = IP_V(iphdr); } } diff --git a/src/ssl.c b/src/ssl.c index a128d3b62..8fc5833b9 100644 --- a/src/ssl.c +++ b/src/ssl.c @@ -1119,7 +1119,12 @@ static int wolfSSL_parse_cipher_list(WOLFSSL_CTX* ctx, WOLFSSL* ssl, #endif /* prevent multiple mutex initializations */ + +/* note, initRefCount is not used for thread synchronization, only for + * bookkeeping while inits_count_mutex is held. + */ static volatile WC_THREADSHARED int initRefCount = 0; + /* init ref count mutex */ static WC_THREADSHARED wolfSSL_Mutex inits_count_mutex WOLFSSL_MUTEX_INITIALIZER_CLAUSE(inits_count_mutex); @@ -6551,7 +6556,7 @@ int wolfSSL_Init(void) #endif /* WOLFSSL_SYS_CRYPTO_POLICY */ if (ret == WOLFSSL_SUCCESS) { - initRefCount++; + initRefCount = initRefCount + 1; } else { initRefCount = 1; /* Force cleanup */ @@ -10003,7 +10008,7 @@ int wolfSSL_dtls_get_current_timeout(WOLFSSL* ssl) */ int wolfSSL_dtls13_use_quick_timeout(WOLFSSL* ssl) { - return ssl->dtls13FastTimeout; + return ssl != NULL && ssl->dtls13FastTimeout; } /* @@ -11425,7 +11430,7 @@ int wolfSSL_Cleanup(void) #endif if (initRefCount > 0) { - --initRefCount; + initRefCount = initRefCount - 1; if (initRefCount == 0) release = 1; } @@ -22047,7 +22052,8 @@ int wolfSSL_get_ocsp_producedDate_tm(WOLFSSL *ssl, struct tm *produced_tm) { return BAD_FUNC_ARG; if (ExtractDate(ssl->ocspProducedDate, - (unsigned char)ssl->ocspProducedDateFormat, produced_tm, &idx)) + (unsigned char)ssl->ocspProducedDateFormat, produced_tm, &idx, + MAX_DATE_SZ)) return 0; else return ASN_PARSE_E; diff --git a/src/ssl_asn1.c b/src/ssl_asn1.c index d501b6a68..2fe3cab47 100644 --- a/src/ssl_asn1.c +++ b/src/ssl_asn1.c @@ -4194,7 +4194,7 @@ char* wolfSSL_ASN1_TIME_to_string(WOLFSSL_ASN1_TIME* t, char* buf, int len) } /* Get time as human readable string. */ - if ((buf != NULL) && !GetTimeString(t->data, t->type, buf, len)) { + if ((buf != NULL) && !GetTimeString(t->data, t->type, buf, len, t->length)) { buf = NULL; } diff --git a/src/tls.c b/src/tls.c index 743fbf3e9..8226ba478 100644 --- a/src/tls.c +++ b/src/tls.c @@ -937,6 +937,9 @@ static int Hmac_UpdateFinal_CT(Hmac* hmac, byte* digest, const byte* in, word32 realLen; byte extraBlock; + if (macLen <= 0 || macLen > (int)sizeof(hmac->innerHash)) + return BAD_FUNC_ARG; + switch (hmac->macType) { #ifndef NO_SHA case WC_SHA: @@ -6972,8 +6975,10 @@ int TLSX_SupportedVersions_Parse(const WOLFSSL* ssl, const byte* input, int set = 0; /* Must contain a length and at least one version. */ - if (length < OPAQUE8_LEN + OPAQUE16_LEN || (length & 1) != 1) + if (length < OPAQUE8_LEN + OPAQUE16_LEN || (length & 1) != 1 + || length > MAX_SV_EXT_LEN) { return BUFFER_ERROR; + } len = *input; @@ -9963,10 +9968,13 @@ int TLSX_KeyShare_Parse_ClientHello(const WOLFSSL* ssl, if (length < OPAQUE16_LEN) return BUFFER_ERROR; - /* ClientHello contains zero or more key share entries. */ + /* ClientHello contains zero or more key share entries. Limits extension + * length to 2^16-1 and subtracting 4 bytes for header size per RFC 8446 */ ato16(input, &len); - if (len != length - OPAQUE16_LEN) + if ((len != length - OPAQUE16_LEN) || + length > (MAX_EXT_DATA_LEN - HELLO_EXT_SZ)) { return BUFFER_ERROR; + } offset += OPAQUE16_LEN; while (offset < (int)length) { @@ -11607,7 +11615,7 @@ static int TLSX_PreSharedKey_Parse(WOLFSSL* ssl, const byte* input, /* Find the list of identities sent to server. */ extension = TLSX_Find(ssl->extensions, TLSX_PRE_SHARED_KEY); if (extension == NULL) - return PSK_KEY_ERROR; + return INCOMPLETE_DATA; list = (PreSharedKey*)extension->data; /* Mark the identity as chosen. */ @@ -16538,11 +16546,51 @@ int TLSX_Parse(WOLFSSL* ssl, const byte* input, word16 length, byte msgType, #if defined(HAVE_RPK) case TLSX_CLIENT_CERTIFICATE_TYPE: WOLFSSL_MSG("Client Certificate Type extension received"); +#if defined(WOLFSSL_TLS13) + /* RFC 8446, Section 4.2 (Extensions), client_certificate_type + and server_certificate_type MUST be sent in ClientHello(CH) + or EncryptedExtensions(EE) */ + if (IsAtLeastTLSv1_3(ssl->version)) { + if (msgType != client_hello && + msgType != encrypted_extensions) { + WOLFSSL_ERROR_VERBOSE(EXT_NOT_ALLOWED); + return EXT_NOT_ALLOWED; + } + } + else +#endif + { + /* TLS 1.2: allowed in CH and SH (RFC 7250) */ + if (msgType != client_hello && + msgType != server_hello) { + WOLFSSL_ERROR_VERBOSE(EXT_NOT_ALLOWED); + return EXT_NOT_ALLOWED; + } + } ret = CCT_PARSE(ssl, input + offset, size, msgType); break; case TLSX_SERVER_CERTIFICATE_TYPE: WOLFSSL_MSG("Server Certificate Type extension received"); +#if defined(WOLFSSL_TLS13) + /* RFC 8446, Section 4.2 (Extensions) */ + if (IsAtLeastTLSv1_3(ssl->version)) { + if (msgType != client_hello && + msgType != encrypted_extensions) { + WOLFSSL_ERROR_VERBOSE(EXT_NOT_ALLOWED); + return EXT_NOT_ALLOWED; + } + } + else +#endif + { + /* TLS 1.2: allowed in CH and SH (RFC 7250) */ + if (msgType != client_hello && + msgType != server_hello) { + WOLFSSL_ERROR_VERBOSE(EXT_NOT_ALLOWED); + return EXT_NOT_ALLOWED; + } + } ret = SCT_PARSE(ssl, input + offset, size, msgType); break; #endif /* HAVE_RPK */ diff --git a/src/tls13.c b/src/tls13.c index 9f25f47a0..25b2b6621 100644 --- a/src/tls13.c +++ b/src/tls13.c @@ -1665,7 +1665,7 @@ end: return ret; } -#if (defined(HAVE_SESSION_TICKET) || !defined(NO_PSK)) +#if defined(HAVE_SESSION_TICKET) || !defined(NO_PSK) || defined(WOLFSSL_DTLS13) #ifdef WOLFSSL_32BIT_MILLI_TIME #ifndef NO_ASN_TIME #if defined(USER_TICKS) @@ -2264,7 +2264,7 @@ end: */ #endif /* !NO_ASN_TIME */ #endif /* WOLFSSL_32BIT_MILLI_TIME */ -#endif /* HAVE_SESSION_TICKET || !NO_PSK */ +#endif /* HAVE_SESSION_TICKET || !NO_PSK || WOLFSSL_DTLS13 */ /* Add record layer header to message. * @@ -4322,6 +4322,26 @@ typedef struct Sch13Args { #endif } Sch13Args; +#ifdef WOLFSSL_EARLY_DATA +/* Check if early data can potentially be sent. + * Returns 1 if early data is possible, 0 otherwise. + */ +static int EarlyDataPossible(WOLFSSL* ssl) +{ + /* Need session resumption OR PSK callback configured */ + if (ssl->options.resuming) { + return 1; + } +#ifndef NO_PSK + if (ssl->options.client_psk_tls13_cb != NULL || + ssl->options.client_psk_cb != NULL) { + return 1; + } +#endif + return 0; +} +#endif /* WOLFSSL_EARLY_DATA */ + int SendTls13ClientHello(WOLFSSL* ssl) { int ret; @@ -4461,14 +4481,8 @@ int SendTls13ClientHello(WOLFSSL* ssl) case TLS_ASYNC_FINALIZE: { #ifdef WOLFSSL_EARLY_DATA - #ifndef NO_PSK - if (!ssl->options.resuming && - ssl->options.client_psk_tls13_cb == NULL && - ssl->options.client_psk_cb == NULL) - #else - if (!ssl->options.resuming) - #endif - ssl->earlyData = no_early_data; + if (!EarlyDataPossible(ssl)) + ssl->earlyData = no_early_data; if (ssl->options.serverState == SERVER_HELLO_RETRY_REQUEST_COMPLETE) ssl->earlyData = no_early_data; if (ssl->earlyData == no_early_data) @@ -5663,7 +5677,7 @@ int DoTls13ServerHello(WOLFSSL* ssl, const byte* input, word32* inOutIdx, #endif ) { SendAlert(ssl, alert_fatal, illegal_parameter); - return DUPLICATE_MSG_E; + return EXT_MISSING; } ssl->options.tls1_3 = 1; @@ -5744,15 +5758,13 @@ static int DoTls13EncryptedExtensions(WOLFSSL* ssl, const byte* input, if (ext == NULL || !ext->val) ssl->earlyData = no_early_data; } -#endif -#ifdef WOLFSSL_EARLY_DATA if (ssl->earlyData == no_early_data) { ret = SetKeysSide(ssl, ENCRYPT_SIDE_ONLY); if (ret != 0) return ret; } -#endif +#endif /* WOLFSSL_EARLY_DATA */ ssl->options.serverState = SERVER_ENCRYPTED_EXTENSIONS_COMPLETE; @@ -9285,10 +9297,13 @@ static int SendTls13CertificateVerify(WOLFSSL* ssl) /* Swap keys */ ssl->buffers.key = ssl->buffers.altKey; + ssl->buffers.weOwnKey = ssl->buffers.weOwnAltKey; + #ifdef WOLFSSL_BLIND_PRIVATE_KEY ssl->buffers.keyMask = ssl->buffers.altKeyMask; + /* Unblind the alternative key before decoding */ + wolfssl_priv_der_blind_toggle(ssl->buffers.key, ssl->buffers.keyMask); #endif - ssl->buffers.weOwnKey = ssl->buffers.weOwnAltKey; } #endif /* WOLFSSL_DUAL_ALG_CERTS */ ret = DecodePrivateKey(ssl, &args->sigLen); @@ -9356,7 +9371,7 @@ static int SendTls13CertificateVerify(WOLFSSL* ssl) /* The native was already decoded. Now we need to do the * alternative. Note that no swap was done because this case is * both native and alternative, not just alternative. */ - if (ssl->ctx->altPrivateKey == NULL) { + if (ssl->buffers.altKey == NULL) { ERROR_OUT(NO_PRIVATE_KEY, exit_scv); } @@ -10295,8 +10310,7 @@ static int DoTls13CertificateVerify(WOLFSSL* ssl, byte* input, args->idx += OPAQUE16_LEN; /* Signature data. */ - if ((args->idx - args->begin) + args->sz > totalSz || - args->sz > ENCRYPT_LEN) { + if ((args->idx - args->begin) + args->sz > totalSz) { ERROR_OUT(BUFFER_ERROR, exit_dcv); } @@ -11384,7 +11398,7 @@ static int SendTls13Finished(WOLFSSL* ssl) if ((ret = SetKeysSide(ssl, ENCRYPT_SIDE_ONLY)) != 0) return ret; -#if defined(HAVE_SESSION_TICKET) || !defined(NO_PSK) +#if defined(HAVE_SESSION_TICKET) ret = DeriveResumptionSecret(ssl, ssl->session->masterSecret); if (ret != 0) return ret; @@ -13086,7 +13100,7 @@ int DoTls13HandShakeMsgType(WOLFSSL* ssl, byte* input, word32* inOutIdx, #endif /* NO_WOLFSSL_CLIENT */ #ifndef NO_WOLFSSL_SERVER - #if defined(HAVE_SESSION_TICKET) || !defined(NO_PSK) + #if defined(HAVE_SESSION_TICKET) if (ssl->options.side == WOLFSSL_SERVER_END && type == finished) { ret = DeriveResumptionSecret(ssl, ssl->session->masterSecret); if (ret != 0) @@ -14978,8 +14992,9 @@ int wolfSSL_get_max_early_data(WOLFSSL* ssl) * sz The size of the early data in bytes. * outSz The number of early data bytes written. * returns BAD_FUNC_ARG when: ssl, data or outSz is NULL; sz is negative; - * or not using TLS v1.3. SIDE ERROR when not a server. Otherwise the number of - * early data bytes written. + * or not using TLS v1.3. SIDE ERROR when not a server. BAD_STATE_E if invoked + * without a valid session or without a valid PSK CB. + * Otherwise the number of early data bytes written. */ int wolfSSL_write_early_data(WOLFSSL* ssl, const void* data, int sz, int* outSz) { @@ -14996,8 +15011,15 @@ int wolfSSL_write_early_data(WOLFSSL* ssl, const void* data, int sz, int* outSz) if (ssl->options.side == WOLFSSL_SERVER_END) return SIDE_ERROR; + /* Early data requires PSK or session resumption */ + if (!EarlyDataPossible(ssl)) { + return BAD_STATE_E; + } + if (ssl->options.handShakeState == NULL_STATE) { - if (ssl->error != WC_NO_ERR_TRACE(WC_PENDING_E)) + /* avoid re-setting ssl->earlyData if we re-enter the function because + * of WC_PENDING_E, WANT_WRITE or WANT_READ */ + if (ssl->error == 0) ssl->earlyData = expecting_early_data; ret = wolfSSL_connect_TLSv13(ssl); if (ret != WOLFSSL_SUCCESS) diff --git a/src/x509.c b/src/x509.c index 8fc94f5f1..7594b0dae 100644 --- a/src/x509.c +++ b/src/x509.c @@ -571,7 +571,6 @@ static int wolfssl_dns_entry_othername_to_gn(DNS_entry* dns, tag = WOLFSSL_V_ASN1_SEQUENCE; } - /* Create a WOLFSSL_ASN1_STRING from the DER. */ str = wolfSSL_ASN1_STRING_type_new(tag); if (str == NULL) { @@ -584,15 +583,23 @@ static int wolfssl_dns_entry_othername_to_gn(DNS_entry* dns, if (type == NULL) goto err; wolfSSL_ASN1_TYPE_set(type, tag, str); + str = NULL; /* type now owns str */ + + if (wolfSSL_GENERAL_NAME_set_type(gn, WOLFSSL_GEN_OTHERNAME) + != WOLFSSL_SUCCESS) { + goto err; + } /* Store the object and string in general name. */ gn->d.otherName->type_id = obj; gn->d.otherName->value = type; + type = NULL; /* gn->d.otherName owns type */ ret = 1; err: if (ret != 1) { wolfSSL_ASN1_OBJECT_free(obj); + wolfSSL_ASN1_TYPE_free(type); wolfSSL_ASN1_STRING_free(str); } return ret; @@ -602,13 +609,13 @@ err: #if defined(OPENSSL_ALL) || defined(OPENSSL_EXTRA) static int DNS_to_GENERAL_NAME(WOLFSSL_GENERAL_NAME* gn, DNS_entry* dns) { - gn->type = dns->type; - switch (gn->type) { + switch (dns->type) { case WOLFSSL_GEN_OTHERNAME: - if (!wolfssl_dns_entry_othername_to_gn(dns, gn)) { - WOLFSSL_MSG("OTHERNAME set failed"); - return WOLFSSL_FAILURE; - } + /* Sets gn->type internally */ + if (!wolfssl_dns_entry_othername_to_gn(dns, gn)) { + WOLFSSL_MSG("OTHERNAME set failed"); + return WOLFSSL_FAILURE; + } break; case WOLFSSL_GEN_EMAIL: @@ -616,16 +623,18 @@ static int DNS_to_GENERAL_NAME(WOLFSSL_GENERAL_NAME* gn, DNS_entry* dns) case WOLFSSL_GEN_URI: case WOLFSSL_GEN_IPADD: case WOLFSSL_GEN_IA5: - gn->d.ia5->length = dns->len; - if (wolfSSL_ASN1_STRING_set(gn->d.ia5, dns->name, - gn->d.ia5->length) != WOLFSSL_SUCCESS) { - WOLFSSL_MSG("ASN1_STRING_set failed"); - return WOLFSSL_FAILURE; - } - break; + gn->type = dns->type; + gn->d.ia5->length = dns->len; + if (wolfSSL_ASN1_STRING_set(gn->d.ia5, dns->name, + gn->d.ia5->length) != WOLFSSL_SUCCESS) { + WOLFSSL_MSG("ASN1_STRING_set failed"); + return WOLFSSL_FAILURE; + } + break; case WOLFSSL_GEN_DIRNAME: + gn->type = dns->type; /* wolfSSL_GENERAL_NAME_new() mallocs this by default */ wolfSSL_ASN1_STRING_free(gn->d.ia5); gn->d.ia5 = NULL; @@ -636,6 +645,7 @@ static int DNS_to_GENERAL_NAME(WOLFSSL_GENERAL_NAME* gn, DNS_entry* dns) #ifdef WOLFSSL_RID_ALT_NAME case WOLFSSL_GEN_RID: + gn->type = dns->type; /* wolfSSL_GENERAL_NAME_new() mallocs this by default */ wolfSSL_ASN1_STRING_free(gn->d.ia5); gn->d.ia5 = NULL; @@ -2310,9 +2320,9 @@ void* wolfSSL_X509_get_ext_d2i(const WOLFSSL_X509* x509, int nid, int* c, goto err; } - gn->type = dns->type; - switch (gn->type) { + switch (dns->type) { case ASN_DIR_TYPE: + gn->type = dns->type; { int localIdx = 0; unsigned char* n = (unsigned char*)XMALLOC( @@ -2336,12 +2346,14 @@ void* wolfSSL_X509_get_ext_d2i(const WOLFSSL_X509* x509, int nid, int* c, break; case ASN_OTHER_TYPE: + /* gn->type set internally */ if (!wolfssl_dns_entry_othername_to_gn(dns, gn)) { goto err; } break; case ASN_IP_TYPE: + gn->type = dns->type; if (wolfSSL_ASN1_STRING_set(gn->d.iPAddress, dns->name, dns->len) != WOLFSSL_SUCCESS) { WOLFSSL_MSG("ASN1_STRING_set failed"); @@ -2350,7 +2362,35 @@ void* wolfSSL_X509_get_ext_d2i(const WOLFSSL_X509* x509, int nid, int* c, gn->d.iPAddress->type = WOLFSSL_V_ASN1_OCTET_STRING; break; + #ifdef WOLFSSL_RID_ALT_NAME + case ASN_RID_TYPE: + gn->type = dns->type; + /* Free ia5 before using union for registeredID */ + wolfSSL_ASN1_STRING_free(gn->d.ia5); + gn->d.ia5 = NULL; + + gn->d.registeredID = wolfSSL_ASN1_OBJECT_new(); + if (gn->d.registeredID == NULL) { + goto err; + } + gn->d.registeredID->obj = + (const unsigned char*)XMALLOC(dns->len, + gn->d.registeredID->heap, DYNAMIC_TYPE_ASN1); + if (gn->d.registeredID->obj == NULL) { + goto err; + } + gn->d.registeredID->dynamic |= + WOLFSSL_ASN1_DYNAMIC_DATA; + XMEMCPY((byte*)gn->d.registeredID->obj, + dns->ridString, dns->len); + gn->d.registeredID->objSz = dns->len; + gn->d.registeredID->grp = oidCertExtType; + gn->d.registeredID->nid = WC_NID_registeredAddress; + break; + #endif /* WOLFSSL_RID_ALT_NAME */ + default: + gn->type = dns->type; if (wolfSSL_ASN1_STRING_set(gn->d.dNSName, dns->name, dns->len) != WOLFSSL_SUCCESS) { WOLFSSL_MSG("ASN1_STRING_set failed"); @@ -3448,25 +3488,25 @@ int wolfSSL_X509_pubkey_digest(const WOLFSSL_X509 *x509, const char* wolfSSL_X509_get_default_cert_file_env(void) { WOLFSSL_STUB("X509_get_default_cert_file_env"); - return NULL; + return ""; } const char* wolfSSL_X509_get_default_cert_file(void) { WOLFSSL_STUB("X509_get_default_cert_file"); - return NULL; + return ""; } const char* wolfSSL_X509_get_default_cert_dir_env(void) { WOLFSSL_STUB("X509_get_default_cert_dir_env"); - return NULL; + return ""; } const char* wolfSSL_X509_get_default_cert_dir(void) { WOLFSSL_STUB("X509_get_default_cert_dir"); - return NULL; + return ""; } #endif @@ -4643,7 +4683,12 @@ int wolfSSL_GENERAL_NAME_set0_othername(WOLFSSL_GENERAL_NAME* gen, return WOLFSSL_FAILURE; } - gen->type = WOLFSSL_GEN_OTHERNAME; + if (wolfSSL_GENERAL_NAME_set_type(gen, WOLFSSL_GEN_OTHERNAME) + != WOLFSSL_SUCCESS) { + wolfSSL_ASN1_OBJECT_free(x); + return WOLFSSL_FAILURE; + } + gen->d.otherName->type_id = x; gen->d.otherName->value = value; return WOLFSSL_SUCCESS; @@ -4975,6 +5020,16 @@ int wolfSSL_GENERAL_NAME_set_type(WOLFSSL_GENERAL_NAME* name, int typ) if (name->d.uniformResourceIdentifier == NULL) ret = MEMORY_E; break; + case WOLFSSL_GEN_OTHERNAME: + name->d.otherName = (WOLFSSL_ASN1_OTHERNAME*)XMALLOC( + sizeof(WOLFSSL_ASN1_OTHERNAME), NULL, DYNAMIC_TYPE_ASN1); + if (name->d.otherName == NULL) { + ret = MEMORY_E; + } + else { + XMEMSET(name->d.otherName, 0, sizeof(WOLFSSL_ASN1_OTHERNAME)); + } + break; default: name->type = WOLFSSL_GEN_IA5; name->d.ia5 = wolfSSL_ASN1_STRING_new(); @@ -6440,9 +6495,9 @@ static int X509PrintValidity(WOLFSSL_BIO* bio, WOLFSSL_ASN1_TIME * notBefore, } if (notBefore->length > 0) { if (GetTimeString(notBefore->data, ASN_UTC_TIME, - tmp, sizeof(tmp)) != WOLFSSL_SUCCESS) { + tmp, sizeof(tmp), notBefore->length) != WOLFSSL_SUCCESS) { if (GetTimeString(notBefore->data, ASN_GENERALIZED_TIME, - tmp, sizeof(tmp)) != WOLFSSL_SUCCESS) { + tmp, sizeof(tmp), notBefore->length) != WOLFSSL_SUCCESS) { WOLFSSL_MSG("Error getting not before date"); return WOLFSSL_FAILURE; } @@ -6462,9 +6517,9 @@ static int X509PrintValidity(WOLFSSL_BIO* bio, WOLFSSL_ASN1_TIME * notBefore, } if (notAfter->length > 0) { if (GetTimeString(notAfter->data, ASN_UTC_TIME, - tmp, sizeof(tmp)) != WOLFSSL_SUCCESS) { + tmp, sizeof(tmp), notAfter->length) != WOLFSSL_SUCCESS) { if (GetTimeString(notAfter->data, ASN_GENERALIZED_TIME, - tmp, sizeof(tmp)) != WOLFSSL_SUCCESS) { + tmp, sizeof(tmp), notAfter->length) != WOLFSSL_SUCCESS) { WOLFSSL_MSG("Error getting not after date"); return WOLFSSL_FAILURE; } @@ -9018,9 +9073,9 @@ static int X509CRLPrintRevoked(WOLFSSL_BIO* bio, WOLFSSL_X509_CRL* crl, if (revoked->revDate[0] != 0) { if (GetTimeString(revoked->revDate, ASN_UTC_TIME, - tmp, MAX_WIDTH) != WOLFSSL_SUCCESS) { + tmp, MAX_WIDTH, MAX_DATE_SIZE) != WOLFSSL_SUCCESS) { if (GetTimeString(revoked->revDate, ASN_GENERALIZED_TIME, - tmp, MAX_WIDTH) != WOLFSSL_SUCCESS) { + tmp, MAX_WIDTH, MAX_DATE_SIZE) != WOLFSSL_SUCCESS) { WOLFSSL_MSG("Error getting revocation date"); return WOLFSSL_FAILURE; } @@ -9071,13 +9126,10 @@ static int X509CRLPrintDates(WOLFSSL_BIO* bio, WOLFSSL_X509_CRL* crl, } if (crl->crlList->lastDate[0] != 0) { - if (GetTimeString(crl->crlList->lastDate, ASN_UTC_TIME, - tmp, MAX_WIDTH) != WOLFSSL_SUCCESS) { - if (GetTimeString(crl->crlList->lastDate, ASN_GENERALIZED_TIME, - tmp, MAX_WIDTH) != WOLFSSL_SUCCESS) { - WOLFSSL_MSG("Error getting last update date"); - return WOLFSSL_FAILURE; - } + if (GetTimeString(crl->crlList->lastDate, crl->crlList->lastDateFormat, + tmp, MAX_WIDTH, MAX_DATE_SIZE) != WOLFSSL_SUCCESS) { + WOLFSSL_MSG("Error getting last update date"); + return WOLFSSL_FAILURE; } } else { @@ -9102,13 +9154,10 @@ static int X509CRLPrintDates(WOLFSSL_BIO* bio, WOLFSSL_X509_CRL* crl, } if (crl->crlList->nextDate[0] != 0) { - if (GetTimeString(crl->crlList->nextDate, ASN_UTC_TIME, - tmp, MAX_WIDTH) != WOLFSSL_SUCCESS) { - if (GetTimeString(crl->crlList->nextDate, ASN_GENERALIZED_TIME, - tmp, MAX_WIDTH) != WOLFSSL_SUCCESS) { - WOLFSSL_MSG("Error getting next update date"); - return WOLFSSL_FAILURE; - } + if (GetTimeString(crl->crlList->nextDate, crl->crlList->nextDateFormat, + tmp, MAX_WIDTH, MAX_DATE_SIZE) != WOLFSSL_SUCCESS) { + WOLFSSL_MSG("Error getting next update date"); + return WOLFSSL_FAILURE; } } else { diff --git a/src/x509_str.c b/src/x509_str.c index 432a48897..99f195b4e 100644 --- a/src/x509_str.c +++ b/src/x509_str.c @@ -375,23 +375,41 @@ static int X509StoreVerifyCertDate(WOLFSSL_X509_STORE_CTX* ctx, int ret) WOLFSSL_MSG("Override date validation, WOLFSSL_USE_CHECK_TIME"); if (wc_ValidateDateWithTime(afterDate, (byte)ctx->current_cert->notAfter.type, ASN_AFTER, - checkTime) < 1) { + checkTime, ctx->current_cert->notAfter.length) < 1) { ret = ASN_AFTER_DATE_E; } else if (wc_ValidateDateWithTime(beforeDate, (byte)ctx->current_cert->notBefore.type, ASN_BEFORE, - checkTime) < 1) { + checkTime, ctx->current_cert->notBefore.length) < 1) { ret = ASN_BEFORE_DATE_E; } } + #if defined(OPENSSL_ALL) + else { + WOLFSSL_MSG("Using system time for date validation"); + /* use system time for date validation */ + if (wc_ValidateDate(afterDate, + (byte)ctx->current_cert->notAfter.type, ASN_AFTER, + ctx->current_cert->notAfter.length) < 1) { + ret = ASN_AFTER_DATE_E; + } + else if (wc_ValidateDate(beforeDate, + (byte)ctx->current_cert->notBefore.type, ASN_BEFORE, + ctx->current_cert->notBefore.length) < 1) { + ret = ASN_BEFORE_DATE_E; + } + } + #endif } #else if (XVALIDATE_DATE(afterDate, - (byte)ctx->current_cert->notAfter.type, ASN_AFTER) < 1) { + (byte)ctx->current_cert->notAfter.type, ASN_AFTER, + ctx->current_cert->notAfter.length) < 1) { ret = ASN_AFTER_DATE_E; } else if (XVALIDATE_DATE(beforeDate, - (byte)ctx->current_cert->notBefore.type, ASN_BEFORE) < 1) { + (byte)ctx->current_cert->notBefore.type, ASN_BEFORE, + ctx->current_cert->notBefore.length) < 1) { ret = ASN_BEFORE_DATE_E; } #endif /* USE_WOLF_VALIDDATE */ @@ -422,7 +440,26 @@ static int X509StoreVerifyCert(WOLFSSL_X509_STORE_CTX* ctx) WOLFSSL_SUCCESS : ret; #endif } - +#if !defined(NO_ASN_TIME) && defined(OPENSSL_ALL) + if (ret != WC_NO_ERR_TRACE(ASN_BEFORE_DATE_E) && + ret != WC_NO_ERR_TRACE(ASN_AFTER_DATE_E)) { + /* With OpenSSL, we need to check the certificate's date + * after certificate manager verification, + * as it skips date validation when other errors are present. + */ + ret = X509StoreVerifyCertDate(ctx, ret); + SetupStoreCtxError(ctx, ret); + ret = ret == WOLFSSL_SUCCESS ? 1 : 0; + if (ctx->store->verify_cb) { + if (ctx->store->verify_cb(ret, ctx) == 1) { + ret = WOLFSSL_SUCCESS; + } + else { + ret = -1; + } + } + } +#endif return ret; } diff --git a/tests/api.c b/tests/api.c index e8c59d97a..4058a8803 100644 --- a/tests/api.c +++ b/tests/api.c @@ -52,7 +52,7 @@ #include -#ifdef __linux__ +#if defined(__linux__) || defined(__FreeBSD__) #include #include #endif @@ -240,6 +240,13 @@ #include #include #include +#include +#include +#include +#include +#include +#include +#include #include #if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) && \ @@ -249,12 +256,6 @@ #define HAVE_SSL_MEMIO_TESTS_DEPENDENCIES #endif -#if !defined(NO_RSA) && !defined(NO_SHA) && !defined(NO_FILESYSTEM) && \ - !defined(NO_CERTS) && \ - (!defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH)) - #define HAVE_CERT_CHAIN_VALIDATION -#endif - #if defined(WOLFSSL_STATIC_MEMORY) && !defined(WOLFCRYPT_ONLY) #if (defined(HAVE_ECC) && !defined(ALT_ECC_SIZE)) || defined(SESSION_CERTS) #ifdef OPENSSL_EXTRA @@ -2783,2053 +2784,6 @@ static int test_wolfSSL_CTX_load_system_CA_certs(void) return res; } -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) -static int test_cm_load_ca_buffer(const byte* cert_buf, size_t cert_sz, - int file_type) -{ - int ret; - WOLFSSL_CERT_MANAGER* cm; - - cm = wolfSSL_CertManagerNew(); - if (cm == NULL) { - fprintf(stderr, "test_cm_load_ca failed\n"); - return -1; - } - - ret = wolfSSL_CertManagerLoadCABuffer(cm, cert_buf, (sword32)cert_sz, file_type); - - wolfSSL_CertManagerFree(cm); - - return ret; -} - -static int test_cm_load_ca_file(const char* ca_cert_file) -{ - int ret = 0; - byte* cert_buf = NULL; - size_t cert_sz = 0; -#if defined(WOLFSSL_PEM_TO_DER) - DerBuffer* pDer = NULL; -#endif - - ret = load_file(ca_cert_file, &cert_buf, &cert_sz); - if (ret == 0) { - /* normal test */ - ret = test_cm_load_ca_buffer(cert_buf, cert_sz, CERT_FILETYPE); - - if (ret == WOLFSSL_SUCCESS) { - /* test including null terminator in length */ - byte* tmp = (byte*)realloc(cert_buf, cert_sz+1); - if (tmp == NULL) { - ret = MEMORY_E; - } - else { - cert_buf = tmp; - cert_buf[cert_sz] = '\0'; - ret = test_cm_load_ca_buffer(cert_buf, cert_sz+1, - CERT_FILETYPE); - } - - } - - #if defined(WOLFSSL_PEM_TO_DER) - if (ret == WOLFSSL_SUCCESS) { - /* test loading DER */ - ret = wc_PemToDer(cert_buf, (sword32)cert_sz, CA_TYPE, &pDer, - NULL, NULL, NULL); - if (ret == 0 && pDer != NULL) { - ret = test_cm_load_ca_buffer(pDer->buffer, pDer->length, - WOLFSSL_FILETYPE_ASN1); - - wc_FreeDer(&pDer); - } - } - #endif - - } - free(cert_buf); - - return ret; -} - -static int test_cm_load_ca_buffer_ex(const byte* cert_buf, size_t cert_sz, - int file_type, word32 flags) -{ - int ret; - WOLFSSL_CERT_MANAGER* cm; - - cm = wolfSSL_CertManagerNew(); - if (cm == NULL) { - fprintf(stderr, "test_cm_load_ca failed\n"); - return -1; - } - - ret = wolfSSL_CertManagerLoadCABuffer_ex(cm, cert_buf, (sword32)cert_sz, file_type, - 0, flags); - - wolfSSL_CertManagerFree(cm); - - return ret; -} - -static int test_cm_load_ca_file_ex(const char* ca_cert_file, word32 flags) -{ - int ret = 0; - byte* cert_buf = NULL; - size_t cert_sz = 0; -#if defined(WOLFSSL_PEM_TO_DER) - DerBuffer* pDer = NULL; -#endif - - ret = load_file(ca_cert_file, &cert_buf, &cert_sz); - if (ret == 0) { - /* normal test */ - ret = test_cm_load_ca_buffer_ex(cert_buf, cert_sz, - CERT_FILETYPE, flags); - - if (ret == WOLFSSL_SUCCESS) { - /* test including null terminator in length */ - byte* tmp = (byte*)realloc(cert_buf, cert_sz+1); - if (tmp == NULL) { - ret = MEMORY_E; - } - else { - cert_buf = tmp; - cert_buf[cert_sz] = '\0'; - ret = test_cm_load_ca_buffer_ex(cert_buf, cert_sz+1, - CERT_FILETYPE, flags); - } - - } - - #if defined(WOLFSSL_PEM_TO_DER) - if (ret == WOLFSSL_SUCCESS) { - /* test loading DER */ - ret = wc_PemToDer(cert_buf, (sword32)cert_sz, CA_TYPE, &pDer, - NULL, NULL, NULL); - if (ret == 0 && pDer != NULL) { - ret = test_cm_load_ca_buffer_ex(pDer->buffer, pDer->length, - WOLFSSL_FILETYPE_ASN1, flags); - - wc_FreeDer(&pDer); - } - } - #endif - - } - free(cert_buf); - - return ret; -} - -#endif /* !NO_FILESYSTEM && !NO_CERTS */ - -static int test_wolfSSL_CertManagerAPI(void) -{ - EXPECT_DECLS; -#ifndef NO_CERTS - WOLFSSL_CERT_MANAGER* cm = NULL; - unsigned char c = 0; - - ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); - - wolfSSL_CertManagerFree(NULL); - ExpectIntEQ(wolfSSL_CertManager_up_ref(NULL), 0); - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); -#ifdef WOLFSSL_TRUST_PEER_CERT - ExpectIntEQ(wolfSSL_CertManagerUnload_trust_peers(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); -#endif - - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer_ex(NULL, &c, 1, - WOLFSSL_FILETYPE_ASN1, 0, 0), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - -#if !defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH) - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, NULL, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, NULL, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, &c, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, NULL, 1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, &c, 1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, NULL, 1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, &c, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, &c, 1, -1), - WC_NO_ERR_TRACE(WOLFSSL_BAD_FILETYPE)); -#endif - -#if !defined(NO_FILESYSTEM) - { - #ifdef WOLFSSL_PEM_TO_DER - const char* ca_cert = "./certs/ca-cert.pem"; - #if !defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH) - const char* ca_cert_der = "./certs/ca-cert.der"; - #endif - #else - const char* ca_cert = "./certs/ca-cert.der"; - #endif - const char* ca_path = "./certs"; - - #if !defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH) - ExpectIntEQ(wolfSSL_CertManagerVerify(NULL, NULL, -1), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerify(cm, NULL, WOLFSSL_FILETYPE_ASN1), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerify(NULL, ca_cert, - WOLFSSL_FILETYPE_PEM), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerVerify(cm, ca_cert, -1), - WC_NO_ERR_TRACE(WOLFSSL_BAD_FILETYPE)); -#ifdef WOLFSSL_PEM_TO_DER - ExpectIntEQ(wolfSSL_CertManagerVerify(cm, ca_cert_der, - WOLFSSL_FILETYPE_PEM), WC_NO_ERR_TRACE(ASN_NO_PEM_HEADER)); -#endif - ExpectIntEQ(wolfSSL_CertManagerVerify(cm, "no-file", - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(WOLFSSL_BAD_FILE)); - #endif - - ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, NULL, NULL), - WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, ca_cert, NULL), - WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, NULL, ca_path), - WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, ca_cert, ca_path), - WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - } -#endif - -#ifdef OPENSSL_COMPATIBLE_DEFAULTS - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, 0), 1); -#elif !defined(HAVE_CRL) - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, 0), WC_NO_ERR_TRACE(NOT_COMPILED_IN)); -#endif - - ExpectIntEQ(wolfSSL_CertManagerDisableCRL(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerDisableCRL(cm), 1); -#ifdef HAVE_CRL - /* Test APIs when CRL is disabled. */ -#ifdef HAVE_CRL_IO - ExpectIntEQ(wolfSSL_CertManagerSetCRL_IOCb(cm, NULL), 1); -#endif - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, - sizeof_server_cert_der_2048), 1); - ExpectIntEQ(wolfSSL_CertManagerFreeCRL(cm), 1); -#endif - - /* OCSP */ - ExpectIntEQ(wolfSSL_CertManagerEnableOCSP(NULL, 0), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerDisableOCSP(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerDisableOCSPStapling(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPMustStaple(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerDisableOCSPMustStaple(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); -#if !defined(HAVE_CERTIFICATE_STATUS_REQUEST) && \ - !defined(HAVE_CERTIFICATE_STATUS_REQUEST_V2) - ExpectIntEQ(wolfSSL_CertManagerDisableOCSPStapling(cm), WC_NO_ERR_TRACE(NOT_COMPILED_IN)); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPMustStaple(cm), WC_NO_ERR_TRACE(NOT_COMPILED_IN)); - ExpectIntEQ(wolfSSL_CertManagerDisableOCSPMustStaple(cm), WC_NO_ERR_TRACE(NOT_COMPILED_IN)); -#endif - -#ifdef HAVE_OCSP - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, NULL, -1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, NULL, -1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, &c, -1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, NULL, 1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, &c, 1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, NULL, 1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, &c, -1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - - ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(NULL, NULL, 0, - NULL, NULL, NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(cm, NULL, 1, - NULL, NULL, NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(NULL, &c, 1, - NULL, NULL, NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - - ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(NULL, NULL), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(NULL, ""), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(cm, NULL), 1); - - ExpectIntEQ(wolfSSL_CertManagerSetOCSP_Cb(NULL, NULL, NULL, NULL), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerSetOCSP_Cb(cm, NULL, NULL, NULL), 1); - - ExpectIntEQ(wolfSSL_CertManagerDisableOCSP(cm), 1); - /* Test APIs when OCSP is disabled. */ - ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(cm, &c, 1, - NULL, NULL, NULL, NULL), 1); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, &c, 1), 1); - -#endif - - ExpectIntEQ(wolfSSL_CertManager_up_ref(cm), 1); - if (EXPECT_SUCCESS()) { - wolfSSL_CertManagerFree(cm); - } - wolfSSL_CertManagerFree(cm); - cm = NULL; - - ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); - -#ifdef HAVE_OCSP - ExpectIntEQ(wolfSSL_CertManagerEnableOCSP(cm, WOLFSSL_OCSP_URL_OVERRIDE | - WOLFSSL_OCSP_CHECKALL), 1); -#if defined(HAVE_CERTIFICATE_STATUS_REQUEST) || \ - defined(HAVE_CERTIFICATE_STATUS_REQUEST_V2) - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(cm), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(cm), 1); - ExpectIntEQ(wolfSSL_CertManagerDisableOCSPStapling(cm), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(cm), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSPMustStaple(cm), 1); - ExpectIntEQ(wolfSSL_CertManagerDisableOCSPMustStaple(cm), 1); -#endif - - ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(cm, ""), 1); - ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(cm, ""), 1); -#endif - -#ifdef WOLFSSL_TRUST_PEER_CERT - ExpectIntEQ(wolfSSL_CertManagerUnload_trust_peers(cm), 1); -#endif - wolfSSL_CertManagerFree(cm); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerLoadCABuffer(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) -#if defined(WOLFSSL_PEM_TO_DER) - const char* ca_cert = "./certs/ca-cert.pem"; - const char* ca_expired_cert = "./certs/test/expired/expired-ca.pem"; -#else - const char* ca_cert = "./certs/ca-cert.der"; - const char* ca_expired_cert = "./certs/test/expired/expired-ca.der"; -#endif - int ret; - - ExpectIntLE(ret = test_cm_load_ca_file(ca_cert), 1); -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); -#elif defined(NO_RSA) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); -#else - ExpectIntEQ(ret, WOLFSSL_SUCCESS); -#endif - - ExpectIntLE(ret = test_cm_load_ca_file(ca_expired_cert), 1); -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); -#elif defined(NO_RSA) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); -#elif !(WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS & WOLFSSL_LOAD_FLAG_DATE_ERR_OKAY) && \ - !defined(NO_ASN_TIME) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_AFTER_DATE_E)); -#else - ExpectIntEQ(ret, WOLFSSL_SUCCESS); -#endif -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerLoadCABuffer_ex(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) -#if defined(WOLFSSL_PEM_TO_DER) - const char* ca_cert = "./certs/ca-cert.pem"; - const char* ca_expired_cert = "./certs/test/expired/expired-ca.pem"; -#else - const char* ca_cert = "./certs/ca-cert.der"; - const char* ca_expired_cert = "./certs/test/expired/expired-ca.der"; -#endif - int ret; - - ExpectIntLE(ret = test_cm_load_ca_file_ex(ca_cert, WOLFSSL_LOAD_FLAG_NONE), - 1); -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); -#elif defined(NO_RSA) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); -#else - ExpectIntEQ(ret, WOLFSSL_SUCCESS); -#endif - - ExpectIntLE(ret = test_cm_load_ca_file_ex(ca_expired_cert, - WOLFSSL_LOAD_FLAG_DATE_ERR_OKAY), 1); -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); -#elif defined(NO_RSA) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); -#elif !(WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS & WOLFSSL_LOAD_FLAG_DATE_ERR_OKAY) && \ - !defined(NO_ASN_TIME) && defined(WOLFSSL_TRUST_PEER_CERT) && \ - defined(OPENSSL_COMPATIBLE_DEFAULTS) - ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_AFTER_DATE_E)); -#else - ExpectIntEQ(ret, WOLFSSL_SUCCESS); -#endif - -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerLoadCABufferType(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) && \ - !defined(NO_RSA) && !defined(NO_SHA256) && \ - !defined(WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION) -#if defined(WOLFSSL_PEM_TO_DER) - const char* ca_cert = "./certs/ca-cert.pem"; - const char* int1_cert = "./certs/intermediate/ca-int-cert.pem"; - const char* int2_cert = "./certs/intermediate/ca-int2-cert.pem"; - const char* client_cert = "./certs/intermediate/client-int-cert.pem"; -#else - const char* ca_cert = "./certs/ca-cert.der"; - const char* int1_cert = "./certs/intermediate/ca-int-cert.der"; - const char* int2_cert = "./certs/intermediate/ca-int2-cert.der"; - const char* client_cert = "./certs/intermediate/client-int-cert.der"; -#endif - byte* ca_cert_buf = NULL; - byte* int1_cert_buf = NULL; - byte* int2_cert_buf = NULL; - byte* client_cert_buf = NULL; - size_t ca_cert_sz = 0; - size_t int1_cert_sz = 0; - size_t int2_cert_sz = 0; - size_t client_cert_sz = 0; - WOLFSSL_CERT_MANAGER* cm = NULL; - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntEQ(load_file(ca_cert, &ca_cert_buf, &ca_cert_sz), 0); - ExpectIntEQ(load_file(int1_cert, &int1_cert_buf, &int1_cert_sz), 0); - ExpectIntEQ(load_file(int2_cert, &int2_cert_buf, &int2_cert_sz), 0); - ExpectIntEQ(load_file(client_cert, &client_cert_buf, &client_cert_sz), 0); - - ExpectIntNE(wolfSSL_CertManagerLoadCABufferType(cm, ca_cert_buf, - (sword32)ca_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, 0), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerLoadCABufferType(cm, ca_cert_buf, - (sword32)ca_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, 5), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, ca_cert_buf, - (sword32)ca_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_CA), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, - int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int1_cert_buf, - (sword32)int1_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int2_cert_buf, - (sword32)int2_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, client_cert_buf, - (sword32)client_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), - WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_USER_INTER), - WOLFSSL_SUCCESS); - - /* Intermediate certs have been unloaded, but CA cert is still - loaded. Expect first level intermediate to verify, rest to fail. */ - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, - int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int1_cert_buf, - (sword32)int1_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_TEMP_CA), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int2_cert_buf, - (sword32)int2_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_CHAIN_CA), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, client_cert_buf, - (sword32)client_cert_sz, CERT_FILETYPE, 0, - WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), - WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_USER_INTER), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, - int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_CHAIN_CA), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, - int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_TEMP_CA), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, - int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_USER_CA), - WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, - int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, - int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, - client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); - - if (cm) - wolfSSL_CertManagerFree(cm); - if (ca_cert_buf) - free(ca_cert_buf); - if (int1_cert_buf) - free(int1_cert_buf); - if (int2_cert_buf) - free(int2_cert_buf); - if (client_cert_buf) - free(client_cert_buf); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerGetCerts(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && !defined(NO_CERTS) && \ - !defined(NO_FILESYSTEM) && !defined(NO_RSA) && \ - defined(WOLFSSL_SIGNER_DER_CERT) - WOLFSSL_CERT_MANAGER* cm = NULL; - WOLFSSL_STACK* sk = NULL; - X509* x509 = NULL; - X509* cert1 = NULL; - FILE* file1 = NULL; -#ifdef DEBUG_WOLFSSL_VERBOSE - WOLFSSL_BIO* bio = NULL; -#endif - int i = 0; - int ret = 0; - const byte* der = NULL; - int derSz = 0; - - ExpectNotNull(file1 = fopen("./certs/ca-cert.pem", "rb")); - - ExpectNotNull(cert1 = wolfSSL_PEM_read_X509(file1, NULL, NULL, NULL)); - if (file1 != NULL) { - fclose(file1); - } - - ExpectNull(sk = wolfSSL_CertManagerGetCerts(NULL)); - ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); - ExpectNull(sk = wolfSSL_CertManagerGetCerts(cm)); - - ExpectNotNull(der = wolfSSL_X509_get_der(cert1, &derSz)); -#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) - /* Check that ASN_SELF_SIGNED_E is returned for a self-signed cert for QT - * and full OpenSSL compatibility */ - ExpectIntEQ(ret = wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_SELF_SIGNED_E)); -#else - ExpectIntEQ(ret = wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)); -#endif - - ExpectIntEQ(WOLFSSL_SUCCESS, wolfSSL_CertManagerLoadCA(cm, - "./certs/ca-cert.pem", NULL)); - - ExpectNotNull(sk = wolfSSL_CertManagerGetCerts(cm)); - - for (i = 0; EXPECT_SUCCESS() && i < sk_X509_num(sk); i++) { - ExpectNotNull(x509 = sk_X509_value(sk, i)); - ExpectIntEQ(0, wolfSSL_X509_cmp(x509, cert1)); - -#ifdef DEBUG_WOLFSSL_VERBOSE - bio = BIO_new(wolfSSL_BIO_s_file()); - if (bio != NULL) { - BIO_set_fp(bio, stderr, BIO_NOCLOSE); - X509_print(bio, x509); - BIO_free(bio); - } -#endif /* DEBUG_WOLFSSL_VERBOSE */ - } - wolfSSL_X509_free(cert1); - sk_X509_pop_free(sk, NULL); - wolfSSL_CertManagerFree(cm); -#endif /* defined(OPENSSL_ALL) && !defined(NO_CERTS) && \ - !defined(NO_FILESYSTEM) && !defined(NO_RSA) && \ - defined(WOLFSSL_SIGNER_DER_CERT) */ - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerSetVerify(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) && \ - !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ - (!defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH)) - WOLFSSL_CERT_MANAGER* cm = NULL; - int tmp = myVerifyAction; -#ifdef WOLFSSL_PEM_TO_DER - const char* ca_cert = "./certs/ca-cert.pem"; - const char* expiredCert = "./certs/test/expired/expired-cert.pem"; -#else - const char* ca_cert = "./certs/ca-cert.der"; - const char* expiredCert = "./certs/test/expired/expired-cert.der"; -#endif - - wolfSSL_CertManagerSetVerify(NULL, NULL); - wolfSSL_CertManagerSetVerify(NULL, myVerify); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - - wolfSSL_CertManagerSetVerify(cm, myVerify); - -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL), -1); -#else - ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL), - WOLFSSL_SUCCESS); -#endif - /* Use the test CB that always accepts certs */ - myVerifyAction = VERIFY_OVERRIDE_ERROR; - - ExpectIntEQ(wolfSSL_CertManagerVerify(cm, expiredCert, - CERT_FILETYPE), WOLFSSL_SUCCESS); - -#ifdef WOLFSSL_ALWAYS_VERIFY_CB - { - const char* verifyCert = "./certs/server-cert.der"; - /* Use the test CB that always fails certs */ - myVerifyAction = VERIFY_FORCE_FAIL; - - ExpectIntEQ(wolfSSL_CertManagerVerify(cm, verifyCert, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(VERIFY_CERT_ERROR)); - } -#endif - - wolfSSL_CertManagerFree(cm); - myVerifyAction = tmp; -#endif - - return EXPECT_RESULT(); -} - - -static int test_wolfSSL_CertManagerNameConstraint(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ - !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ - defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ - defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ - !defined(NO_SHA256) - WOLFSSL_CERT_MANAGER* cm = NULL; - WOLFSSL_EVP_PKEY *priv = NULL; - WOLFSSL_X509_NAME* name = NULL; - const char* ca_cert = "./certs/test/cert-ext-nc.der"; - const char* server_cert = "./certs/test/server-goodcn.pem"; - int i = 0; - static const byte extNameConsOid[] = {85, 29, 30}; - - RsaKey key; - WC_RNG rng; - byte *der = NULL; - int derSz = 0; - word32 idx = 0; - byte *pt; - WOLFSSL_X509 *x509 = NULL; - WOLFSSL_X509 *ca = NULL; - - wc_InitRng(&rng); - - /* load in CA private key for signing */ - ExpectIntEQ(wc_InitRsaKey_ex(&key, HEAP_HINT, testDevId), 0); - ExpectIntEQ(wc_RsaPrivateKeyDecode(server_key_der_2048, &idx, &key, - sizeof_server_key_der_2048), 0); - - /* get ca certificate then alter it */ - ExpectNotNull(der = - (byte*)XMALLOC(FOURK_BUF, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER)); - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(ca_cert, - WOLFSSL_FILETYPE_ASN1)); - ExpectNotNull(pt = (byte*)wolfSSL_X509_get_tbs(x509, &derSz)); - if (EXPECT_SUCCESS() && (der != NULL)) { - XMEMCPY(der, pt, (size_t)derSz); - - /* find the name constraint extension and alter it */ - pt = der; - for (i = 0; i < derSz - 3; i++) { - if (XMEMCMP(pt, extNameConsOid, 3) == 0) { - pt += 3; - break; - } - pt++; - } - ExpectIntNE(i, derSz - 3); /* did not find OID if this case is hit */ - - /* go to the length value and set it to 0 */ - while (i < derSz && *pt != 0x81) { - pt++; - i++; - } - ExpectIntNE(i, derSz); /* did not place to alter */ - pt++; - *pt = 0x00; - } - - /* resign the altered certificate */ - ExpectIntGT((derSz = wc_SignCert(derSz, CTC_SHA256wRSA, der, - FOURK_BUF, &key, NULL, &rng)), 0); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_PARSE_E)); - wolfSSL_CertManagerFree(cm); - - XFREE(der, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - wolfSSL_X509_free(x509); - wc_FreeRsaKey(&key); - wc_FreeRng(&rng); - - /* add email alt name to satisfy constraint */ - pt = (byte*)server_key_der_2048; - ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - (const unsigned char**)&pt, sizeof_server_key_der_2048)); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, - WOLFSSL_FILETYPE_ASN1)); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); - DEBUG_WRITE_DER(der, derSz, "ca.der"); - - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - /* Good cert test with proper alt email name */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, - (byte*)"support@info.wolfssl.com", 24, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "wolfssl@info.wolfssl.com", ASN_RFC822_TYPE); - - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - - /* Cert with bad alt name list */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, - (byte*)"support@info.wolfssl.com", 24, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - - wolfSSL_X509_add_altname(x509, "wolfssl@info.com", ASN_RFC822_TYPE); - wolfSSL_X509_add_altname(x509, "wolfssl@info.wolfssl.com", ASN_RFC822_TYPE); - - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - - wolfSSL_CertManagerFree(cm); - wolfSSL_X509_free(x509); - wolfSSL_X509_free(ca); - wolfSSL_EVP_PKEY_free(priv); -#endif - - return EXPECT_RESULT(); -} - - -static int test_wolfSSL_CertManagerNameConstraint2(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ - !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ - defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ - defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) - const char* ca_cert = "./certs/test/cert-ext-ndir.der"; - const char* ca_cert2 = "./certs/test/cert-ext-ndir-exc.der"; - const char* server_cert = "./certs/server-cert.pem"; - WOLFSSL_CERT_MANAGER* cm = NULL; - WOLFSSL_X509 *x509 = NULL; - WOLFSSL_X509 *ca = NULL; - - const unsigned char *der = NULL; - const unsigned char *pt; - WOLFSSL_EVP_PKEY *priv = NULL; - WOLFSSL_X509_NAME* name = NULL; - int derSz = 0; - - /* C=US*/ - char altName[] = { - 0x30, 0x0D, 0x31, 0x0B, 0x30, 0x09, - 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, 0x53 - }; - - /* C=ID */ - char altNameFail[] = { - 0x30, 0x0D, 0x31, 0x0B, 0x30, 0x09, - 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x49, 0x44 - }; - - /* C=US ST=California*/ - char altNameExc[] = { - 0x30, 0x22, - 0x31, 0x0B, - 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, 0x53, - 0x31, 0x13, - 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0C, 0x0A, - 0x43, 0x61, 0x6c, 0x69, 0x66, 0x6f, 0x72, 0x6e, 0x69, 0x61 - }; - /* load in CA private key for signing */ - pt = ca_key_der_2048; - ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, &pt, - sizeof_ca_key_der_2048)); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, - WOLFSSL_FILETYPE_ASN1)); - ExpectNotNull((der = wolfSSL_X509_get_der(ca, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); -#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) - wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); -#else - wolfSSL_X509_sign(x509, priv, EVP_sha256()); -#endif - ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - /* Test no name case. */ - ExpectIntEQ(wolfSSL_X509_add_altname_ex(x509, NULL, 0, ASN_DIR_TYPE), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_add_altname(x509, "", ASN_DIR_TYPE), - WOLFSSL_SUCCESS); - /* IP not supported. */ - ExpectIntEQ(wolfSSL_X509_add_altname(x509, "127.0.0.1", ASN_IP_TYPE), - WOLFSSL_FAILURE); - - /* add in matching DIR alt name and resign */ - wolfSSL_X509_add_altname_ex(x509, altName, sizeof(altName), ASN_DIR_TYPE); -#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) - wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); -#else - wolfSSL_X509_sign(x509, priv, EVP_sha256()); -#endif - - ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check verify fail */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - - /* add in miss matching DIR alt name and resign */ - wolfSSL_X509_add_altname_ex(x509, altNameFail, sizeof(altNameFail), - ASN_DIR_TYPE); - -#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) - wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); -#else - wolfSSL_X509_sign(x509, priv, EVP_sha256()); -#endif - ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); -#ifndef WOLFSSL_NO_ASN_STRICT - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); -#else - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); -#endif - - /* check that it still fails if one bad altname and one good altname is in - * the certificate */ - wolfSSL_X509_free(x509); - x509 = NULL; - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - wolfSSL_X509_add_altname_ex(x509, altName, sizeof(altName), ASN_DIR_TYPE); - wolfSSL_X509_add_altname_ex(x509, altNameFail, sizeof(altNameFail), - ASN_DIR_TYPE); - -#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) - wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); -#else - wolfSSL_X509_sign(x509, priv, EVP_sha256()); -#endif - ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); -#ifndef WOLFSSL_NO_ASN_STRICT - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); -#else - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); -#endif - - /* check it fails with switching position of bad altname */ - wolfSSL_X509_free(x509); - x509 = NULL; - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - wolfSSL_X509_add_altname_ex(x509, altNameFail, sizeof(altNameFail), - ASN_DIR_TYPE); - wolfSSL_X509_add_altname_ex(x509, altName, sizeof(altName), ASN_DIR_TYPE); - -#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) - wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); -#else - wolfSSL_X509_sign(x509, priv, EVP_sha256()); -#endif - ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); -#ifndef WOLFSSL_NO_ASN_STRICT - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); -#else - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); -#endif - wolfSSL_CertManagerFree(cm); - - wolfSSL_X509_free(x509); - x509 = NULL; - wolfSSL_X509_free(ca); - ca = NULL; - - /* now test with excluded name constraint */ - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert2, - WOLFSSL_FILETYPE_ASN1)); - ExpectNotNull((der = wolfSSL_X509_get_der(ca, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - wolfSSL_X509_add_altname_ex(x509, altNameExc, sizeof(altNameExc), - ASN_DIR_TYPE); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - -#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) - wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); -#else - wolfSSL_X509_sign(x509, priv, EVP_sha256()); -#endif - ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); -#ifndef WOLFSSL_NO_ASN_STRICT - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); -#else - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); -#endif - wolfSSL_CertManagerFree(cm); - wolfSSL_X509_free(x509); - wolfSSL_X509_free(ca); - wolfSSL_EVP_PKEY_free(priv); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerNameConstraint3(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ - !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ - defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ - defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ - !defined(NO_SHA256) - WOLFSSL_CERT_MANAGER* cm = NULL; - WOLFSSL_EVP_PKEY *priv = NULL; - WOLFSSL_X509_NAME* name = NULL; - const char* ca_cert = "./certs/test/cert-ext-mnc.der"; - const char* server_cert = "./certs/test/server-goodcn.pem"; - - byte *der = NULL; - int derSz = 0; - byte *pt; - WOLFSSL_X509 *x509 = NULL; - WOLFSSL_X509 *ca = NULL; - - pt = (byte*)server_key_der_2048; - ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - (const unsigned char**)&pt, sizeof_server_key_der_2048)); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, - WOLFSSL_FILETYPE_ASN1)); - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); - DEBUG_WRITE_DER(der, derSz, "ca.der"); - - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - /* check satisfying .wolfssl.com constraint passes */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, - (byte*)"support@info.wolfssl.com", 24, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "wolfssl@info.wolfssl.com", ASN_RFC822_TYPE); - - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-1st-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check satisfying .random.com constraint passes */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, - (byte*)"support@info.example.com", 24, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "wolfssl@info.example.com", ASN_RFC822_TYPE); - - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-2nd-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check fail case when neither constraint is matched */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, - (byte*)"support@info.com", 16, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - - wolfSSL_X509_add_altname(x509, "wolfssl@info.com", ASN_RFC822_TYPE); - - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - - wolfSSL_CertManagerFree(cm); - wolfSSL_X509_free(x509); - wolfSSL_X509_free(ca); - wolfSSL_EVP_PKEY_free(priv); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerNameConstraint4(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ - !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ - defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ - defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ - !defined(NO_SHA256) - WOLFSSL_CERT_MANAGER* cm = NULL; - WOLFSSL_EVP_PKEY *priv = NULL; - WOLFSSL_X509_NAME* name = NULL; - const char* ca_cert = "./certs/test/cert-ext-ncdns.der"; - const char* server_cert = "./certs/test/server-goodcn.pem"; - - byte *der = NULL; - int derSz; - byte *pt; - WOLFSSL_X509 *x509 = NULL; - WOLFSSL_X509 *ca = NULL; - - pt = (byte*)server_key_der_2048; - ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - (const unsigned char**)&pt, sizeof_server_key_der_2048)); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, - WOLFSSL_FILETYPE_ASN1)); - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); - DEBUG_WRITE_DER(der, derSz, "ca.der"); - - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - /* check satisfying wolfssl.com constraint passes */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "www.wolfssl.com", ASN_DNS_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-1st-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check satisfying example.com constraint passes */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"example.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "www.example.com", ASN_DNS_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-2nd-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check satisfying wolfssl.com constraint passes with list of DNS's */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "www.wolfssl.com", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "www.info.wolfssl.com", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "extra.wolfssl.com", ASN_DNS_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-multiple-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check fail when one DNS in the list is bad */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "www.wolfssl.com", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "www.nomatch.com", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "www.info.wolfssl.com", ASN_DNS_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-multiple-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* check fail case when neither constraint is matched */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"common", 6, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - - wolfSSL_X509_add_altname(x509, "www.random.com", ASN_DNS_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - - wolfSSL_CertManagerFree(cm); - wolfSSL_X509_free(x509); - wolfSSL_X509_free(ca); - wolfSSL_EVP_PKEY_free(priv); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerNameConstraint5(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ - !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ - defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ - defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ - !defined(NO_SHA256) - WOLFSSL_CERT_MANAGER* cm = NULL; - WOLFSSL_EVP_PKEY *priv = NULL; - WOLFSSL_X509_NAME* name = NULL; - const char* ca_cert = "./certs/test/cert-ext-ncmixed.der"; - const char* server_cert = "./certs/test/server-goodcn.pem"; - - byte *der = NULL; - int derSz; - byte *pt; - WOLFSSL_X509 *x509 = NULL; - WOLFSSL_X509 *ca = NULL; - - pt = (byte*)server_key_der_2048; - ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - (const unsigned char**)&pt, sizeof_server_key_der_2048)); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, - WOLFSSL_FILETYPE_ASN1)); - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); - DEBUG_WRITE_DER(der, derSz, "ca.der"); - - ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - - /* check satisfying wolfssl.com constraint passes */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"example", 7, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "good.example", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "facts@into.wolfssl.com", ASN_RFC822_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* fail with DNS check because of common name */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, - (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "example", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "facts@wolfssl.com", ASN_RFC822_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-cn-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* fail on permitted DNS name constraint */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "www.example", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "www.wolfssl", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "info@wolfssl.com", ASN_RFC822_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-1st-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* fail on permitted email name constraint */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - name = NULL; - - wolfSSL_X509_add_altname(x509, "example", ASN_DNS_TYPE); - wolfSSL_X509_add_altname(x509, "info@wolfssl.com", ASN_RFC822_TYPE); - wolfSSL_X509_add_altname(x509, "info@example.com", ASN_RFC822_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "bad-2nd-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); - wolfSSL_X509_free(x509); - x509 = NULL; - - /* success with empty email name */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, - WOLFSSL_FILETYPE_PEM)); - ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); - ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); - name = NULL; - - ExpectNotNull(name = X509_NAME_new()); - ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, - (byte*)"US", 2, -1, 0), SSL_SUCCESS); - ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); - X509_NAME_free(name); - - wolfSSL_X509_add_altname(x509, "example", ASN_DNS_TYPE); - ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); - DEBUG_WRITE_CERT_X509(x509, "good-missing-constraint-cert.pem"); - - ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, - WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); - wolfSSL_X509_free(x509); - - wolfSSL_CertManagerFree(cm); - wolfSSL_X509_free(ca); - wolfSSL_EVP_PKEY_free(priv); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CRL_duplicate_extensions(void) -{ - EXPECT_DECLS; -#if defined(WOLFSSL_ASN_TEMPLATE) && !defined(NO_CERTS) && \ - defined(HAVE_CRL) && !defined(NO_RSA) && !defined(WOLFSSL_NO_ASN_STRICT) && \ - (defined(WC_ASN_RUNTIME_DATE_CHECK_CONTROL) || defined(NO_ASN_TIME_CHECK)) - const unsigned char crl_duplicate_akd[] = - "-----BEGIN X509 CRL-----\n" - "MIICCDCB8QIBATANBgkqhkiG9w0BAQsFADB5MQswCQYDVQQGEwJVUzETMBEGA1UE\n" - "CAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzETMBEGA1UECgwK\n" - "TXkgQ29tcGFueTETMBEGA1UEAwwKTXkgUm9vdCBDQTETMBEGA1UECwwKTXkgUm9v\n" - "dCBDQRcNMjQwOTAxMDAwMDAwWhcNMjUxMjAxMDAwMDAwWqBEMEIwHwYDVR0jBBgw\n" - "FoAU72ng99Ud5pns3G3Q9+K5XGRxgzUwHwYDVR0jBBgwFoAU72ng99Ud5pns3G3Q\n" - "9+K5XGRxgzUwDQYJKoZIhvcNAQELBQADggEBAIFVw4jrS4taSXR/9gPzqGrqFeHr\n" - "IXCnFtHJTLxqa8vUOAqSwqysvNpepVKioMVoGrLjFMjANjWQqTEiMROAnLfJ/+L8\n" - "FHZkV/mZwOKAXMhIC9MrJzifxBICwmvD028qnwQm09EP8z4ICZptD6wPdRTDzduc\n" - "KBuAX+zn8pNrJgyrheRKpPgno9KsbCzK4D/RIt1sTK2M3vVOtY+vpsN70QYUXvQ4\n" - "r2RZac3omlT43x5lddPxIlcouQpwWcVvr/K+Va770MRrjn88PBrJmvsEw/QYVBXp\n" - "Gxv2b78HFDacba80sMIm8ltRdqUCa5qIc6OATsz7izCQXEbkTEeESrcK1MA=\n" - "-----END X509 CRL-----\n"; - - WOLFSSL_CERT_MANAGER* cm = NULL; - int ret; - - (void)wc_AsnSetSkipDateCheck(1); - - cm = wolfSSL_CertManagerNew(); - ExpectNotNull(cm); - - /* Test loading CRL with duplicate extensions */ - WOLFSSL_MSG("Testing CRL with duplicate Authority Key Identifier extensions"); - ret = wolfSSL_CertManagerLoadCRLBuffer(cm, crl_duplicate_akd, - sizeof(crl_duplicate_akd), - WOLFSSL_FILETYPE_PEM); - ExpectIntEQ(ret, ASN_PARSE_E); - - wolfSSL_CertManagerFree(cm); - - (void)wc_AsnSetSkipDateCheck(0); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerCRL(void) -{ - EXPECT_DECLS; -#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && defined(HAVE_CRL) && \ - !defined(NO_RSA) - const char* ca_cert = "./certs/ca-cert.pem"; - const char* crl1 = "./certs/crl/crl.pem"; - const char* crl2 = "./certs/crl/crl2.pem"; -#ifdef WC_RSA_PSS - const char* crl_rsapss = "./certs/crl/crl_rsapss.pem"; - const char* ca_rsapss = "./certs/rsapss/ca-rsapss.pem"; -#endif - /* ./certs/crl/crl.der */ - const unsigned char crl_buff[] = { - 0x30, 0x82, 0x02, 0x04, 0x30, 0x81, 0xED, 0x02, - 0x01, 0x01, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, - 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x0B, 0x05, - 0x00, 0x30, 0x81, 0x94, 0x31, 0x0B, 0x30, 0x09, - 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, - 0x53, 0x31, 0x10, 0x30, 0x0E, 0x06, 0x03, 0x55, - 0x04, 0x08, 0x0C, 0x07, 0x4D, 0x6F, 0x6E, 0x74, - 0x61, 0x6E, 0x61, 0x31, 0x10, 0x30, 0x0E, 0x06, - 0x03, 0x55, 0x04, 0x07, 0x0C, 0x07, 0x42, 0x6F, - 0x7A, 0x65, 0x6D, 0x61, 0x6E, 0x31, 0x11, 0x30, - 0x0F, 0x06, 0x03, 0x55, 0x04, 0x0A, 0x0C, 0x08, - 0x53, 0x61, 0x77, 0x74, 0x6F, 0x6F, 0x74, 0x68, - 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, - 0x0B, 0x0C, 0x0A, 0x43, 0x6F, 0x6E, 0x73, 0x75, - 0x6C, 0x74, 0x69, 0x6E, 0x67, 0x31, 0x18, 0x30, - 0x16, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0C, 0x0F, - 0x77, 0x77, 0x77, 0x2E, 0x77, 0x6F, 0x6C, 0x66, - 0x73, 0x73, 0x6C, 0x2E, 0x63, 0x6F, 0x6D, 0x31, - 0x1F, 0x30, 0x1D, 0x06, 0x09, 0x2A, 0x86, 0x48, - 0x86, 0xF7, 0x0D, 0x01, 0x09, 0x01, 0x16, 0x10, - 0x69, 0x6E, 0x66, 0x6F, 0x40, 0x77, 0x6F, 0x6C, - 0x66, 0x73, 0x73, 0x6C, 0x2E, 0x63, 0x6F, 0x6D, - 0x17, 0x0D, 0x32, 0x34, 0x30, 0x31, 0x30, 0x39, - 0x30, 0x30, 0x33, 0x34, 0x33, 0x30, 0x5A, 0x17, - 0x0D, 0x32, 0x36, 0x31, 0x30, 0x30, 0x35, 0x30, - 0x30, 0x33, 0x34, 0x33, 0x30, 0x5A, 0x30, 0x14, - 0x30, 0x12, 0x02, 0x01, 0x02, 0x17, 0x0D, 0x32, - 0x34, 0x30, 0x31, 0x30, 0x39, 0x30, 0x30, 0x33, - 0x34, 0x33, 0x30, 0x5A, 0xA0, 0x0E, 0x30, 0x0C, - 0x30, 0x0A, 0x06, 0x03, 0x55, 0x1D, 0x14, 0x04, - 0x03, 0x02, 0x01, 0x02, 0x30, 0x0D, 0x06, 0x09, - 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, - 0x0B, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, - 0xB3, 0x6F, 0xED, 0x72, 0xD2, 0x73, 0x6A, 0x77, - 0xBF, 0x3A, 0x55, 0xBC, 0x54, 0x18, 0x6A, 0x71, - 0xBC, 0x6A, 0xCC, 0xCD, 0x5D, 0x90, 0xF5, 0x64, - 0x8D, 0x1B, 0xF0, 0xE0, 0x48, 0x7B, 0xF2, 0x7B, - 0x06, 0x86, 0x53, 0x63, 0x9B, 0xD8, 0x24, 0x15, - 0x10, 0xB1, 0x19, 0x96, 0x9B, 0xD2, 0x75, 0xA8, - 0x25, 0xA2, 0x35, 0xA9, 0x14, 0xD6, 0xD5, 0x5E, - 0x53, 0xE3, 0x34, 0x9D, 0xF2, 0x8B, 0x07, 0x19, - 0x9B, 0x1F, 0xF1, 0x02, 0x0F, 0x04, 0x46, 0xE8, - 0xB8, 0xB6, 0xF2, 0x8D, 0xC7, 0xC0, 0x15, 0x3E, - 0x3E, 0x8E, 0x96, 0x73, 0x15, 0x1E, 0x62, 0xF6, - 0x4E, 0x2A, 0xF7, 0xAA, 0xA0, 0x91, 0x80, 0x12, - 0x7F, 0x81, 0x0C, 0x65, 0xCC, 0x38, 0xBE, 0x58, - 0x6C, 0x14, 0xA5, 0x21, 0xA1, 0x8D, 0xF7, 0x8A, - 0xB9, 0x24, 0xF4, 0x2D, 0xCA, 0xC0, 0x67, 0x43, - 0x0B, 0xC8, 0x1C, 0xB4, 0x7D, 0x12, 0x7F, 0xA2, - 0x1B, 0x19, 0x0E, 0x94, 0xCF, 0x7B, 0x9F, 0x75, - 0xA0, 0x08, 0x9A, 0x67, 0x3F, 0x87, 0x89, 0x3E, - 0xF8, 0x58, 0xA5, 0x8A, 0x1B, 0x2D, 0xDA, 0x9B, - 0xD0, 0x1B, 0x18, 0x92, 0xC3, 0xD2, 0x6A, 0xD7, - 0x1C, 0xFC, 0x45, 0x69, 0x77, 0xC3, 0x57, 0x65, - 0x75, 0x99, 0x9E, 0x47, 0x2A, 0x20, 0x25, 0xEF, - 0x90, 0xF2, 0x5F, 0x3B, 0x7D, 0x9C, 0x7D, 0x00, - 0xEA, 0x92, 0x54, 0xEB, 0x0B, 0xE7, 0x17, 0xAF, - 0x24, 0x1A, 0xF9, 0x7C, 0x83, 0x50, 0x68, 0x1D, - 0xDC, 0x5B, 0x60, 0x12, 0xA7, 0x52, 0x78, 0xD9, - 0xA9, 0xB0, 0x1F, 0x59, 0x48, 0x36, 0xC7, 0xA6, - 0x97, 0x34, 0xC7, 0x87, 0x3F, 0xAE, 0xFD, 0xA9, - 0x56, 0x5D, 0x48, 0xCC, 0x89, 0x7A, 0x79, 0x60, - 0x8F, 0x9B, 0x2B, 0x63, 0x3C, 0xB3, 0x04, 0x1D, - 0x5F, 0xF7, 0x20, 0xD2, 0xFD, 0xF2, 0x51, 0xB1, - 0x96, 0x93, 0x13, 0x5B, 0xAB, 0x74, 0x82, 0x8B - }; - - WOLFSSL_CERT_MANAGER* cm = NULL; - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(NULL, 0), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, WOLFSSL_CRL_CHECKALL), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, WOLFSSL_CRL_CHECK), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, - WOLFSSL_CRL_CHECK | WOLFSSL_CRL_CHECKALL), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, 16), 1); - ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, WOLFSSL_CRL_CHECKALL), 1); - - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, NULL, -1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, NULL, -1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, server_cert_der_2048, -1), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, NULL, 1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, server_cert_der_2048, 1), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, NULL, 1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, -1), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, - sizeof_server_cert_der_2048), WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)); - - ExpectIntEQ(wolfSSL_CertManagerSetCRL_Cb(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerSetCRL_Cb(cm, NULL), 1); -#ifdef HAVE_CRL_IO - ExpectIntEQ(wolfSSL_CertManagerSetCRL_IOCb(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerSetCRL_IOCb(cm, NULL), 1); -#endif - -#ifndef NO_FILESYSTEM - ExpectIntEQ(wolfSSL_CertManagerLoadCRL(NULL, NULL, WOLFSSL_FILETYPE_ASN1, - 0), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRL(cm, NULL, WOLFSSL_FILETYPE_ASN1, - 0), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - /* -1 seen as !WOLFSSL_FILETYPE_PEM */ - ExpectIntEQ(wolfSSL_CertManagerLoadCRL(cm, "./certs/crl", -1, 0), 1); - - ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(NULL, NULL, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, NULL, WOLFSSL_FILETYPE_ASN1), - WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - /* -1 seen as !WOLFSSL_FILETYPE_PEM */ - ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, "./certs/crl/crl.pem", -1), - WC_NO_ERR_TRACE(ASN_PARSE_E)); -#endif - - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, NULL, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, NULL, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, crl_buff, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, NULL, 1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, crl_buff, 1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, NULL, 1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, crl_buff, -1, - WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - - ExpectIntEQ(wolfSSL_CertManagerFreeCRL(NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - DoExpectIntEQ(wolfSSL_CertManagerFreeCRL(cm), 1); - - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL)); - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCRL(cm, crl1, WOLFSSL_FILETYPE_PEM, 0)); - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCRL(cm, crl2, WOLFSSL_FILETYPE_PEM, 0)); - wolfSSL_CertManagerFreeCRL(cm); - -#ifndef WOLFSSL_CRL_ALLOW_MISSING_CDP - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCRL(cm, crl1, WOLFSSL_FILETYPE_PEM, 0)); - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL)); - ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, - sizeof_server_cert_der_2048), WC_NO_ERR_TRACE(CRL_MISSING)); - ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, server_cert_der_2048, - sizeof_server_cert_der_2048, WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(CRL_MISSING)); -#endif /* !WOLFSSL_CRL_ALLOW_MISSING_CDP */ - - ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, crl_buff, sizeof(crl_buff), - WOLFSSL_FILETYPE_ASN1), 1); - -#if !defined(NO_FILESYSTEM) && defined(WC_RSA_PSS) - /* loading should fail without the CA set */ - ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, crl_rsapss, - WOLFSSL_FILETYPE_PEM), WC_NO_ERR_TRACE(ASN_CRL_NO_SIGNER_E)); - - /* now successfully load the RSA-PSS crl once loading in it's CA */ - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCA(cm, ca_rsapss, NULL)); - ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, crl_rsapss, - WOLFSSL_FILETYPE_PEM), WOLFSSL_SUCCESS); -#endif - - wolfSSL_CertManagerFree(cm); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_CertManagerCheckOCSPResponse(void) -{ - EXPECT_DECLS; -#if defined(HAVE_OCSP) && !defined(NO_RSA) && !defined(NO_SHA) -/* Need one of these for wolfSSL_OCSP_REQUEST_new. */ -#if defined(OPENSSL_ALL) || defined(WOLFSSL_NGINX) || \ - defined(WOLFSSL_HAPROXY) || defined(WOLFSSL_APACHE_HTTPD) || \ - defined(HAVE_LIGHTY) - WOLFSSL_CERT_MANAGER* cm = NULL; - /* Raw OCSP response bytes captured using the following setup: - * - Run responder with - * openssl ocsp -port 9999 -ndays 9999 - * -index certs/ocsp/index-intermediate1-ca-issued-certs.txt - * -rsigner certs/ocsp/ocsp-responder-cert.pem - * -rkey certs/ocsp/ocsp-responder-key.pem - * -CA certs/ocsp/intermediate1-ca-cert.pem - * - Run client with - * openssl ocsp -host 127.0.0.1:9999 -respout resp.out - * -issuer certs/ocsp/intermediate1-ca-cert.pem - * -cert certs/ocsp/server1-cert.pem - * -CAfile certs/ocsp/root-ca-cert.pem -noverify - * - Select the response packet in Wireshark, and export it using - * "File->Export Packet Dissection->As "C" Arrays". Select "Selected - * packets only". After importing into the editor, remove the initial - * ~148 bytes of header, ending with the Content-Length and the \r\n\r\n. - */ - static const byte response[] = { - 0x30, 0x82, 0x07, 0x40, /* ....0..@ */ - 0x0a, 0x01, 0x00, 0xa0, 0x82, 0x07, 0x39, 0x30, /* ......90 */ - 0x82, 0x07, 0x35, 0x06, 0x09, 0x2b, 0x06, 0x01, /* ..5..+.. */ - 0x05, 0x05, 0x07, 0x30, 0x01, 0x01, 0x04, 0x82, /* ...0.... */ - 0x07, 0x26, 0x30, 0x82, 0x07, 0x22, 0x30, 0x82, /* .&0.."0. */ - 0x01, 0x40, 0xa1, 0x81, 0xa1, 0x30, 0x81, 0x9e, /* .@...0.. */ - 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, /* 1.0...U. */ - 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, /* ...US1.0 */ - 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0c, 0x0a, /* ...U.... */ - 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, /* Washingt */ - 0x6f, 0x6e, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, /* on1.0... */ - 0x55, 0x04, 0x07, 0x0c, 0x07, 0x53, 0x65, 0x61, /* U....Sea */ - 0x74, 0x74, 0x6c, 0x65, 0x31, 0x10, 0x30, 0x0e, /* ttle1.0. */ - 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, 0x07, 0x77, /* ..U....w */ - 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x31, 0x14, /* olfSSL1. */ - 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x0c, /* 0...U... */ - 0x0b, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x65, /* .Enginee */ - 0x72, 0x69, 0x6e, 0x67, 0x31, 0x1f, 0x30, 0x1d, /* ring1.0. */ - 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x16, 0x77, /* ..U....w */ - 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x20, 0x4f, /* olfSSL O */ - 0x43, 0x53, 0x50, 0x20, 0x52, 0x65, 0x73, 0x70, /* CSP Resp */ - 0x6f, 0x6e, 0x64, 0x65, 0x72, 0x31, 0x1f, 0x30, /* onder1.0 */ - 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, /* ...*.H.. */ - 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, 0x69, 0x6e, /* ......in */ - 0x66, 0x6f, 0x40, 0x77, 0x6f, 0x6c, 0x66, 0x73, /* fo@wolfs */ - 0x73, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, 0x18, 0x0f, /* sl.com.. */ - 0x32, 0x30, 0x32, 0x34, 0x31, 0x32, 0x32, 0x30, /* 20241220 */ - 0x31, 0x37, 0x30, 0x37, 0x30, 0x34, 0x5a, 0x30, /* 170704Z0 */ - 0x64, 0x30, 0x62, 0x30, 0x3a, 0x30, 0x09, 0x06, /* d0b0:0.. */ - 0x05, 0x2b, 0x0e, 0x03, 0x02, 0x1a, 0x05, 0x00, /* .+...... */ - 0x04, 0x14, 0x71, 0x4d, 0x82, 0x23, 0x40, 0x59, /* ..qM.#@Y */ - 0xc0, 0x96, 0xa1, 0x37, 0x43, 0xfa, 0x31, 0xdb, /* ...7C.1. */ - 0xba, 0xb1, 0x43, 0x18, 0xda, 0x04, 0x04, 0x14, /* ..C..... */ - 0x83, 0xc6, 0x3a, 0x89, 0x2c, 0x81, 0xf4, 0x02, /* ..:.,... */ - 0xd7, 0x9d, 0x4c, 0xe2, 0x2a, 0xc0, 0x71, 0x82, /* ..L.*.q. */ - 0x64, 0x44, 0xda, 0x0e, 0x02, 0x01, 0x05, 0x80, /* dD...... */ - 0x00, 0x18, 0x0f, 0x32, 0x30, 0x32, 0x34, 0x31, /* ...20241 */ - 0x32, 0x32, 0x30, 0x31, 0x37, 0x30, 0x37, 0x30, /* 22017070 */ - 0x34, 0x5a, 0xa0, 0x11, 0x18, 0x0f, 0x32, 0x30, /* 4Z....20 */ - 0x35, 0x32, 0x30, 0x35, 0x30, 0x36, 0x31, 0x37, /* 52050617 */ - 0x30, 0x37, 0x30, 0x34, 0x5a, 0xa1, 0x23, 0x30, /* 0704Z.#0 */ - 0x21, 0x30, 0x1f, 0x06, 0x09, 0x2b, 0x06, 0x01, /* !0...+.. */ - 0x05, 0x05, 0x07, 0x30, 0x01, 0x02, 0x04, 0x12, /* ...0.... */ - 0x04, 0x10, 0x12, 0x7c, 0x27, 0xbd, 0x22, 0x28, /* ...|'."( */ - 0x5e, 0x62, 0x81, 0xed, 0x6d, 0x2c, 0x2d, 0x59, /* ^b..m,-Y */ - 0x42, 0xd7, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, /* B.0...*. */ - 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, /* H....... */ - 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, 0x6c, 0xce, /* ......l. */ - 0xa8, 0xe8, 0xfe, 0xaf, 0x33, 0xe2, 0xce, 0x4e, /* ....3..N */ - 0x63, 0x8d, 0x61, 0x16, 0x0f, 0x70, 0xb2, 0x0c, /* c.a..p.. */ - 0x9a, 0xe3, 0x01, 0xd5, 0xca, 0xe5, 0x9b, 0x70, /* .......p */ - 0x81, 0x6f, 0x94, 0x09, 0xe8, 0x88, 0x98, 0x1a, /* .o...... */ - 0x67, 0xa0, 0xc2, 0xe7, 0x8f, 0x9b, 0x5f, 0x13, /* g....._. */ - 0x17, 0x8d, 0x93, 0x8c, 0x31, 0x61, 0x7d, 0x72, /* ....1a}r */ - 0x34, 0xbd, 0x21, 0x48, 0xca, 0xb2, 0xc9, 0xae, /* 4.!H.... */ - 0x28, 0x5f, 0x97, 0x19, 0xcb, 0xdf, 0xed, 0xd4, /* (_...... */ - 0x6e, 0x89, 0x30, 0x89, 0x11, 0xd1, 0x05, 0x08, /* n.0..... */ - 0x81, 0xe9, 0xa7, 0xba, 0xf7, 0x16, 0x0c, 0xbe, /* ........ */ - 0x48, 0x2e, 0xc0, 0x05, 0xac, 0x90, 0xc2, 0x35, /* H......5 */ - 0xce, 0x6c, 0x94, 0x5d, 0x2b, 0xad, 0x4f, 0x19, /* .l.]+.O. */ - 0xea, 0x7b, 0xd9, 0x4f, 0x49, 0x20, 0x8d, 0x98, /* .{.OI .. */ - 0xa9, 0xe4, 0x53, 0x6d, 0xca, 0x34, 0xdb, 0x4a, /* ..Sm.4.J */ - 0x28, 0xb3, 0x33, 0xfb, 0xfd, 0xcc, 0x4b, 0xfa, /* (.3...K. */ - 0xdb, 0x70, 0xe1, 0x96, 0xc8, 0xd4, 0xf1, 0x85, /* .p...... */ - 0x99, 0xaf, 0x06, 0xeb, 0xfd, 0x96, 0x21, 0x86, /* ......!. */ - 0x81, 0xee, 0xcf, 0xd2, 0xf4, 0x83, 0xc9, 0x1d, /* ........ */ - 0x8f, 0x42, 0xd1, 0xc1, 0xbc, 0x50, 0x0a, 0xfb, /* .B...P.. */ - 0x95, 0x39, 0x4c, 0x36, 0xa8, 0xfe, 0x2b, 0x8e, /* .9L6..+. */ - 0xc5, 0xb5, 0xe0, 0xab, 0xdb, 0xc0, 0xbf, 0x1d, /* ........ */ - 0x35, 0x4d, 0xc0, 0x52, 0xfb, 0x08, 0x04, 0x4c, /* 5M.R...L */ - 0x98, 0xf0, 0xb5, 0x5b, 0xff, 0x99, 0x74, 0xce, /* ...[..t. */ - 0xb7, 0xc9, 0xe3, 0xe5, 0x70, 0x2e, 0xd3, 0x1d, /* ....p... */ - 0x46, 0x38, 0xf9, 0x51, 0x17, 0x73, 0xd1, 0x08, /* F8.Q.s.. */ - 0x8d, 0x3d, 0x12, 0x47, 0xd0, 0x66, 0x77, 0xaf, /* .=.G.fw. */ - 0xfd, 0x4c, 0x75, 0x1f, 0xe9, 0x6c, 0xf4, 0x5a, /* .Lu..l.Z */ - 0xde, 0xec, 0x37, 0xc7, 0xc4, 0x0a, 0xbe, 0x91, /* ..7..... */ - 0xbc, 0x05, 0x08, 0x86, 0x47, 0x30, 0x2a, 0xc6, /* ....G0*. */ - 0x85, 0x4b, 0x55, 0x6c, 0xef, 0xdf, 0x2d, 0x5a, /* .KUl..-Z */ - 0xf7, 0x5b, 0xb5, 0xba, 0xed, 0x38, 0xb0, 0xcb, /* .[...8.. */ - 0xeb, 0x7e, 0x84, 0x3a, 0x69, 0x2c, 0xa0, 0x82, /* .~.:i,.. */ - 0x04, 0xc6, 0x30, 0x82, 0x04, 0xc2, 0x30, 0x82, /* ..0...0. */ - 0x04, 0xbe, 0x30, 0x82, 0x03, 0xa6, 0xa0, 0x03, /* ..0..... */ - 0x02, 0x01, 0x02, 0x02, 0x01, 0x04, 0x30, 0x0d, /* ......0. */ - 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, /* ..*.H... */ - 0x01, 0x01, 0x0b, 0x05, 0x00, 0x30, 0x81, 0x97, /* .....0.. */ - 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, /* 1.0...U. */ - 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, /* ...US1.0 */ - 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0c, 0x0a, /* ...U.... */ - 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, /* Washingt */ - 0x6f, 0x6e, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, /* on1.0... */ - 0x55, 0x04, 0x07, 0x0c, 0x07, 0x53, 0x65, 0x61, /* U....Sea */ - 0x74, 0x74, 0x6c, 0x65, 0x31, 0x10, 0x30, 0x0e, /* ttle1.0. */ - 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, 0x07, 0x77, /* ..U....w */ - 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x31, 0x14, /* olfSSL1. */ - 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x0c, /* 0...U... */ - 0x0b, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x65, /* .Enginee */ - 0x72, 0x69, 0x6e, 0x67, 0x31, 0x18, 0x30, 0x16, /* ring1.0. */ - 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x0f, 0x77, /* ..U....w */ - 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x20, 0x72, /* olfSSL r */ - 0x6f, 0x6f, 0x74, 0x20, 0x43, 0x41, 0x31, 0x1f, /* oot CA1. */ - 0x30, 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, /* 0...*.H. */ - 0xf7, 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, 0x69, /* .......i */ - 0x6e, 0x66, 0x6f, 0x40, 0x77, 0x6f, 0x6c, 0x66, /* nfo@wolf */ - 0x73, 0x73, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, 0x30, /* ssl.com0 */ - 0x1e, 0x17, 0x0d, 0x32, 0x34, 0x31, 0x32, 0x31, /* ...24121 */ - 0x38, 0x32, 0x31, 0x32, 0x35, 0x33, 0x31, 0x5a, /* 8212531Z */ - 0x17, 0x0d, 0x32, 0x37, 0x30, 0x39, 0x31, 0x34, /* ..270914 */ - 0x32, 0x31, 0x32, 0x35, 0x33, 0x31, 0x5a, 0x30, /* 212531Z0 */ - 0x81, 0x9e, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, /* ..1.0... */ - 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, /* U....US1 */ - 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, /* .0...U.. */ - 0x0c, 0x0a, 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, /* ..Washin */ - 0x67, 0x74, 0x6f, 0x6e, 0x31, 0x10, 0x30, 0x0e, /* gton1.0. */ - 0x06, 0x03, 0x55, 0x04, 0x07, 0x0c, 0x07, 0x53, /* ..U....S */ - 0x65, 0x61, 0x74, 0x74, 0x6c, 0x65, 0x31, 0x10, /* eattle1. */ - 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, /* 0...U... */ - 0x07, 0x77, 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, /* .wolfSSL */ - 0x31, 0x14, 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, /* 1.0...U. */ - 0x0b, 0x0c, 0x0b, 0x45, 0x6e, 0x67, 0x69, 0x6e, /* ...Engin */ - 0x65, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x31, 0x1f, /* eering1. */ - 0x30, 0x1d, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, /* 0...U... */ - 0x16, 0x77, 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, /* .wolfSSL */ - 0x20, 0x4f, 0x43, 0x53, 0x50, 0x20, 0x52, 0x65, /* OCSP Re */ - 0x73, 0x70, 0x6f, 0x6e, 0x64, 0x65, 0x72, 0x31, /* sponder1 */ - 0x1f, 0x30, 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, /* .0...*.H */ - 0x86, 0xf7, 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, /* ........ */ - 0x69, 0x6e, 0x66, 0x6f, 0x40, 0x77, 0x6f, 0x6c, /* info@wol */ - 0x66, 0x73, 0x73, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, /* fssl.com */ - 0x30, 0x82, 0x01, 0x22, 0x30, 0x0d, 0x06, 0x09, /* 0.."0... */ - 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, /* *.H..... */ - 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0f, 0x00, /* ........ */ - 0x30, 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, /* 0....... */ - 0x00, 0xb8, 0xba, 0x23, 0xb4, 0xf6, 0xc3, 0x7b, /* ...#...{ */ - 0x14, 0xc3, 0xa4, 0xf5, 0x1d, 0x61, 0xa1, 0xf5, /* .....a.. */ - 0x1e, 0x63, 0xb9, 0x85, 0x23, 0x34, 0x50, 0x6d, /* .c..#4Pm */ - 0xf8, 0x7c, 0xa2, 0x8a, 0x04, 0x8b, 0xd5, 0x75, /* .|.....u */ - 0x5c, 0x2d, 0xf7, 0x63, 0x88, 0xd1, 0x07, 0x7a, /* \-.c...z */ - 0xea, 0x0b, 0x45, 0x35, 0x2b, 0xeb, 0x1f, 0xb1, /* ..E5+... */ - 0x22, 0xb4, 0x94, 0x41, 0x38, 0xe2, 0x9d, 0x74, /* "..A8..t */ - 0xd6, 0x8b, 0x30, 0x22, 0x10, 0x51, 0xc5, 0xdb, /* ..0".Q.. */ - 0xca, 0x3f, 0x46, 0x2b, 0xfe, 0xe5, 0x5a, 0x3f, /* .?F+..Z? */ - 0x41, 0x74, 0x67, 0x75, 0x95, 0xa9, 0x94, 0xd5, /* Atgu.... */ - 0xc3, 0xee, 0x42, 0xf8, 0x8d, 0xeb, 0x92, 0x95, /* ..B..... */ - 0xe1, 0xd9, 0x65, 0xb7, 0x43, 0xc4, 0x18, 0xde, /* ..e.C... */ - 0x16, 0x80, 0x90, 0xce, 0x24, 0x35, 0x21, 0xc4, /* ....$5!. */ - 0x55, 0xac, 0x5a, 0x51, 0xe0, 0x2e, 0x2d, 0xb3, /* U.ZQ..-. */ - 0x0a, 0x5a, 0x4f, 0x4a, 0x73, 0x31, 0x50, 0xee, /* .ZOJs1P. */ - 0x4a, 0x16, 0xbd, 0x39, 0x8b, 0xad, 0x05, 0x48, /* J..9...H */ - 0x87, 0xb1, 0x99, 0xe2, 0x10, 0xa7, 0x06, 0x72, /* .......r */ - 0x67, 0xca, 0x5c, 0xd1, 0x97, 0xbd, 0xc8, 0xf1, /* g.\..... */ - 0x76, 0xf8, 0xe0, 0x4a, 0xec, 0xbc, 0x93, 0xf4, /* v..J.... */ - 0x66, 0x4c, 0x28, 0x71, 0xd1, 0xd8, 0x66, 0x03, /* fL(q..f. */ - 0xb4, 0x90, 0x30, 0xbb, 0x17, 0xb0, 0xfe, 0x97, /* ..0..... */ - 0xf5, 0x1e, 0xe8, 0xc7, 0x5d, 0x9b, 0x8b, 0x11, /* ....]... */ - 0x19, 0x12, 0x3c, 0xab, 0x82, 0x71, 0x78, 0xff, /* ..<..qx. */ - 0xae, 0x3f, 0x32, 0xb2, 0x08, 0x71, 0xb2, 0x1b, /* .?2..q.. */ - 0x8c, 0x27, 0xac, 0x11, 0xb8, 0xd8, 0x43, 0x49, /* .'....CI */ - 0xcf, 0xb0, 0x70, 0xb1, 0xf0, 0x8c, 0xae, 0xda, /* ..p..... */ - 0x24, 0x87, 0x17, 0x3b, 0xd8, 0x04, 0x65, 0x6c, /* $..;..el */ - 0x00, 0x76, 0x50, 0xef, 0x15, 0x08, 0xd7, 0xb4, /* .vP..... */ - 0x73, 0x68, 0x26, 0x14, 0x87, 0x95, 0xc3, 0x5f, /* sh&...._ */ - 0x6e, 0x61, 0xb8, 0x87, 0x84, 0xfa, 0x80, 0x1a, /* na...... */ - 0x0a, 0x8b, 0x98, 0xf3, 0xe3, 0xff, 0x4e, 0x44, /* ......ND */ - 0x1c, 0x65, 0x74, 0x7c, 0x71, 0x54, 0x65, 0xe5, /* .et|qTe. */ - 0x39, 0x02, 0x03, 0x01, 0x00, 0x01, 0xa3, 0x82, /* 9....... */ - 0x01, 0x0a, 0x30, 0x82, 0x01, 0x06, 0x30, 0x09, /* ..0...0. */ - 0x06, 0x03, 0x55, 0x1d, 0x13, 0x04, 0x02, 0x30, /* ..U....0 */ - 0x00, 0x30, 0x1d, 0x06, 0x03, 0x55, 0x1d, 0x0e, /* .0...U.. */ - 0x04, 0x16, 0x04, 0x14, 0x32, 0x67, 0xe1, 0xb1, /* ....2g.. */ - 0x79, 0xd2, 0x81, 0xfc, 0x9f, 0x23, 0x0c, 0x70, /* y....#.p */ - 0x40, 0x50, 0xb5, 0x46, 0x56, 0xb8, 0x30, 0x36, /* @P.FV.06 */ - 0x30, 0x81, 0xc4, 0x06, 0x03, 0x55, 0x1d, 0x23, /* 0....U.# */ - 0x04, 0x81, 0xbc, 0x30, 0x81, 0xb9, 0x80, 0x14, /* ...0.... */ - 0x73, 0xb0, 0x1c, 0xa4, 0x2f, 0x82, 0xcb, 0xcf, /* s.../... */ - 0x47, 0xa5, 0x38, 0xd7, 0xb0, 0x04, 0x82, 0x3a, /* G.8....: */ - 0x7e, 0x72, 0x15, 0x21, 0xa1, 0x81, 0x9d, 0xa4, /* ~r.!.... */ - 0x81, 0x9a, 0x30, 0x81, 0x97, 0x31, 0x0b, 0x30, /* ..0..1.0 */ - 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, /* ...U.... */ - 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, /* US1.0... */ - 0x55, 0x04, 0x08, 0x0c, 0x0a, 0x57, 0x61, 0x73, /* U....Was */ - 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, /* hington1 */ - 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, /* .0...U.. */ - 0x0c, 0x07, 0x53, 0x65, 0x61, 0x74, 0x74, 0x6c, /* ..Seattl */ - 0x65, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, /* e1.0...U */ - 0x04, 0x0a, 0x0c, 0x07, 0x77, 0x6f, 0x6c, 0x66, /* ....wolf */ - 0x53, 0x53, 0x4c, 0x31, 0x14, 0x30, 0x12, 0x06, /* SSL1.0.. */ - 0x03, 0x55, 0x04, 0x0b, 0x0c, 0x0b, 0x45, 0x6e, /* .U....En */ - 0x67, 0x69, 0x6e, 0x65, 0x65, 0x72, 0x69, 0x6e, /* gineerin */ - 0x67, 0x31, 0x18, 0x30, 0x16, 0x06, 0x03, 0x55, /* g1.0...U */ - 0x04, 0x03, 0x0c, 0x0f, 0x77, 0x6f, 0x6c, 0x66, /* ....wolf */ - 0x53, 0x53, 0x4c, 0x20, 0x72, 0x6f, 0x6f, 0x74, /* SSL root */ - 0x20, 0x43, 0x41, 0x31, 0x1f, 0x30, 0x1d, 0x06, /* CA1.0.. */ - 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, /* .*.H.... */ - 0x09, 0x01, 0x16, 0x10, 0x69, 0x6e, 0x66, 0x6f, /* ....info */ - 0x40, 0x77, 0x6f, 0x6c, 0x66, 0x73, 0x73, 0x6c, /* @wolfssl */ - 0x2e, 0x63, 0x6f, 0x6d, 0x82, 0x01, 0x63, 0x30, /* .com..c0 */ - 0x13, 0x06, 0x03, 0x55, 0x1d, 0x25, 0x04, 0x0c, /* ...U.%.. */ - 0x30, 0x0a, 0x06, 0x08, 0x2b, 0x06, 0x01, 0x05, /* 0...+... */ - 0x05, 0x07, 0x03, 0x09, 0x30, 0x0d, 0x06, 0x09, /* ....0... */ - 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, /* *.H..... */ - 0x0b, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, /* ........ */ - 0x4d, 0xa2, 0xd8, 0x55, 0xe0, 0x2b, 0xf4, 0xad, /* M..U.+.. */ - 0x65, 0xe2, 0x92, 0x35, 0xcb, 0x60, 0xa0, 0xa2, /* e..5.`.. */ - 0x6b, 0xa6, 0x88, 0xc1, 0x86, 0x58, 0x57, 0x37, /* k....XW7 */ - 0xbd, 0x2e, 0x28, 0x6e, 0x1c, 0x56, 0x2a, 0x35, /* ..(n.V*5 */ - 0xde, 0xff, 0x3e, 0x8e, 0x3d, 0x47, 0x21, 0x1a, /* ..>.=G!. */ - 0xe9, 0xd3, 0xc6, 0xb4, 0xe2, 0xcb, 0x3e, 0xc6, /* ......>. */ - 0xaf, 0x9b, 0xef, 0x23, 0x88, 0x56, 0x95, 0x73, /* ...#.V.s */ - 0x2e, 0xb3, 0xed, 0xc5, 0x11, 0x4b, 0x69, 0xf7, /* .....Ki. */ - 0x13, 0x3a, 0x05, 0xe1, 0xaf, 0xba, 0xc9, 0x59, /* .:.....Y */ - 0xfd, 0xe2, 0xa0, 0x81, 0xa0, 0x4c, 0x0c, 0x2c, /* .....L., */ - 0xcb, 0x57, 0xad, 0x96, 0x3a, 0x8c, 0x32, 0xa6, /* .W..:.2. */ - 0x4a, 0xf8, 0x72, 0xb8, 0xec, 0xb3, 0x26, 0x69, /* J.r...&i */ - 0xd6, 0x6a, 0x4c, 0x4c, 0x78, 0x18, 0x3c, 0xca, /* .jLLx.<. */ - 0x19, 0xf1, 0xb5, 0x8e, 0x23, 0x81, 0x5b, 0x27, /* ....#.[' */ - 0x90, 0xe0, 0x5c, 0x2b, 0x17, 0x4d, 0x78, 0x99, /* ..\+.Mx. */ - 0x6b, 0x25, 0xbd, 0x2f, 0xae, 0x1b, 0xaa, 0xce, /* k%./.... */ - 0x84, 0xb9, 0x44, 0x21, 0x46, 0xc0, 0x34, 0x6b, /* ..D!F.4k */ - 0x5b, 0xb9, 0x1b, 0xca, 0x5c, 0x60, 0xf1, 0xef, /* [...\`.. */ - 0xe6, 0x66, 0xbc, 0x84, 0x63, 0x56, 0x50, 0x7d, /* .f..cVP} */ - 0xbb, 0x2c, 0x2f, 0x7b, 0x47, 0xb4, 0xfd, 0x58, /* .,/{G..X */ - 0x77, 0x87, 0xee, 0x27, 0x20, 0x96, 0x72, 0x8e, /* w..' .r. */ - 0x4c, 0x7e, 0x4f, 0x93, 0xeb, 0x5f, 0x8f, 0x9c, /* L~O.._.. */ - 0x1e, 0x59, 0x7a, 0x96, 0xaa, 0x53, 0x77, 0x22, /* .Yz..Sw" */ - 0x41, 0xd8, 0xd3, 0xf9, 0x89, 0x8f, 0xe8, 0x9d, /* A....... */ - 0x65, 0xbd, 0x0c, 0x71, 0x3c, 0xbb, 0xa3, 0x07, /* e..q<... */ - 0xbf, 0xfb, 0xa8, 0xd1, 0x18, 0x0a, 0xb4, 0xc4, /* ........ */ - 0xf7, 0x83, 0xb3, 0x86, 0x2b, 0xf0, 0x5b, 0x05, /* ....+.[. */ - 0x28, 0xc1, 0x01, 0x31, 0x73, 0x5c, 0x2b, 0xbd, /* (..1s\+. */ - 0x60, 0x97, 0xa3, 0x36, 0x82, 0x96, 0xd7, 0x83, /* `..6.... */ - 0xdf, 0x75, 0xee, 0x29, 0x42, 0x97, 0x86, 0x41, /* .u.)B..A */ - 0x55, 0xb9, 0x70, 0x87, 0xd5, 0x02, 0x85, 0x13, /* U.p..... */ - 0x41, 0xf8, 0x25, 0x05, 0xab, 0x6a, 0xaa, 0x57 /* A.%..j.W */ - }; - OcspEntry entry[1]; - CertStatus status[1]; - OcspRequest* request = NULL; -#ifndef NO_FILESYSTEM - const char* ca_cert = "./certs/ca-cert.pem"; -#endif - - byte serial[] = {0x05}; - byte issuerHash[] = {0x71, 0x4d, 0x82, 0x23, 0x40, 0x59, 0xc0, 0x96, 0xa1, 0x37, 0x43, 0xfa, 0x31, 0xdb, 0xba, 0xb1, 0x43, 0x18, 0xda, 0x04}; - byte issuerKeyHash[] = {0x83, 0xc6, 0x3a, 0x89, 0x2c, 0x81, 0xf4, 0x02, 0xd7, 0x9d, 0x4c, 0xe2, 0x2a, 0xc0, 0x71, 0x82, 0x64, 0x44, 0xda, 0x0e}; - - - XMEMSET(entry, 0, sizeof(OcspEntry)); - XMEMSET(status, 0, sizeof(CertStatus)); - - ExpectNotNull(request = wolfSSL_OCSP_REQUEST_new()); - ExpectNotNull(request->serial = (byte*)XMALLOC(sizeof(serial), NULL, - DYNAMIC_TYPE_OCSP_REQUEST)); - - if ((request != NULL) && (request->serial != NULL)) { - request->serialSz = sizeof(serial); - XMEMCPY(request->serial, serial, sizeof(serial)); - XMEMCPY(request->issuerHash, issuerHash, sizeof(issuerHash)); - XMEMCPY(request->issuerKeyHash, issuerKeyHash, sizeof(issuerKeyHash)); - } - - ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); - ExpectIntEQ(wolfSSL_CertManagerEnableOCSP(cm, 0), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, - "./certs/ocsp/intermediate1-ca-cert.pem", NULL), WOLFSSL_SUCCESS); - - /* Response should be valid. */ - ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(cm, (byte *)response, - sizeof(response), NULL, status, entry, request), WOLFSSL_SUCCESS); - - /* Flip a byte in the request serial number, response should be invalid - * now. */ - if ((request != NULL) && (request->serial != NULL)) - request->serial[0] ^= request->serial[0]; - ExpectIntNE(wolfSSL_CertManagerCheckOCSPResponse(cm, (byte *)response, - sizeof(response), NULL, status, entry, request), WOLFSSL_SUCCESS); - -#ifndef NO_FILESYSTEM - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, server_cert_der_2048, - sizeof(server_cert_der_2048)), WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)); - ExpectIntEQ(WOLFSSL_SUCCESS, - wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL)); - ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, server_cert_der_2048, - sizeof(server_cert_der_2048)), 1); -#endif - - wolfSSL_OCSP_REQUEST_free(request); - wolfSSL_CertManagerFree(cm); -#endif /* OPENSSL_ALL || WOLFSSL_NGINX || WOLFSSL_HAPROXY || - * WOLFSSL_APACHE_HTTPD || HAVE_LIGHTY */ -#endif /* HAVE_OCSP */ - return EXPECT_RESULT(); -} - static int test_wolfSSL_CheckOCSPResponse(void) { EXPECT_DECLS; @@ -5183,103 +3137,6 @@ static int test_wolfSSL_CertRsaPss(void) #endif /* HAVE_CERT_CHAIN_VALIDATION */ -/* Test RSA-PSS digital signature creation and verification */ -static int test_wc_RsaPSS_DigitalSignVerify(void) -{ - EXPECT_DECLS; - - /* Early FIPS did not support PSS. */ -#if (!defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && \ - (HAVE_FIPS_VERSION > 2))) && \ - (!defined(HAVE_SELFTEST) || (defined(HAVE_SELFTEST_VERSION) && \ - (HAVE_SELFTEST_VERSION > 2))) && \ - !defined(NO_RSA) && defined(WC_RSA_PSS) && defined(OPENSSL_EXTRA) && \ - defined(WOLFSSL_KEY_GEN) && defined(WC_RSA_NO_PADDING) && \ - !defined(NO_SHA256) - - /* Test digest */ - const unsigned char test_digest[32] = { - 0x08, 0x09, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, - 0x06, 0x07, 0x08, 0x09, 0x00, 0x01, 0x02, 0x03, - 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x00, 0x01, - 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09 - }; - const unsigned int digest_len = sizeof(test_digest); - - /* Variables for RSA key generation and signature operations */ - EVP_PKEY_CTX *pkctx = NULL; - EVP_PKEY *pkey = NULL; - EVP_PKEY_CTX *sign_ctx = NULL; - EVP_PKEY_CTX *verify_ctx = NULL; - unsigned char signature[256+MAX_DER_DIGEST_ASN_SZ] = {0}; - size_t signature_len = sizeof(signature); - int modulus_bits = 2048; - - /* Generate RSA key pair to avoid file dependencies */ - ExpectNotNull(pkctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL)); - ExpectIntEQ(EVP_PKEY_keygen_init(pkctx), 1); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_keygen_bits(pkctx, modulus_bits), 1); - ExpectIntEQ(EVP_PKEY_keygen(pkctx, &pkey), 1); - - /* Create signing context */ - ExpectNotNull(sign_ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_sign_init(sign_ctx), 1); - - /* Configure RSA-PSS parameters for signing. */ - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(sign_ctx, RSA_PKCS1_PSS_PADDING), - 1); - /* Default salt length matched hash so use 32 for SHA256 */ - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_pss_saltlen(sign_ctx, 32), 1); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_mgf1_md(sign_ctx, EVP_sha256()), 1); - ExpectIntEQ(EVP_PKEY_CTX_set_signature_md(sign_ctx, EVP_sha256()), 1); - - /* Create the digital signature */ - ExpectIntEQ(EVP_PKEY_sign(sign_ctx, signature, &signature_len, test_digest, - digest_len), 1); - ExpectIntGT((int)signature_len, 0); - - /* Create verification context */ - ExpectNotNull(verify_ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_verify_init(verify_ctx), 1); - - /* Configure RSA-PSS parameters for verification */ - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(verify_ctx, RSA_PKCS1_PSS_PADDING), - 1); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_pss_saltlen(verify_ctx, 32), 1); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_mgf1_md(verify_ctx, EVP_sha256()), 1); - ExpectIntEQ(EVP_PKEY_CTX_set_signature_md(verify_ctx, EVP_sha256()), 1); - - /* Verify the digital signature */ - ExpectIntEQ(EVP_PKEY_verify(verify_ctx, signature, signature_len, - test_digest, digest_len), 1); - - /* Test with wrong digest to ensure verification fails (negative test) */ - { - const unsigned char wrong_digest[32] = { - 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, - 0x09, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, - 0x07, 0x08, 0x09, 0x00, 0x01, 0x02, 0x03, 0x04, - 0x05, 0x06, 0x07, 0x08, 0x09, 0x00, 0x01, 0x02 - }; - ExpectIntNE(EVP_PKEY_verify(verify_ctx, signature, signature_len, - wrong_digest, digest_len), 1); - } - - /* Clean up */ - if (verify_ctx) - EVP_PKEY_CTX_free(verify_ctx); - if (sign_ctx) - EVP_PKEY_CTX_free(sign_ctx); - if (pkey) - EVP_PKEY_free(pkey); - if (pkctx) - EVP_PKEY_CTX_free(pkctx); - -#endif - - return EXPECT_RESULT(); -} - static int test_wolfSSL_CTX_load_verify_locations_ex(void) { EXPECT_DECLS; @@ -6346,938 +4203,6 @@ static int test_wolfSSL_SetMinVersion(void) #include -/*----------------------------------------------------------------------------* - | EVP - *----------------------------------------------------------------------------*/ - -static int test_wolfSSL_EVP_PKEY_print_public(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_BIO) - WOLFSSL_BIO* rbio = NULL; - WOLFSSL_BIO* wbio = NULL; - WOLFSSL_EVP_PKEY* pkey = NULL; - char line[256] = { 0 }; - char line1[256] = { 0 }; - int i = 0; - - /* test error cases */ - ExpectIntEQ( EVP_PKEY_print_public(NULL,NULL,0,NULL),0L); - - /* - * test RSA public key print - * in this test, pass '3' for indent - */ -#if !defined(NO_RSA) && defined(USE_CERT_BUFFERS_1024) - - ExpectNotNull(rbio = BIO_new_mem_buf( client_keypub_der_1024, - sizeof_client_keypub_der_1024)); - - ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); - - ExpectNotNull(wbio = BIO_new(BIO_s_mem())); - - ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,3,NULL),1); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, " RSA Public-Key: (1024 bit)\n"); - ExpectIntEQ(XSTRNCMP(line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, " Modulus:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, " 00:bc:73:0e:a8:49:f3:74:a2:a9:ef:18:a5:da:55:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - - /* skip to the end of modulus element*/ - for (i = 0; i < 8 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, " Exponent: 65537 (0x010001)\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - - /* should reach EOF */ - ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); - - EVP_PKEY_free(pkey); - pkey = NULL; - BIO_free(rbio); - BIO_free(wbio); - rbio = NULL; - wbio = NULL; - -#endif /* !NO_RSA && USE_CERT_BUFFERS_1024*/ - - /* - * test DSA public key print - */ -#if !defined(NO_DSA) && defined(USE_CERT_BUFFERS_2048) - ExpectNotNull(rbio = BIO_new_mem_buf( dsa_pub_key_der_2048, - sizeof_dsa_pub_key_der_2048)); - - ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); - - ExpectNotNull(wbio = BIO_new(BIO_s_mem())); - - ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,0,NULL),1); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "DSA Public-Key: (2048 bit)\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "pub:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, - " 00:C2:35:2D:EC:83:83:6C:73:13:9E:52:7C:74:C8:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of pub element*/ - for (i = 0; i < 17 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "P:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of P element*/ - for (i = 0; i < 18 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "Q:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of Q element*/ - for (i = 0; i < 3 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "G:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of G element*/ - for (i = 0; i < 18 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - /* should reach EOF */ - ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); - - EVP_PKEY_free(pkey); - pkey = NULL; - BIO_free(rbio); - BIO_free(wbio); - rbio = NULL; - wbio = NULL; - -#endif /* !NO_DSA && USE_CERT_BUFFERS_2048 */ - - /* - * test ECC public key print - */ -#if defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) - - ExpectNotNull(rbio = BIO_new_mem_buf( ecc_clikeypub_der_256, - sizeof_ecc_clikeypub_der_256)); - - ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); - - ExpectNotNull(wbio = BIO_new(BIO_s_mem())); - - ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,0,NULL),1); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - ExpectStrEQ(line, "Public-Key: (256 bit)\n"); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "pub:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, - " 04:55:BF:F4:0F:44:50:9A:3D:CE:9B:B7:F0:C5:4D:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of pub element*/ - for (i = 0; i < 4 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "ASN1 OID: prime256v1\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "NIST CURVE: P-256\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - - /* should reach EOF */ - ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); - - EVP_PKEY_free(pkey); - pkey = NULL; - BIO_free(rbio); - BIO_free(wbio); - rbio = NULL; - wbio = NULL; - -#endif /* HAVE_ECC && USE_CERT_BUFFERS_256 */ - - /* - * test DH public key print - */ -#if defined(WOLFSSL_DH_EXTRA) && defined(USE_CERT_BUFFERS_2048) - - ExpectNotNull(rbio = BIO_new_mem_buf( dh_pub_key_der_2048, - sizeof_dh_pub_key_der_2048)); - - ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); - - ExpectNotNull(wbio = BIO_new(BIO_s_mem())); - - ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,0,NULL), 1); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "DH Public-Key: (2048 bit)\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "public-key:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, - " 34:41:BF:E9:F2:11:BF:05:DB:B2:72:A8:29:CC:BD:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of public-key element*/ - for (i = 0; i < 17 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "prime:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, - " 00:D3:B2:99:84:5C:0A:4C:E7:37:CC:FC:18:37:01:\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* skip to the end of prime element*/ - for (i = 0; i < 17 ;i++) { - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - } - - ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); - strcpy(line1, "generator: 2 (0x02)\n"); - ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); - - /* should reach EOF */ - ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); - - EVP_PKEY_free(pkey); - pkey = NULL; - BIO_free(rbio); - BIO_free(wbio); - rbio = NULL; - wbio = NULL; - -#endif /* WOLFSSL_DH_EXTRA && USE_CERT_BUFFERS_2048 */ - - /* to prevent "unused variable" warning */ - (void)pkey; - (void)wbio; - (void)rbio; - (void)line; - (void)line1; - (void)i; -#endif /* OPENSSL_EXTRA */ - return EXPECT_RESULT(); -} -/* Test functions for base64 encode/decode */ -static int test_wolfSSL_EVP_ENCODE_CTX_new(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && \ -( defined(WOLFSSL_BASE64_ENCODE) || defined(WOLFSSL_BASE64_DECODE)) - EVP_ENCODE_CTX* ctx = NULL; - - ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); - ExpectIntEQ(ctx->remaining,0); - ExpectIntEQ(ctx->data[0],0); - ExpectIntEQ(ctx->data[sizeof(ctx->data) -1],0); - EVP_ENCODE_CTX_free(ctx); -#endif /* OPENSSL_EXTRA && (WOLFSSL_BASE64_ENCODE || WOLFSSL_BASE64_DECODE) */ - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_ENCODE_CTX_free(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && \ -( defined(WOLFSSL_BASE64_ENCODE) || defined(WOLFSSL_BASE64_DECODE)) - EVP_ENCODE_CTX* ctx = NULL; - - ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); - EVP_ENCODE_CTX_free(ctx); -#endif /* OPENSSL_EXTRA && (WOLFSSL_BASE64_ENCODE || WOLFSSL_BASE64_DECODE) */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_EncodeInit(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_ENCODE) - EVP_ENCODE_CTX* ctx = NULL; - - ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); - ExpectIntEQ(ctx->remaining, 0); - ExpectIntEQ(ctx->data[0], 0); - ExpectIntEQ(ctx->data[sizeof(ctx->data) -1], 0); - - if (ctx != NULL) { - /* make ctx dirty */ - ctx->remaining = 10; - XMEMSET(ctx->data, 0x77, sizeof(ctx->data)); - } - - EVP_EncodeInit(ctx); - - ExpectIntEQ(ctx->remaining, 0); - ExpectIntEQ(ctx->data[0], 0); - ExpectIntEQ(ctx->data[sizeof(ctx->data) -1], 0); - - EVP_ENCODE_CTX_free(ctx); -#endif /* OPENSSL_EXTRA && WOLFSSL_BASE64_ENCODE*/ - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_EncodeUpdate(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_ENCODE) - int outl; - int total; - - const unsigned char plain0[] = {"Th"}; - const unsigned char plain1[] = {"This is a base64 encodeing test."}; - const unsigned char plain2[] = {"This is additional data."}; - - const unsigned char encBlock0[] = {"VGg="}; - const unsigned char enc0[] = {"VGg=\n"}; - /* expected encoded result for the first output 64 chars plus trailing LF*/ - const unsigned char enc1[] = {"VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVpbmcgdGVzdC5UaGlzIGlzIGFkZGl0aW9u\n"}; - - const unsigned char enc2[] = - {"VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVpbmcgdGVzdC5UaGlzIGlzIGFkZGl0aW9u\nYWwgZGF0YS4=\n"}; - - unsigned char encOutBuff[300]; - - EVP_ENCODE_CTX* ctx = NULL; - - ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); - - EVP_EncodeInit(ctx); - - /* illegal parameter test */ - ExpectIntEQ( - EVP_EncodeUpdate( - NULL, /* pass NULL as ctx */ - encOutBuff, - &outl, - plain1, - sizeof(plain1)-1), - 0 /* expected result code 0: fail */ - ); - - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - NULL, /* pass NULL as out buff */ - &outl, - plain1, - sizeof(plain1)-1), - 0 /* expected result code 0: fail */ - ); - - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - encOutBuff, - NULL, /* pass NULL as outl */ - plain1, - sizeof(plain1)-1), - 0 /* expected result code 0: fail */ - ); - - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - encOutBuff, - &outl, - NULL, /* pass NULL as in */ - sizeof(plain1)-1), - 0 /* expected result code 0: fail */ - ); - - ExpectIntEQ(EVP_EncodeBlock(NULL, NULL, 0), -1); - - /* meaningless parameter test */ - - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - encOutBuff, - &outl, - plain1, - 0), /* pass zero input */ - 1 /* expected result code 1: success */ - ); - - /* very small data encoding test */ - - EVP_EncodeInit(ctx); - - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - encOutBuff, - &outl, - plain0, - sizeof(plain0)-1), - 1 /* expected result code 1: success */ - ); - ExpectIntEQ(outl,0); - - if (EXPECT_SUCCESS()) { - EVP_EncodeFinal( - ctx, - encOutBuff + outl, - &outl); - } - - ExpectIntEQ( outl, sizeof(enc0)-1); - ExpectIntEQ( - XSTRNCMP( - (const char*)encOutBuff, - (const char*)enc0,sizeof(enc0) ), - 0); - - XMEMSET( encOutBuff,0, sizeof(encOutBuff)); - ExpectIntEQ(EVP_EncodeBlock(encOutBuff, plain0, sizeof(plain0)-1), - sizeof(encBlock0)-1); - ExpectStrEQ(encOutBuff, encBlock0); - - /* pass small size( < 48bytes ) input, then make sure they are not - * encoded and just stored in ctx - */ - - EVP_EncodeInit(ctx); - - total = 0; - outl = 0; - XMEMSET( encOutBuff,0, sizeof(encOutBuff)); - - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - encOutBuff, /* buffer for output */ - &outl, /* size of output */ - plain1, /* input */ - sizeof(plain1)-1), /* size of input */ - 1); /* expected result code 1:success */ - - total += outl; - - ExpectIntEQ(outl, 0); /* no output expected */ - ExpectIntEQ(ctx->remaining, sizeof(plain1) -1); - ExpectTrue( - XSTRNCMP((const char*)(ctx->data), - (const char*)plain1, - ctx->remaining) ==0 ); - ExpectTrue(encOutBuff[0] == 0); - - /* call wolfSSL_EVP_EncodeUpdate again to make it encode - * the stored data and the new input together - */ - ExpectIntEQ( - EVP_EncodeUpdate( - ctx, - encOutBuff + outl, /* buffer for output */ - &outl, /* size of output */ - plain2, /* additional input */ - sizeof(plain2) -1), /* size of additional input */ - 1); /* expected result code 1:success */ - - total += outl; - - ExpectIntNE(outl, 0); /* some output is expected this time*/ - ExpectIntEQ(outl, BASE64_ENCODE_RESULT_BLOCK_SIZE +1); /* 64 bytes and LF */ - ExpectIntEQ( - XSTRNCMP((const char*)encOutBuff,(const char*)enc1,sizeof(enc1) ),0); - - /* call wolfSSL_EVP_EncodeFinal to flush all the unprocessed input */ - EVP_EncodeFinal( - ctx, - encOutBuff + outl, - &outl); - - total += outl; - - ExpectIntNE(total,0); - ExpectIntNE(outl,0); - ExpectIntEQ(XSTRNCMP( - (const char*)encOutBuff,(const char*)enc2,sizeof(enc2) ),0); - - /* test with illeagal parameters */ - outl = 1; - EVP_EncodeFinal(NULL, encOutBuff + outl, &outl); - ExpectIntEQ(outl, 0); - outl = 1; - EVP_EncodeFinal(ctx, NULL, &outl); - ExpectIntEQ(outl, 0); - EVP_EncodeFinal(ctx, encOutBuff + outl, NULL); - EVP_EncodeFinal(NULL, NULL, NULL); - - EVP_ENCODE_CTX_free(ctx); -#endif /* OPENSSL_EXTRA && WOLFSSL_BASE64_ENCODE*/ - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_EncodeFinal(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_ENCODE) - /* tests for wolfSSL_EVP_EncodeFinal are included in - * test_wolfSSL_EVP_EncodeUpdate - */ - res = TEST_SUCCESS; -#endif /* OPENSSL_EXTRA && WOLFSSL_BASE64_ENCODE*/ - return res; -} - - -static int test_wolfSSL_EVP_DecodeInit(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_DECODE) - EVP_ENCODE_CTX* ctx = NULL; - - ExpectNotNull( ctx = EVP_ENCODE_CTX_new()); - ExpectIntEQ( ctx->remaining,0); - ExpectIntEQ( ctx->data[0],0); - ExpectIntEQ( ctx->data[sizeof(ctx->data) -1],0); - - if (ctx != NULL) { - /* make ctx dirty */ - ctx->remaining = 10; - XMEMSET( ctx->data, 0x77, sizeof(ctx->data)); - } - - EVP_DecodeInit(ctx); - - ExpectIntEQ( ctx->remaining,0); - ExpectIntEQ( ctx->data[0],0); - ExpectIntEQ( ctx->data[sizeof(ctx->data) -1],0); - - EVP_ENCODE_CTX_free(ctx); -#endif /* OPENSSL && WOLFSSL_BASE_DECODE */ - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_DecodeUpdate(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_DECODE) - int outl; - unsigned char decOutBuff[300]; - - EVP_ENCODE_CTX* ctx = NULL; - - static const unsigned char enc1[] = - {"VGhpcyBpcyBhIGJhc2U2NCBkZWNvZGluZyB0ZXN0Lg==\n"}; -/* const unsigned char plain1[] = - {"This is a base64 decoding test."} */ - - ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); - - EVP_DecodeInit(ctx); - - /* illegal parameter tests */ - - /* pass NULL as ctx */ - ExpectIntEQ( - EVP_DecodeUpdate( - NULL, /* pass NULL as ctx */ - decOutBuff, - &outl, - enc1, - sizeof(enc1)-1), - -1 /* expected result code -1: fail */ - ); - ExpectIntEQ( outl, 0); - - /* pass NULL as output */ - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - NULL, /* pass NULL as out buff */ - &outl, - enc1, - sizeof(enc1)-1), - -1 /* expected result code -1: fail */ - ); - ExpectIntEQ( outl, 0); - - /* pass NULL as outl */ - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - NULL, /* pass NULL as outl */ - enc1, - sizeof(enc1)-1), - -1 /* expected result code -1: fail */ - ); - - /* pass NULL as input */ - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - &outl, - NULL, /* pass NULL as in */ - sizeof(enc1)-1), - -1 /* expected result code -1: fail */ - ); - ExpectIntEQ( outl, 0); - - ExpectIntEQ(EVP_DecodeBlock(NULL, NULL, 0), -1); - - /* pass zero length input */ - - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - &outl, - enc1, - 0), /* pass zero as input len */ - 1 /* expected result code 1: success */ - ); - - /* decode correct base64 string */ - - { - static const unsigned char enc2[] = - {"VGhpcyBpcyBhIGJhc2U2NCBkZWNvZGluZyB0ZXN0Lg==\n"}; - static const unsigned char plain2[] = - {"This is a base64 decoding test."}; - - EVP_EncodeInit(ctx); - - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - &outl, - enc2, - sizeof(enc2)-1), - 0 /* expected result code 0: success */ - ); - - ExpectIntEQ(outl,sizeof(plain2) -1); - - ExpectIntEQ( - EVP_DecodeFinal( - ctx, - decOutBuff + outl, - &outl), - 1 /* expected result code 1: success */ - ); - ExpectIntEQ(outl, 0); /* expected DecodeFinal output no data */ - - ExpectIntEQ(XSTRNCMP( (const char*)plain2,(const char*)decOutBuff, - sizeof(plain2) -1 ),0); - ExpectIntEQ(EVP_DecodeBlock(decOutBuff, enc2, sizeof(enc2)), - sizeof(plain2)-1); - ExpectIntEQ(XSTRNCMP( (const char*)plain2,(const char*)decOutBuff, - sizeof(plain2) -1 ),0); - } - - /* decode correct base64 string which does not have '\n' in its last*/ - - { - static const unsigned char enc3[] = - {"VGhpcyBpcyBhIGJhc2U2NCBkZWNvZGluZyB0ZXN0Lg=="}; /* 44 chars */ - static const unsigned char plain3[] = - {"This is a base64 decoding test."}; /* 31 chars */ - - EVP_EncodeInit(ctx); - - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - &outl, - enc3, - sizeof(enc3)-1), - 0 /* expected result code 0: success */ - ); - - ExpectIntEQ(outl,sizeof(plain3)-1); /* 31 chars should be output */ - - ExpectIntEQ(XSTRNCMP( (const char*)plain3,(const char*)decOutBuff, - sizeof(plain3) -1 ),0); - - ExpectIntEQ( - EVP_DecodeFinal( - ctx, - decOutBuff + outl, - &outl), - 1 /* expected result code 1: success */ - ); - - ExpectIntEQ(outl,0 ); - - ExpectIntEQ(EVP_DecodeBlock(decOutBuff, enc3, sizeof(enc3)-1), - sizeof(plain3)-1); - ExpectIntEQ(XSTRNCMP( (const char*)plain3,(const char*)decOutBuff, - sizeof(plain3) -1 ),0); - } - - /* decode string which has a padding char ('=') in the illegal position*/ - - { - static const unsigned char enc4[] = - {"VGhpcyBpcyBhIGJhc2U2N=CBkZWNvZGluZyB0ZXN0Lg==\n"}; - - EVP_EncodeInit(ctx); - - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - &outl, - enc4, - sizeof(enc4)-1), - -1 /* expected result code -1: error */ - ); - ExpectIntEQ(outl,0); - ExpectIntEQ(EVP_DecodeBlock(decOutBuff, enc4, sizeof(enc4)-1), -1); - } - - /* small data decode test */ - - { - static const unsigned char enc00[] = {"VG"}; - static const unsigned char enc01[] = {"g=\n"}; - static const unsigned char plain4[] = {"Th"}; - - EVP_EncodeInit(ctx); - - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff, - &outl, - enc00, - sizeof(enc00)-1), - 1 /* expected result code 1: success */ - ); - ExpectIntEQ(outl,0); - - ExpectIntEQ( - EVP_DecodeUpdate( - ctx, - decOutBuff + outl, - &outl, - enc01, - sizeof(enc01)-1), - 0 /* expected result code 0: success */ - ); - - ExpectIntEQ(outl,sizeof(plain4)-1); - - /* test with illegal parameters */ - ExpectIntEQ(EVP_DecodeFinal(NULL,decOutBuff + outl,&outl), -1); - ExpectIntEQ(EVP_DecodeFinal(ctx,NULL,&outl), -1); - ExpectIntEQ(EVP_DecodeFinal(ctx,decOutBuff + outl, NULL), -1); - ExpectIntEQ(EVP_DecodeFinal(NULL,NULL, NULL), -1); - - if (EXPECT_SUCCESS()) { - EVP_DecodeFinal( - ctx, - decOutBuff + outl, - &outl); - } - - ExpectIntEQ( outl, 0); - ExpectIntEQ( - XSTRNCMP( - (const char*)decOutBuff, - (const char*)plain4,sizeof(plain4)-1 ), - 0); - } - - EVP_ENCODE_CTX_free(ctx); -#endif /* OPENSSL && WOLFSSL_BASE_DECODE */ - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_DecodeFinal(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_DECODE) - /* tests for wolfSSL_EVP_DecodeFinal are included in - * test_wolfSSL_EVP_DecodeUpdate - */ - res = TEST_SUCCESS; -#endif /* OPENSSL && WOLFSSL_BASE_DECODE */ - return res; -} - -/* Test function for wolfSSL_EVP_get_cipherbynid. - */ - -#ifdef OPENSSL_EXTRA -static int test_wolfSSL_EVP_get_cipherbynid(void) -{ - EXPECT_DECLS; -#ifndef NO_AES - const WOLFSSL_EVP_CIPHER* c; - - c = wolfSSL_EVP_get_cipherbynid(419); - #if (defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)) && \ - defined(WOLFSSL_AES_128) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_128_CBC", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(423); - #if (defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)) && \ - defined(WOLFSSL_AES_192) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_192_CBC", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(427); - #if (defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)) && \ - defined(WOLFSSL_AES_256) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_256_CBC", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(904); - #if defined(WOLFSSL_AES_COUNTER) && defined(WOLFSSL_AES_128) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_128_CTR", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(905); - #if defined(WOLFSSL_AES_COUNTER) && defined(WOLFSSL_AES_192) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_192_CTR", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(906); - #if defined(WOLFSSL_AES_COUNTER) && defined(WOLFSSL_AES_256) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_256_CTR", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(418); - #if defined(HAVE_AES_ECB) && defined(WOLFSSL_AES_128) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_128_ECB", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(422); - #if defined(HAVE_AES_ECB) && defined(WOLFSSL_AES_192) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_192_ECB", c)); - #else - ExpectNull(c); - #endif - - c = wolfSSL_EVP_get_cipherbynid(426); - #if defined(HAVE_AES_ECB) && defined(WOLFSSL_AES_256) - ExpectNotNull(c); - ExpectNotNull(XSTRCMP("EVP_AES_256_ECB", c)); - #else - ExpectNull(c); - #endif -#endif /* !NO_AES */ - -#ifndef NO_DES3 - ExpectNotNull(XSTRCMP("EVP_DES_CBC", wolfSSL_EVP_get_cipherbynid(31))); -#ifdef WOLFSSL_DES_ECB - ExpectNotNull(XSTRCMP("EVP_DES_ECB", wolfSSL_EVP_get_cipherbynid(29))); -#endif - ExpectNotNull(XSTRCMP("EVP_DES_EDE3_CBC", wolfSSL_EVP_get_cipherbynid(44))); -#ifdef WOLFSSL_DES_ECB - ExpectNotNull(XSTRCMP("EVP_DES_EDE3_ECB", wolfSSL_EVP_get_cipherbynid(33))); -#endif -#endif /* !NO_DES3 */ - -#if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) - ExpectNotNull(XSTRCMP("EVP_CHACHA20_POLY13O5", EVP_get_cipherbynid(1018))); -#endif - - /* test for nid is out of range */ - ExpectNull(wolfSSL_EVP_get_cipherbynid(1)); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_CIPHER_CTX(void) -{ - EXPECT_DECLS; -#if !defined(NO_AES) && defined(HAVE_AES_CBC) && defined(WOLFSSL_AES_128) - EVP_CIPHER_CTX *ctx = EVP_CIPHER_CTX_new(); - const EVP_CIPHER *init = EVP_aes_128_cbc(); - const EVP_CIPHER *test; - byte key[AES_BLOCK_SIZE] = {0}; - byte iv[AES_BLOCK_SIZE] = {0}; - - ExpectNotNull(ctx); - wolfSSL_EVP_CIPHER_CTX_init(ctx); - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - test = EVP_CIPHER_CTX_cipher(ctx); - ExpectTrue(init == test); - ExpectIntEQ(EVP_CIPHER_nid(test), NID_aes_128_cbc); - - ExpectIntEQ(EVP_CIPHER_CTX_reset(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_CIPHER_CTX_reset(NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - EVP_CIPHER_CTX_free(ctx); - /* test EVP_CIPHER_CTX_cleanup with NULL */ - ExpectIntEQ(EVP_CIPHER_CTX_cleanup(NULL), WOLFSSL_SUCCESS); -#endif /* !NO_AES && HAVE_AES_CBC && WOLFSSL_AES_128 */ - return EXPECT_RESULT(); -} -#endif /* OPENSSL_EXTRA */ /*----------------------------------------------------------------------------* | IO @@ -13147,410 +10072,6 @@ static int test_tls_bad_legacy_version(void) | X509 Tests *----------------------------------------------------------------------------*/ -/* Testing functions dealing with PKCS12 parsing out X509 certs */ -static int test_wolfSSL_PKCS12(void) -{ - EXPECT_DECLS; - /* .p12 file is encrypted with DES3 */ -#ifndef HAVE_FIPS /* Password used in cert "wolfSSL test" is only 12-bytes - * (96-bit) FIPS mode requires Minimum of 14-byte (112-bit) - * Password Key - */ -#if defined(OPENSSL_EXTRA) && !defined(NO_DES3) && !defined(NO_FILESYSTEM) && \ - !defined(NO_STDIO_FILESYSTEM) && !defined(NO_TLS) && \ - !defined(NO_ASN) && !defined(NO_PWDBASED) && !defined(NO_RSA) && \ - !defined(NO_SHA) && defined(HAVE_PKCS12) && !defined(NO_BIO) && \ - defined(WOLFSSL_AES_256) - byte buf[6000]; - char file[] = "./certs/test-servercert.p12"; - char order[] = "./certs/ecc-rsa-server.p12"; -#ifdef WC_RC2 - char rc2p12[] = "./certs/test-servercert-rc2.p12"; -#endif - char pass[] = "a password"; - const char goodPsw[] = "wolfSSL test"; - const char badPsw[] = "bad"; -#ifdef HAVE_ECC - WOLFSSL_X509_NAME *subject = NULL; - WOLFSSL_X509 *x509 = NULL; -#endif - XFILE f = XBADFILE; - int bytes = 0, ret = 0, goodPswLen = 0, badPswLen = 0; - WOLFSSL_BIO *bio = NULL; - WOLFSSL_EVP_PKEY *pkey = NULL; - WC_PKCS12 *pkcs12 = NULL; - WC_PKCS12 *pkcs12_2 = NULL; - WOLFSSL_X509 *cert = NULL; - WOLFSSL_X509 *tmp = NULL; - WOLF_STACK_OF(WOLFSSL_X509) *ca = NULL; -#if (defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) || defined(WOLFSSL_HAPROXY) \ - || defined(WOLFSSL_NGINX)) && defined(SESSION_CERTS) - WOLFSSL_CTX *ctx = NULL; - WOLFSSL *ssl = NULL; - WOLF_STACK_OF(WOLFSSL_X509) *tmp_ca = NULL; -#endif - - ExpectTrue((f = XFOPEN(file, "rb")) != XBADFILE); - ExpectIntGT(bytes = (int)XFREAD(buf, 1, sizeof(buf), f), 0); - if (f != XBADFILE) { - XFCLOSE(f); - f = XBADFILE; - } - - goodPswLen = (int)XSTRLEN(goodPsw); - badPswLen = (int)XSTRLEN(badPsw); - - ExpectNotNull(bio = wolfSSL_BIO_new(wolfSSL_BIO_s_mem())); - - ExpectIntEQ(BIO_write(bio, buf, bytes), bytes); /* d2i consumes BIO */ - ExpectNotNull(d2i_PKCS12_bio(bio, &pkcs12)); - ExpectNotNull(pkcs12); - BIO_free(bio); - bio = NULL; - - /* check verify MAC directly */ - ExpectIntEQ(ret = PKCS12_verify_mac(pkcs12, goodPsw, goodPswLen), 1); - - /* check verify MAC fail case directly */ - ExpectIntEQ(ret = PKCS12_verify_mac(pkcs12, badPsw, badPswLen), 0); - - /* check verify MAC fail case */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "bad", &pkey, &cert, NULL), 0); - ExpectNull(pkey); - ExpectNull(cert); - - /* check parse with no extra certs kept */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, NULL), - 1); - ExpectNotNull(pkey); - ExpectNotNull(cert); - - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; - wolfSSL_X509_free(cert); - cert = NULL; - - /* check parse with extra certs kept */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), - 1); - ExpectNotNull(pkey); - ExpectNotNull(cert); - ExpectNotNull(ca); - -#if (defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) || defined(WOLFSSL_HAPROXY) \ - || defined(WOLFSSL_NGINX)) && defined(SESSION_CERTS) - - /* Check that SSL_CTX_set0_chain correctly sets the certChain buffer */ -#if !defined(NO_WOLFSSL_CLIENT) || !defined(NO_WOLFSSL_SERVER) -#if !defined(NO_WOLFSSL_CLIENT) && defined(SESSION_CERTS) - ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_client_method())); -#else - ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_server_method())); -#endif - /* Copy stack structure */ - ExpectNotNull(tmp_ca = X509_chain_up_ref(ca)); - ExpectIntEQ(SSL_CTX_set0_chain(ctx, tmp_ca), 1); - /* CTX now owns the tmp_ca stack structure */ - tmp_ca = NULL; - ExpectIntEQ(wolfSSL_CTX_get_extra_chain_certs(ctx, &tmp_ca), 1); - ExpectNotNull(tmp_ca); - ExpectIntEQ(sk_X509_num(tmp_ca), sk_X509_num(ca)); - /* Check that the main cert is also set */ - ExpectNotNull(SSL_CTX_get0_certificate(ctx)); - ExpectNotNull(ssl = SSL_new(ctx)); - ExpectNotNull(SSL_get_certificate(ssl)); - SSL_free(ssl); - SSL_CTX_free(ctx); - ctx = NULL; -#endif -#endif /* !NO_WOLFSSL_CLIENT || !NO_WOLFSSL_SERVER */ - /* should be 2 other certs on stack */ - ExpectNotNull(tmp = sk_X509_pop(ca)); - X509_free(tmp); - ExpectNotNull(tmp = sk_X509_pop(ca)); - X509_free(tmp); - ExpectNull(sk_X509_pop(ca)); - - EVP_PKEY_free(pkey); - pkey = NULL; - X509_free(cert); - cert = NULL; - sk_X509_pop_free(ca, X509_free); - ca = NULL; - - /* check PKCS12_create */ - ExpectNull(PKCS12_create(pass, NULL, NULL, NULL, NULL, -1, -1, -1, -1,0)); - ExpectIntEQ(PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), - SSL_SUCCESS); - ExpectNotNull((pkcs12_2 = PKCS12_create(pass, NULL, pkey, cert, ca, - -1, -1, 100, -1, 0))); - EVP_PKEY_free(pkey); - pkey = NULL; - X509_free(cert); - cert = NULL; - sk_X509_pop_free(ca, NULL); - ca = NULL; - - ExpectIntEQ(PKCS12_parse(pkcs12_2, "a password", &pkey, &cert, &ca), - SSL_SUCCESS); - PKCS12_free(pkcs12_2); - pkcs12_2 = NULL; - ExpectNotNull((pkcs12_2 = PKCS12_create(pass, NULL, pkey, cert, ca, - NID_pbe_WithSHA1And3_Key_TripleDES_CBC, - NID_pbe_WithSHA1And3_Key_TripleDES_CBC, - 2000, 1, 0))); - EVP_PKEY_free(pkey); - pkey = NULL; - X509_free(cert); - cert = NULL; - sk_X509_pop_free(ca, NULL); - ca = NULL; - - /* convert to DER then back and parse */ - ExpectNotNull(bio = BIO_new(BIO_s_mem())); - ExpectIntEQ(i2d_PKCS12_bio(bio, pkcs12_2), SSL_SUCCESS); - PKCS12_free(pkcs12_2); - pkcs12_2 = NULL; - - ExpectNotNull(pkcs12_2 = d2i_PKCS12_bio(bio, NULL)); - BIO_free(bio); - bio = NULL; - ExpectIntEQ(PKCS12_parse(pkcs12_2, "a password", &pkey, &cert, &ca), - SSL_SUCCESS); - - /* should be 2 other certs on stack */ - ExpectNotNull(tmp = sk_X509_pop(ca)); - X509_free(tmp); - ExpectNotNull(tmp = sk_X509_pop(ca)); - X509_free(tmp); - ExpectNull(sk_X509_pop(ca)); - - -#ifndef NO_RC4 - PKCS12_free(pkcs12_2); - pkcs12_2 = NULL; - ExpectNotNull((pkcs12_2 = PKCS12_create(pass, NULL, pkey, cert, NULL, - NID_pbe_WithSHA1And128BitRC4, - NID_pbe_WithSHA1And128BitRC4, - 2000, 1, 0))); - EVP_PKEY_free(pkey); - pkey = NULL; - X509_free(cert); - cert = NULL; - sk_X509_pop_free(ca, NULL); - ca = NULL; - - ExpectIntEQ(PKCS12_parse(pkcs12_2, "a password", &pkey, &cert, &ca), - SSL_SUCCESS); - -#endif /* NO_RC4 */ - - EVP_PKEY_free(pkey); - pkey = NULL; - X509_free(cert); - cert = NULL; - PKCS12_free(pkcs12); - pkcs12 = NULL; - PKCS12_free(pkcs12_2); - pkcs12_2 = NULL; - sk_X509_pop_free(ca, NULL); - ca = NULL; - -#ifdef HAVE_ECC - /* test order of parsing */ - ExpectTrue((f = XFOPEN(order, "rb")) != XBADFILE); - ExpectIntGT(bytes = (int)XFREAD(buf, 1, sizeof(buf), f), 0); - if (f != XBADFILE) { - XFCLOSE(f); - f = XBADFILE; - } - - ExpectNotNull(bio = BIO_new_mem_buf((void*)buf, bytes)); - ExpectNotNull(pkcs12 = d2i_PKCS12_bio(bio, NULL)); - ExpectIntEQ((ret = PKCS12_parse(pkcs12, "", &pkey, &cert, &ca)), - WOLFSSL_SUCCESS); - - /* check use of pkey after parse */ -#if (defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) || defined(WOLFSSL_HAPROXY) \ - || defined(WOLFSSL_NGINX)) && defined(SESSION_CERTS) -#if !defined(NO_WOLFSSL_CLIENT) || !defined(NO_WOLFSSL_SERVER) -#if !defined(NO_WOLFSSL_CLIENT) && defined(SESSION_CERTS) - ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_client_method())); -#else - ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_server_method())); -#endif - ExpectIntEQ(SSL_CTX_use_PrivateKey(ctx, pkey), WOLFSSL_SUCCESS); - SSL_CTX_free(ctx); -#endif /* !NO_WOLFSSL_CLIENT || !NO_WOLFSSL_SERVER */ -#endif - - ExpectNotNull(pkey); - ExpectNotNull(cert); - ExpectNotNull(ca); - - /* compare subject lines of certificates */ - ExpectNotNull(subject = wolfSSL_X509_get_subject_name(cert)); - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(eccRsaCertFile, - SSL_FILETYPE_PEM)); - ExpectIntEQ(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, - (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); - X509_free(x509); - x509 = NULL; - - /* test expected fail case */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(eccCertFile, - SSL_FILETYPE_PEM)); - ExpectIntNE(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, - (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); - X509_free(x509); - x509 = NULL; - X509_free(cert); - cert = NULL; - - /* get subject line from ca stack */ - ExpectNotNull(cert = sk_X509_pop(ca)); - ExpectNotNull(subject = wolfSSL_X509_get_subject_name(cert)); - - /* compare subject from certificate in ca to expected */ - ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(eccCertFile, - SSL_FILETYPE_PEM)); - ExpectIntEQ(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, - (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); - - /* modify case and compare subject from certificate in ca to expected. - * The first bit of the name is: - * /C=US/ST=Washington - * So we'll change subject->name[1] to 'c' (lower case) */ - if (subject != NULL) { - subject->name[1] = 'c'; - ExpectIntEQ(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, - (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); - } - - EVP_PKEY_free(pkey); - pkey = NULL; - X509_free(x509); - x509 = NULL; - X509_free(cert); - cert = NULL; - BIO_free(bio); - bio = NULL; - PKCS12_free(pkcs12); - pkcs12 = NULL; - sk_X509_pop_free(ca, NULL); /* TEST d2i_PKCS12_fp */ - ca = NULL; - - /* test order of parsing */ - ExpectTrue((f = XFOPEN(file, "rb")) != XBADFILE); - ExpectNotNull(pkcs12 = d2i_PKCS12_fp(f, NULL)); - if (f != XBADFILE) { - XFCLOSE(f); - f = XBADFILE; - } - - /* check verify MAC fail case */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "bad", &pkey, &cert, NULL), 0); - ExpectNull(pkey); - ExpectNull(cert); - - /* check parse with no extra certs kept */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, NULL), - 1); - ExpectNotNull(pkey); - ExpectNotNull(cert); - - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; - wolfSSL_X509_free(cert); - cert = NULL; - - /* check parse with extra certs kept */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), - 1); - ExpectNotNull(pkey); - ExpectNotNull(cert); - ExpectNotNull(ca); - - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; - wolfSSL_X509_free(cert); - cert = NULL; - sk_X509_pop_free(ca, NULL); - ca = NULL; - - PKCS12_free(pkcs12); - pkcs12 = NULL; -#endif /* HAVE_ECC */ - -#ifdef WC_RC2 - /* test PKCS#12 with RC2 encryption */ - ExpectTrue((f = XFOPEN(rc2p12, "rb")) != XBADFILE); - ExpectIntGT(bytes = (int)XFREAD(buf, 1, sizeof(buf), f), 0); - if (f != XBADFILE) { - XFCLOSE(f); - f = XBADFILE; - } - - ExpectNotNull(bio = BIO_new_mem_buf((void*)buf, bytes)); - ExpectNotNull(pkcs12 = d2i_PKCS12_bio(bio, NULL)); - - /* check verify MAC fail case */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "bad", &pkey, &cert, NULL), 0); - ExpectNull(pkey); - ExpectNull(cert); - - /* check parse with not extra certs kept */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, NULL), - WOLFSSL_SUCCESS); - ExpectNotNull(pkey); - ExpectNotNull(cert); - - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; - wolfSSL_X509_free(cert); - cert = NULL; - - /* check parse with extra certs kept */ - ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), - WOLFSSL_SUCCESS); - ExpectNotNull(pkey); - ExpectNotNull(cert); - ExpectNotNull(ca); - - wolfSSL_EVP_PKEY_free(pkey); - wolfSSL_X509_free(cert); - sk_X509_pop_free(ca, NULL); - - BIO_free(bio); - bio = NULL; - PKCS12_free(pkcs12); - pkcs12 = NULL; -#endif /* WC_RC2 */ - - /* Test i2d_PKCS12_bio */ - ExpectTrue((f = XFOPEN(file, "rb")) != XBADFILE); - ExpectNotNull(pkcs12 = d2i_PKCS12_fp(f, NULL)); - if (f != XBADFILE) - XFCLOSE(f); - - ExpectNotNull(bio = BIO_new(BIO_s_mem())); - - ExpectIntEQ(ret = i2d_PKCS12_bio(bio, pkcs12), 1); - - ExpectIntEQ(ret = i2d_PKCS12_bio(NULL, pkcs12), 0); - - ExpectIntEQ(ret = i2d_PKCS12_bio(bio, NULL), 0); - - PKCS12_free(pkcs12); - BIO_free(bio); - - (void)order; -#endif /* OPENSSL_EXTRA */ -#endif /* HAVE_FIPS */ - return EXPECT_RESULT(); -} - - #if !defined(NO_FILESYSTEM) && !defined(NO_ASN) && defined(HAVE_PKCS8) && \ defined(WOLFSSL_ENCRYPTED_KEYS) && !defined(NO_DES3) && !defined(NO_PWDBASED) && \ (!defined(NO_RSA) || defined(HAVE_ECC)) && !defined(NO_MD5) @@ -16270,754 +12791,6 @@ static int test_wolfSSL_ctrl(void) } -static int test_wolfSSL_EVP_PKEY_new_mac_key(void) -{ - EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - static const unsigned char pw[] = "password"; - static const int pwSz = sizeof(pw) - 1; - size_t checkPwSz = 0; - const unsigned char* checkPw = NULL; - WOLFSSL_EVP_PKEY* key = NULL; - - ExpectNull(key = wolfSSL_EVP_PKEY_new_mac_key(0, NULL, pw, pwSz)); - ExpectNull(key = wolfSSL_EVP_PKEY_new_mac_key(0, NULL, NULL, pwSz)); - - ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, pw, - pwSz)); - if (key != NULL) { - ExpectIntEQ(key->type, EVP_PKEY_HMAC); - ExpectIntEQ(key->save_type, EVP_PKEY_HMAC); - ExpectIntEQ(key->pkey_sz, pwSz); - ExpectIntEQ(XMEMCMP(key->pkey.ptr, pw, pwSz), 0); - } - ExpectNotNull(checkPw = wolfSSL_EVP_PKEY_get0_hmac(key, &checkPwSz)); - ExpectIntEQ((int)checkPwSz, pwSz); - ExpectIntEQ(XMEMCMP(checkPw, pw, pwSz), 0); - wolfSSL_EVP_PKEY_free(key); - key = NULL; - - ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, pw, - 0)); - ExpectIntEQ(key->pkey_sz, 0); - if (EXPECT_SUCCESS()) { - /* Allocation for key->pkey.ptr may fail - OK key len is 0 */ - checkPw = wolfSSL_EVP_PKEY_get0_hmac(key, &checkPwSz); - } - ExpectTrue((checkPwSz == 0) || (checkPw != NULL)); - ExpectIntEQ((int)checkPwSz, 0); - wolfSSL_EVP_PKEY_free(key); - key = NULL; - - ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, NULL, - 0)); - ExpectIntEQ(key->pkey_sz, 0); - if (EXPECT_SUCCESS()) { - /* Allocation for key->pkey.ptr may fail - OK key len is 0 */ - checkPw = wolfSSL_EVP_PKEY_get0_hmac(key, &checkPwSz); - } - ExpectTrue((checkPwSz == 0) || (checkPw != NULL)); - ExpectIntEQ((int)checkPwSz, 0); - wolfSSL_EVP_PKEY_free(key); - key = NULL; -#endif /* OPENSSL_EXTRA */ - return EXPECT_RESULT(); -} - - -static int test_wolfSSL_EVP_PKEY_new_CMAC_key(void) -{ - EXPECT_DECLS; -#ifdef OPENSSL_EXTRA -#if defined(WOLFSSL_CMAC) && !defined(NO_AES) && \ - defined(WOLFSSL_AES_DIRECT) && defined(WOLFSSL_AES_128) - const char *priv = "ABCDEFGHIJKLMNOP"; - const WOLFSSL_EVP_CIPHER* cipher = EVP_aes_128_cbc(); - WOLFSSL_EVP_PKEY* key = NULL; - - ExpectNull(key = wolfSSL_EVP_PKEY_new_CMAC_key( - NULL, NULL, AES_128_KEY_SIZE, cipher)); - ExpectNull(key = wolfSSL_EVP_PKEY_new_CMAC_key( - NULL, (const unsigned char *)priv, 0, cipher)); - ExpectNull(key = wolfSSL_EVP_PKEY_new_CMAC_key( - NULL, (const unsigned char *)priv, AES_128_KEY_SIZE, NULL)); - - ExpectNotNull(key = wolfSSL_EVP_PKEY_new_CMAC_key( - NULL, (const unsigned char *)priv, AES_128_KEY_SIZE, cipher)); - wolfSSL_EVP_PKEY_free(key); -#endif /* WOLFSSL_CMAC && !NO_AES && WOLFSSL_AES_DIRECT && WOLFSSL_AES_128 */ -#endif /* OPENSSL_EXTRA */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_Digest(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_SHA256) && !defined(NO_PWDBASED) - const char* in = "abc"; - int inLen = (int)XSTRLEN(in); - byte out[WC_SHA256_DIGEST_SIZE]; - unsigned int outLen; - const char* expOut = - "\xBA\x78\x16\xBF\x8F\x01\xCF\xEA\x41\x41\x40\xDE\x5D\xAE\x22" - "\x23\xB0\x03\x61\xA3\x96\x17\x7A\x9C\xB4\x10\xFF\x61\xF2\x00" - "\x15\xAD"; - - ExpectIntEQ(wolfSSL_EVP_Digest((unsigned char*)in, inLen, out, &outLen, - "SHA256", NULL), 1); - ExpectIntEQ(outLen, WC_SHA256_DIGEST_SIZE); - ExpectIntEQ(XMEMCMP(out, expOut, WC_SHA256_DIGEST_SIZE), 0); -#endif /* OPEN_EXTRA && ! NO_SHA256 */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_Digest_all(void) -{ - EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - const char* digests[] = { -#ifndef NO_MD5 - "MD5", -#endif -#ifndef NO_SHA - "SHA", -#endif -#ifdef WOLFSSL_SHA224 - "SHA224", -#endif -#ifndef NO_SHA256 - "SHA256", -#endif -#ifdef WOLFSSL_SHA384 - "SHA384", -#endif -#ifdef WOLFSSL_SHA512 - "SHA512", -#endif -#if defined(WOLFSSL_SHA512) && !defined(WOLFSSL_NOSHA512_224) - "SHA512-224", -#endif -#if defined(WOLFSSL_SHA512) && !defined(WOLFSSL_NOSHA512_256) - "SHA512-256", -#endif -#ifdef WOLFSSL_SHA3 -#ifndef WOLFSSL_NOSHA3_224 - "SHA3-224", -#endif -#ifndef WOLFSSL_NOSHA3_256 - "SHA3-256", -#endif - "SHA3-384", -#ifndef WOLFSSL_NOSHA3_512 - "SHA3-512", -#endif -#endif /* WOLFSSL_SHA3 */ - NULL - }; - const char** d; - const unsigned char in[] = "abc"; - int inLen = XSTR_SIZEOF(in); - byte out[WC_MAX_DIGEST_SIZE]; - unsigned int outLen; - - for (d = digests; *d != NULL; d++) { - ExpectIntEQ(EVP_Digest(in, inLen, out, &outLen, *d, NULL), 1); - ExpectIntGT(outLen, 0); - ExpectIntEQ(EVP_MD_size(*d), outLen); - } -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_MD_size(void) -{ - EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - WOLFSSL_EVP_MD_CTX mdCtx; - -#ifdef WOLFSSL_SHA3 -#ifndef WOLFSSL_NOSHA3_224 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-224"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_224_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_224_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); -#endif -#ifndef WOLFSSL_NOSHA3_256 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-256"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_256_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_256_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); -#endif - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-384"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_384_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_384_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); -#ifndef WOLFSSL_NOSHA3_512 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-512"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_512_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_512_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); -#endif -#endif /* WOLFSSL_SHA3 */ - -#ifndef NO_SHA256 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA256"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA256_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA256_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA256_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA256_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - -#endif - -#ifndef NO_MD5 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "MD5"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_MD5_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_MD5_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_MD5_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_MD5_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - -#endif - -#ifdef WOLFSSL_SHA224 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA224"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA224_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA224_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA224_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA224_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - -#endif - -#ifdef WOLFSSL_SHA384 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA384"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA384_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA384_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA384_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA384_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - -#endif - -#ifdef WOLFSSL_SHA512 - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA512"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA512_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA512_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA512_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA512_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - -#endif - -#ifndef NO_SHA - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA1"), 1); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), - WC_SHA_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA_DIGEST_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA_BLOCK_SIZE); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); -#endif - /* error case */ - wolfSSL_EVP_MD_CTX_init(&mdCtx); - - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, ""), 0); - ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), 0); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), 0); - /* Cleanup is valid on uninit'ed struct */ - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); -#endif /* OPENSSL_EXTRA */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_MD_pkey_type(void) -{ - EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - const WOLFSSL_EVP_MD* md; - -#ifndef NO_MD5 - ExpectNotNull(md = EVP_md5()); - ExpectIntEQ(EVP_MD_pkey_type(md), NID_md5WithRSAEncryption); -#endif -#ifndef NO_SHA - ExpectNotNull(md = EVP_sha1()); - ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha1WithRSAEncryption); -#endif -#ifdef WOLFSSL_SHA224 - ExpectNotNull(md = EVP_sha224()); - ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha224WithRSAEncryption); -#endif - ExpectNotNull(md = EVP_sha256()); - ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha256WithRSAEncryption); -#ifdef WOLFSSL_SHA384 - ExpectNotNull(md = EVP_sha384()); - ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha384WithRSAEncryption); -#endif -#ifdef WOLFSSL_SHA512 - ExpectNotNull(md = EVP_sha512()); - ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha512WithRSAEncryption); -#endif -#endif - return EXPECT_RESULT(); -} - -#ifdef OPENSSL_EXTRA -static int test_hmac_signing(const WOLFSSL_EVP_MD *type, const byte* testKey, - size_t testKeySz, const char* testData, size_t testDataSz, - const byte* testResult, size_t testResultSz) -{ - EXPECT_DECLS; - unsigned char check[WC_MAX_DIGEST_SIZE]; - size_t checkSz = 0; - WOLFSSL_EVP_PKEY* key = NULL; - WOLFSSL_EVP_MD_CTX mdCtx; - - ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, - testKey, (int)testKeySz)); - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, type, NULL, key), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, - (unsigned int)testDataSz), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - ExpectIntEQ((int)checkSz, (int)testResultSz); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz,(int)testResultSz); - ExpectIntEQ(XMEMCMP(testResult, check, testResultSz), 0); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, type, NULL, key), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, - (unsigned int)testDataSz), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, testResult, checkSz), 1); - - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, type, NULL, key), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, 4), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - ExpectIntEQ((int)checkSz, (int)testResultSz); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz,(int)testResultSz); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData + 4, - (unsigned int)testDataSz - 4), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz,(int)testResultSz); - ExpectIntEQ(XMEMCMP(testResult, check, testResultSz), 0); - - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, type, NULL, key), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, 4), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData + 4, - (unsigned int)testDataSz - 4), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, testResult, checkSz), 1); - - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - - wolfSSL_EVP_PKEY_free(key); - - return EXPECT_RESULT(); -} -#endif - -static int test_wolfSSL_EVP_MD_hmac_signing(void) -{ - EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - static const unsigned char testKey[] = - { - 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, - 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, - 0x0b, 0x0b, 0x0b, 0x0b - }; - static const char testData[] = "Hi There"; -#ifdef WOLFSSL_SHA224 - static const unsigned char testResultSha224[] = - { - 0x89, 0x6f, 0xb1, 0x12, 0x8a, 0xbb, 0xdf, 0x19, - 0x68, 0x32, 0x10, 0x7c, 0xd4, 0x9d, 0xf3, 0x3f, - 0x47, 0xb4, 0xb1, 0x16, 0x99, 0x12, 0xba, 0x4f, - 0x53, 0x68, 0x4b, 0x22 - }; -#endif -#ifndef NO_SHA256 - static const unsigned char testResultSha256[] = - { - 0xb0, 0x34, 0x4c, 0x61, 0xd8, 0xdb, 0x38, 0x53, - 0x5c, 0xa8, 0xaf, 0xce, 0xaf, 0x0b, 0xf1, 0x2b, - 0x88, 0x1d, 0xc2, 0x00, 0xc9, 0x83, 0x3d, 0xa7, - 0x26, 0xe9, 0x37, 0x6c, 0x2e, 0x32, 0xcf, 0xf7 - }; -#endif -#ifdef WOLFSSL_SHA384 - static const unsigned char testResultSha384[] = - { - 0xaf, 0xd0, 0x39, 0x44, 0xd8, 0x48, 0x95, 0x62, - 0x6b, 0x08, 0x25, 0xf4, 0xab, 0x46, 0x90, 0x7f, - 0x15, 0xf9, 0xda, 0xdb, 0xe4, 0x10, 0x1e, 0xc6, - 0x82, 0xaa, 0x03, 0x4c, 0x7c, 0xeb, 0xc5, 0x9c, - 0xfa, 0xea, 0x9e, 0xa9, 0x07, 0x6e, 0xde, 0x7f, - 0x4a, 0xf1, 0x52, 0xe8, 0xb2, 0xfa, 0x9c, 0xb6 - }; -#endif -#ifdef WOLFSSL_SHA512 - static const unsigned char testResultSha512[] = - { - 0x87, 0xaa, 0x7c, 0xde, 0xa5, 0xef, 0x61, 0x9d, - 0x4f, 0xf0, 0xb4, 0x24, 0x1a, 0x1d, 0x6c, 0xb0, - 0x23, 0x79, 0xf4, 0xe2, 0xce, 0x4e, 0xc2, 0x78, - 0x7a, 0xd0, 0xb3, 0x05, 0x45, 0xe1, 0x7c, 0xde, - 0xda, 0xa8, 0x33, 0xb7, 0xd6, 0xb8, 0xa7, 0x02, - 0x03, 0x8b, 0x27, 0x4e, 0xae, 0xa3, 0xf4, 0xe4, - 0xbe, 0x9d, 0x91, 0x4e, 0xeb, 0x61, 0xf1, 0x70, - 0x2e, 0x69, 0x6c, 0x20, 0x3a, 0x12, 0x68, 0x54 - }; -#endif -#ifdef WOLFSSL_SHA3 - #ifndef WOLFSSL_NOSHA3_224 - static const unsigned char testResultSha3_224[] = - { - 0x3b, 0x16, 0x54, 0x6b, 0xbc, 0x7b, 0xe2, 0x70, - 0x6a, 0x03, 0x1d, 0xca, 0xfd, 0x56, 0x37, 0x3d, - 0x98, 0x84, 0x36, 0x76, 0x41, 0xd8, 0xc5, 0x9a, - 0xf3, 0xc8, 0x60, 0xf7 - }; - #endif - #ifndef WOLFSSL_NOSHA3_256 - static const unsigned char testResultSha3_256[] = - { - 0xba, 0x85, 0x19, 0x23, 0x10, 0xdf, 0xfa, 0x96, - 0xe2, 0xa3, 0xa4, 0x0e, 0x69, 0x77, 0x43, 0x51, - 0x14, 0x0b, 0xb7, 0x18, 0x5e, 0x12, 0x02, 0xcd, - 0xcc, 0x91, 0x75, 0x89, 0xf9, 0x5e, 0x16, 0xbb - }; - #endif - #ifndef WOLFSSL_NOSHA3_384 - static const unsigned char testResultSha3_384[] = - { - 0x68, 0xd2, 0xdc, 0xf7, 0xfd, 0x4d, 0xdd, 0x0a, - 0x22, 0x40, 0xc8, 0xa4, 0x37, 0x30, 0x5f, 0x61, - 0xfb, 0x73, 0x34, 0xcf, 0xb5, 0xd0, 0x22, 0x6e, - 0x1b, 0xc2, 0x7d, 0xc1, 0x0a, 0x2e, 0x72, 0x3a, - 0x20, 0xd3, 0x70, 0xb4, 0x77, 0x43, 0x13, 0x0e, - 0x26, 0xac, 0x7e, 0x3d, 0x53, 0x28, 0x86, 0xbd - }; - #endif - #ifndef WOLFSSL_NOSHA3_512 - static const unsigned char testResultSha3_512[] = - { - 0xeb, 0x3f, 0xbd, 0x4b, 0x2e, 0xaa, 0xb8, 0xf5, - 0xc5, 0x04, 0xbd, 0x3a, 0x41, 0x46, 0x5a, 0xac, - 0xec, 0x15, 0x77, 0x0a, 0x7c, 0xab, 0xac, 0x53, - 0x1e, 0x48, 0x2f, 0x86, 0x0b, 0x5e, 0xc7, 0xba, - 0x47, 0xcc, 0xb2, 0xc6, 0xf2, 0xaf, 0xce, 0x8f, - 0x88, 0xd2, 0x2b, 0x6d, 0xc6, 0x13, 0x80, 0xf2, - 0x3a, 0x66, 0x8f, 0xd3, 0x88, 0x8b, 0xb8, 0x05, - 0x37, 0xc0, 0xa0, 0xb8, 0x64, 0x07, 0x68, 0x9e - }; - #endif -#endif - -#ifndef NO_SHA256 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha256(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha256, - sizeof(testResultSha256)), TEST_SUCCESS); -#endif -#ifdef WOLFSSL_SHA224 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha224(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha224, - sizeof(testResultSha224)), TEST_SUCCESS); -#endif -#ifdef WOLFSSL_SHA384 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha384(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha384, - sizeof(testResultSha384)), TEST_SUCCESS); -#endif -#ifdef WOLFSSL_SHA512 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha512(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha512, - sizeof(testResultSha512)), TEST_SUCCESS); -#endif -#ifdef WOLFSSL_SHA3 - #ifndef WOLFSSL_NOSHA3_224 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_224(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_224, - sizeof(testResultSha3_224)), TEST_SUCCESS); - #endif - #ifndef WOLFSSL_NOSHA3_256 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_256(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_256, - sizeof(testResultSha3_256)), TEST_SUCCESS); - #endif - #ifndef WOLFSSL_NOSHA3_384 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_384(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_384, - sizeof(testResultSha3_384)), TEST_SUCCESS); - #endif - #ifndef WOLFSSL_NOSHA3_512 - ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_512(), testKey, - sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_512, - sizeof(testResultSha3_512)), TEST_SUCCESS); - #endif -#endif -#endif /* OPENSSL_EXTRA */ - return EXPECT_RESULT(); -} - - -static int test_wolfSSL_EVP_MD_rsa_signing(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(USE_CERT_BUFFERS_2048) - WOLFSSL_EVP_PKEY* privKey = NULL; - WOLFSSL_EVP_PKEY* pubKey = NULL; - WOLFSSL_EVP_PKEY_CTX* keyCtx = NULL; - const char testData[] = "Hi There"; - WOLFSSL_EVP_MD_CTX mdCtx; - WOLFSSL_EVP_MD_CTX mdCtxCopy; - int ret; - size_t checkSz = -1; - int sz = 2048 / 8; - const unsigned char* cp; - const unsigned char* p; - unsigned char check[2048/8]; - size_t i; - int paddings[] = { - RSA_PKCS1_PADDING, -#if !defined(HAVE_FIPS) && !defined(HAVE_SELFTEST) && defined(WC_RSA_PSS) - RSA_PKCS1_PSS_PADDING, -#endif - }; - - - cp = client_key_der_2048; - ExpectNotNull((privKey = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, &cp, - sizeof_client_key_der_2048))); - p = client_keypub_der_2048; - ExpectNotNull((pubKey = wolfSSL_d2i_PUBKEY(NULL, &p, - sizeof_client_keypub_der_2048))); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - wolfSSL_EVP_MD_CTX_init(&mdCtxCopy); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, privKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, - (unsigned int)XSTRLEN(testData)), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - ExpectIntEQ((int)checkSz, sz); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz,sz); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_copy_ex(&mdCtxCopy, &mdCtx), 1); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_copy_ex(&mdCtxCopy, &mdCtx), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtxCopy); - ExpectIntEQ(ret, 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, pubKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, - (unsigned int)XSTRLEN(testData)), - 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, privKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, 4), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - ExpectIntEQ((int)checkSz, sz); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz, sz); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData + 4, - (unsigned int)XSTRLEN(testData) - 4), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz, sz); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, pubKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, 4), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData + 4, - (unsigned int)XSTRLEN(testData) - 4), - 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - /* Check all signing padding types */ - for (i = 0; i < sizeof(paddings)/sizeof(int); i++) { - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, &keyCtx, - wolfSSL_EVP_sha256(), NULL, privKey), 1); - ExpectIntEQ(wolfSSL_EVP_PKEY_CTX_set_rsa_padding(keyCtx, - paddings[i]), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, - (unsigned int)XSTRLEN(testData)), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - ExpectIntEQ((int)checkSz, sz); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ((int)checkSz,sz); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, &keyCtx, - wolfSSL_EVP_sha256(), NULL, pubKey), 1); - ExpectIntEQ(wolfSSL_EVP_PKEY_CTX_set_rsa_padding(keyCtx, - paddings[i]), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, - (unsigned int)XSTRLEN(testData)), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - } - - wolfSSL_EVP_PKEY_free(pubKey); - wolfSSL_EVP_PKEY_free(privKey); -#endif - return EXPECT_RESULT(); -} - - -static int test_wolfSSL_EVP_MD_ecc_signing(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) - WOLFSSL_EVP_PKEY* privKey = NULL; - WOLFSSL_EVP_PKEY* pubKey = NULL; - const char testData[] = "Hi There"; - WOLFSSL_EVP_MD_CTX mdCtx; - int ret; - const unsigned char* cp; - const unsigned char* p; - unsigned char check[2048/8]; - size_t checkSz = sizeof(check); - - XMEMSET(check, 0, sizeof(check)); - - cp = ecc_clikey_der_256; - ExpectNotNull(privKey = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, &cp, - sizeof_ecc_clikey_der_256)); - p = ecc_clikeypub_der_256; - ExpectNotNull((pubKey = wolfSSL_d2i_PUBKEY(NULL, &p, - sizeof_ecc_clikeypub_der_256))); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, privKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, - (unsigned int)XSTRLEN(testData)), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, pubKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, - (unsigned int)XSTRLEN(testData)), - 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, privKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, 4), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData + 4, - (unsigned int)XSTRLEN(testData) - 4), 1); - checkSz = sizeof(check); - ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), - NULL, pubKey), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, 4), 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData + 4, - (unsigned int)XSTRLEN(testData) - 4), - 1); - ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); - ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); - ExpectIntEQ(ret, 1); - - wolfSSL_EVP_PKEY_free(pubKey); - wolfSSL_EVP_PKEY_free(privKey); -#endif - return EXPECT_RESULT(); -} - - static int test_wolfSSL_CTX_add_extra_chain_cert(void) { EXPECT_DECLS; @@ -17202,80 +12975,6 @@ static int test_wolfSSL_ERR_peek_last_error_line(void) } #endif /* !NO_WOLFSSL_CLIENT && !NO_WOLFSSL_SERVER */ -static int test_wolfSSL_PKCS7_certs(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && !defined(NO_CERTS) && !defined(NO_BIO) && \ - !defined(NO_FILESYSTEM) && !defined(NO_RSA) && defined(HAVE_PKCS7) - STACK_OF(X509)* sk = NULL; - STACK_OF(X509_INFO)* info_sk = NULL; - PKCS7 *p7 = NULL; - BIO* bio = NULL; - const byte* p = NULL; - int buflen = 0; - int i; - - /* Test twice. Once with d2i and once without to test - * that everything is free'd correctly. */ - for (i = 0; i < 2; i++) { - ExpectNotNull(p7 = PKCS7_new()); - if (p7 != NULL) { - p7->version = 1; - #ifdef NO_SHA - p7->hashOID = SHA256h; - #else - p7->hashOID = SHAh; - #endif - } - ExpectNotNull(bio = BIO_new(BIO_s_file())); - ExpectIntGT(BIO_read_filename(bio, svrCertFile), 0); - ExpectNotNull(info_sk = PEM_X509_INFO_read_bio(bio, NULL, NULL, NULL)); - ExpectIntEQ(sk_X509_INFO_num(info_sk), 2); - ExpectNotNull(sk = sk_X509_new_null()); - while (EXPECT_SUCCESS() && (sk_X509_INFO_num(info_sk) > 0)) { - X509_INFO* info = NULL; - ExpectNotNull(info = sk_X509_INFO_shift(info_sk)); - if (EXPECT_SUCCESS() && info != NULL) { - ExpectIntGT(sk_X509_push(sk, info->x509), 0); - info->x509 = NULL; - } - X509_INFO_free(info); - } - sk_X509_INFO_pop_free(info_sk, X509_INFO_free); - info_sk = NULL; - BIO_free(bio); - bio = NULL; - ExpectNotNull(bio = BIO_new(BIO_s_mem())); - ExpectIntEQ(wolfSSL_PKCS7_encode_certs(p7, sk, bio), 1); - if ((sk != NULL) && ((p7 == NULL) || (bio == NULL))) { - sk_X509_pop_free(sk, X509_free); - } - sk = NULL; - ExpectIntGT((buflen = BIO_get_mem_data(bio, &p)), 0); - - if (i == 0) { - PKCS7_free(p7); - p7 = NULL; - ExpectNotNull(d2i_PKCS7(&p7, &p, buflen)); - if (p7 != NULL) { - /* Reset certs to force wolfSSL_PKCS7_to_stack to regenerate - * them */ - ((WOLFSSL_PKCS7*)p7)->certs = NULL; - } - /* PKCS7_free free's the certs */ - ExpectNotNull(wolfSSL_PKCS7_to_stack(p7)); - } - - BIO_free(bio); - bio = NULL; - PKCS7_free(p7); - p7 = NULL; - } -#endif /* defined(OPENSSL_ALL) && !defined(NO_CERTS) && \ - !defined(NO_FILESYSTEM) && !defined(NO_RSA) && defined(HAVE_PKCS7) */ - return EXPECT_RESULT(); -} - static int test_wolfSSL_CTX_get0_set1_param(void) { EXPECT_DECLS; @@ -18948,289 +14647,6 @@ static int test_wolfSSL_set_tlsext_status_type(void) return EXPECT_RESULT(); } -#ifndef NO_BIO - -#if defined(OPENSSL_EXTRA) -static long bioCallback(BIO *bio, int cmd, const char* argp, int argi, - long argl, long ret) -{ - (void)bio; - (void)cmd; - (void)argp; - (void)argi; - (void)argl; - return ret; -} -#endif - - -static int test_wolfSSL_BIO(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) - const unsigned char* p = NULL; - byte buff[20]; - BIO* bio1 = NULL; - BIO* bio2 = NULL; - BIO* bio3 = NULL; - char* bufPt = NULL; - int i; - - for (i = 0; i < 20; i++) { - buff[i] = i; - } - /* test BIO_free with NULL */ - ExpectIntEQ(BIO_free(NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - /* Creating and testing type BIO_s_bio */ - ExpectNotNull(bio1 = BIO_new(BIO_s_bio())); - ExpectNotNull(bio2 = BIO_new(BIO_s_bio())); - ExpectNotNull(bio3 = BIO_new(BIO_s_bio())); - - /* read/write before set up */ - ExpectIntEQ(BIO_read(bio1, buff, 2), WOLFSSL_BIO_UNSET); - ExpectIntEQ(BIO_write(bio1, buff, 2), WOLFSSL_BIO_UNSET); - - ExpectIntEQ(BIO_set_nbio(bio1, 1), 1); - ExpectIntEQ(BIO_set_write_buf_size(bio1, 20), WOLFSSL_SUCCESS); - ExpectIntEQ(BIO_set_write_buf_size(bio2, 8), WOLFSSL_SUCCESS); - ExpectIntEQ(BIO_make_bio_pair(bio1, bio2), WOLFSSL_SUCCESS); - - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 10), 10); - ExpectNotNull(XMEMCPY(bufPt, buff, 10)); - ExpectIntEQ(BIO_write(bio1, buff + 10, 10), 10); - /* write buffer full */ - ExpectIntEQ(BIO_write(bio1, buff, 10), WOLFSSL_BIO_ERROR); - ExpectIntEQ(BIO_flush(bio1), WOLFSSL_SUCCESS); - ExpectIntEQ((int)BIO_ctrl_pending(bio1), 0); - - /* write the other direction with pair */ - ExpectIntEQ((int)BIO_nwrite(bio2, &bufPt, 10), 8); - ExpectNotNull(XMEMCPY(bufPt, buff, 8)); - ExpectIntEQ(BIO_write(bio2, buff, 10), WOLFSSL_BIO_ERROR); - - /* try read */ - ExpectIntEQ((int)BIO_ctrl_pending(bio1), 8); - ExpectIntEQ((int)BIO_ctrl_pending(bio2), 20); - - /* try read using ctrl function */ - ExpectIntEQ((int)BIO_ctrl(bio1, BIO_CTRL_WPENDING, 0, NULL), 8); - ExpectIntEQ((int)BIO_ctrl(bio1, BIO_CTRL_PENDING, 0, NULL), 8); - ExpectIntEQ((int)BIO_ctrl(bio2, BIO_CTRL_WPENDING, 0, NULL), 20); - ExpectIntEQ((int)BIO_ctrl(bio2, BIO_CTRL_PENDING, 0, NULL), 20); - - ExpectIntEQ(BIO_nread(bio2, &bufPt, (int)BIO_ctrl_pending(bio2)), 20); - for (i = 0; i < 20; i++) { - ExpectIntEQ((int)bufPt[i], i); - } - ExpectIntEQ(BIO_nread(bio2, &bufPt, 1), 0); - ExpectIntEQ(BIO_nread(bio1, &bufPt, (int)BIO_ctrl_pending(bio1)), 8); - for (i = 0; i < 8; i++) { - ExpectIntEQ((int)bufPt[i], i); - } - ExpectIntEQ(BIO_nread(bio1, &bufPt, 1), 0); - ExpectIntEQ(BIO_ctrl_reset_read_request(bio1), 1); - - /* new pair */ - ExpectIntEQ(BIO_make_bio_pair(bio1, bio3), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - BIO_free(bio2); /* free bio2 and automatically remove from pair */ - bio2 = NULL; - ExpectIntEQ(BIO_make_bio_pair(bio1, bio3), WOLFSSL_SUCCESS); - ExpectIntEQ((int)BIO_ctrl_pending(bio3), 0); - ExpectIntEQ(BIO_nread(bio3, &bufPt, 10), 0); - - /* test wrap around... */ - ExpectIntEQ(BIO_reset(bio1), 1); - ExpectIntEQ(BIO_reset(bio3), 1); - - /* fill write buffer, read only small amount then write again */ - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 20), 20); - ExpectNotNull(XMEMCPY(bufPt, buff, 20)); - ExpectIntEQ(BIO_nread(bio3, &bufPt, 4), 4); - for (i = 0; i < 4; i++) { - ExpectIntEQ(bufPt[i], i); - } - - /* try writing over read index */ - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 5), 4); - ExpectNotNull(XMEMSET(bufPt, 0, 4)); - ExpectIntEQ((int)BIO_ctrl_pending(bio3), 20); - - /* read and write 0 bytes */ - ExpectIntEQ(BIO_nread(bio3, &bufPt, 0), 0); - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 0), 0); - - /* should read only to end of write buffer then need to read again */ - ExpectIntEQ(BIO_nread(bio3, &bufPt, 20), 16); - for (i = 0; i < 16; i++) { - ExpectIntEQ(bufPt[i], buff[4 + i]); - } - - ExpectIntEQ(BIO_nread(bio3, NULL, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(BIO_nread0(bio3, &bufPt), 4); - for (i = 0; i < 4; i++) { - ExpectIntEQ(bufPt[i], 0); - } - - /* read index should not have advanced with nread0 */ - ExpectIntEQ(BIO_nread(bio3, &bufPt, 5), 4); - for (i = 0; i < 4; i++) { - ExpectIntEQ(bufPt[i], 0); - } - - /* write and fill up buffer checking reset of index state */ - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 20), 20); - ExpectNotNull(XMEMCPY(bufPt, buff, 20)); - - /* test reset on data in bio1 write buffer */ - ExpectIntEQ(BIO_reset(bio1), 1); - ExpectIntEQ((int)BIO_ctrl_pending(bio3), 0); - ExpectIntEQ(BIO_nread(bio3, &bufPt, 3), 0); - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 20), 20); - ExpectIntEQ((int)BIO_ctrl(bio1, BIO_CTRL_INFO, 0, &p), 20); - ExpectNotNull(p); - ExpectNotNull(XMEMCPY(bufPt, buff, 20)); - ExpectIntEQ(BIO_nread(bio3, &bufPt, 6), 6); - for (i = 0; i < 6; i++) { - ExpectIntEQ(bufPt[i], i); - } - - /* test case of writing twice with offset read index */ - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 3), 3); - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), 3); /* try overwriting */ - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), WOLFSSL_BIO_ERROR); - ExpectIntEQ(BIO_nread(bio3, &bufPt, 0), 0); - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), WOLFSSL_BIO_ERROR); - ExpectIntEQ(BIO_nread(bio3, &bufPt, 1), 1); - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), 1); - ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), WOLFSSL_BIO_ERROR); - - BIO_free(bio1); - bio1 = NULL; - BIO_free(bio3); - bio3 = NULL; - - #if defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) - { - BIO* bioA = NULL; - BIO* bioB = NULL; - ExpectIntEQ(BIO_new_bio_pair(NULL, 256, NULL, 256), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(BIO_new_bio_pair(&bioA, 256, &bioB, 256), WOLFSSL_SUCCESS); - BIO_free(bioA); - bioA = NULL; - BIO_free(bioB); - bioB = NULL; - } - #endif /* OPENSSL_ALL || WOLFSSL_ASIO */ - - /* BIOs with file pointers */ - #if !defined(NO_FILESYSTEM) - { - XFILE f1 = XBADFILE; - XFILE f2 = XBADFILE; - BIO* f_bio1 = NULL; - BIO* f_bio2 = NULL; - unsigned char cert[300]; - char testFile[] = "tests/bio_write_test.txt"; - char msg[] = "bio_write_test.txt contains the first 300 bytes of certs/server-cert.pem\ncreated by tests/unit.test\n\n"; - - ExpectNotNull(f_bio1 = BIO_new(BIO_s_file())); - ExpectNotNull(f_bio2 = BIO_new(BIO_s_file())); - - /* Failure due to wrong BIO type */ - ExpectIntEQ((int)BIO_set_mem_eof_return(f_bio1, -1), 0); - ExpectIntEQ((int)BIO_set_mem_eof_return(NULL, -1), 0); - - ExpectTrue((f1 = XFOPEN(svrCertFile, "rb+")) != XBADFILE); - ExpectIntEQ((int)BIO_set_fp(f_bio1, f1, BIO_CLOSE), WOLFSSL_SUCCESS); - ExpectIntEQ(BIO_write_filename(f_bio2, testFile), - WOLFSSL_SUCCESS); - - ExpectIntEQ(BIO_read(f_bio1, cert, sizeof(cert)), sizeof(cert)); - ExpectIntEQ(BIO_tell(f_bio1),sizeof(cert)); - ExpectIntEQ(BIO_write(f_bio2, msg, sizeof(msg)), sizeof(msg)); - ExpectIntEQ(BIO_tell(f_bio2),sizeof(msg)); - ExpectIntEQ(BIO_write(f_bio2, cert, sizeof(cert)), sizeof(cert)); - ExpectIntEQ(BIO_tell(f_bio2),sizeof(cert) + sizeof(msg)); - - ExpectIntEQ((int)BIO_get_fp(f_bio2, &f2), WOLFSSL_SUCCESS); - ExpectIntEQ(BIO_reset(f_bio2), 1); - ExpectIntEQ(BIO_tell(NULL),-1); - ExpectIntEQ(BIO_tell(f_bio2),0); - ExpectIntEQ(BIO_seek(f_bio2, 4), 0); - ExpectIntEQ(BIO_tell(f_bio2),4); - - BIO_free(f_bio1); - f_bio1 = NULL; - BIO_free(f_bio2); - f_bio2 = NULL; - - ExpectNotNull(f_bio1 = BIO_new_file(svrCertFile, "rb+")); - ExpectIntEQ((int)BIO_set_mem_eof_return(f_bio1, -1), 0); - ExpectIntEQ(BIO_read(f_bio1, cert, sizeof(cert)), sizeof(cert)); - BIO_free(f_bio1); - f_bio1 = NULL; - } - #endif /* !defined(NO_FILESYSTEM) */ - - /* BIO info callback */ - { - const char* testArg = "test"; - BIO* cb_bio = NULL; - ExpectNotNull(cb_bio = BIO_new(BIO_s_mem())); - - BIO_set_callback(cb_bio, bioCallback); - ExpectNotNull(BIO_get_callback(cb_bio)); - BIO_set_callback(cb_bio, NULL); - ExpectNull(BIO_get_callback(cb_bio)); - - BIO_set_callback_arg(cb_bio, (char*)testArg); - ExpectStrEQ(BIO_get_callback_arg(cb_bio), testArg); - ExpectNull(BIO_get_callback_arg(NULL)); - - BIO_free(cb_bio); - cb_bio = NULL; - } - - /* BIO_vfree */ - ExpectNotNull(bio1 = BIO_new(BIO_s_bio())); - BIO_vfree(NULL); - BIO_vfree(bio1); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_BIO_BIO_ring_read(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) - BIO* bio1 = NULL; - BIO* bio2 = NULL; - byte data[50]; - byte tmp[50]; - - XMEMSET(data, 42, sizeof(data)); - - - ExpectIntEQ(BIO_new_bio_pair(&bio1, sizeof(data), &bio2, sizeof(data)), - SSL_SUCCESS); - - ExpectIntEQ(BIO_write(bio1, data, 40), 40); - ExpectIntEQ(BIO_read(bio1, tmp, 20), -1); - ExpectIntEQ(BIO_read(bio2, tmp, 20), 20); - ExpectBufEQ(tmp, data, 20); - ExpectIntEQ(BIO_write(bio1, data, 20), 20); - ExpectIntEQ(BIO_read(bio2, tmp, 40), 40); - ExpectBufEQ(tmp, data, 40); - - BIO_free(bio1); - BIO_free(bio2); -#endif - return EXPECT_RESULT(); -} - -#endif /* !NO_BIO */ - static int test_wolfSSL_a2i_IPADDRESS(void) { @@ -19460,304 +14876,6 @@ static int test_wolfSSL_BUF(void) return EXPECT_RESULT(); } -#if defined(OPENSSL_EXTRA) && !defined(WOLFSSL_NO_OPENSSL_RAND_CB) -static int stub_rand_seed(const void *buf, int num) -{ - (void)buf; - (void)num; - - return 123; -} - -static int stub_rand_bytes(unsigned char *buf, int num) -{ - (void)buf; - (void)num; - - return 456; -} - -static byte* was_stub_rand_cleanup_called(void) -{ - static byte was_called = 0; - - return &was_called; -} - -static void stub_rand_cleanup(void) -{ - byte* was_called = was_stub_rand_cleanup_called(); - - *was_called = 1; - - return; -} - -static byte* was_stub_rand_add_called(void) -{ - static byte was_called = 0; - - return &was_called; -} - -static int stub_rand_add(const void *buf, int num, double entropy) -{ - byte* was_called = was_stub_rand_add_called(); - - (void)buf; - (void)num; - (void)entropy; - - *was_called = 1; - - return 0; -} - -static int stub_rand_pseudo_bytes(unsigned char *buf, int num) -{ - (void)buf; - (void)num; - - return 9876; -} - -static int stub_rand_status(void) -{ - return 5432; -} -#endif /* OPENSSL_EXTRA && !WOLFSSL_NO_OPENSSL_RAND_CB */ - -static int test_wolfSSL_RAND_set_rand_method(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(WOLFSSL_NO_OPENSSL_RAND_CB) - RAND_METHOD rand_methods = {NULL, NULL, NULL, NULL, NULL, NULL}; - unsigned char* buf = NULL; - int num = 0; - double entropy = 0; - int ret; - byte* was_cleanup_called = was_stub_rand_cleanup_called(); - byte* was_add_called = was_stub_rand_add_called(); - - ExpectNotNull(buf = (byte*)XMALLOC(32 * sizeof(byte), NULL, - DYNAMIC_TYPE_TMP_BUFFER)); - - ExpectIntNE(wolfSSL_RAND_status(), 5432); - ExpectIntEQ(*was_cleanup_called, 0); - RAND_cleanup(); - ExpectIntEQ(*was_cleanup_called, 0); - - - rand_methods.seed = &stub_rand_seed; - rand_methods.bytes = &stub_rand_bytes; - rand_methods.cleanup = &stub_rand_cleanup; - rand_methods.add = &stub_rand_add; - rand_methods.pseudorand = &stub_rand_pseudo_bytes; - rand_methods.status = &stub_rand_status; - - ExpectIntEQ(RAND_set_rand_method(&rand_methods), WOLFSSL_SUCCESS); - ExpectIntEQ(RAND_seed(buf, num), 123); - ExpectIntEQ(RAND_bytes(buf, num), 456); - ExpectIntEQ(RAND_pseudo_bytes(buf, num), 9876); - ExpectIntEQ(RAND_status(), 5432); - - ExpectIntEQ(*was_add_called, 0); - /* The function pointer for RAND_add returns int, but RAND_add itself - * returns void. */ - RAND_add(buf, num, entropy); - ExpectIntEQ(*was_add_called, 1); - was_add_called = 0; - ExpectIntEQ(*was_cleanup_called, 0); - RAND_cleanup(); - ExpectIntEQ(*was_cleanup_called, 1); - *was_cleanup_called = 0; - - - ret = RAND_set_rand_method(NULL); - ExpectIntEQ(ret, WOLFSSL_SUCCESS); - ExpectIntNE(RAND_status(), 5432); - ExpectIntEQ(*was_cleanup_called, 0); - RAND_cleanup(); - ExpectIntEQ(*was_cleanup_called, 0); - - RAND_set_rand_method(NULL); - - XFREE(buf, NULL, DYNAMIC_TYPE_TMP_BUFFER); -#endif /* OPENSSL_EXTRA && !WOLFSSL_NO_OPENSSL_RAND_CB */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_RAND_bytes(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) - const int size1 = RNG_MAX_BLOCK_LEN; /* in bytes */ - const int size2 = RNG_MAX_BLOCK_LEN + 1; /* in bytes */ - const int size3 = RNG_MAX_BLOCK_LEN * 2; /* in bytes */ - const int size4 = RNG_MAX_BLOCK_LEN * 4; /* in bytes */ - int max_bufsize; - byte *my_buf = NULL; -#if defined(OPENSSL_EXTRA) && defined(HAVE_GETPID) && !defined(__MINGW64__) && \ - !defined(__MINGW32__) - byte seed[16] = {0}; - byte randbuf[8] = {0}; - int pipefds[2] = {0}; - pid_t pid = 0; -#endif - - /* sanity check */ - ExpectIntEQ(RAND_bytes(NULL, 16), 0); - ExpectIntEQ(RAND_bytes(NULL, 0), 0); - - max_bufsize = size4; - - ExpectNotNull(my_buf = (byte*)XMALLOC(max_bufsize * sizeof(byte), HEAP_HINT, - DYNAMIC_TYPE_TMP_BUFFER)); - - ExpectIntEQ(RAND_bytes(my_buf, 0), 1); - ExpectIntEQ(RAND_bytes(my_buf, -1), 0); - - ExpectNotNull(XMEMSET(my_buf, 0, max_bufsize)); - ExpectIntEQ(RAND_bytes(my_buf, size1), 1); - ExpectIntEQ(RAND_bytes(my_buf, size2), 1); - ExpectIntEQ(RAND_bytes(my_buf, size3), 1); - ExpectIntEQ(RAND_bytes(my_buf, size4), 1); - XFREE(my_buf, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - -#if defined(OPENSSL_EXTRA) && defined(HAVE_GETPID) && !defined(__MINGW64__) && \ - !defined(__MINGW32__) - XMEMSET(seed, 0, sizeof(seed)); - RAND_cleanup(); - - /* No global methods set. */ - ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); - - ExpectIntEQ(pipe(pipefds), 0); - pid = fork(); - ExpectIntGE(pid, 0); - if (pid == 0) { - ssize_t n_written = 0; - - /* Child process. */ - close(pipefds[0]); - RAND_bytes(randbuf, sizeof(randbuf)); - n_written = write(pipefds[1], randbuf, sizeof(randbuf)); - close(pipefds[1]); - exit(n_written == sizeof(randbuf) ? 0 : 1); - } - else { - /* Parent process. */ - byte childrand[8] = {0}; - int waitstatus = 0; - - close(pipefds[1]); - ExpectIntEQ(RAND_bytes(randbuf, sizeof(randbuf)), 1); - ExpectIntEQ(read(pipefds[0], childrand, sizeof(childrand)), - sizeof(childrand)); - #ifdef WOLFSSL_NO_GETPID - ExpectBufEQ(randbuf, childrand, sizeof(randbuf)); - #else - ExpectBufNE(randbuf, childrand, sizeof(randbuf)); - #endif - close(pipefds[0]); - waitpid(pid, &waitstatus, 0); - } - RAND_cleanup(); -#endif -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_RAND(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) - byte seed[16]; - - XMEMSET(seed, 0, sizeof(seed)); - - /* No global methods set. */ - ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); - ExpectIntEQ(RAND_poll(), 1); - RAND_cleanup(); - - ExpectIntEQ(RAND_egd(NULL), -1); -#ifndef NO_FILESYSTEM - { - char fname[100]; - - ExpectNotNull(RAND_file_name(fname, (sizeof(fname) - 1))); - ExpectIntEQ(RAND_write_file(NULL), 0); - } -#endif -#endif - return EXPECT_RESULT(); -} - - -#if defined(WC_RNG_SEED_CB) && defined(OPENSSL_EXTRA) -static int wc_DummyGenerateSeed(OS_Seed* os, byte* output, word32 sz) -{ - word32 i; - for (i = 0; i < sz; i++ ) - output[i] = (byte)i; - - (void)os; - - return 0; -} -#endif /* WC_RNG_SEED_CB */ - - -static int test_wolfSSL_RAND_poll(void) -{ - EXPECT_DECLS; - -#if defined(OPENSSL_EXTRA) - byte seed[16]; - byte rand1[16]; -#ifdef WC_RNG_SEED_CB - byte rand2[16]; -#endif - - XMEMSET(seed, 0, sizeof(seed)); - ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); - ExpectIntEQ(RAND_poll(), 1); - ExpectIntEQ(RAND_bytes(rand1, 16), 1); - RAND_cleanup(); - -#ifdef WC_RNG_SEED_CB - /* Test with custom seed and poll */ - wc_SetSeed_Cb(wc_DummyGenerateSeed); - - ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); - ExpectIntEQ(RAND_bytes(rand1, 16), 1); - RAND_cleanup(); - - /* test that the same value is generated twice with dummy seed function */ - ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); - ExpectIntEQ(RAND_bytes(rand2, 16), 1); - ExpectIntEQ(XMEMCMP(rand1, rand2, 16), 0); - RAND_cleanup(); - - /* test that doing a poll is reseeding RNG */ - ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); - ExpectIntEQ(RAND_poll(), 1); - ExpectIntEQ(RAND_bytes(rand2, 16), 1); - ExpectIntNE(XMEMCMP(rand1, rand2, 16), 0); - - /* reset the seed function used */ - wc_SetSeed_Cb(WC_GENERATE_SEED_DEFAULT); -#endif - RAND_cleanup(); - - ExpectIntEQ(RAND_egd(NULL), -1); -#endif - - return EXPECT_RESULT(); -} - - static int test_wolfSSL_PKCS8_Compat(void) { EXPECT_DECLS; @@ -20437,311 +15555,6 @@ static int test_wolfSSL_GetLoggingCb(void) #endif /* !NO_BIO */ -static int test_wolfSSL_OBJ(void) -{ -/* Password "wolfSSL test" is only 12 (96-bit) too short for testing in FIPS - * mode - */ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_SHA256) && !defined(NO_ASN) && \ - !defined(HAVE_FIPS) && !defined(NO_SHA) && defined(WOLFSSL_CERT_EXT) && \ - defined(WOLFSSL_CERT_GEN) && !defined(NO_BIO) && \ - !defined(NO_FILESYSTEM) && !defined(NO_STDIO_FILESYSTEM) - ASN1_OBJECT *obj = NULL; - ASN1_OBJECT *obj2 = NULL; - char buf[50]; - - XFILE fp = XBADFILE; - X509 *x509 = NULL; - X509_NAME *x509Name = NULL; - X509_NAME_ENTRY *x509NameEntry = NULL; - ASN1_OBJECT *asn1Name = NULL; - int numNames = 0; - BIO *bio = NULL; - int nid; - int i, j; - const char *f[] = { - #ifndef NO_RSA - "./certs/ca-cert.der", - #endif - #ifdef HAVE_ECC - "./certs/ca-ecc-cert.der", - "./certs/ca-ecc384-cert.der", - #endif - NULL}; - ASN1_OBJECT *field_name_obj = NULL; - int lastpos = -1; - int tmp = -1; - ASN1_STRING *asn1 = NULL; - unsigned char *buf_dyn = NULL; - - ExpectIntEQ(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectNotNull(obj = OBJ_nid2obj(NID_any_policy)); - ExpectIntEQ(OBJ_obj2nid(obj), NID_any_policy); - ExpectIntEQ(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1), 11); - ExpectIntGT(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 0), 0); - ASN1_OBJECT_free(obj); - obj = NULL; - - ExpectNotNull(obj = OBJ_nid2obj(NID_sha256)); - ExpectIntEQ(OBJ_obj2nid(obj), NID_sha256); - ExpectIntEQ(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1), 22); -#ifdef WOLFSSL_CERT_EXT - ExpectIntEQ(OBJ_txt2nid(buf), NID_sha256); -#endif - ExpectIntGT(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 0), 0); - ExpectNotNull(obj2 = OBJ_dup(obj)); - ExpectIntEQ(OBJ_cmp(obj, obj2), 0); - ASN1_OBJECT_free(obj); - obj = NULL; - ASN1_OBJECT_free(obj2); - obj2 = NULL; - - for (i = 0; f[i] != NULL; i++) - { - ExpectTrue((fp = XFOPEN(f[i], "rb")) != XBADFILE); - ExpectNotNull(x509 = d2i_X509_fp(fp, NULL)); - if (fp != XBADFILE) { - XFCLOSE(fp); - fp = XBADFILE; - } - ExpectNotNull(x509Name = X509_get_issuer_name(x509)); - ExpectIntNE((numNames = X509_NAME_entry_count(x509Name)), 0); - - /* Get the Common Name by using OBJ_txt2obj */ - ExpectNotNull(field_name_obj = OBJ_txt2obj("CN", 0)); - ExpectIntEQ(X509_NAME_get_index_by_OBJ(NULL, NULL, 99), - WOLFSSL_FATAL_ERROR); - ExpectIntEQ(X509_NAME_get_index_by_OBJ(x509Name, NULL, 99), - WOLFSSL_FATAL_ERROR); - ExpectIntEQ(X509_NAME_get_index_by_OBJ(NULL, field_name_obj, 99), - WOLFSSL_FATAL_ERROR); - ExpectIntEQ(X509_NAME_get_index_by_OBJ(x509Name, field_name_obj, 99), - WOLFSSL_FATAL_ERROR); - ExpectIntEQ(X509_NAME_get_index_by_OBJ(x509Name, NULL, 0), - WOLFSSL_FATAL_ERROR); - do - { - lastpos = tmp; - tmp = X509_NAME_get_index_by_OBJ(x509Name, field_name_obj, lastpos); - } while (tmp > -1); - ExpectIntNE(lastpos, -1); - ASN1_OBJECT_free(field_name_obj); - field_name_obj = NULL; - ExpectNotNull(x509NameEntry = X509_NAME_get_entry(x509Name, lastpos)); - ExpectNotNull(asn1 = X509_NAME_ENTRY_get_data(x509NameEntry)); - ExpectIntGE(ASN1_STRING_to_UTF8(&buf_dyn, asn1), 0); - /* - * All Common Names should be www.wolfssl.com - * This makes testing easier as we can test for the expected value. - */ - ExpectStrEQ((char*)buf_dyn, "www.wolfssl.com"); - OPENSSL_free(buf_dyn); - buf_dyn = NULL; - bio = BIO_new(BIO_s_mem()); - ExpectTrue(bio != NULL); - for (j = 0; j < numNames; j++) - { - ExpectNotNull(x509NameEntry = X509_NAME_get_entry(x509Name, j)); - ExpectNotNull(asn1Name = X509_NAME_ENTRY_get_object(x509NameEntry)); - ExpectTrue((nid = OBJ_obj2nid(asn1Name)) > 0); - } - BIO_free(bio); - bio = NULL; - X509_free(x509); - x509 = NULL; - - } - -#ifdef HAVE_PKCS12 - { - PKCS12 *p12 = NULL; - int boolRet; - EVP_PKEY *pkey = NULL; - const char *p12_f[] = { - /* bundle uses AES-CBC 256 and PKCS7 key uses DES3 */ - #if !defined(NO_DES3) && defined(WOLFSSL_AES_256) && !defined(NO_RSA) - "./certs/test-servercert.p12", - #endif - NULL - }; - - for (i = 0; p12_f[i] != NULL; i++) - { - ExpectTrue((fp = XFOPEN(p12_f[i], "rb")) != XBADFILE); - ExpectNotNull(p12 = d2i_PKCS12_fp(fp, NULL)); - if (fp != XBADFILE) { - XFCLOSE(fp); - fp = XBADFILE; - } - ExpectTrue((boolRet = PKCS12_parse(p12, "wolfSSL test", - &pkey, &x509, NULL)) > 0); - wc_PKCS12_free(p12); - p12 = NULL; - EVP_PKEY_free(pkey); - x509Name = X509_get_issuer_name(x509); - ExpectNotNull(x509Name); - ExpectIntNE((numNames = X509_NAME_entry_count(x509Name)), 0); - ExpectTrue((bio = BIO_new(BIO_s_mem())) != NULL); - for (j = 0; j < numNames; j++) - { - ExpectNotNull(x509NameEntry = X509_NAME_get_entry(x509Name, j)); - ExpectNotNull(asn1Name = - X509_NAME_ENTRY_get_object(x509NameEntry)); - ExpectTrue((nid = OBJ_obj2nid(asn1Name)) > 0); - } - BIO_free(bio); - bio = NULL; - X509_free(x509); - x509 = NULL; - } - } -#endif /* HAVE_PKCS12 */ -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_OBJ_cmp(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_SHA256) - ASN1_OBJECT *obj = NULL; - ASN1_OBJECT *obj2 = NULL; - - ExpectNotNull(obj = OBJ_nid2obj(NID_any_policy)); - ExpectNotNull(obj2 = OBJ_nid2obj(NID_sha256)); - - ExpectIntEQ(OBJ_cmp(NULL, NULL), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(OBJ_cmp(obj, NULL), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(OBJ_cmp(NULL, obj2), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(OBJ_cmp(obj, obj2), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); - ExpectIntEQ(OBJ_cmp(obj, obj), 0); - ExpectIntEQ(OBJ_cmp(obj2, obj2), 0); - - ASN1_OBJECT_free(obj); - ASN1_OBJECT_free(obj2); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_OBJ_txt2nid(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) || \ - defined(WOLFSSL_APACHE_HTTPD) - int i; - static const struct { - const char* sn; - const char* ln; - const char* oid; - int nid; - } testVals[] = { -#ifdef WOLFSSL_APACHE_HTTPD - { "tlsfeature", "TLS Feature", "1.3.6.1.5.5.7.1.24", NID_tlsfeature }, - { "id-on-dnsSRV", "SRVName", "1.3.6.1.5.5.7.8.7", - NID_id_on_dnsSRV }, - { "msUPN", "Microsoft User Principal Name", - "1.3.6.1.4.1.311.20.2.3", NID_ms_upn }, -#endif - { NULL, NULL, NULL, NID_undef } - }; - - /* Invalid cases */ - ExpectIntEQ(OBJ_txt2nid(NULL), NID_undef); - ExpectIntEQ(OBJ_txt2nid("Bad name"), NID_undef); - - /* Valid cases */ - for (i = 0; testVals[i].sn != NULL; i++) { - ExpectIntEQ(OBJ_txt2nid(testVals[i].sn), testVals[i].nid); - ExpectIntEQ(OBJ_txt2nid(testVals[i].ln), testVals[i].nid); - ExpectIntEQ(OBJ_txt2nid(testVals[i].oid), testVals[i].nid); - } -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_OBJ_txt2obj(void) -{ - EXPECT_DECLS; -#if defined(WOLFSSL_APACHE_HTTPD) || (defined(OPENSSL_EXTRA) && \ - defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_CERT_GEN)) - int i; - char buf[50]; - ASN1_OBJECT* obj = NULL; - static const struct { - const char* oidStr; - const char* sn; - const char* ln; - } objs_list[] = { - #if defined(WOLFSSL_APACHE_HTTPD) - { "1.3.6.1.5.5.7.1.24", "tlsfeature", "TLS Feature" }, - { "1.3.6.1.5.5.7.8.7", "id-on-dnsSRV", "SRVName" }, - #endif - { "2.5.29.19", "basicConstraints", "X509v3 Basic Constraints"}, - { NULL, NULL, NULL } - }; - static const struct { - const char* numeric; - const char* name; - } objs_named[] = { - /* In dictionary but not in normal list. */ - { "1.3.6.1.5.5.7.3.8", "Time Stamping" }, - /* Made up OID. */ - { "1.3.5.7", "1.3.5.7" }, - { NULL, NULL } - }; - - ExpectNull(obj = OBJ_txt2obj("Bad name", 0)); - ASN1_OBJECT_free(obj); - obj = NULL; - ExpectNull(obj = OBJ_txt2obj(NULL, 0)); - ASN1_OBJECT_free(obj); - obj = NULL; - - for (i = 0; objs_list[i].oidStr != NULL; i++) { - /* Test numerical value of oid (oidStr) */ - ExpectNotNull(obj = OBJ_txt2obj(objs_list[i].oidStr, 1)); - /* Convert object back to text to confirm oid is correct */ - wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); - ExpectIntEQ(XSTRNCMP(buf, objs_list[i].oidStr, (int)XSTRLEN(buf)), 0); - ASN1_OBJECT_free(obj); - obj = NULL; - XMEMSET(buf, 0, sizeof(buf)); - - /* Test short name (sn) */ - ExpectNull(obj = OBJ_txt2obj(objs_list[i].sn, 1)); - ExpectNotNull(obj = OBJ_txt2obj(objs_list[i].sn, 0)); - /* Convert object back to text to confirm oid is correct */ - wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); - ExpectIntEQ(XSTRNCMP(buf, objs_list[i].oidStr, (int)XSTRLEN(buf)), 0); - ASN1_OBJECT_free(obj); - obj = NULL; - XMEMSET(buf, 0, sizeof(buf)); - - /* Test long name (ln) - should fail when no_name = 1 */ - ExpectNull(obj = OBJ_txt2obj(objs_list[i].ln, 1)); - ExpectNotNull(obj = OBJ_txt2obj(objs_list[i].ln, 0)); - /* Convert object back to text to confirm oid is correct */ - wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); - ExpectIntEQ(XSTRNCMP(buf, objs_list[i].oidStr, (int)XSTRLEN(buf)), 0); - ASN1_OBJECT_free(obj); - obj = NULL; - XMEMSET(buf, 0, sizeof(buf)); - } - - for (i = 0; objs_named[i].numeric != NULL; i++) { - ExpectNotNull(obj = OBJ_txt2obj(objs_named[i].numeric, 1)); - wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 0); - ExpectIntEQ(XSTRNCMP(buf, objs_named[i].name, (int)XSTRLEN(buf)), 0); - wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); - ExpectIntEQ(XSTRNCMP(buf, objs_named[i].numeric, (int)XSTRLEN(buf)), 0); - ASN1_OBJECT_free(obj); - obj = NULL; - } -#endif - return EXPECT_RESULT(); -} - /* Note the lack of wolfSSL_ prefix...this is a compatibility layer test. */ static int test_GENERAL_NAME_set0_othername(void) { @@ -20844,6 +15657,104 @@ static int test_GENERAL_NAME_set0_othername(void) return EXPECT_RESULT(); } +/* Test RID (Registered ID) GENERAL_NAME creation and freeing */ +static int test_RID_GENERAL_NAME_free(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(WOLFSSL_RID_ALT_NAME) + /* RID OID: 1.2.3.4.5 in DER */ + const unsigned char ridData[] = { 0x06, 0x04, 0x2a, 0x03, 0x04, 0x05 }; + const unsigned char* p = ridData; + GENERAL_NAME* gn = NULL; + GENERAL_NAMES* gns = NULL; + ASN1_OBJECT* ridObj = NULL; + + /* Create RID ASN1_OBJECT from DER */ + ExpectNotNull(ridObj = wolfSSL_d2i_ASN1_OBJECT(NULL, &p, sizeof(ridData))); + + /* Create GENERAL_NAME and set up as RID */ + ExpectNotNull(gn = GENERAL_NAME_new()); + if (gn != NULL) { + /* GENERAL_NAME_new allocates ia5, must free before using as RID */ + gn->type = GEN_RID; + wolfSSL_ASN1_STRING_free(gn->d.ia5); + gn->d.ia5 = NULL; + gn->d.registeredID = ridObj; + ridObj = NULL; /* gn owns */ + } + if (EXPECT_FAIL()) { + wolfSSL_ASN1_OBJECT_free(ridObj); + } + + /* Add to stack */ + ExpectNotNull(gns = sk_GENERAL_NAME_new(NULL)); + ExpectIntEQ(sk_GENERAL_NAME_push(gns, gn), 1); + if (EXPECT_FAIL()) { + GENERAL_NAME_free(gn); + gn = NULL; + } + + /* Verify RID is set up correctly */ + ExpectNotNull(gn = sk_GENERAL_NAME_value(gns, 0)); + ExpectIntEQ(gn->type, GEN_RID); + ExpectNotNull(gn->d.registeredID); + + /* Free via sk_GENERAL_NAME_pop_free, exercises type_free for RID */ + sk_GENERAL_NAME_pop_free(gns, GENERAL_NAME_free); +#endif + return EXPECT_RESULT(); +} + +/* Test RID (Registered ID) SAN parsing via X509_get_ext_d2i(). + * Uses rid-cert.der which contains a RID SAN with OID 1.2.3.4.5. This tests + * that ASN_RID_TYPE case in wolfSSL_X509_get_ext_d2i() frees ia5 before + * allocating registeredID. */ +static int test_RID_X509_get_ext_d2i(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(WOLFSSL_RID_ALT_NAME) && \ + !defined(NO_RSA) && !defined(NO_FILESYSTEM) + int i; + int numNames; + int foundRID = 0; + const char* ridCert = "./certs/rid-cert.der"; + X509* x509 = NULL; + GENERAL_NAMES* gns = NULL; + GENERAL_NAME* gn = NULL; + XFILE f = XBADFILE; + + ExpectTrue((f = XFOPEN(ridCert, "rb")) != XBADFILE); + ExpectNotNull(x509 = d2i_X509_fp(f, NULL)); + if (f != XBADFILE) { + XFCLOSE(f); + f = XBADFILE; + } + + /* Get SANs, will exercise ASN_RID_TYPE case */ + ExpectNotNull(gns = (GENERAL_NAMES*)X509_get_ext_d2i(x509, + NID_subject_alt_name, NULL, NULL)); + + /* rid-cert.der contains: UPN, RID (1.2.3.4.5), DNS, URI, othername */ + numNames = sk_GENERAL_NAME_num(gns); + ExpectIntGE(numNames, 2); + + for (i = 0; i < numNames; i++) { + gn = sk_GENERAL_NAME_value(gns, i); + if (gn != NULL && gn->type == GEN_RID) { + ExpectNotNull(gn->d.registeredID); + foundRID = 1; + break; + } + } + ExpectIntEQ(foundRID, 1); + + /* Free via sk_GENERAL_NAME_pop_free, exercises type_free for RID */ + sk_GENERAL_NAME_pop_free(gns, GENERAL_NAME_free); + X509_free(x509); +#endif + return EXPECT_RESULT(); +} + /* Note the lack of wolfSSL_ prefix...this is a compatibility layer test. */ static int test_othername_and_SID_ext(void) { @@ -22810,248 +17721,6 @@ static int test_wolfSSL_msg_callback(void) return EXPECT_RESULT(); } -/* test_EVP_Cipher_extra, Extra-test on EVP_CipherUpdate/Final. see also test.c */ -#if (defined(OPENSSL_EXTRA) || defined(OPENSSL_ALL)) &&\ - (!defined(NO_AES) && defined(HAVE_AES_CBC) && defined(WOLFSSL_AES_128)) -static void binary_dump(void *ptr, int size) -{ - #ifdef WOLFSSL_EVP_PRINT - int i = 0; - unsigned char *p = (unsigned char *) ptr; - - fprintf(stderr, "{"); - while ((p != NULL) && (i < size)) { - if ((i % 8) == 0) { - fprintf(stderr, "\n"); - fprintf(stderr, " "); - } - fprintf(stderr, "0x%02x, ", p[i]); - i++; - } - fprintf(stderr, "\n};\n"); - #else - (void) ptr; - (void) size; - #endif -} - -static int last_val = 0x0f; - -static int check_result(unsigned char *data, int len) -{ - int i; - - for ( ; len; ) { - last_val = (last_val + 1) % 16; - for (i = 0; i < 16; len--, i++, data++) - if (*data != last_val) { - return -1; - } - } - return 0; -} - -static int r_offset; -static int w_offset; - -static void init_offset(void) -{ - r_offset = 0; - w_offset = 0; -} -static void get_record(unsigned char *data, unsigned char *buf, int len) -{ - XMEMCPY(buf, data+r_offset, len); - r_offset += len; -} - -static void set_record(unsigned char *data, unsigned char *buf, int len) -{ - XMEMCPY(data+w_offset, buf, len); - w_offset += len; -} - -static void set_plain(unsigned char *plain, int rec) -{ - int i, j; - unsigned char *p = plain; - - #define BLOCKSZ 16 - - for (i=0; i<(rec/BLOCKSZ); i++) { - for (j=0; j 0 && keylen != klen) { - ExpectIntNE(EVP_CIPHER_CTX_set_key_length(evp, keylen), 0); - } - ilen = EVP_CIPHER_CTX_iv_length(evp); - if (ilen > 0 && ivlen != ilen) { - ExpectIntNE(EVP_CIPHER_CTX_set_iv_length(evp, ivlen), 0); - } - - ExpectIntNE((ret = EVP_CipherInit(evp, NULL, key, iv, 1)), 0); - - for (j = 0; j 0) - set_record(cipher, outb, outl); - } - - for (i = 0; test_drive[i]; i++) { - last_val = 0x0f; - - ExpectIntNE((ret = EVP_CipherInit(evp, NULL, key, iv, 0)), 0); - - init_offset(); - - for (j = 0; test_drive[i][j]; j++) { - inl = test_drive[i][j]; - get_record(cipher, inb, inl); - - ExpectIntNE((ret = EVP_DecryptUpdate(evp, outb, &outl, inb, inl)), - 0); - - binary_dump(outb, outl); - ExpectIntEQ((ret = check_result(outb, outl)), 0); - ExpectFalse(outl > ((inl/16+1)*16) && outl > 16); - } - - ret = EVP_CipherFinal(evp, outb, &outl); - - binary_dump(outb, outl); - - ret = (((test_drive_len[i] % 16) != 0) && (ret == 0)) || - (((test_drive_len[i] % 16) == 0) && (ret == 1)); - ExpectTrue(ret); - } - - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(evp), WOLFSSL_SUCCESS); - - EVP_CIPHER_CTX_free(evp); - evp = NULL; - - /* Do an extra test to verify correct behavior with empty input. */ - - ExpectNotNull(evp = EVP_CIPHER_CTX_new()); - ExpectIntNE((ret = EVP_CipherInit(evp, type, NULL, iv, 0)), 0); - - ExpectIntEQ(EVP_CIPHER_CTX_nid(evp), NID_aes_128_cbc); - - klen = EVP_CIPHER_CTX_key_length(evp); - if (klen > 0 && keylen != klen) { - ExpectIntNE(EVP_CIPHER_CTX_set_key_length(evp, keylen), 0); - } - ilen = EVP_CIPHER_CTX_iv_length(evp); - if (ilen > 0 && ivlen != ilen) { - ExpectIntNE(EVP_CIPHER_CTX_set_iv_length(evp, ivlen), 0); - } - - ExpectIntNE((ret = EVP_CipherInit(evp, NULL, key, iv, 1)), 0); - - /* outl should be set to 0 after passing NULL, 0 for input args. */ - outl = -1; - ExpectIntNE((ret = EVP_CipherUpdate(evp, outb, &outl, NULL, 0)), 0); - ExpectIntEQ(outl, 0); - - EVP_CIPHER_CTX_free(evp); -#endif /* test_EVP_Cipher */ - return EXPECT_RESULT(); -} - static int test_wolfSSL_X509_SEP(void) { EXPECT_DECLS; @@ -23272,183 +17941,6 @@ static int test_wolfSSL_get_ciphers_compat(void) return EXPECT_RESULT(); } -static int test_wolfSSL_EVP_PKEY_set1_get1_DSA(void) -{ - EXPECT_DECLS; -#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) - DSA *dsa = NULL; - DSA *setDsa = NULL; - EVP_PKEY *pkey = NULL; - EVP_PKEY *set1Pkey = NULL; - - SHA_CTX sha; - byte signature[DSA_SIG_SIZE]; - byte hash[WC_SHA_DIGEST_SIZE]; - word32 bytes; - int answer; -#ifdef USE_CERT_BUFFERS_1024 - const unsigned char* dsaKeyDer = dsa_key_der_1024; - int dsaKeySz = sizeof_dsa_key_der_1024; - byte tmp[ONEK_BUF]; - - XMEMSET(tmp, 0, sizeof(tmp)); - XMEMCPY(tmp, dsaKeyDer , dsaKeySz); - bytes = dsaKeySz; -#elif defined(USE_CERT_BUFFERS_2048) - const unsigned char* dsaKeyDer = dsa_key_der_2048; - int dsaKeySz = sizeof_dsa_key_der_2048; - byte tmp[TWOK_BUF]; - - XMEMSET(tmp, 0, sizeof(tmp)); - XMEMCPY(tmp, dsaKeyDer , dsaKeySz); - bytes = (word32)dsaKeySz; -#else - byte tmp[TWOK_BUF]; - const unsigned char* dsaKeyDer = (const unsigned char*)tmp; - int dsaKeySz; - XFILE fp = XBADFILE; - - XMEMSET(tmp, 0, sizeof(tmp)); - ExpectTrue((fp = XFOPEN("./certs/dsa2048.der", "rb")) != XBADFILE); - ExpectIntGT(dsaKeySz = bytes = (word32) XFREAD(tmp, 1, sizeof(tmp), fp), 0); - if (fp != XBADFILE) - XFCLOSE(fp); -#endif /* END USE_CERT_BUFFERS_1024 */ - - /* Create hash to later Sign and Verify */ - ExpectIntEQ(SHA1_Init(&sha), WOLFSSL_SUCCESS); - ExpectIntEQ(SHA1_Update(&sha, tmp, bytes), WOLFSSL_SUCCESS); - ExpectIntEQ(SHA1_Final(hash,&sha), WOLFSSL_SUCCESS); - - /* Initialize pkey with der format dsa key */ - ExpectNotNull(d2i_PrivateKey(EVP_PKEY_DSA, &pkey, &dsaKeyDer, - (long)dsaKeySz)); - - /* Test wolfSSL_EVP_PKEY_get1_DSA */ - /* Should Fail: NULL argument */ - ExpectNull(dsa = EVP_PKEY_get0_DSA(NULL)); - ExpectNull(dsa = EVP_PKEY_get1_DSA(NULL)); - /* Should Pass: Initialized pkey argument */ - ExpectNotNull(dsa = EVP_PKEY_get0_DSA(pkey)); - ExpectNotNull(dsa = EVP_PKEY_get1_DSA(pkey)); - -#ifdef USE_CERT_BUFFERS_1024 - ExpectIntEQ(DSA_bits(dsa), 1024); -#else - ExpectIntEQ(DSA_bits(dsa), 2048); -#endif - - /* Sign */ - ExpectIntEQ(wolfSSL_DSA_do_sign(hash, signature, dsa), WOLFSSL_SUCCESS); - /* Verify. */ - ExpectIntEQ(wolfSSL_DSA_do_verify(hash, signature, dsa, &answer), - WOLFSSL_SUCCESS); - - /* Test wolfSSL_EVP_PKEY_set1_DSA */ - /* Should Fail: set1Pkey not initialized */ - ExpectIntNE(EVP_PKEY_set1_DSA(set1Pkey, dsa), WOLFSSL_SUCCESS); - - /* Initialize set1Pkey */ - set1Pkey = EVP_PKEY_new(); - - /* Should Fail Verify: setDsa not initialized from set1Pkey */ - ExpectIntNE(wolfSSL_DSA_do_verify(hash,signature,setDsa,&answer), - WOLFSSL_SUCCESS); - - /* Should Pass: set dsa into set1Pkey */ - ExpectIntEQ(EVP_PKEY_set1_DSA(set1Pkey, dsa), WOLFSSL_SUCCESS); - - DSA_free(dsa); - DSA_free(setDsa); - EVP_PKEY_free(pkey); - EVP_PKEY_free(set1Pkey); -#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ - return EXPECT_RESULT(); -} /* END test_EVP_PKEY_set1_get1_DSA */ - -static int test_wolfSSL_EVP_PKEY_set1_get1_EC_KEY (void) -{ - EXPECT_DECLS; -#ifdef HAVE_ECC - WOLFSSL_EC_KEY* ecKey = NULL; - WOLFSSL_EC_KEY* ecGet1 = NULL; - EVP_PKEY* pkey = NULL; - - ExpectNotNull(ecKey = wolfSSL_EC_KEY_new()); - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - - /* Test wolfSSL_EVP_PKEY_set1_EC_KEY */ - ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(NULL, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(pkey, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Should fail since ecKey is empty */ - ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(pkey, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EC_KEY_generate_key(ecKey), 1); - ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(pkey, ecKey), WOLFSSL_SUCCESS); - - /* Test wolfSSL_EVP_PKEY_get1_EC_KEY */ - ExpectNull(wolfSSL_EVP_PKEY_get1_EC_KEY(NULL)); - ExpectNotNull(ecGet1 = wolfSSL_EVP_PKEY_get1_EC_KEY(pkey)); - - wolfSSL_EC_KEY_free(ecKey); - wolfSSL_EC_KEY_free(ecGet1); - EVP_PKEY_free(pkey); -#endif /* HAVE_ECC */ - return EXPECT_RESULT(); -} /* END test_EVP_PKEY_set1_get1_EC_KEY */ - -static int test_wolfSSL_EVP_PKEY_set1_get1_DH (void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) || defined(WOLFSSL_OPENSSH) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) -#if !defined(NO_DH) && defined(WOLFSSL_DH_EXTRA) && !defined(NO_FILESYSTEM) - DH *dh = NULL; - DH *setDh = NULL; - EVP_PKEY *pkey = NULL; - - XFILE f = XBADFILE; - unsigned char buf[4096]; - const unsigned char* pt = buf; - const char* dh2048 = "./certs/dh2048.der"; - long len = 0; - int code = -1; - - XMEMSET(buf, 0, sizeof(buf)); - - ExpectTrue((f = XFOPEN(dh2048, "rb")) != XBADFILE); - ExpectTrue((len = (long)XFREAD(buf, 1, sizeof(buf), f)) > 0); - if (f != XBADFILE) - XFCLOSE(f); - - /* Load dh2048.der into DH with internal format */ - ExpectNotNull(setDh = wolfSSL_d2i_DHparams(NULL, &pt, len)); - - ExpectIntEQ(wolfSSL_DH_check(setDh, &code), WOLFSSL_SUCCESS); - ExpectIntEQ(code, 0); - code = -1; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - - /* Set DH into PKEY */ - ExpectIntEQ(wolfSSL_EVP_PKEY_set1_DH(pkey, setDh), WOLFSSL_SUCCESS); - - /* Get DH from PKEY */ - ExpectNotNull(dh = wolfSSL_EVP_PKEY_get1_DH(pkey)); - - ExpectIntEQ(wolfSSL_DH_check(dh, &code), WOLFSSL_SUCCESS); - ExpectIntEQ(code, 0); - - EVP_PKEY_free(pkey); - DH_free(setDh); - setDh = NULL; - DH_free(dh); - dh = NULL; -#endif /* !NO_DH && WOLFSSL_DH_EXTRA && !NO_FILESYSTEM */ -#endif /* !HAVE_FIPS || HAVE_FIPS_VERSION > 2 */ -#endif /* OPENSSL_ALL || WOLFSSL_QT || WOLFSSL_OPENSSH */ - return EXPECT_RESULT(); -} /* END test_EVP_PKEY_set1_get1_DH */ - static int test_wolfSSL_CTX_ctrl(void) { EXPECT_DECLS; @@ -23624,1542 +18116,6 @@ static int test_wolfSSL_CTX_ctrl(void) return EXPECT_RESULT(); } -static int test_wolfSSL_EVP_PKEY_assign(void) -{ - EXPECT_DECLS; -#if !defined(NO_RSA) || !defined(NO_DSA) || defined(HAVE_ECC) - int type; - WOLFSSL_EVP_PKEY* pkey = NULL; -#ifndef NO_RSA - WOLFSSL_RSA* rsa = NULL; -#endif -#ifndef NO_DSA - WOLFSSL_DSA* dsa = NULL; -#endif -#ifdef HAVE_ECC - WOLFSSL_EC_KEY* ecKey = NULL; -#endif - -#ifndef NO_RSA - type = EVP_PKEY_RSA; - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(rsa = wolfSSL_RSA_new()); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(NULL, type, rsa), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, -1, rsa), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, rsa), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - wolfSSL_RSA_free(rsa); - } - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; -#endif /* NO_RSA */ - -#ifndef NO_DSA - type = EVP_PKEY_DSA; - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(dsa = wolfSSL_DSA_new()); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(NULL, type, dsa), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, -1, dsa), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, dsa), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - wolfSSL_DSA_free(dsa); - } - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; -#endif /* NO_DSA */ - -#ifdef HAVE_ECC - type = EVP_PKEY_EC; - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(ecKey = wolfSSL_EC_KEY_new()); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(NULL, type, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, -1, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EC_KEY_generate_key(ecKey), 1); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, ecKey), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - wolfSSL_EC_KEY_free(ecKey); - } - wolfSSL_EVP_PKEY_free(pkey); - pkey = NULL; -#endif /* HAVE_ECC */ -#endif /* !NO_RSA || !NO_DSA || HAVE_ECC */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_assign_DH(void) -{ - EXPECT_DECLS; -#if !defined(NO_DH) && \ - !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - XFILE f = XBADFILE; - unsigned char buf[4096]; - const unsigned char* pt = buf; - const char* params1 = "./certs/dh2048.der"; - long len = 0; - WOLFSSL_DH* dh = NULL; - WOLFSSL_EVP_PKEY* pkey = NULL; - XMEMSET(buf, 0, sizeof(buf)); - - /* Load DH parameters DER. */ - ExpectTrue((f = XFOPEN(params1, "rb")) != XBADFILE); - ExpectTrue((len = (long)XFREAD(buf, 1, sizeof(buf), f)) > 0); - if (f != XBADFILE) - XFCLOSE(f); - - ExpectNotNull(dh = wolfSSL_d2i_DHparams(NULL, &pt, len)); - ExpectIntEQ(DH_generate_key(dh), WOLFSSL_SUCCESS); - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - - /* Bad cases */ - ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(NULL, dh), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(pkey, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(NULL, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - /* Good case */ - ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(pkey, dh), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - wolfSSL_DH_free(dh); - } - - EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_base_id(void) -{ - EXPECT_DECLS; - WOLFSSL_EVP_PKEY* pkey = NULL; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - - ExpectIntEQ(wolfSSL_EVP_PKEY_base_id(NULL), NID_undef); - - ExpectIntEQ(wolfSSL_EVP_PKEY_base_id(pkey), EVP_PKEY_RSA); - - EVP_PKEY_free(pkey); - - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_PKEY_id(void) -{ - EXPECT_DECLS; - WOLFSSL_EVP_PKEY* pkey = NULL; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - - ExpectIntEQ(wolfSSL_EVP_PKEY_id(NULL), 0); - - ExpectIntEQ(wolfSSL_EVP_PKEY_id(pkey), EVP_PKEY_RSA); - - EVP_PKEY_free(pkey); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_paramgen(void) -{ - EXPECT_DECLS; - /* ECC check taken from ecc.c. It is the condition that defines ECC256 */ -#if defined(OPENSSL_ALL) && !defined(NO_ECC_SECP) && \ - ((!defined(NO_ECC256) || defined(HAVE_ALL_CURVES)) && \ - ECC_MIN_KEY_SZ <= 256) - EVP_PKEY_CTX* ctx = NULL; - EVP_PKEY* pkey = NULL; - - /* Test error conditions. */ - ExpectIntEQ(EVP_PKEY_paramgen(NULL, &pkey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectNotNull(ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_EC, NULL)); - ExpectIntEQ(EVP_PKEY_paramgen(ctx, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - -#ifndef NO_RSA - EVP_PKEY_CTX_free(ctx); - /* Parameter generation for RSA not supported yet. */ - ExpectNotNull(ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL)); - ExpectIntEQ(EVP_PKEY_paramgen(ctx, &pkey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); -#endif - -#ifdef HAVE_ECC - EVP_PKEY_CTX_free(ctx); - ExpectNotNull(ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_EC, NULL)); - ExpectIntEQ(EVP_PKEY_paramgen_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_ec_paramgen_curve_nid(ctx, - NID_X9_62_prime256v1), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_paramgen(ctx, &pkey), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_ec_param_enc(ctx, OPENSSL_EC_NAMED_CURVE), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_keygen_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_keygen(ctx, &pkey), WOLFSSL_SUCCESS); -#endif - - EVP_PKEY_CTX_free(ctx); - EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_keygen(void) -{ - EXPECT_DECLS; - WOLFSSL_EVP_PKEY* pkey = NULL; - EVP_PKEY_CTX* ctx = NULL; -#if !defined(NO_DH) && (!defined(HAVE_FIPS) || FIPS_VERSION_GT(2,0)) - WOLFSSL_EVP_PKEY* params = NULL; - DH* dh = NULL; - const BIGNUM* pubkey = NULL; - const BIGNUM* privkey = NULL; - ASN1_INTEGER* asn1int = NULL; - unsigned int length = 0; - byte* derBuffer = NULL; -#endif - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - - /* Bad cases */ - ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(NULL, &pkey), 0); - ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(ctx, NULL), 0); - ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(NULL, NULL), 0); - - /* Good case */ - ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(ctx, &pkey), 0); - - EVP_PKEY_CTX_free(ctx); - ctx = NULL; - EVP_PKEY_free(pkey); - pkey = NULL; - -#if !defined(NO_DH) && (!defined(HAVE_FIPS) || FIPS_VERSION_GT(2,0)) - /* Test DH keygen */ - { - ExpectNotNull(params = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(dh = DH_get_2048_256()); - ExpectIntEQ(EVP_PKEY_set1_DH(params, dh), WOLFSSL_SUCCESS); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(params, NULL)); - ExpectIntEQ(EVP_PKEY_keygen_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_keygen(ctx, &pkey), WOLFSSL_SUCCESS); - - DH_free(dh); - dh = NULL; - EVP_PKEY_CTX_free(ctx); - EVP_PKEY_free(params); - - /* try exporting generated key to DER, to verify */ - ExpectNotNull(dh = EVP_PKEY_get1_DH(pkey)); - DH_get0_key(dh, &pubkey, &privkey); - ExpectNotNull(pubkey); - ExpectNotNull(privkey); - ExpectNotNull(asn1int = BN_to_ASN1_INTEGER(pubkey, NULL)); - ExpectIntGT((length = i2d_ASN1_INTEGER(asn1int, &derBuffer)), 0); - - ASN1_INTEGER_free(asn1int); - DH_free(dh); - dh = NULL; - XFREE(derBuffer, NULL, DYNAMIC_TYPE_TMP_BUFFER); - - EVP_PKEY_free(pkey); - } -#endif - - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_PKEY_keygen_init(void) -{ - EXPECT_DECLS; - WOLFSSL_EVP_PKEY* pkey = NULL; - EVP_PKEY_CTX *ctx = NULL; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - - ExpectIntEQ(wolfSSL_EVP_PKEY_keygen_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_PKEY_keygen_init(NULL), WOLFSSL_SUCCESS); - - EVP_PKEY_CTX_free(ctx); - EVP_PKEY_free(pkey); - - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_PKEY_missing_parameters(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && !defined(NO_WOLFSSL_STUB) - WOLFSSL_EVP_PKEY* pkey = NULL; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - - ExpectIntEQ(wolfSSL_EVP_PKEY_missing_parameters(pkey), 0); - ExpectIntEQ(wolfSSL_EVP_PKEY_missing_parameters(NULL), 0); - - EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_PKEY_copy_parameters(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_DH) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) && (defined(OPENSSL_ALL) || defined(WOLFSSL_QT) || \ - defined(WOLFSSL_OPENSSH)) && defined(WOLFSSL_DH_EXTRA) && \ - !defined(NO_FILESYSTEM) - WOLFSSL_EVP_PKEY* params = NULL; - WOLFSSL_EVP_PKEY* copy = NULL; - DH* dh = NULL; - BIGNUM* p1; - BIGNUM* g1; - BIGNUM* q1; - BIGNUM* p2; - BIGNUM* g2; - BIGNUM* q2; - - /* create DH with DH_get_2048_256 params */ - ExpectNotNull(params = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(dh = DH_get_2048_256()); - ExpectIntEQ(EVP_PKEY_set1_DH(params, dh), WOLFSSL_SUCCESS); - DH_get0_pqg(dh, (const BIGNUM**)&p1, - (const BIGNUM**)&q1, - (const BIGNUM**)&g1); - DH_free(dh); - dh = NULL; - - /* create DH with random generated DH params */ - ExpectNotNull(copy = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(dh = DH_generate_parameters(2048, 2, NULL, NULL)); - ExpectIntEQ(EVP_PKEY_set1_DH(copy, dh), WOLFSSL_SUCCESS); - DH_free(dh); - dh = NULL; - - ExpectIntEQ(EVP_PKEY_copy_parameters(copy, params), WOLFSSL_SUCCESS); - ExpectNotNull(dh = EVP_PKEY_get1_DH(copy)); - ExpectNotNull(dh->p); - ExpectNotNull(dh->g); - ExpectNotNull(dh->q); - DH_get0_pqg(dh, (const BIGNUM**)&p2, - (const BIGNUM**)&q2, - (const BIGNUM**)&g2); - - ExpectIntEQ(BN_cmp(p1, p2), 0); - ExpectIntEQ(BN_cmp(q1, q2), 0); - ExpectIntEQ(BN_cmp(g1, g2), 0); - - DH_free(dh); - dh = NULL; - EVP_PKEY_free(copy); - EVP_PKEY_free(params); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits(void) -{ - EXPECT_DECLS; - WOLFSSL_EVP_PKEY* pkey = NULL; - EVP_PKEY_CTX* ctx = NULL; - int bits = 2048; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - - ExpectIntEQ(wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits(ctx, bits), - WOLFSSL_SUCCESS); - - EVP_PKEY_CTX_free(ctx); - EVP_PKEY_free(pkey); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_CIPHER_CTX_iv_length(void) -{ - EXPECT_DECLS; - /* This is large enough to be used for all key sizes */ - byte key[AES_256_KEY_SIZE] = {0}; - byte iv[AES_BLOCK_SIZE] = {0}; - int i; - int nids[] = { - #ifdef HAVE_AES_CBC - NID_aes_128_cbc, - #endif - #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ - (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - #ifdef HAVE_AESGCM - NID_aes_128_gcm, - #endif - #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ - #ifdef WOLFSSL_AES_COUNTER - NID_aes_128_ctr, - #endif - #ifndef NO_DES3 - NID_des_cbc, - NID_des_ede3_cbc, - #endif - }; - int iv_lengths[] = { - #ifdef HAVE_AES_CBC - AES_BLOCK_SIZE, - #endif - #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ - (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - #ifdef HAVE_AESGCM - GCM_NONCE_MID_SZ, - #endif - #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ - #ifdef WOLFSSL_AES_COUNTER - AES_BLOCK_SIZE, - #endif - #ifndef NO_DES3 - DES_BLOCK_SIZE, - DES_BLOCK_SIZE, - #endif - }; - int nidsLen = (sizeof(nids)/sizeof(int)); - - for (i = 0; i < nidsLen; i++) { - const EVP_CIPHER* init = wolfSSL_EVP_get_cipherbynid(nids[i]); - EVP_CIPHER_CTX* ctx = EVP_CIPHER_CTX_new(); - wolfSSL_EVP_CIPHER_CTX_init(ctx); - - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_iv_length(ctx), iv_lengths[i]); - - EVP_CIPHER_CTX_free(ctx); - } - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_CIPHER_CTX_key_length(void) -{ - EXPECT_DECLS; - byte key[AES_256_KEY_SIZE] = {0}; - byte iv[AES_BLOCK_SIZE] = {0}; - int i; - int nids[] = { - #ifdef HAVE_AES_CBC - NID_aes_128_cbc, - #ifdef WOLFSSL_AES_256 - NID_aes_256_cbc, - #endif - #endif - #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ - (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - #ifdef HAVE_AESGCM - NID_aes_128_gcm, - #ifdef WOLFSSL_AES_256 - NID_aes_256_gcm, - #endif - #endif - #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ - #ifdef WOLFSSL_AES_COUNTER - NID_aes_128_ctr, - #ifdef WOLFSSL_AES_256 - NID_aes_256_ctr, - #endif - #endif - #ifndef NO_DES3 - NID_des_cbc, - NID_des_ede3_cbc, - #endif - }; - int key_lengths[] = { - #ifdef HAVE_AES_CBC - AES_128_KEY_SIZE, - #ifdef WOLFSSL_AES_256 - AES_256_KEY_SIZE, - #endif - #endif - #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ - (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - #ifdef HAVE_AESGCM - AES_128_KEY_SIZE, - #ifdef WOLFSSL_AES_256 - AES_256_KEY_SIZE, - #endif - #endif - #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ - #ifdef WOLFSSL_AES_COUNTER - AES_128_KEY_SIZE, - #ifdef WOLFSSL_AES_256 - AES_256_KEY_SIZE, - #endif - #endif - #ifndef NO_DES3 - DES_KEY_SIZE, - DES3_KEY_SIZE, - #endif - }; - int nidsLen = (sizeof(nids)/sizeof(int)); - - for (i = 0; i < nidsLen; i++) { - const EVP_CIPHER *init = wolfSSL_EVP_get_cipherbynid(nids[i]); - EVP_CIPHER_CTX* ctx = EVP_CIPHER_CTX_new(); - wolfSSL_EVP_CIPHER_CTX_init(ctx); - - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_key_length(ctx), key_lengths[i]); - - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_key_length(ctx, key_lengths[i]), - WOLFSSL_SUCCESS); - - EVP_CIPHER_CTX_free(ctx); - } - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_CIPHER_CTX_set_iv(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESGCM) && !defined(NO_DES3) - int ivLen, keyLen; - EVP_CIPHER_CTX *ctx = EVP_CIPHER_CTX_new(); -#ifdef HAVE_AESGCM - byte key[AES_128_KEY_SIZE] = {0}; - byte iv[AES_BLOCK_SIZE] = {0}; - const EVP_CIPHER *init = EVP_aes_128_gcm(); -#else - byte key[DES3_KEY_SIZE] = {0}; - byte iv[DES_BLOCK_SIZE] = {0}; - const EVP_CIPHER *init = EVP_des_ede3_cbc(); -#endif - - wolfSSL_EVP_CIPHER_CTX_init(ctx); - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - - ivLen = wolfSSL_EVP_CIPHER_CTX_iv_length(ctx); - keyLen = wolfSSL_EVP_CIPHER_CTX_key_length(ctx); - - /* Bad cases */ - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(NULL, iv, ivLen), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, NULL, ivLen), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, iv, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(NULL, NULL, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, iv, keyLen), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - /* Good case */ - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, iv, ivLen), 1); - - EVP_CIPHER_CTX_free(ctx); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_CTX_new_id(void) -{ - EXPECT_DECLS; - WOLFSSL_ENGINE* e = NULL; - int id = 0; - EVP_PKEY_CTX *ctx = NULL; - - ExpectNotNull(ctx = wolfSSL_EVP_PKEY_CTX_new_id(id, e)); - - EVP_PKEY_CTX_free(ctx); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_rc4(void) -{ - EXPECT_DECLS; -#if !defined(NO_RC4) - ExpectNotNull(wolfSSL_EVP_rc4()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_enc_null(void) -{ - EXPECT_DECLS; - ExpectNotNull(wolfSSL_EVP_enc_null()); - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_rc2_cbc(void) - -{ - EXPECT_DECLS; -#if defined(WOLFSSL_QT) && !defined(NO_WOLFSSL_STUB) - ExpectNull(wolfSSL_EVP_rc2_cbc()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_mdc2(void) -{ - EXPECT_DECLS; -#if !defined(NO_WOLFSSL_STUB) - ExpectNull(wolfSSL_EVP_mdc2()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_md4(void) -{ - EXPECT_DECLS; -#if !defined(NO_MD4) - ExpectNotNull(wolfSSL_EVP_md4()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_aes_256_gcm(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESGCM) && defined(WOLFSSL_AES_256) - ExpectNotNull(wolfSSL_EVP_aes_256_gcm()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_aes_192_gcm(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESGCM) && defined(WOLFSSL_AES_192) - ExpectNotNull(wolfSSL_EVP_aes_192_gcm()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_aes_256_ccm(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESCCM) && defined(WOLFSSL_AES_256) - ExpectNotNull(wolfSSL_EVP_aes_256_ccm()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_aes_192_ccm(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESCCM) && defined(WOLFSSL_AES_192) - ExpectNotNull(wolfSSL_EVP_aes_192_ccm()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_aes_128_ccm(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESCCM) && defined(WOLFSSL_AES_128) - ExpectNotNull(wolfSSL_EVP_aes_128_ccm()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_ripemd160(void) -{ - EXPECT_DECLS; -#if !defined(NO_WOLFSSL_STUB) - ExpectNull(wolfSSL_EVP_ripemd160()); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_get_digestbynid(void) -{ - EXPECT_DECLS; - -#ifndef NO_MD5 - ExpectNotNull(wolfSSL_EVP_get_digestbynid(NID_md5)); -#endif -#ifndef NO_SHA - ExpectNotNull(wolfSSL_EVP_get_digestbynid(NID_sha1)); -#endif -#ifndef NO_SHA256 - ExpectNotNull(wolfSSL_EVP_get_digestbynid(NID_sha256)); -#endif - ExpectNull(wolfSSL_EVP_get_digestbynid(0)); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_MD_nid(void) -{ - EXPECT_DECLS; - -#ifndef NO_MD5 - ExpectIntEQ(EVP_MD_nid(EVP_md5()), NID_md5); -#endif -#ifndef NO_SHA - ExpectIntEQ(EVP_MD_nid(EVP_sha1()), NID_sha1); -#endif -#ifndef NO_SHA256 - ExpectIntEQ(EVP_MD_nid(EVP_sha256()), NID_sha256); -#endif - ExpectIntEQ(EVP_MD_nid(NULL), NID_undef); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_get0_EC_KEY(void) -{ - EXPECT_DECLS; -#if defined(HAVE_ECC) - WOLFSSL_EVP_PKEY* pkey = NULL; - - ExpectNull(EVP_PKEY_get0_EC_KEY(NULL)); - - ExpectNotNull(pkey = EVP_PKEY_new()); - ExpectNull(EVP_PKEY_get0_EC_KEY(pkey)); - EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_X_STATE(void) -{ - EXPECT_DECLS; -#if !defined(NO_DES3) && !defined(NO_RC4) - byte key[DES3_KEY_SIZE] = {0}; - byte iv[DES_IV_SIZE] = {0}; - EVP_CIPHER_CTX *ctx = NULL; - const EVP_CIPHER *init = NULL; - - /* Bad test cases */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - ExpectNotNull(init = EVP_des_ede3_cbc()); - - wolfSSL_EVP_CIPHER_CTX_init(ctx); - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - - ExpectNull(wolfSSL_EVP_X_STATE(NULL)); - ExpectNull(wolfSSL_EVP_X_STATE(ctx)); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* Good test case */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - ExpectNotNull(init = wolfSSL_EVP_rc4()); - - wolfSSL_EVP_CIPHER_CTX_init(ctx); - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - - ExpectNotNull(wolfSSL_EVP_X_STATE(ctx)); - EVP_CIPHER_CTX_free(ctx); -#endif - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_X_STATE_LEN(void) -{ - EXPECT_DECLS; -#if !defined(NO_DES3) && !defined(NO_RC4) - byte key[DES3_KEY_SIZE] = {0}; - byte iv[DES_IV_SIZE] = {0}; - EVP_CIPHER_CTX *ctx = NULL; - const EVP_CIPHER *init = NULL; - - /* Bad test cases */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - ExpectNotNull(init = EVP_des_ede3_cbc()); - - wolfSSL_EVP_CIPHER_CTX_init(ctx); - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_EVP_X_STATE_LEN(NULL), 0); - ExpectIntEQ(wolfSSL_EVP_X_STATE_LEN(ctx), 0); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* Good test case */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - ExpectNotNull(init = wolfSSL_EVP_rc4()); - - wolfSSL_EVP_CIPHER_CTX_init(ctx); - ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_EVP_X_STATE_LEN(ctx), sizeof(Arc4)); - EVP_CIPHER_CTX_free(ctx); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_CIPHER_block_size(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AES_CBC) || defined(HAVE_AESGCM) || \ - defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AES_ECB) || \ - defined(WOLFSSL_AES_OFB) || !defined(NO_RC4) || \ - (defined(HAVE_CHACHA) && defined(HAVE_POLY1305)) - -#ifdef HAVE_AES_CBC - #ifdef WOLFSSL_AES_128 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_cbc()), AES_BLOCK_SIZE); - #endif - #ifdef WOLFSSL_AES_192 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_cbc()), AES_BLOCK_SIZE); - #endif - #ifdef WOLFSSL_AES_256 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_cbc()), AES_BLOCK_SIZE); - #endif -#endif - -#ifdef HAVE_AESGCM - #ifdef WOLFSSL_AES_128 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_gcm()), 1); - #endif - #ifdef WOLFSSL_AES_192 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_gcm()), 1); - #endif - #ifdef WOLFSSL_AES_256 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_gcm()), 1); - #endif -#endif - -#ifdef HAVE_AESCCM - #ifdef WOLFSSL_AES_128 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ccm()), 1); - #endif - #ifdef WOLFSSL_AES_192 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ccm()), 1); - #endif - #ifdef WOLFSSL_AES_256 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ccm()), 1); - #endif -#endif - -#ifdef WOLFSSL_AES_COUNTER - #ifdef WOLFSSL_AES_128 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ctr()), 1); - #endif - #ifdef WOLFSSL_AES_192 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ctr()), 1); - #endif - #ifdef WOLFSSL_AES_256 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ctr()), 1); - #endif -#endif - -#ifdef HAVE_AES_ECB - #ifdef WOLFSSL_AES_128 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ecb()), AES_BLOCK_SIZE); - #endif - #ifdef WOLFSSL_AES_192 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ecb()), AES_BLOCK_SIZE); - #endif - #ifdef WOLFSSL_AES_256 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ecb()), AES_BLOCK_SIZE); - #endif -#endif - -#ifdef WOLFSSL_AES_OFB - #ifdef WOLFSSL_AES_128 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ofb()), 1); - #endif - #ifdef WOLFSSL_AES_192 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ofb()), 1); - #endif - #ifdef WOLFSSL_AES_256 - ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ofb()), 1); - #endif -#endif - -#ifndef NO_RC4 - ExpectIntEQ(EVP_CIPHER_block_size(wolfSSL_EVP_rc4()), 1); -#endif - -#if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) - ExpectIntEQ(EVP_CIPHER_block_size(wolfSSL_EVP_chacha20_poly1305()), 1); -#endif -#endif - -#ifdef WOLFSSL_SM4_ECB - ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_ecb()), SM4_BLOCK_SIZE); -#endif -#ifdef WOLFSSL_SM4_CBC - ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_cbc()), SM4_BLOCK_SIZE); -#endif -#ifdef WOLFSSL_SM4_CTR - ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_ctr()), 1); -#endif -#ifdef WOLFSSL_SM4_GCM - ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_gcm()), 1); -#endif -#ifdef WOLFSSL_SM4_CCM - ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_ccm()), 1); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_CIPHER_iv_length(void) -{ - EXPECT_DECLS; - int nids[] = { - #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) - #ifdef WOLFSSL_AES_128 - NID_aes_128_cbc, - #endif - #ifdef WOLFSSL_AES_192 - NID_aes_192_cbc, - #endif - #ifdef WOLFSSL_AES_256 - NID_aes_256_cbc, - #endif - #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */ - #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ - (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - #ifdef HAVE_AESGCM - #ifdef WOLFSSL_AES_128 - NID_aes_128_gcm, - #endif - #ifdef WOLFSSL_AES_192 - NID_aes_192_gcm, - #endif - #ifdef WOLFSSL_AES_256 - NID_aes_256_gcm, - #endif - #endif /* HAVE_AESGCM */ - #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ - #ifdef WOLFSSL_AES_COUNTER - #ifdef WOLFSSL_AES_128 - NID_aes_128_ctr, - #endif - #ifdef WOLFSSL_AES_192 - NID_aes_192_ctr, - #endif - #ifdef WOLFSSL_AES_256 - NID_aes_256_ctr, - #endif - #endif - #ifndef NO_DES3 - NID_des_cbc, - NID_des_ede3_cbc, - #endif - #if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) - NID_chacha20_poly1305, - #endif - }; - int iv_lengths[] = { - #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) - #ifdef WOLFSSL_AES_128 - AES_BLOCK_SIZE, - #endif - #ifdef WOLFSSL_AES_192 - AES_BLOCK_SIZE, - #endif - #ifdef WOLFSSL_AES_256 - AES_BLOCK_SIZE, - #endif - #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */ - #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ - (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) - #ifdef HAVE_AESGCM - #ifdef WOLFSSL_AES_128 - GCM_NONCE_MID_SZ, - #endif - #ifdef WOLFSSL_AES_192 - GCM_NONCE_MID_SZ, - #endif - #ifdef WOLFSSL_AES_256 - GCM_NONCE_MID_SZ, - #endif - #endif /* HAVE_AESGCM */ - #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ - #ifdef WOLFSSL_AES_COUNTER - #ifdef WOLFSSL_AES_128 - AES_BLOCK_SIZE, - #endif - #ifdef WOLFSSL_AES_192 - AES_BLOCK_SIZE, - #endif - #ifdef WOLFSSL_AES_256 - AES_BLOCK_SIZE, - #endif - #endif - #ifndef NO_DES3 - DES_BLOCK_SIZE, - DES_BLOCK_SIZE, - #endif - #if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) - CHACHA20_POLY1305_AEAD_IV_SIZE, - #endif - }; - int i; - int nidsLen = (sizeof(nids)/sizeof(int)); - - for (i = 0; i < nidsLen; i++) { - const EVP_CIPHER *c = EVP_get_cipherbynid(nids[i]); - ExpectIntEQ(EVP_CIPHER_iv_length(c), iv_lengths[i]); - } - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_SignInit_ex(void) -{ - EXPECT_DECLS; - WOLFSSL_EVP_MD_CTX mdCtx; - WOLFSSL_ENGINE* e = 0; - const EVP_MD* md = EVP_sha256(); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_SignInit_ex(&mdCtx, md, e), WOLFSSL_SUCCESS); - - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_DigestFinalXOF(void) -{ - EXPECT_DECLS; -#if defined(WOLFSSL_SHA3) && defined(WOLFSSL_SHAKE256) && defined(OPENSSL_ALL) - WOLFSSL_EVP_MD_CTX mdCtx; - unsigned char shake[256]; - unsigned char zeros[10]; - unsigned char data[] = "Test data"; - unsigned int sz; - - XMEMSET(zeros, 0, sizeof(zeros)); - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(EVP_DigestInit(&mdCtx, EVP_shake256()), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_MD_flags(EVP_shake256()), EVP_MD_FLAG_XOF); - ExpectIntEQ(EVP_MD_flags(EVP_sha3_256()), 0); - ExpectIntEQ(EVP_DigestUpdate(&mdCtx, data, 1), WOLFSSL_SUCCESS); - XMEMSET(shake, 0, sizeof(shake)); - ExpectIntEQ(EVP_DigestFinalXOF(&mdCtx, shake, 10), WOLFSSL_SUCCESS); - - /* make sure was only size of 10 */ - ExpectIntEQ(XMEMCMP(&shake[11], zeros, 10), 0); - ExpectIntEQ(EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); - - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(EVP_DigestInit(&mdCtx, EVP_shake256()), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(&mdCtx, data, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestFinal(&mdCtx, shake, &sz), WOLFSSL_SUCCESS); - ExpectIntEQ(sz, 32); - ExpectIntEQ(EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); - - #if defined(WOLFSSL_SHAKE128) - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(EVP_DigestInit(&mdCtx, EVP_shake128()), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(&mdCtx, data, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestFinal(&mdCtx, shake, &sz), WOLFSSL_SUCCESS); - ExpectIntEQ(sz, 16); - ExpectIntEQ(EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); - #endif -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_DigestFinal_ex(void) -{ - EXPECT_DECLS; -#if !defined(NO_SHA256) - WOLFSSL_EVP_MD_CTX mdCtx; - unsigned int s = 0; - unsigned char md[WC_SHA256_DIGEST_SIZE]; - unsigned char md2[WC_SHA256_DIGEST_SIZE]; - - /* Bad Case */ -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && \ - (HAVE_FIPS_VERSION > 2)) - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestFinal_ex(&mdCtx, md, &s), 0); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); - -#else - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestFinal_ex(&mdCtx, md, &s), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); - -#endif - - /* Good Case */ - wolfSSL_EVP_MD_CTX_init(&mdCtx); - ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, EVP_sha256()), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_DigestFinal_ex(&mdCtx, md2, &s), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_QT_EVP_PKEY_CTX_free(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) - EVP_PKEY* pkey = NULL; - EVP_PKEY_CTX* ctx = NULL; - - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - -#if defined(OPENSSL_VERSION_NUMBER) && OPENSSL_VERSION_NUMBER >= 0x10100000L - /* void */ - EVP_PKEY_CTX_free(ctx); -#else - /* int */ - ExpectIntEQ(EVP_PKEY_CTX_free(ctx), WOLFSSL_SUCCESS); -#endif - - EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PKEY_param_check(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) -#if !defined(NO_DH) && defined(WOLFSSL_DH_EXTRA) && !defined(NO_FILESYSTEM) - - DH *dh = NULL; - DH *setDh = NULL; - EVP_PKEY *pkey = NULL; - EVP_PKEY_CTX* ctx = NULL; - - FILE* f = NULL; - unsigned char buf[512]; - const unsigned char* pt = buf; - const char* dh2048 = "./certs/dh2048.der"; - long len = 0; - int code = -1; - - XMEMSET(buf, 0, sizeof(buf)); - - ExpectTrue((f = XFOPEN(dh2048, "rb")) != XBADFILE); - ExpectTrue((len = (long)XFREAD(buf, 1, sizeof(buf), f)) > 0); - if (f != XBADFILE) - XFCLOSE(f); - - /* Load dh2048.der into DH with internal format */ - ExpectNotNull(setDh = d2i_DHparams(NULL, &pt, len)); - ExpectIntEQ(DH_check(setDh, &code), WOLFSSL_SUCCESS); - ExpectIntEQ(code, 0); - code = -1; - - pkey = wolfSSL_EVP_PKEY_new(); - /* Set DH into PKEY */ - ExpectIntEQ(EVP_PKEY_set1_DH(pkey, setDh), WOLFSSL_SUCCESS); - /* create ctx from pkey */ - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_param_check(ctx), 1/* valid */); - - /* TODO: more invalid cases */ - ExpectIntEQ(EVP_PKEY_param_check(NULL), 0); - - EVP_PKEY_CTX_free(ctx); - EVP_PKEY_free(pkey); - DH_free(setDh); - setDh = NULL; - DH_free(dh); - dh = NULL; -#endif -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_BytesToKey(void) -{ - EXPECT_DECLS; -#if !defined(NO_AES) && defined(HAVE_AES_CBC) - byte key[AES_BLOCK_SIZE] = {0}; - byte iv[AES_BLOCK_SIZE] = {0}; - int count = 0; - const EVP_MD* md = EVP_sha256(); - const EVP_CIPHER *type; - const unsigned char *salt = (unsigned char *)"salt1234"; - int sz = 5; - const byte data[] = { - 0x48,0x65,0x6c,0x6c,0x6f,0x20,0x57,0x6f, - 0x72,0x6c,0x64 - }; - - type = wolfSSL_EVP_get_cipherbynid(NID_aes_128_cbc); - - /* Bad cases */ - ExpectIntEQ(EVP_BytesToKey(NULL, md, salt, data, sz, count, key, iv), - 0); - ExpectIntEQ(EVP_BytesToKey(type, md, salt, NULL, sz, count, key, iv), - 16); - md = "2"; - ExpectIntEQ(EVP_BytesToKey(type, md, salt, data, sz, count, key, iv), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - /* Good case */ - md = EVP_sha256(); - ExpectIntEQ(EVP_BytesToKey(type, md, salt, data, sz, count, key, iv), - 16); -#endif - return EXPECT_RESULT(); -} - -static int test_evp_cipher_aes_gcm(void) -{ - EXPECT_DECLS; -#if defined(HAVE_AESGCM) && ((!defined(HAVE_FIPS) && \ - !defined(HAVE_SELFTEST)) || (defined(HAVE_FIPS_VERSION) && \ - (HAVE_FIPS_VERSION >= 2))) && defined(WOLFSSL_AES_256) - /* - * This test checks data at various points in the encrypt/decrypt process - * against known values produced using the same test with OpenSSL. This - * interop testing is critical for verifying the correctness of our - * EVP_Cipher implementation with AES-GCM. Specifically, this test exercises - * a flow supported by OpenSSL that uses the control command - * EVP_CTRL_GCM_IV_GEN to increment the IV between cipher operations without - * the need to call EVP_CipherInit. OpenSSH uses this flow, for example. We - * had a bug with OpenSSH where wolfSSL OpenSSH servers could only talk to - * wolfSSL OpenSSH clients because there was a bug in this flow that - * happened to "cancel out" if both sides of the connection had the bug. - */ - enum { - NUM_ENCRYPTIONS = 3, - AAD_SIZE = 4 - }; - static const byte plainText1[] = { - 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, - 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23 - }; - static const byte plainText2[] = { - 0x42, 0x49, 0x3b, 0x27, 0x03, 0x35, 0x59, 0x14, 0x41, 0x47, 0x37, 0x14, - 0x0e, 0x34, 0x0d, 0x28, 0x63, 0x09, 0x0a, 0x5b, 0x22, 0x57, 0x42, 0x22, - 0x0f, 0x5c, 0x1e, 0x53, 0x45, 0x15, 0x62, 0x08, 0x60, 0x43, 0x50, 0x2c - }; - static const byte plainText3[] = { - 0x36, 0x0d, 0x2b, 0x09, 0x4a, 0x56, 0x3b, 0x4c, 0x21, 0x22, 0x58, 0x0e, - 0x5b, 0x57, 0x10 - }; - static const byte* plainTexts[NUM_ENCRYPTIONS] = { - plainText1, - plainText2, - plainText3 - }; - static const int plainTextSzs[NUM_ENCRYPTIONS] = { - sizeof(plainText1), - sizeof(plainText2), - sizeof(plainText3) - }; - static const byte aad1[AAD_SIZE] = { - 0x00, 0x00, 0x00, 0x01 - }; - static const byte aad2[AAD_SIZE] = { - 0x00, 0x00, 0x00, 0x10 - }; - static const byte aad3[AAD_SIZE] = { - 0x00, 0x00, 0x01, 0x00 - }; - static const byte* aads[NUM_ENCRYPTIONS] = { - aad1, - aad2, - aad3 - }; - const byte iv[GCM_NONCE_MID_SZ] = { - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF - }; - byte currentIv[GCM_NONCE_MID_SZ]; - const byte key[] = { - 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, - 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, - 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f - }; - const byte expIvs[NUM_ENCRYPTIONS][GCM_NONCE_MID_SZ] = { - { - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, - 0xEF - }, - { - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, - 0xF0 - }, - { - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, - 0xF1 - } - }; - const byte expTags[NUM_ENCRYPTIONS][AES_BLOCK_SIZE] = { - { - 0x65, 0x4F, 0xF7, 0xA0, 0xBB, 0x7B, 0x90, 0xB7, 0x9C, 0xC8, 0x14, - 0x3D, 0x32, 0x18, 0x34, 0xA9 - }, - { - 0x50, 0x3A, 0x13, 0x8D, 0x91, 0x1D, 0xEC, 0xBB, 0xBA, 0x5B, 0x57, - 0xA2, 0xFD, 0x2D, 0x6B, 0x7F - }, - { - 0x3B, 0xED, 0x18, 0x9C, 0xB3, 0xE3, 0x61, 0x1E, 0x11, 0xEB, 0x13, - 0x5B, 0xEC, 0x52, 0x49, 0x32, - } - }; - static const byte expCipherText1[] = { - 0xCB, 0x93, 0x4F, 0xC8, 0x22, 0xE2, 0xC0, 0x35, 0xAA, 0x6B, 0x41, 0x15, - 0x17, 0x30, 0x2F, 0x97, 0x20, 0x74, 0x39, 0x28, 0xF8, 0xEB, 0xC5, 0x51, - 0x7B, 0xD9, 0x8A, 0x36, 0xB8, 0xDA, 0x24, 0x80, 0xE7, 0x9E, 0x09, 0xDE - }; - static const byte expCipherText2[] = { - 0xF9, 0x32, 0xE1, 0x87, 0x37, 0x0F, 0x04, 0xC1, 0xB5, 0x59, 0xF0, 0x45, - 0x3A, 0x0D, 0xA0, 0x26, 0xFF, 0xA6, 0x8D, 0x38, 0xFE, 0xB8, 0xE5, 0xC2, - 0x2A, 0x98, 0x4A, 0x54, 0x8F, 0x1F, 0xD6, 0x13, 0x03, 0xB2, 0x1B, 0xC0 - }; - static const byte expCipherText3[] = { - 0xD0, 0x37, 0x59, 0x1C, 0x2F, 0x85, 0x39, 0x4D, 0xED, 0xC2, 0x32, 0x5B, - 0x80, 0x5E, 0x6B, - }; - static const byte* expCipherTexts[NUM_ENCRYPTIONS] = { - expCipherText1, - expCipherText2, - expCipherText3 - }; - byte* cipherText = NULL; - byte* calcPlainText = NULL; - byte tag[AES_BLOCK_SIZE]; - EVP_CIPHER_CTX* encCtx = NULL; - EVP_CIPHER_CTX* decCtx = NULL; - int i, j, outl; - - /****************************************************/ - for (i = 0; i < 3; ++i) { - ExpectNotNull(encCtx = EVP_CIPHER_CTX_new()); - ExpectNotNull(decCtx = EVP_CIPHER_CTX_new()); - - /* First iteration, set key before IV. */ - if (i == 0) { - ExpectIntEQ(EVP_CipherInit(encCtx, EVP_aes_256_gcm(), key, NULL, 1), - SSL_SUCCESS); - - /* - * The call to EVP_CipherInit below (with NULL key) should clear the - * authIvGenEnable flag set by EVP_CTRL_GCM_SET_IV_FIXED. As such, a - * subsequent EVP_CTRL_GCM_IV_GEN should fail. This matches OpenSSL - * behavior. - */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_SET_IV_FIXED, -1, - (void*)iv), SSL_SUCCESS); - ExpectIntEQ(EVP_CipherInit(encCtx, NULL, NULL, iv, 1), - SSL_SUCCESS); - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_IV_GEN, -1, - currentIv), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - ExpectIntEQ(EVP_CipherInit(decCtx, EVP_aes_256_gcm(), key, NULL, 0), - SSL_SUCCESS); - ExpectIntEQ(EVP_CipherInit(decCtx, NULL, NULL, iv, 0), - SSL_SUCCESS); - } - /* Second iteration, IV before key. */ - else { - ExpectIntEQ(EVP_CipherInit(encCtx, EVP_aes_256_gcm(), NULL, iv, 1), - SSL_SUCCESS); - ExpectIntEQ(EVP_CipherInit(encCtx, NULL, key, NULL, 1), - SSL_SUCCESS); - ExpectIntEQ(EVP_CipherInit(decCtx, EVP_aes_256_gcm(), NULL, iv, 0), - SSL_SUCCESS); - ExpectIntEQ(EVP_CipherInit(decCtx, NULL, key, NULL, 0), - SSL_SUCCESS); - } - - /* - * EVP_CTRL_GCM_IV_GEN should fail if EVP_CTRL_GCM_SET_IV_FIXED hasn't - * been issued first. - */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_IV_GEN, -1, - currentIv), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_SET_IV_FIXED, -1, - (void*)iv), SSL_SUCCESS); - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(decCtx, EVP_CTRL_GCM_SET_IV_FIXED, -1, - (void*)iv), SSL_SUCCESS); - - for (j = 0; j < NUM_ENCRYPTIONS; ++j) { - /*************** Encrypt ***************/ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_IV_GEN, -1, - currentIv), SSL_SUCCESS); - /* Check current IV against expected. */ - ExpectIntEQ(XMEMCMP(currentIv, expIvs[j], GCM_NONCE_MID_SZ), 0); - - /* Add AAD. */ - if (i == 2) { - /* Test streaming API. */ - ExpectIntEQ(EVP_CipherUpdate(encCtx, NULL, &outl, aads[j], - AAD_SIZE), SSL_SUCCESS); - } - else { - ExpectIntEQ(EVP_Cipher(encCtx, NULL, (byte *)aads[j], AAD_SIZE), - AAD_SIZE); - } - - ExpectNotNull(cipherText = (byte*)XMALLOC(plainTextSzs[j], NULL, - DYNAMIC_TYPE_TMP_BUFFER)); - - /* Encrypt plaintext. */ - if (i == 2) { - ExpectIntEQ(EVP_CipherUpdate(encCtx, cipherText, &outl, - plainTexts[j], plainTextSzs[j]), - SSL_SUCCESS); - } - else { - ExpectIntEQ(EVP_Cipher(encCtx, cipherText, (byte *)plainTexts[j], - plainTextSzs[j]), plainTextSzs[j]); - } - - if (i == 2) { - ExpectIntEQ(EVP_CipherFinal(encCtx, cipherText, &outl), - SSL_SUCCESS); - } - else { - /* - * Calling EVP_Cipher with NULL input and output for AES-GCM is - * akin to calling EVP_CipherFinal. - */ - ExpectIntGE(EVP_Cipher(encCtx, NULL, NULL, 0), 0); - } - - /* Check ciphertext against expected. */ - ExpectIntEQ(XMEMCMP(cipherText, expCipherTexts[j], plainTextSzs[j]), - 0); - - /* Get and check tag against expected. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_GET_TAG, - sizeof(tag), tag), SSL_SUCCESS); - ExpectIntEQ(XMEMCMP(tag, expTags[j], sizeof(tag)), 0); - - /*************** Decrypt ***************/ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(decCtx, EVP_CTRL_GCM_IV_GEN, -1, - currentIv), SSL_SUCCESS); - /* Check current IV against expected. */ - ExpectIntEQ(XMEMCMP(currentIv, expIvs[j], GCM_NONCE_MID_SZ), 0); - - /* Add AAD. */ - if (i == 2) { - /* Test streaming API. */ - ExpectIntEQ(EVP_CipherUpdate(decCtx, NULL, &outl, aads[j], - AAD_SIZE), SSL_SUCCESS); - } - else { - ExpectIntEQ(EVP_Cipher(decCtx, NULL, (byte *)aads[j], AAD_SIZE), - AAD_SIZE); - } - - /* Set expected tag. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(decCtx, EVP_CTRL_GCM_SET_TAG, - sizeof(tag), tag), SSL_SUCCESS); - - /* Decrypt ciphertext. */ - ExpectNotNull(calcPlainText = (byte*)XMALLOC(plainTextSzs[j], NULL, - DYNAMIC_TYPE_TMP_BUFFER)); - if (i == 2) { - ExpectIntEQ(EVP_CipherUpdate(decCtx, calcPlainText, &outl, - cipherText, plainTextSzs[j]), - SSL_SUCCESS); - } - else { - /* This first EVP_Cipher call will check the tag, too. */ - ExpectIntEQ(EVP_Cipher(decCtx, calcPlainText, cipherText, - plainTextSzs[j]), plainTextSzs[j]); - } - - if (i == 2) { - ExpectIntEQ(EVP_CipherFinal(decCtx, calcPlainText, &outl), - SSL_SUCCESS); - } - else { - ExpectIntGE(EVP_Cipher(decCtx, NULL, NULL, 0), 0); - } - - /* Check plaintext against expected. */ - ExpectIntEQ(XMEMCMP(calcPlainText, plainTexts[j], plainTextSzs[j]), - 0); - - XFREE(cipherText, NULL, DYNAMIC_TYPE_TMP_BUFFER); - cipherText = NULL; - XFREE(calcPlainText, NULL, DYNAMIC_TYPE_TMP_BUFFER); - calcPlainText = NULL; - } - - EVP_CIPHER_CTX_free(encCtx); - encCtx = NULL; - EVP_CIPHER_CTX_free(decCtx); - decCtx = NULL; - } -#endif - return EXPECT_RESULT(); -} -static int test_wolfSSL_OBJ_ln(void) -{ - EXPECT_DECLS; - const int nid_set[] = { - NID_commonName, - NID_serialNumber, - NID_countryName, - NID_localityName, - NID_stateOrProvinceName, - NID_organizationName, - NID_organizationalUnitName, - NID_domainComponent, - NID_businessCategory, - NID_jurisdictionCountryName, - NID_jurisdictionStateOrProvinceName, - NID_emailAddress - }; - const char* ln_set[] = { - "commonName", - "serialNumber", - "countryName", - "localityName", - "stateOrProvinceName", - "organizationName", - "organizationalUnitName", - "domainComponent", - "businessCategory", - "jurisdictionCountryName", - "jurisdictionStateOrProvinceName", - "emailAddress", - }; - size_t i = 0, maxIdx = sizeof(ln_set)/sizeof(char*); - - ExpectIntEQ(OBJ_ln2nid(NULL), NID_undef); - -#ifdef HAVE_ECC -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - { - EC_builtin_curve r[27]; - size_t nCurves = sizeof(r) / sizeof(r[0]); - nCurves = EC_get_builtin_curves(r, nCurves); - - for (i = 0; i < nCurves; i++) { - /* skip ECC_CURVE_INVALID */ - if (r[i].nid != ECC_CURVE_INVALID) { - ExpectIntEQ(OBJ_ln2nid(r[i].comment), r[i].nid); - ExpectStrEQ(OBJ_nid2ln(r[i].nid), r[i].comment); - } - } - } -#endif -#endif - - for (i = 0; i < maxIdx; i++) { - ExpectIntEQ(OBJ_ln2nid(ln_set[i]), nid_set[i]); - ExpectStrEQ(OBJ_nid2ln(nid_set[i]), ln_set[i]); - } - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_OBJ_sn(void) -{ - EXPECT_DECLS; - int i = 0, maxIdx = 7; - const int nid_set[] = {NID_commonName,NID_countryName,NID_localityName, - NID_stateOrProvinceName,NID_organizationName, - NID_organizationalUnitName,NID_emailAddress}; - const char* sn_open_set[] = {"CN","C","L","ST","O","OU","emailAddress"}; - - ExpectIntEQ(wolfSSL_OBJ_sn2nid(NULL), NID_undef); - for (i = 0; i < maxIdx; i++) { - ExpectIntEQ(wolfSSL_OBJ_sn2nid(sn_open_set[i]), nid_set[i]); - ExpectStrEQ(wolfSSL_OBJ_nid2sn(nid_set[i]), sn_open_set[i]); - } - - return EXPECT_RESULT(); -} - #if !defined(NO_BIO) static word32 TXT_DB_hash(const WOLFSSL_STRING *s) { @@ -25255,24 +18211,6 @@ static int test_wolfSSL_NCONF(void) } #endif /* OPENSSL_ALL */ -static int test_wolfSSL_EVP_PKEY_up_ref(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) - EVP_PKEY* pkey; - - pkey = EVP_PKEY_new(); - ExpectNotNull(pkey); - ExpectIntEQ(EVP_PKEY_up_ref(NULL), 0); - ExpectIntEQ(EVP_PKEY_up_ref(pkey), 1); - EVP_PKEY_free(pkey); - ExpectIntEQ(EVP_PKEY_up_ref(pkey), 1); - EVP_PKEY_free(pkey); - EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} - static int test_wolfSSL_d2i_and_i2d_PublicKey(void) { EXPECT_DECLS; @@ -26092,155 +19030,6 @@ static int test_wolfSSL_OCSP_REQ_CTX(void) return EXPECT_RESULT(); } -static int test_wolfSSL_EVP_PKEY_derive(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) || defined(WOLFSSL_OPENSSH) -#if (!defined(NO_DH) && defined(WOLFSSL_DH_EXTRA)) || defined(HAVE_ECC) - EVP_PKEY_CTX *ctx = NULL; - unsigned char *skey = NULL; - size_t skeylen; - EVP_PKEY *pkey = NULL; - EVP_PKEY *peerkey = NULL; - const unsigned char* key; - -#if !defined(NO_DH) && defined(WOLFSSL_DH_EXTRA) - /* DH */ - key = dh_key_der_2048; - ExpectNotNull((pkey = d2i_PrivateKey(EVP_PKEY_DH, NULL, &key, - sizeof_dh_key_der_2048))); - ExpectIntEQ(DH_generate_key(EVP_PKEY_get0_DH(pkey)), 1); - key = dh_key_der_2048; - ExpectNotNull((peerkey = d2i_PrivateKey(EVP_PKEY_DH, NULL, &key, - sizeof_dh_key_der_2048))); - ExpectIntEQ(DH_generate_key(EVP_PKEY_get0_DH(peerkey)), 1); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_derive_init(ctx), 1); - ExpectIntEQ(EVP_PKEY_derive_set_peer(ctx, peerkey), 1); - ExpectIntEQ(EVP_PKEY_derive(ctx, NULL, &skeylen), 1); - ExpectNotNull(skey = (unsigned char*)XMALLOC(skeylen, NULL, - DYNAMIC_TYPE_OPENSSL)); - ExpectIntEQ(EVP_PKEY_derive(ctx, skey, &skeylen), 1); - - EVP_PKEY_CTX_free(ctx); - ctx = NULL; - EVP_PKEY_free(peerkey); - peerkey = NULL; - EVP_PKEY_free(pkey); - pkey = NULL; - XFREE(skey, NULL, DYNAMIC_TYPE_OPENSSL); - skey = NULL; -#endif - -#ifdef HAVE_ECC - /* ECDH */ - key = ecc_clikey_der_256; - ExpectNotNull((pkey = d2i_PrivateKey(EVP_PKEY_EC, NULL, &key, - sizeof_ecc_clikey_der_256))); - key = ecc_clikeypub_der_256; - ExpectNotNull((peerkey = d2i_PUBKEY(NULL, &key, - sizeof_ecc_clikeypub_der_256))); - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_derive_init(ctx), 1); - ExpectIntEQ(EVP_PKEY_derive_set_peer(ctx, peerkey), 1); - ExpectIntEQ(EVP_PKEY_derive(ctx, NULL, &skeylen), 1); - ExpectNotNull(skey = (unsigned char*)XMALLOC(skeylen, NULL, - DYNAMIC_TYPE_OPENSSL)); - ExpectIntEQ(EVP_PKEY_derive(ctx, skey, &skeylen), 1); - - EVP_PKEY_CTX_free(ctx); - EVP_PKEY_free(peerkey); - EVP_PKEY_free(pkey); - XFREE(skey, NULL, DYNAMIC_TYPE_OPENSSL); -#endif /* HAVE_ECC */ -#endif /* (!NO_DH && WOLFSSL_DH_EXTRA) || HAVE_ECC */ -#endif /* OPENSSL_ALL || WOLFSSL_QT || WOLFSSL_OPENSSH */ - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_PBE_scrypt(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_SCRYPT) && defined(HAVE_PBKDF2) && \ - (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 5)) -#if !defined(NO_PWDBASED) && !defined(NO_SHA256) - int ret; - - const char pwd[] = {'p','a','s','s','w','o','r','d'}; - int pwdlen = sizeof(pwd); - const byte salt[] = {'N','a','C','l'}; - int saltlen = sizeof(salt); - byte key[80]; - word64 numOvr32 = (word64)INT32_MAX + 1; - - /* expected derived key for N:16, r:1, p:1 */ - const byte expectedKey[] = { - 0xAE, 0xC6, 0xB7, 0x48, 0x3E, 0xD2, 0x6E, 0x08, 0x80, 0x2B, - 0x41, 0xF4, 0x03, 0x20, 0x86, 0xA0, 0xE8, 0x86, 0xBE, 0x7A, - 0xC4, 0x8F, 0xCF, 0xD9, 0x2F, 0xF0, 0xCE, 0xF8, 0x10, 0x97, - 0x52, 0xF4, 0xAC, 0x74, 0xB0, 0x77, 0x26, 0x32, 0x56, 0xA6, - 0x5A, 0x99, 0x70, 0x1B, 0x7A, 0x30, 0x4D, 0x46, 0x61, 0x1C, - 0x8A, 0xA3, 0x91, 0xE7, 0x99, 0xCE, 0x10, 0xA2, 0x77, 0x53, - 0xE7, 0xE9, 0xC0, 0x9A}; - - /* N r p mx key keylen */ - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 0, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 0); /* N must be greater than 1 */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 3, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 0); /* N must be power of 2 */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 0, 1, 0, key, 64); - ExpectIntEQ(ret, 0); /* r must be greater than 0 */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 0, 0, key, 64); - ExpectIntEQ(ret, 0); /* p must be greater than 0 */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 1, 0, key, 0); - ExpectIntEQ(ret, 0); /* keylen must be greater than 0 */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 9, 1, 0, key, 64); - ExpectIntEQ(ret, 0); /* r must be smaller than 9 */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 1, 0, NULL, 64); - ExpectIntEQ(ret, 1); /* should succeed if key is NULL */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 1); /* should succeed */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, numOvr32, 1, 0, - key, 64); - ExpectIntEQ(ret, 0); /* should fail since r is greater than INT32_MAC */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, numOvr32, 0, - key, 64); - ExpectIntEQ(ret, 0); /* should fail since p is greater than INT32_MAC */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, NULL, 0, 2, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 1); /* should succeed even if salt is NULL */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, NULL, 4, 2, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 0); /* if salt is NULL, saltlen must be 0, otherwise fail*/ - - ret = EVP_PBE_scrypt(NULL, 0, salt, saltlen, 2, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 1); /* should succeed if pwd is NULL and pwdlen is 0*/ - - ret = EVP_PBE_scrypt(NULL, 4, salt, saltlen, 2, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 0); /* if pwd is NULL, pwdlen must be 0 */ - - ret = EVP_PBE_scrypt(NULL, 0, NULL, 0, 2, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 1); /* should succeed even both pwd and salt are NULL */ - - ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 16, 1, 1, 0, key, 64); - ExpectIntEQ(ret, 1); - - ret = XMEMCMP(expectedKey, key, sizeof(expectedKey)); - ExpectIntEQ(ret, 0); /* derived key must be the same as expected-key */ -#endif /* !NO_PWDBASED && !NO_SHA256 */ -#endif /* OPENSSL_EXTRA && HAVE_SCRYPT && HAVE_PBKDF2 */ - return EXPECT_RESULT(); -} - static int test_no_op_functions(void) { EXPECT_DECLS; @@ -27029,511 +19818,6 @@ static int test_MakeCertWithCaFalse(void) return EXPECT_RESULT(); } -static int test_wolfSSL_EVP_PKEY_encrypt(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) - WOLFSSL_RSA* rsa = NULL; - WOLFSSL_EVP_PKEY* pkey = NULL; - WOLFSSL_EVP_PKEY_CTX* ctx = NULL; - const char* in = "What is easy to do is easy not to do."; - size_t inlen = XSTRLEN(in); - size_t outEncLen = 0; - byte* outEnc = NULL; - byte* outDec = NULL; - size_t outDecLen = 0; - size_t rsaKeySz = 2048/8; /* Bytes */ -#if !defined(HAVE_FIPS) && defined(WC_RSA_NO_PADDING) - byte* inTmp = NULL; - byte* outEncTmp = NULL; - byte* outDecTmp = NULL; -#endif - - ExpectNotNull(outEnc = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, - DYNAMIC_TYPE_TMP_BUFFER)); - if (outEnc != NULL) { - XMEMSET(outEnc, 0, rsaKeySz); - } - ExpectNotNull(outDec = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, - DYNAMIC_TYPE_TMP_BUFFER)); - if (outDec != NULL) { - XMEMSET(outDec, 0, rsaKeySz); - } - - ExpectNotNull(rsa = RSA_generate_key(2048, 3, NULL, NULL)); - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, rsa), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - RSA_free(rsa); - } - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_encrypt_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING), - WOLFSSL_SUCCESS); - - /* Test pkey references count is decremented. pkey shouldn't be destroyed - since ctx uses it.*/ - ExpectIntEQ(pkey->ref.count, 2); - EVP_PKEY_free(pkey); - ExpectIntEQ(pkey->ref.count, 1); - - /* Encrypt data */ - /* Check that we can get the required output buffer length by passing in a - * NULL output buffer. */ - ExpectIntEQ(EVP_PKEY_encrypt(ctx, NULL, &outEncLen, - (const unsigned char*)in, inlen), WOLFSSL_SUCCESS); - ExpectIntEQ(rsaKeySz, outEncLen); - /* Now do the actual encryption. */ - ExpectIntEQ(EVP_PKEY_encrypt(ctx, outEnc, &outEncLen, - (const unsigned char*)in, inlen), WOLFSSL_SUCCESS); - - /* Decrypt data */ - ExpectIntEQ(EVP_PKEY_decrypt_init(ctx), WOLFSSL_SUCCESS); - /* Check that we can get the required output buffer length by passing in a - * NULL output buffer. */ - ExpectIntEQ(EVP_PKEY_decrypt(ctx, NULL, &outDecLen, outEnc, outEncLen), - WOLFSSL_SUCCESS); - ExpectIntEQ(rsaKeySz, outDecLen); - /* Now do the actual decryption. */ - ExpectIntEQ(EVP_PKEY_decrypt(ctx, outDec, &outDecLen, outEnc, outEncLen), - WOLFSSL_SUCCESS); - - ExpectIntEQ(XMEMCMP(in, outDec, outDecLen), 0); - -#if !defined(HAVE_FIPS) && defined(WC_RSA_NO_PADDING) - /* The input length must be the same size as the RSA key.*/ - ExpectNotNull(inTmp = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, - DYNAMIC_TYPE_TMP_BUFFER)); - if (inTmp != NULL) { - XMEMSET(inTmp, 9, rsaKeySz); - } - ExpectNotNull(outEncTmp = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, - DYNAMIC_TYPE_TMP_BUFFER)); - if (outEncTmp != NULL) { - XMEMSET(outEncTmp, 0, rsaKeySz); - } - ExpectNotNull(outDecTmp = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, - DYNAMIC_TYPE_TMP_BUFFER)); - if (outDecTmp != NULL) { - XMEMSET(outDecTmp, 0, rsaKeySz); - } - ExpectIntEQ(EVP_PKEY_encrypt_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_NO_PADDING), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_encrypt(ctx, outEncTmp, &outEncLen, inTmp, rsaKeySz), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_decrypt_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_decrypt(ctx, outDecTmp, &outDecLen, outEncTmp, - outEncLen), WOLFSSL_SUCCESS); - ExpectIntEQ(XMEMCMP(inTmp, outDecTmp, outDecLen), 0); -#endif - EVP_PKEY_CTX_free(ctx); - XFREE(outEnc, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - XFREE(outDec, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); -#if !defined(HAVE_FIPS) && defined(WC_RSA_NO_PADDING) - XFREE(inTmp, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - XFREE(outEncTmp, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - XFREE(outDecTmp, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); -#endif -#endif - return EXPECT_RESULT(); -} - -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - #ifndef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY - #define TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY - #endif -#endif -#endif -#if defined(OPENSSL_EXTRA) -#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) - #ifndef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY - #define TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY - #endif -#endif -#endif -#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - #ifndef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY - #define TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY - #endif -#endif -#endif - -#ifdef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY -static int test_wolfSSL_EVP_PKEY_sign_verify(int keyType) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - WOLFSSL_RSA* rsa = NULL; -#endif -#endif -#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) - WOLFSSL_DSA* dsa = NULL; -#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ -#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - WOLFSSL_EC_KEY* ecKey = NULL; -#endif -#endif - WOLFSSL_EVP_PKEY* pkey = NULL; - WOLFSSL_EVP_PKEY_CTX* ctx = NULL; - WOLFSSL_EVP_PKEY_CTX* ctx_verify = NULL; - const char* in = "What is easy to do is easy not to do."; - size_t inlen = XSTRLEN(in); - byte hash[SHA256_DIGEST_LENGTH] = {0}; - byte zero[SHA256_DIGEST_LENGTH] = {0}; - SHA256_CTX c; - byte* sig = NULL; - byte* sigVerify = NULL; - size_t siglen; - size_t siglenOnlyLen; - size_t keySz = 2048/8; /* Bytes */ - - ExpectNotNull(sig = - (byte*)XMALLOC(keySz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER)); - ExpectNotNull(sigVerify = - (byte*)XMALLOC(keySz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER)); - - siglen = keySz; - ExpectNotNull(XMEMSET(sig, 0, keySz)); - ExpectNotNull(XMEMSET(sigVerify, 0, keySz)); - - /* Generate hash */ - SHA256_Init(&c); - SHA256_Update(&c, in, inlen); - SHA256_Final(hash, &c); -#ifdef WOLFSSL_SMALL_STACK_CACHE - /* workaround for small stack cache case */ - wc_Sha256Free((wc_Sha256*)&c); -#endif - - /* Generate key */ - ExpectNotNull(pkey = EVP_PKEY_new()); - switch (keyType) { - case EVP_PKEY_RSA: -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - { - ExpectNotNull(rsa = RSA_generate_key(2048, 3, NULL, NULL)); - ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, rsa), WOLFSSL_SUCCESS); - } -#endif -#endif - break; - case EVP_PKEY_DSA: -#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) - ExpectNotNull(dsa = DSA_new()); - ExpectIntEQ(DSA_generate_parameters_ex(dsa, 2048, - NULL, 0, NULL, NULL, NULL), 1); - ExpectIntEQ(DSA_generate_key(dsa), 1); - ExpectIntEQ(EVP_PKEY_set1_DSA(pkey, dsa), WOLFSSL_SUCCESS); -#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ - break; - case EVP_PKEY_EC: -#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - { - ExpectNotNull(ecKey = EC_KEY_new()); - ExpectIntEQ(EC_KEY_generate_key(ecKey), 1); - ExpectIntEQ( - EVP_PKEY_assign_EC_KEY(pkey, ecKey), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - EC_KEY_free(ecKey); - } - } -#endif -#endif - break; - } - ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - if (keyType == EVP_PKEY_RSA) - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING), - WOLFSSL_SUCCESS); -#endif -#endif - - /* Check returning only length */ - ExpectIntEQ(EVP_PKEY_sign(ctx, NULL, &siglenOnlyLen, hash, - SHA256_DIGEST_LENGTH), WOLFSSL_SUCCESS); - ExpectIntGT(siglenOnlyLen, 0); - /* Sign data */ - ExpectIntEQ(EVP_PKEY_sign(ctx, sig, &siglen, hash, - SHA256_DIGEST_LENGTH), WOLFSSL_SUCCESS); - ExpectIntGE(siglenOnlyLen, siglen); - - /* Verify signature */ - ExpectNotNull(ctx_verify = EVP_PKEY_CTX_new(pkey, NULL)); - ExpectIntEQ(EVP_PKEY_verify_init(ctx_verify), WOLFSSL_SUCCESS); -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - if (keyType == EVP_PKEY_RSA) - ExpectIntEQ( - EVP_PKEY_CTX_set_rsa_padding(ctx_verify, RSA_PKCS1_PADDING), - WOLFSSL_SUCCESS); -#endif -#endif - ExpectIntEQ(EVP_PKEY_verify( - ctx_verify, sig, siglen, hash, SHA256_DIGEST_LENGTH), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_verify( - ctx_verify, sig, siglen, zero, SHA256_DIGEST_LENGTH), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - if (keyType == EVP_PKEY_RSA) { - #if defined(WC_RSA_NO_PADDING) || defined(WC_RSA_DIRECT) - /* Try RSA sign/verify with no padding. */ - ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_NO_PADDING), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_sign(ctx, sigVerify, &siglen, sig, - siglen), WOLFSSL_SUCCESS); - ExpectIntGE(siglenOnlyLen, siglen); - ExpectIntEQ(EVP_PKEY_verify_init(ctx_verify), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx_verify, - RSA_NO_PADDING), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_verify(ctx_verify, sigVerify, siglen, sig, - siglen), WOLFSSL_SUCCESS); - #endif - - /* Wrong padding schemes. */ - ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, - RSA_PKCS1_OAEP_PADDING), WOLFSSL_SUCCESS); - ExpectIntNE(EVP_PKEY_sign(ctx, sigVerify, &siglen, sig, - siglen), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_verify_init(ctx_verify), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx_verify, - RSA_PKCS1_OAEP_PADDING), WOLFSSL_SUCCESS); - ExpectIntNE(EVP_PKEY_verify(ctx_verify, sigVerify, siglen, sig, - siglen), WOLFSSL_SUCCESS); - - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx_verify, - RSA_PKCS1_PADDING), WOLFSSL_SUCCESS); - } -#endif -#endif - - /* error cases */ - siglen = keySz; /* Reset because sig size may vary slightly */ - ExpectIntNE(EVP_PKEY_sign_init(NULL), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); - ExpectIntNE(EVP_PKEY_sign(NULL, sig, &siglen, (byte*)in, inlen), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_sign(ctx, sig, &siglen, (byte*)in, inlen), - WOLFSSL_SUCCESS); - - EVP_PKEY_free(pkey); - pkey = NULL; -#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) - DSA_free(dsa); - dsa = NULL; -#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ - EVP_PKEY_CTX_free(ctx_verify); - ctx_verify = NULL; - EVP_PKEY_CTX_free(ctx); - ctx = NULL; - - XFREE(sig, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - XFREE(sigVerify, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); -#endif /* OPENSSL_EXTRA */ - return EXPECT_RESULT(); -} -#endif - -static int test_wolfSSL_EVP_PKEY_sign_verify_rsa(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ - !defined(HAVE_SELFTEST) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - ExpectIntEQ(test_wolfSSL_EVP_PKEY_sign_verify(EVP_PKEY_RSA), TEST_SUCCESS); -#endif -#endif - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_PKEY_sign_verify_dsa(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) -#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) - ExpectIntEQ(test_wolfSSL_EVP_PKEY_sign_verify(EVP_PKEY_DSA), TEST_SUCCESS); -#endif -#endif - return EXPECT_RESULT(); -} -static int test_wolfSSL_EVP_PKEY_sign_verify_ec(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - ExpectIntEQ(test_wolfSSL_EVP_PKEY_sign_verify(EVP_PKEY_EC), TEST_SUCCESS); -#endif -#endif - return EXPECT_RESULT(); -} - -static int test_EVP_PKEY_rsa(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) - WOLFSSL_RSA* rsa = NULL; - WOLFSSL_EVP_PKEY* pkey = NULL; - - ExpectNotNull(rsa = wolfSSL_RSA_new()); - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectIntEQ(EVP_PKEY_assign_RSA(NULL, rsa), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, rsa), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - wolfSSL_RSA_free(rsa); - } - ExpectPtrEq(EVP_PKEY_get0_RSA(pkey), rsa); - wolfSSL_EVP_PKEY_free(pkey); -#endif - return EXPECT_RESULT(); -} - -static int test_EVP_PKEY_ec(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) -#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) - WOLFSSL_EC_KEY* ecKey = NULL; - WOLFSSL_EVP_PKEY* pkey = NULL; - - ExpectNotNull(ecKey = wolfSSL_EC_KEY_new()); - ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); - ExpectIntEQ(EVP_PKEY_assign_EC_KEY(NULL, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_assign_EC_KEY(pkey, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Should fail since ecKey is empty */ - ExpectIntEQ(EVP_PKEY_assign_EC_KEY(pkey, ecKey), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(wolfSSL_EC_KEY_generate_key(ecKey), 1); - ExpectIntEQ(EVP_PKEY_assign_EC_KEY(pkey, ecKey), WOLFSSL_SUCCESS); - if (EXPECT_FAIL()) { - wolfSSL_EC_KEY_free(ecKey); - } - wolfSSL_EVP_PKEY_free(pkey); -#endif -#endif - return EXPECT_RESULT(); -} - -static int test_EVP_PKEY_cmp(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) - EVP_PKEY *a = NULL; - EVP_PKEY *b = NULL; - const unsigned char *in; - -#if !defined(NO_RSA) && defined(USE_CERT_BUFFERS_2048) - in = client_key_der_2048; - ExpectNotNull(a = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - &in, (long)sizeof_client_key_der_2048)); - in = client_key_der_2048; - ExpectNotNull(b = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - &in, (long)sizeof_client_key_der_2048)); - - /* Test success case RSA */ -#if defined(WOLFSSL_ERROR_CODE_OPENSSL) - ExpectIntEQ(EVP_PKEY_cmp(a, b), 1); -#else - ExpectIntEQ(EVP_PKEY_cmp(a, b), 0); -#endif /* WOLFSSL_ERROR_CODE_OPENSSL */ - - EVP_PKEY_free(b); - b = NULL; - EVP_PKEY_free(a); - a = NULL; -#endif - -#if defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) - in = ecc_clikey_der_256; - ExpectNotNull(a = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, - &in, (long)sizeof_ecc_clikey_der_256)); - in = ecc_clikey_der_256; - ExpectNotNull(b = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, - &in, (long)sizeof_ecc_clikey_der_256)); - - /* Test success case ECC */ -#if defined(WOLFSSL_ERROR_CODE_OPENSSL) - ExpectIntEQ(EVP_PKEY_cmp(a, b), 1); -#else - ExpectIntEQ(EVP_PKEY_cmp(a, b), 0); -#endif /* WOLFSSL_ERROR_CODE_OPENSSL */ - - EVP_PKEY_free(b); - b = NULL; - EVP_PKEY_free(a); - a = NULL; -#endif - - /* Test failure cases */ -#if !defined(NO_RSA) && defined(USE_CERT_BUFFERS_2048) && \ - defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) - - in = client_key_der_2048; - ExpectNotNull(a = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, - &in, (long)sizeof_client_key_der_2048)); - in = ecc_clikey_der_256; - ExpectNotNull(b = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, - &in, (long)sizeof_ecc_clikey_der_256)); - -#if defined(WOLFSSL_ERROR_CODE_OPENSSL) - ExpectIntEQ(EVP_PKEY_cmp(a, b), -1); -#else - ExpectIntNE(EVP_PKEY_cmp(a, b), 0); -#endif /* WOLFSSL_ERROR_CODE_OPENSSL */ - EVP_PKEY_free(b); - b = NULL; - EVP_PKEY_free(a); - a = NULL; -#endif - - /* invalid or empty failure cases */ - a = EVP_PKEY_new(); - b = EVP_PKEY_new(); -#if defined(WOLFSSL_ERROR_CODE_OPENSSL) - ExpectIntEQ(EVP_PKEY_cmp(NULL, NULL), 0); - ExpectIntEQ(EVP_PKEY_cmp(a, NULL), 0); - ExpectIntEQ(EVP_PKEY_cmp(NULL, b), 0); -#ifdef NO_RSA - /* Type check will fail since RSA is the default EVP key type */ - ExpectIntEQ(EVP_PKEY_cmp(a, b), -2); -#else - ExpectIntEQ(EVP_PKEY_cmp(a, b), 0); -#endif -#else - ExpectIntNE(EVP_PKEY_cmp(NULL, NULL), 0); - ExpectIntNE(EVP_PKEY_cmp(a, NULL), 0); - ExpectIntNE(EVP_PKEY_cmp(NULL, b), 0); - ExpectIntNE(EVP_PKEY_cmp(a, b), 0); -#endif - EVP_PKEY_free(b); - EVP_PKEY_free(a); - - (void)in; -#endif - return EXPECT_RESULT(); -} - static int test_ERR_load_crypto_strings(void) { #if defined(OPENSSL_ALL) @@ -27710,10 +19994,25 @@ static int test_sk_X509_CRL(void) ExpectIntEQ(BIO_get_mem_data(bio, NULL), 1324); #endif BIO_free(bio); - + bio = NULL; wolfSSL_X509_CRL_free(crl); crl = NULL; -#endif + +#ifndef NO_ASN_TIME + /* Test CRL with invalid GeneralizedTime */ + ExpectNotNull(bio = BIO_new_file("./certs/crl/bad_time_fmt.pem", "rb")); + ExpectNotNull(crl = PEM_read_bio_X509_CRL(bio, NULL, NULL, NULL)); + BIO_free(bio); + bio = NULL; + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + ExpectIntEQ(wolfSSL_X509_CRL_print(bio, crl), WOLFSSL_FAILURE); + + BIO_free(bio); + bio = NULL; + wolfSSL_X509_CRL_free(crl); + crl = NULL; +#endif /* !NO_ASN_TIME */ +#endif /* !NO_BIO */ #if !defined(NO_FILESYSTEM) && !defined(NO_STDIO_FILESYSTEM) ExpectTrue((fp = XFOPEN("./certs/crl/crl.der", "rb")) != XBADFILE); @@ -28127,817 +20426,6 @@ static int test_wolfSSL_X509_REQ_print(void) return EXPECT_RESULT(); } -static int test_wolfssl_PKCS7(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_BIO) && \ - !defined(NO_RSA) - PKCS7* pkcs7 = NULL; - byte data[FOURK_BUF]; - word32 len = sizeof(data); - const byte* p = data; - byte content[] = "Test data to encode."; -#if !defined(NO_RSA) & defined(USE_CERT_BUFFERS_2048) - BIO* bio = NULL; - byte key[sizeof(client_key_der_2048)]; - word32 keySz = (word32)sizeof(key); - byte* out = NULL; -#endif - - ExpectIntGT((len = (word32)CreatePKCS7SignedData(data, (int)len, content, - (word32)sizeof(content), 0, 0, 0, RSA_TYPE)), 0); - - ExpectNull(pkcs7 = d2i_PKCS7(NULL, NULL, (int)len)); - ExpectNull(pkcs7 = d2i_PKCS7(NULL, &p, 0)); - ExpectNotNull(pkcs7 = d2i_PKCS7(NULL, &p, (int)len)); - ExpectIntEQ(wolfSSL_PKCS7_verify(NULL, NULL, NULL, NULL, NULL, - PKCS7_NOVERIFY), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - PKCS7_free(pkcs7); - pkcs7 = NULL; - - /* fail case, without PKCS7_NOVERIFY */ - p = data; - ExpectNotNull(pkcs7 = d2i_PKCS7(NULL, &p, (int)len)); - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, NULL, NULL, - 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - PKCS7_free(pkcs7); - pkcs7 = NULL; - - /* success case, with PKCS7_NOVERIFY */ - p = data; - ExpectNotNull(pkcs7 = d2i_PKCS7(NULL, &p, (int)len)); - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, NULL, NULL, - PKCS7_NOVERIFY), WOLFSSL_SUCCESS); - -#if !defined(NO_RSA) & defined(USE_CERT_BUFFERS_2048) - /* test i2d */ - XMEMCPY(key, client_key_der_2048, keySz); - if (pkcs7 != NULL) { - pkcs7->privateKey = key; - pkcs7->privateKeySz = (word32)sizeof(key); - pkcs7->encryptOID = RSAk; - #ifdef NO_SHA - pkcs7->hashOID = SHA256h; - #else - pkcs7->hashOID = SHAh; - #endif - } - ExpectNotNull(bio = BIO_new(BIO_s_mem())); - ExpectIntEQ(i2d_PKCS7_bio(bio, pkcs7), 1); -#ifndef NO_ASN_TIME - ExpectIntEQ(i2d_PKCS7(pkcs7, &out), 655); -#else - ExpectIntEQ(i2d_PKCS7(pkcs7, &out), 625); -#endif - XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); - BIO_free(bio); -#endif - - PKCS7_free(NULL); - PKCS7_free(pkcs7); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_PKCS7_sign(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_BIO) && \ - !defined(NO_FILESYSTEM) && !defined(NO_RSA) - - PKCS7* p7 = NULL; - PKCS7* p7Ver = NULL; - byte* out = NULL; - byte* tmpPtr = NULL; - int outLen = 0; - int flags = 0; - byte data[] = "Test data to encode."; - - const char* cert = "./certs/server-cert.pem"; - const char* key = "./certs/server-key.pem"; - const char* ca = "./certs/ca-cert.pem"; - - WOLFSSL_BIO* certBio = NULL; - WOLFSSL_BIO* keyBio = NULL; - WOLFSSL_BIO* caBio = NULL; - WOLFSSL_BIO* inBio = NULL; - X509* signCert = NULL; - EVP_PKEY* signKey = NULL; - X509* caCert = NULL; - X509_STORE* store = NULL; -#ifndef NO_PKCS7_STREAM - int z; - int ret; -#endif /* !NO_PKCS7_STREAM */ - - /* read signer cert/key into BIO */ - ExpectNotNull(certBio = BIO_new_file(cert, "r")); - ExpectNotNull(keyBio = BIO_new_file(key, "r")); - ExpectNotNull(signCert = PEM_read_bio_X509(certBio, NULL, 0, NULL)); - ExpectNotNull(signKey = PEM_read_bio_PrivateKey(keyBio, NULL, 0, NULL)); - - /* read CA cert into store (for verify) */ - ExpectNotNull(caBio = BIO_new_file(ca, "r")); - ExpectNotNull(caCert = PEM_read_bio_X509(caBio, NULL, 0, NULL)); - ExpectNotNull(store = X509_STORE_new()); - ExpectIntEQ(X509_STORE_add_cert(store, caCert), 1); - - /* data to be signed into BIO */ - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - /* PKCS7_sign, bad args: signer NULL */ - ExpectNull(p7 = PKCS7_sign(NULL, signKey, NULL, inBio, 0)); - /* PKCS7_sign, bad args: signer key NULL */ - ExpectNull(p7 = PKCS7_sign(signCert, NULL, NULL, inBio, 0)); - /* PKCS7_sign, bad args: in data NULL without PKCS7_STREAM */ - ExpectNull(p7 = PKCS7_sign(signCert, signKey, NULL, NULL, 0)); - /* PKCS7_sign, bad args: PKCS7_NOCERTS flag not supported */ - ExpectNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, PKCS7_NOCERTS)); - /* PKCS7_sign, bad args: PKCS7_PARTIAL flag not supported */ - ExpectNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, PKCS7_PARTIAL)); - - /* TEST SUCCESS: Not detached, not streaming, not MIME */ - { - flags = PKCS7_BINARY; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); - - /* verify with d2i_PKCS7 */ - tmpPtr = out; - ExpectNotNull(p7Ver = d2i_PKCS7(NULL, (const byte**)&tmpPtr, outLen)); - ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); - PKCS7_free(p7Ver); - p7Ver = NULL; - - /* verify with wc_PKCS7_VerifySignedData */ - ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); - ExpectIntEQ(wc_PKCS7_Init(p7Ver, HEAP_HINT, INVALID_DEVID), 0); - ExpectIntEQ(wc_PKCS7_VerifySignedData(p7Ver, out, (word32)outLen), 0); - - #ifndef NO_PKCS7_STREAM - /* verify with wc_PKCS7_VerifySignedData streaming */ - wc_PKCS7_Free(p7Ver); - p7Ver = NULL; - ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); - ExpectIntEQ(wc_PKCS7_Init(p7Ver, HEAP_HINT, INVALID_DEVID), 0); - /* test for streaming */ - ret = -1; - for (z = 0; z < outLen && ret != 0; z++) { - ret = wc_PKCS7_VerifySignedData(p7Ver, out + z, 1); - if (ret < 0){ - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)); - } - } - ExpectIntEQ(ret, 0); - #endif /* !NO_PKCS7_STREAM */ - - /* compare the signer found to expected signer */ - ExpectIntNE(p7Ver->verifyCertSz, 0); - tmpPtr = NULL; - ExpectIntEQ(i2d_X509(signCert, &tmpPtr), p7Ver->verifyCertSz); - ExpectIntEQ(XMEMCMP(tmpPtr, p7Ver->verifyCert, p7Ver->verifyCertSz), 0); - XFREE(tmpPtr, NULL, DYNAMIC_TYPE_OPENSSL); - tmpPtr = NULL; - - wc_PKCS7_Free(p7Ver); - p7Ver = NULL; - - ExpectNotNull(out); - XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); - out = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - /* TEST SUCCESS: Not detached, streaming, not MIME. Also bad arg - * tests for PKCS7_final() while we have a PKCS7 pointer to use */ - { - /* re-populate input BIO, may have been consumed */ - BIO_free(inBio); - inBio = NULL; - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_BINARY | PKCS7_STREAM; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectIntEQ(PKCS7_final(p7, inBio, flags), 1); - ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); - - /* PKCS7_final, bad args: PKCS7 null */ - ExpectIntEQ(PKCS7_final(NULL, inBio, 0), 0); - /* PKCS7_final, bad args: PKCS7 null */ - ExpectIntEQ(PKCS7_final(p7, NULL, 0), 0); - - tmpPtr = out; - ExpectNotNull(p7Ver = d2i_PKCS7(NULL, (const byte**)&tmpPtr, outLen)); - ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); - PKCS7_free(p7Ver); - p7Ver = NULL; - - ExpectNotNull(out); - XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); - out = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - /* TEST SUCCESS: Detached, not streaming, not MIME */ - { - /* re-populate input BIO, may have been consumed */ - BIO_free(inBio); - inBio = NULL; - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_BINARY | PKCS7_DETACHED; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); - ExpectNotNull(out); - - /* verify with wolfCrypt, d2i_PKCS7 does not support detached content */ - ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); - if (p7Ver != NULL) { - p7Ver->content = data; - p7Ver->contentSz = sizeof(data); - } - ExpectIntEQ(wc_PKCS7_VerifySignedData(p7Ver, out, (word32)outLen), 0); - wc_PKCS7_Free(p7Ver); - p7Ver = NULL; - - #ifndef NO_PKCS7_STREAM - /* verify with wc_PKCS7_VerifySignedData streaming */ - ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); - if (p7Ver != NULL) { - p7Ver->content = data; - p7Ver->contentSz = sizeof(data); - } - /* test for streaming */ - if (EXPECT_SUCCESS()) { - ret = -1; - for (z = 0; z < outLen && ret != 0; z++) { - ret = wc_PKCS7_VerifySignedData(p7Ver, out + z, 1); - if (ret < 0){ - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)); - } - } - ExpectIntEQ(ret, 0); - } - wc_PKCS7_Free(p7Ver); - p7Ver = NULL; - #endif /* !NO_PKCS7_STREAM */ - - /* verify expected failure (NULL return) from d2i_PKCS7, it does not - * yet support detached content */ - tmpPtr = out; - ExpectNull(p7Ver = d2i_PKCS7(NULL, (const byte**)&tmpPtr, outLen)); - PKCS7_free(p7Ver); - p7Ver = NULL; - - XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); - out = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - /* TEST SUCCESS: Detached, streaming, not MIME */ - { - /* re-populate input BIO, may have been consumed */ - BIO_free(inBio); - inBio = NULL; - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_BINARY | PKCS7_DETACHED | PKCS7_STREAM; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectIntEQ(PKCS7_final(p7, inBio, flags), 1); - ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); - - /* verify with wolfCrypt, d2i_PKCS7 does not support detached content */ - ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); - if (p7Ver != NULL) { - p7Ver->content = data; - p7Ver->contentSz = sizeof(data); - } - ExpectIntEQ(wc_PKCS7_VerifySignedData(p7Ver, out, (word32)outLen), 0); - wc_PKCS7_Free(p7Ver); - p7Ver = NULL; - - ExpectNotNull(out); - - #ifndef NO_PKCS7_STREAM - /* verify with wc_PKCS7_VerifySignedData streaming */ - ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); - if (p7Ver != NULL) { - p7Ver->content = data; - p7Ver->contentSz = sizeof(data); - } - /* test for streaming */ - if (EXPECT_SUCCESS()) { - ret = -1; - for (z = 0; z < outLen && ret != 0; z++) { - ret = wc_PKCS7_VerifySignedData(p7Ver, out + z, 1); - if (ret < 0){ - ExpectIntEQ(ret, WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)); - } - } - ExpectIntEQ(ret, 0); - } - wc_PKCS7_Free(p7Ver); - p7Ver = NULL; - #endif /* !NO_PKCS7_STREAM */ - - XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); - PKCS7_free(p7); - p7 = NULL; - } - - X509_STORE_free(store); - X509_free(caCert); - X509_free(signCert); - EVP_PKEY_free(signKey); - BIO_free(inBio); - BIO_free(keyBio); - BIO_free(certBio); - BIO_free(caBio); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_PKCS7_SIGNED_new(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) - PKCS7_SIGNED* pkcs7 = NULL; - - ExpectNotNull(pkcs7 = PKCS7_SIGNED_new()); - ExpectIntEQ(pkcs7->contentOID, SIGNED_DATA); - - PKCS7_SIGNED_free(pkcs7); -#endif - return EXPECT_RESULT(); -} - -#ifndef NO_BIO - -static int test_wolfSSL_PEM_write_bio_encryptedKey(void) -{ - EXPECT_DECLS; -#if (defined(OPENSSL_EXTRA) || defined(OPENSSL_ALL)) && \ - defined(WOLFSSL_KEY_GEN) && !defined(NO_RSA) && \ - defined(WOLFSSL_ENCRYPTED_KEYS) && \ - (defined(WOLFSSL_PEM_TO_DER) || defined(WOLFSSL_DER_TO_PEM)) && \ - !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ - !defined(NO_DES3) - RSA* rsaKey = NULL; - RSA* retKey = NULL; - const EVP_CIPHER *cipher = NULL; - BIO* bio = NULL; - BIO* retbio = NULL; - byte* out; - const char* password = "wolfssl"; - word32 passwordSz =(word32)XSTRLEN((char*)password); - int membufSz = 0; - -#if defined(USE_CERT_BUFFERS_2048) - const byte* key = client_key_der_2048; - word32 keySz = sizeof_client_key_der_2048; -#elif defined(USE_CERT_BUFFERS_1024) - const byte* key = client_key_der_1024; - word32 keySz = sizeof_client_key_der_1024; -#endif - /* Import Rsa Key */ - ExpectNotNull(rsaKey = wolfSSL_RSA_new()); - ExpectIntEQ(wolfSSL_RSA_LoadDer_ex(rsaKey, key, keySz, - WOLFSSL_RSA_LOAD_PRIVATE), 1); - - ExpectNotNull(cipher = EVP_des_ede3_cbc()); - ExpectNotNull(bio = BIO_new(BIO_s_mem())); - ExpectIntEQ(PEM_write_bio_RSAPrivateKey(bio, rsaKey, cipher, - (byte*)password, passwordSz, NULL, NULL), 1); - ExpectIntGT((membufSz = BIO_get_mem_data(bio, &out)), 0); - ExpectNotNull(retbio = BIO_new_mem_buf(out, membufSz)); - ExpectNotNull((retKey = PEM_read_bio_RSAPrivateKey(retbio, NULL, - NULL, (void*)password))); - if (bio != NULL) { - BIO_free(bio); - } - if (retbio != NULL) { - BIO_free(retbio); - } - if (retKey != NULL) { - RSA_free(retKey); - } - if (rsaKey != NULL) { - RSA_free(rsaKey); - } -#endif - return EXPECT_RESULT(); -} - -static int test_wolfSSL_PEM_write_bio_PKCS7(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_FILESYSTEM) - PKCS7* pkcs7 = NULL; - BIO* bio = NULL; - const byte* cert_buf = NULL; - int ret = 0; - WC_RNG rng; - const byte data[] = { /* Hello World */ - 0x48,0x65,0x6c,0x6c,0x6f,0x20,0x57,0x6f, - 0x72,0x6c,0x64 - }; -#ifndef NO_RSA - #if defined(USE_CERT_BUFFERS_2048) - byte key[sizeof(client_key_der_2048)]; - byte cert[sizeof(client_cert_der_2048)]; - word32 keySz = (word32)sizeof(key); - word32 certSz = (word32)sizeof(cert); - XMEMSET(key, 0, keySz); - XMEMSET(cert, 0, certSz); - XMEMCPY(key, client_key_der_2048, keySz); - XMEMCPY(cert, client_cert_der_2048, certSz); - #elif defined(USE_CERT_BUFFERS_1024) - byte key[sizeof_client_key_der_1024]; - byte cert[sizeof(sizeof_client_cert_der_1024)]; - word32 keySz = (word32)sizeof(key); - word32 certSz = (word32)sizeof(cert); - XMEMSET(key, 0, keySz); - XMEMSET(cert, 0, certSz); - XMEMCPY(key, client_key_der_1024, keySz); - XMEMCPY(cert, client_cert_der_1024, certSz); - #else - unsigned char cert[ONEK_BUF]; - unsigned char key[ONEK_BUF]; - XFILE fp = XBADFILE; - int certSz; - int keySz; - - ExpectTrue((fp = XFOPEN("./certs/1024/client-cert.der", "rb")) != - XBADFILE); - ExpectIntGT(certSz = (int)XFREAD(cert, 1, sizeof_client_cert_der_1024, - fp), 0); - if (fp != XBADFILE) { - XFCLOSE(fp); - fp = XBADFILE; - } - - ExpectTrue((fp = XFOPEN("./certs/1024/client-key.der", "rb")) != - XBADFILE); - ExpectIntGT(keySz = (int)XFREAD(key, 1, sizeof_client_key_der_1024, fp), - 0); - if (fp != XBADFILE) { - XFCLOSE(fp); - fp = XBADFILE; - } - #endif -#elif defined(HAVE_ECC) - #if defined(USE_CERT_BUFFERS_256) - unsigned char cert[sizeof(cliecc_cert_der_256)]; - unsigned char key[sizeof(ecc_clikey_der_256)]; - int certSz = (int)sizeof(cert); - int keySz = (int)sizeof(key); - XMEMSET(cert, 0, certSz); - XMEMSET(key, 0, keySz); - XMEMCPY(cert, cliecc_cert_der_256, sizeof_cliecc_cert_der_256); - XMEMCPY(key, ecc_clikey_der_256, sizeof_ecc_clikey_der_256); - #else - unsigned char cert[ONEK_BUF]; - unsigned char key[ONEK_BUF]; - XFILE fp = XBADFILE; - int certSz, keySz; - - ExpectTrue((fp = XFOPEN("./certs/client-ecc-cert.der", "rb")) != - XBADFILE); - ExpectIntGT(certSz = (int)XFREAD(cert, 1, sizeof_cliecc_cert_der_256, - fp), 0); - if (fp != XBADFILE) { - XFCLOSE(fp); - fp = XBADFILE; - } - - ExpectTrue((fp = XFOPEN("./certs/client-ecc-key.der", "rb")) != - XBADFILE); - ExpectIntGT(keySz = (int)XFREAD(key, 1, sizeof_ecc_clikey_der_256, fp), - 0); - if (fp != XBADFILE) { - XFCLOSE(fp); - fp = XBADFILE; - } - #endif -#else - #error PKCS7 requires ECC or RSA -#endif - - ExpectNotNull(pkcs7 = wc_PKCS7_New(HEAP_HINT, testDevId)); - /* initialize with DER encoded cert */ - ExpectIntEQ(wc_PKCS7_InitWithCert(pkcs7, (byte*)cert, (word32)certSz), 0); - - /* init rng */ - XMEMSET(&rng, 0, sizeof(WC_RNG)); - ExpectIntEQ(wc_InitRng(&rng), 0); - - if (pkcs7 != NULL) { - pkcs7->rng = &rng; - pkcs7->content = (byte*)data; /* not used for ex */ - pkcs7->contentSz = (word32)sizeof(data); - pkcs7->contentOID = SIGNED_DATA; - pkcs7->privateKey = key; - pkcs7->privateKeySz = (word32)sizeof(key); - pkcs7->encryptOID = RSAk; - #ifdef NO_SHA - pkcs7->hashOID = SHA256h; - #else - pkcs7->hashOID = SHAh; - #endif - pkcs7->signedAttribs = NULL; - pkcs7->signedAttribsSz = 0; - } - - ExpectNotNull(bio = BIO_new(BIO_s_mem())); - /* Write PKCS#7 PEM to BIO, the function converts the DER to PEM cert*/ - ExpectIntEQ(PEM_write_bio_PKCS7(bio, pkcs7), WOLFSSL_SUCCESS); - - /* Read PKCS#7 PEM from BIO */ - ret = wolfSSL_BIO_get_mem_data(bio, &cert_buf); - ExpectIntGE(ret, 0); - - BIO_free(bio); - wc_PKCS7_Free(pkcs7); - wc_FreeRng(&rng); -#endif - return EXPECT_RESULT(); -} - -#ifdef HAVE_SMIME -/* // NOLINTBEGIN(clang-analyzer-unix.Stream) */ -static int test_wolfSSL_SMIME_read_PKCS7(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_FILESYSTEM) && \ - !defined(NO_RSA) - PKCS7* pkcs7 = NULL; - BIO* bio = NULL; - BIO* bcont = NULL; - BIO* out = NULL; - const byte* outBuf = NULL; - int outBufLen = 0; - static const char contTypeText[] = "Content-Type: text/plain\r\n\r\n"; - XFILE smimeTestFile = XBADFILE; - - ExpectTrue((smimeTestFile = XFOPEN("./certs/test/smime-test.p7s", "rb")) != - XBADFILE); - - /* smime-test.p7s */ - bio = wolfSSL_BIO_new(wolfSSL_BIO_s_file()); - ExpectNotNull(bio); - ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); - pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); - ExpectNotNull(pkcs7); - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, - PKCS7_NOVERIFY), SSL_SUCCESS); - if (smimeTestFile != XBADFILE) { - XFCLOSE(smimeTestFile); - smimeTestFile = XBADFILE; - } - if (bcont) BIO_free(bcont); - bcont = NULL; - wolfSSL_PKCS7_free(pkcs7); - pkcs7 = NULL; - - /* smime-test-multipart.p7s */ - smimeTestFile = XFOPEN("./certs/test/smime-test-multipart.p7s", "rb"); - ExpectFalse(smimeTestFile == XBADFILE); - ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); - pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); - ExpectNotNull(pkcs7); - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, - PKCS7_NOVERIFY), SSL_SUCCESS); - if (smimeTestFile != XBADFILE) { - XFCLOSE(smimeTestFile); - smimeTestFile = XBADFILE; - } - if (bcont) BIO_free(bcont); - bcont = NULL; - wolfSSL_PKCS7_free(pkcs7); - pkcs7 = NULL; - - /* smime-test-multipart-badsig.p7s */ - smimeTestFile = XFOPEN("./certs/test/smime-test-multipart-badsig.p7s", - "rb"); - ExpectFalse(smimeTestFile == XBADFILE); - ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); - pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); - ExpectNotNull(pkcs7); /* can read in the unverified smime bundle */ - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, - PKCS7_NOVERIFY), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - if (smimeTestFile != XBADFILE) { - XFCLOSE(smimeTestFile); - smimeTestFile = XBADFILE; - } - if (bcont) BIO_free(bcont); - bcont = NULL; - wolfSSL_PKCS7_free(pkcs7); - pkcs7 = NULL; - - /* smime-test-canon.p7s */ - smimeTestFile = XFOPEN("./certs/test/smime-test-canon.p7s", "rb"); - ExpectFalse(smimeTestFile == XBADFILE); - ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); - pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); - ExpectNotNull(pkcs7); - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, - PKCS7_NOVERIFY), SSL_SUCCESS); - if (smimeTestFile != XBADFILE) { - XFCLOSE(smimeTestFile); - smimeTestFile = XBADFILE; - } - if (bcont) BIO_free(bcont); - bcont = NULL; - wolfSSL_PKCS7_free(pkcs7); - pkcs7 = NULL; - - /* Test PKCS7_TEXT, PKCS7_verify() should remove Content-Type: text/plain */ - smimeTestFile = XFOPEN("./certs/test/smime-test-canon.p7s", "rb"); - ExpectFalse(smimeTestFile == XBADFILE); - ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); - pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); - ExpectNotNull(pkcs7); - out = wolfSSL_BIO_new(BIO_s_mem()); - ExpectNotNull(out); - ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, out, - PKCS7_NOVERIFY | PKCS7_TEXT), SSL_SUCCESS); - ExpectIntGT((outBufLen = BIO_get_mem_data(out, &outBuf)), 0); - /* Content-Type should not show up at beginning of output buffer */ - ExpectIntGT(outBufLen, XSTRLEN(contTypeText)); - ExpectIntGT(XMEMCMP(outBuf, contTypeText, XSTRLEN(contTypeText)), 0); - - BIO_free(out); - BIO_free(bio); - if (bcont) BIO_free(bcont); - wolfSSL_PKCS7_free(pkcs7); -#endif - return EXPECT_RESULT(); -} -/* // NOLINTEND(clang-analyzer-unix.Stream) */ - -static int test_wolfSSL_SMIME_write_PKCS7(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_RSA) - PKCS7* p7 = NULL; - PKCS7* p7Ver = NULL; - int flags = 0; - byte data[] = "Test data to encode."; - - const char* cert = "./certs/server-cert.pem"; - const char* key = "./certs/server-key.pem"; - const char* ca = "./certs/ca-cert.pem"; - - WOLFSSL_BIO* certBio = NULL; - WOLFSSL_BIO* keyBio = NULL; - WOLFSSL_BIO* caBio = NULL; - WOLFSSL_BIO* inBio = NULL; - WOLFSSL_BIO* outBio = NULL; - WOLFSSL_BIO* content = NULL; - X509* signCert = NULL; - EVP_PKEY* signKey = NULL; - X509* caCert = NULL; - X509_STORE* store = NULL; - - /* read signer cert/key into BIO */ - ExpectNotNull(certBio = BIO_new_file(cert, "r")); - ExpectNotNull(keyBio = BIO_new_file(key, "r")); - ExpectNotNull(signCert = PEM_read_bio_X509(certBio, NULL, 0, NULL)); - ExpectNotNull(signKey = PEM_read_bio_PrivateKey(keyBio, NULL, 0, NULL)); - - /* read CA cert into store (for verify) */ - ExpectNotNull(caBio = BIO_new_file(ca, "r")); - ExpectNotNull(caCert = PEM_read_bio_X509(caBio, NULL, 0, NULL)); - ExpectNotNull(store = X509_STORE_new()); - ExpectIntEQ(X509_STORE_add_cert(store, caCert), 1); - - - /* generate and verify SMIME: not detached */ - { - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_STREAM; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectNotNull(outBio = BIO_new(BIO_s_mem())); - ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); - - /* bad arg: out NULL */ - ExpectIntEQ(SMIME_write_PKCS7(NULL, p7, inBio, flags), 0); - /* bad arg: pkcs7 NULL */ - ExpectIntEQ(SMIME_write_PKCS7(outBio, NULL, inBio, flags), 0); - - ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); - ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); - - BIO_free(content); - content = NULL; - BIO_free(inBio); - inBio = NULL; - BIO_free(outBio); - outBio = NULL; - PKCS7_free(p7Ver); - p7Ver = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - /* generate and verify SMIME: not detached, add Content-Type */ - { - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_STREAM | PKCS7_TEXT; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectNotNull(outBio = BIO_new(BIO_s_mem())); - ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); - - ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); - ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); - - BIO_free(content); - content = NULL; - BIO_free(inBio); - inBio = NULL; - BIO_free(outBio); - outBio = NULL; - PKCS7_free(p7Ver); - p7Ver = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - /* generate and verify SMIME: detached */ - { - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_DETACHED | PKCS7_STREAM; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectNotNull(outBio = BIO_new(BIO_s_mem())); - ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); - - ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); - ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, content, NULL, flags), 1); - - BIO_free(content); - content = NULL; - BIO_free(inBio); - inBio = NULL; - BIO_free(outBio); - outBio = NULL; - PKCS7_free(p7Ver); - p7Ver = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - /* generate and verify SMIME: PKCS7_TEXT to add Content-Type header */ - { - ExpectNotNull(inBio = BIO_new(BIO_s_mem())); - ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); - - flags = PKCS7_STREAM | PKCS7_DETACHED | PKCS7_TEXT; - ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); - ExpectNotNull(outBio = BIO_new(BIO_s_mem())); - ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); - - ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); - ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, content, NULL, flags), 1); - - BIO_free(content); - content = NULL; - BIO_free(inBio); - inBio = NULL; - BIO_free(outBio); - outBio = NULL; - PKCS7_free(p7Ver); - p7Ver = NULL; - PKCS7_free(p7); - p7 = NULL; - } - - X509_STORE_free(store); - X509_free(caCert); - X509_free(signCert); - EVP_PKEY_free(signKey); - BIO_free(keyBio); - BIO_free(certBio); - BIO_free(caBio); -#endif - return EXPECT_RESULT(); -} -#endif /* HAVE_SMIME */ -#endif /* !NO_BIO */ - - /*----------------------------------------------------------------------------* | Certificate Failure Checks *----------------------------------------------------------------------------*/ @@ -29667,1436 +21155,6 @@ static int test_wolfSSL_PEM_read(void) return EXPECT_RESULT(); } -static int test_wolfssl_EVP_aes_gcm_AAD_2_parts(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESGCM) && \ - !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) - const byte iv[12] = { 0 }; - const byte key[16] = { 0 }; - const byte cleartext[16] = { 0 }; - const byte aad[] = { - 0x01, 0x10, 0x00, 0x2a, 0x08, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, - 0x00, 0x00, 0xdc, 0x4d, 0xad, 0x6b, 0x06, 0x93, - 0x4f - }; - byte out1Part[16]; - byte outTag1Part[16]; - byte out2Part[16]; - byte outTag2Part[16]; - byte decryptBuf[16]; - int len = 0; - int tlen; - EVP_CIPHER_CTX* ctx = NULL; - - /* ENCRYPT */ - /* Send AAD and data in 1 part */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - tlen = 0; - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), - 1); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), 1); - ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &len, aad, sizeof(aad)), 1); - ExpectIntEQ(EVP_EncryptUpdate(ctx, out1Part, &len, cleartext, - sizeof(cleartext)), 1); - tlen += len; - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, out1Part, &len), 1); - tlen += len; - ExpectIntEQ(tlen, sizeof(cleartext)); - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, 16, - outTag1Part), 1); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* DECRYPT */ - /* Send AAD and data in 1 part */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - tlen = 0; - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), - 1); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), 1); - ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &len, aad, sizeof(aad)), 1); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptBuf, &len, out1Part, - sizeof(cleartext)), 1); - tlen += len; - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, - outTag1Part), 1); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptBuf, &len), 1); - tlen += len; - ExpectIntEQ(tlen, sizeof(cleartext)); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - ExpectIntEQ(XMEMCMP(decryptBuf, cleartext, len), 0); - - /* ENCRYPT */ - /* Send AAD and data in 2 parts */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - tlen = 0; - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), - 1); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), 1); - ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &len, aad, 1), 1); - ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &len, aad + 1, sizeof(aad) - 1), - 1); - ExpectIntEQ(EVP_EncryptUpdate(ctx, out2Part, &len, cleartext, 1), 1); - tlen += len; - ExpectIntEQ(EVP_EncryptUpdate(ctx, out2Part + tlen, &len, cleartext + 1, - sizeof(cleartext) - 1), 1); - tlen += len; - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, out2Part + tlen, &len), 1); - tlen += len; - ExpectIntEQ(tlen, sizeof(cleartext)); - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, 16, - outTag2Part), 1); - - ExpectIntEQ(XMEMCMP(out1Part, out2Part, sizeof(out1Part)), 0); - ExpectIntEQ(XMEMCMP(outTag1Part, outTag2Part, sizeof(outTag1Part)), 0); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* DECRYPT */ - /* Send AAD and data in 2 parts */ - ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); - tlen = 0; - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), - 1); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), 1); - ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &len, aad, 1), 1); - ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &len, aad + 1, sizeof(aad) - 1), - 1); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptBuf, &len, out1Part, 1), 1); - tlen += len; - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptBuf + tlen, &len, out1Part + 1, - sizeof(cleartext) - 1), 1); - tlen += len; - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, - outTag1Part), 1); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptBuf + tlen, &len), 1); - tlen += len; - ExpectIntEQ(tlen, sizeof(cleartext)); - - ExpectIntEQ(XMEMCMP(decryptBuf, cleartext, len), 0); - - /* Test AAD reuse */ - EVP_CIPHER_CTX_free(ctx); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_aes_gcm_zeroLen(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESGCM) && \ - !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) && defined(WOLFSSL_AES_256) - /* Zero length plain text */ - byte key[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte iv[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte plaintxt[1]; - int ivSz = 12; - int plaintxtSz = 0; - unsigned char tag[16]; - unsigned char tag_kat[] = { - 0x53,0x0f,0x8a,0xfb,0xc7,0x45,0x36,0xb9, - 0xa9,0x63,0xb4,0xf1,0xc4,0xcb,0x73,0x8b - }; - - byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - - EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); - EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); - - ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_aes_256_gcm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, - plaintxtSz)); - ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_GET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); - - ExpectIntEQ(0, ciphertxtSz); - ExpectIntEQ(0, XMEMCMP(tag, tag_kat, sizeof(tag))); - - EVP_CIPHER_CTX_init(de); - ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_aes_256_gcm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(0, decryptedtxtSz); - - EVP_CIPHER_CTX_free(en); - EVP_CIPHER_CTX_free(de); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_aes_gcm(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESGCM) && \ - !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) - /* A 256 bit key, AES_128 will use the first 128 bit*/ - byte *key = (byte*)"01234567890123456789012345678901"; - /* A 128 bit IV */ - byte *iv = (byte*)"0123456789012345"; - int ivSz = AES_BLOCK_SIZE; - /* Message to be encrypted */ - byte *plaintxt = (byte*)"for things to change you have to change"; - /* Additional non-confidential data */ - byte *aad = (byte*)"Don't spend major time on minor things."; - - unsigned char tag[AES_BLOCK_SIZE] = {0}; - int plaintxtSz = (int)XSTRLEN((char*)plaintxt); - int aadSz = (int)XSTRLEN((char*)aad); - byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - int i = 0; - EVP_CIPHER_CTX en[2]; - EVP_CIPHER_CTX de[2]; - - for (i = 0; i < 2; i++) { - EVP_CIPHER_CTX_init(&en[i]); - if (i == 0) { - /* Default uses 96-bits IV length */ -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_gcm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_gcm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_gcm(), NULL, - key, iv)); -#endif - } - else { -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_gcm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_gcm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_gcm(), NULL, - NULL, NULL)); -#endif - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - } - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, - plaintxtSz)); - ciphertxtSz = len; - ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_GET_TAG, - AES_BLOCK_SIZE, tag)); - wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]); - - EVP_CIPHER_CTX_init(&de[i]); - if (i == 0) { - /* Default uses 96-bits IV length */ -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, - key, iv)); -#endif - } - else { -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, - NULL, NULL)); -#endif - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - - } - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, - AES_BLOCK_SIZE, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(ciphertxtSz, decryptedtxtSz); - ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); - - /* modify tag*/ - if (i == 0) { - /* Default uses 96-bits IV length */ -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, - key, iv)); -#endif - } - else { -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, - NULL, NULL)); -#endif - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - - } - tag[AES_BLOCK_SIZE-1]+=0xBB; - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, - AES_BLOCK_SIZE, tag)); - /* fail due to wrong tag */ - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - ExpectIntEQ(0, len); - - wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]); - } -#endif /* OPENSSL_EXTRA && !NO_AES && HAVE_AESGCM */ - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_aria_gcm(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(HAVE_ARIA) && \ - !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) - - /* A 256 bit key, AES_128 will use the first 128 bit*/ - byte *key = (byte*)"01234567890123456789012345678901"; - /* A 128 bit IV */ - byte *iv = (byte*)"0123456789012345"; - int ivSz = ARIA_BLOCK_SIZE; - /* Message to be encrypted */ - const int plaintxtSz = 40; - byte plaintxt[WC_ARIA_GCM_GET_CIPHERTEXT_SIZE(plaintxtSz)]; - XMEMCPY(plaintxt,"for things to change you have to change",plaintxtSz); - /* Additional non-confidential data */ - byte *aad = (byte*)"Don't spend major time on minor things."; - - unsigned char tag[ARIA_BLOCK_SIZE] = {0}; - int aadSz = (int)XSTRLEN((char*)aad); - byte ciphertxt[WC_ARIA_GCM_GET_CIPHERTEXT_SIZE(plaintxtSz)]; - byte decryptedtxt[plaintxtSz]; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - int i = 0; - #define TEST_ARIA_GCM_COUNT 6 - EVP_CIPHER_CTX en[TEST_ARIA_GCM_COUNT]; - EVP_CIPHER_CTX de[TEST_ARIA_GCM_COUNT]; - - for (i = 0; i < TEST_ARIA_GCM_COUNT; i++) { - - EVP_CIPHER_CTX_init(&en[i]); - switch (i) { - case 0: - /* Default uses 96-bits IV length */ - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_128_gcm(), NULL, key, iv)); - break; - case 1: - /* Default uses 96-bits IV length */ - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_192_gcm(), NULL, key, iv)); - break; - case 2: - /* Default uses 96-bits IV length */ - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_256_gcm(), NULL, key, iv)); - break; - case 3: - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_128_gcm(), NULL, NULL, NULL)); - /* non-default must to set the IV length first */ - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - break; - case 4: - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_192_gcm(), NULL, NULL, NULL)); - /* non-default must to set the IV length first */ - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - break; - case 5: - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_256_gcm(), NULL, NULL, NULL)); - /* non-default must to set the IV length first */ - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - break; - } - XMEMSET(ciphertxt,0,sizeof(ciphertxt)); - AssertIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); - AssertIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, plaintxtSz)); - ciphertxtSz = len; - AssertIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); - AssertIntNE(0, XMEMCMP(plaintxt, ciphertxt, plaintxtSz)); - ciphertxtSz += len; - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_GET_TAG, ARIA_BLOCK_SIZE, tag)); - AssertIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]), 1); - - EVP_CIPHER_CTX_init(&de[i]); - switch (i) { - case 0: - /* Default uses 96-bits IV length */ - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_128_gcm(), NULL, key, iv)); - break; - case 1: - /* Default uses 96-bits IV length */ - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_192_gcm(), NULL, key, iv)); - break; - case 2: - /* Default uses 96-bits IV length */ - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_256_gcm(), NULL, key, iv)); - break; - case 3: - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_128_gcm(), NULL, NULL, NULL)); - /* non-default must to set the IV length first */ - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - break; - case 4: - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_192_gcm(), NULL, NULL, NULL)); - /* non-default must to set the IV length first */ - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - break; - case 5: - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_256_gcm(), NULL, NULL, NULL)); - /* non-default must to set the IV length first */ - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - break; - } - XMEMSET(decryptedtxt,0,sizeof(decryptedtxt)); - AssertIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - AssertIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, ciphertxtSz)); - decryptedtxtSz = len; - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, ARIA_BLOCK_SIZE, tag)); - AssertIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - decryptedtxtSz += len; - AssertIntEQ(plaintxtSz, decryptedtxtSz); - AssertIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); - - XMEMSET(decryptedtxt,0,sizeof(decryptedtxt)); - /* modify tag*/ - tag[AES_BLOCK_SIZE-1]+=0xBB; - AssertIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, ARIA_BLOCK_SIZE, tag)); - /* fail due to wrong tag */ - AssertIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, ciphertxtSz)); - AssertIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - AssertIntEQ(0, len); - AssertIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]), 1); - } - - res = TEST_RES_CHECK(1); -#endif /* OPENSSL_EXTRA && !NO_AES && HAVE_AESGCM */ - return res; -} - -static int test_wolfssl_EVP_aes_ccm_zeroLen(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESCCM) && \ - !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) && defined(WOLFSSL_AES_256) - /* Zero length plain text */ - byte key[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte iv[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte plaintxt[1]; - int ivSz = 12; - int plaintxtSz = 0; - unsigned char tag[16]; - - byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - - EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); - EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); - - ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_aes_256_ccm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, - plaintxtSz)); - ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_GET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); - - ExpectIntEQ(0, ciphertxtSz); - - EVP_CIPHER_CTX_init(de); - ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_aes_256_ccm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(0, decryptedtxtSz); - - EVP_CIPHER_CTX_free(en); - EVP_CIPHER_CTX_free(de); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_aes_ccm(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESCCM) && \ - !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) - /* A 256 bit key, AES_128 will use the first 128 bit*/ - byte *key = (byte*)"01234567890123456789012345678901"; - /* A 128 bit IV */ - byte *iv = (byte*)"0123456789012"; - int ivSz = (int)XSTRLEN((char*)iv); - /* Message to be encrypted */ - byte *plaintxt = (byte*)"for things to change you have to change"; - /* Additional non-confidential data */ - byte *aad = (byte*)"Don't spend major time on minor things."; - - unsigned char tag[AES_BLOCK_SIZE] = {0}; - int plaintxtSz = (int)XSTRLEN((char*)plaintxt); - int aadSz = (int)XSTRLEN((char*)aad); - byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - int i = 0; - int ret; - EVP_CIPHER_CTX en[2]; - EVP_CIPHER_CTX de[2]; - - for (i = 0; i < 2; i++) { - EVP_CIPHER_CTX_init(&en[i]); - - if (i == 0) { - /* Default uses 96-bits IV length */ -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_ccm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_ccm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_ccm(), NULL, - key, iv)); -#endif - } - else { -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_ccm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_ccm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_ccm(), NULL, - NULL, NULL)); -#endif - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - } - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, - plaintxtSz)); - ciphertxtSz = len; - ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_GET_TAG, - AES_BLOCK_SIZE, tag)); - ret = wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]); - ExpectIntEQ(ret, 1); - - EVP_CIPHER_CTX_init(&de[i]); - if (i == 0) { - /* Default uses 96-bits IV length */ -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_ccm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_ccm(), NULL, - key, iv)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_ccm(), NULL, - key, iv)); -#endif - } - else { -#ifdef WOLFSSL_AES_128 - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_ccm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_192) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_ccm(), NULL, - NULL, NULL)); -#elif defined(WOLFSSL_AES_256) - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_ccm(), NULL, - NULL, NULL)); -#endif - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - - } - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, - AES_BLOCK_SIZE, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(ciphertxtSz, decryptedtxtSz); - ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); - - /* modify tag*/ - tag[AES_BLOCK_SIZE-1]+=0xBB; - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, - AES_BLOCK_SIZE, tag)); - /* fail due to wrong tag */ - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - ExpectIntEQ(0, len); - ret = wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]); - ExpectIntEQ(ret, 1); - } -#endif /* OPENSSL_EXTRA && !NO_AES && HAVE_AESCCM */ - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_chacha20_poly1305(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_CHACHA) && defined(HAVE_POLY1305) - byte key[CHACHA20_POLY1305_AEAD_KEYSIZE]; - byte iv [CHACHA20_POLY1305_AEAD_IV_SIZE]; - byte plainText[] = {0xDE, 0xAD, 0xBE, 0xEF}; - byte aad[] = {0xAA, 0XBB, 0xCC, 0xDD, 0xEE, 0xFF}; - byte cipherText[sizeof(plainText)]; - byte decryptedText[sizeof(plainText)]; - byte tag[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]; - EVP_CIPHER_CTX* ctx = NULL; - int outSz; - - XMEMSET(key, 0, sizeof(key)); - XMEMSET(iv, 0, sizeof(iv)); - - /* Encrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_chacha20_poly1305(), NULL, NULL, - NULL), WOLFSSL_SUCCESS); - /* Invalid IV length. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, - CHACHA20_POLY1305_AEAD_IV_SIZE-1, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Valid IV length. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, - CHACHA20_POLY1305_AEAD_IV_SIZE, NULL), WOLFSSL_SUCCESS); - /* Invalid tag length. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, - CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE-1, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Valid tag length. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, - CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE, NULL), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &outSz, aad, sizeof(aad)), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(aad)); - ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, - sizeof(plainText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText, &outSz), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - /* Invalid tag length. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, - CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE-1, tag), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Valid tag length. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, - CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE, tag), WOLFSSL_SUCCESS); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* Decrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_chacha20_poly1305(), NULL, NULL, - NULL), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, - CHACHA20_POLY1305_AEAD_IV_SIZE, NULL), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, - CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE, tag), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &outSz, aad, sizeof(aad)), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(aad)); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(cipherText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* Test partial Inits. CipherInit() allow setting of key and iv - * in separate calls. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_chacha20_poly1305(), - key, NULL, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 1), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CipherUpdate(ctx, NULL, &outSz, - aad, sizeof(aad)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(aad)); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(cipherText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - EVP_CIPHER_CTX_free(ctx); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_chacha20(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_CHACHA) - byte key[CHACHA_MAX_KEY_SZ]; - byte iv [WOLFSSL_EVP_CHACHA_IV_BYTES]; - byte plainText[] = {0xDE, 0xAD, 0xBE, 0xEF}; - byte cipherText[sizeof(plainText)]; - byte decryptedText[sizeof(plainText)]; - EVP_CIPHER_CTX* ctx = NULL; - int outSz; - - XMEMSET(key, 0, sizeof(key)); - XMEMSET(iv, 0, sizeof(iv)); - /* Encrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_chacha20(), NULL, NULL, - NULL), WOLFSSL_SUCCESS); - /* Any tag length must fail - not an AEAD cipher. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, - 16, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, - sizeof(plainText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText, &outSz), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* Decrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_chacha20(), NULL, NULL, - NULL), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(cipherText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - EVP_CIPHER_CTX_free(ctx); - ctx = NULL; - - /* Test partial Inits. CipherInit() allow setting of key and iv - * in separate calls. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_chacha20(), - key, NULL, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 1), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(cipherText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - EVP_CIPHER_CTX_free(ctx); -#endif - return EXPECT_RESULT(); -} - -static int test_wolfssl_EVP_sm4_ecb(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_ECB) - EXPECT_DECLS; - byte key[SM4_KEY_SIZE]; - byte plainText[SM4_BLOCK_SIZE] = { - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF - }; - byte cipherText[sizeof(plainText) + SM4_BLOCK_SIZE]; - byte decryptedText[sizeof(plainText) + SM4_BLOCK_SIZE]; - EVP_CIPHER_CTX* ctx; - int outSz; - - XMEMSET(key, 0, sizeof(key)); - - /* Encrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_sm4_ecb(), NULL, NULL, NULL), - WOLFSSL_SUCCESS); - /* Any tag length must fail - not an AEAD cipher. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, NULL), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, NULL), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, - sizeof(plainText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText + outSz, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, SM4_BLOCK_SIZE); - ExpectBufNE(cipherText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - /* Decrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_sm4_ecb(), NULL, NULL, NULL), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, NULL), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText + outSz, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - res = EXPECT_RESULT(); -#endif - return res; -} - -static int test_wolfssl_EVP_sm4_cbc(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CBC) - EXPECT_DECLS; - byte key[SM4_KEY_SIZE]; - byte iv[SM4_BLOCK_SIZE]; - byte plainText[SM4_BLOCK_SIZE] = { - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, - 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF - }; - byte cipherText[sizeof(plainText) + SM4_BLOCK_SIZE]; - byte decryptedText[sizeof(plainText) + SM4_BLOCK_SIZE]; - EVP_CIPHER_CTX* ctx; - int outSz; - - XMEMSET(key, 0, sizeof(key)); - XMEMSET(iv, 0, sizeof(iv)); - - /* Encrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_sm4_cbc(), NULL, NULL, NULL), - WOLFSSL_SUCCESS); - /* Any tag length must fail - not an AEAD cipher. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, NULL), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, - sizeof(plainText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText + outSz, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, SM4_BLOCK_SIZE); - ExpectBufNE(cipherText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - /* Decrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_sm4_cbc(), NULL, NULL, NULL), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText + outSz, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - /* Test partial Inits. CipherInit() allow setting of key and iv - * in separate calls. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_sm4_cbc(), key, NULL, 0), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 0), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText + outSz, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - res = EXPECT_RESULT(); -#endif - return res; -} - -static int test_wolfssl_EVP_sm4_ctr(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CTR) - EXPECT_DECLS; - byte key[SM4_KEY_SIZE]; - byte iv[SM4_BLOCK_SIZE]; - byte plainText[] = {0xDE, 0xAD, 0xBE, 0xEF}; - byte cipherText[sizeof(plainText)]; - byte decryptedText[sizeof(plainText)]; - EVP_CIPHER_CTX* ctx; - int outSz; - - XMEMSET(key, 0, sizeof(key)); - XMEMSET(iv, 0, sizeof(iv)); - - /* Encrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_sm4_ctr(), NULL, NULL, NULL), - WOLFSSL_SUCCESS); - /* Any tag length must fail - not an AEAD cipher. */ - ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, NULL), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, - sizeof(plainText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(plainText)); - ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText, &outSz), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - ExpectBufNE(cipherText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - /* Decrypt. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_sm4_ctr(), NULL, NULL, NULL), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(cipherText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - /* Test partial Inits. CipherInit() allow setting of key and iv - * in separate calls. */ - ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_sm4_ctr(), key, NULL, 1), - WOLFSSL_SUCCESS); - ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 1), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, - sizeof(cipherText)), WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, sizeof(cipherText)); - ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), - WOLFSSL_SUCCESS); - ExpectIntEQ(outSz, 0); - ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); - EVP_CIPHER_CTX_free(ctx); - - res = EXPECT_RESULT(); -#endif - return res; -} - -static int test_wolfssl_EVP_sm4_gcm_zeroLen(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_GCM) - /* Zero length plain text */ - EXPECT_DECLS; - byte key[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte iv[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte plaintxt[1]; - int ivSz = 12; - int plaintxtSz = 0; - unsigned char tag[16]; - unsigned char tag_kat[16] = { - 0x23,0x2f,0x0c,0xfe,0x30,0x8b,0x49,0xea, - 0x6f,0xc8,0x82,0x29,0xb5,0xdc,0x85,0x8d - }; - - byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - - EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); - EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); - - ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_sm4_gcm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, - plaintxtSz)); - ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_GET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); - - ExpectIntEQ(0, ciphertxtSz); - ExpectIntEQ(0, XMEMCMP(tag, tag_kat, sizeof(tag))); - - EVP_CIPHER_CTX_init(de); - ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_sm4_gcm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(0, decryptedtxtSz); - - EVP_CIPHER_CTX_free(en); - EVP_CIPHER_CTX_free(de); - - res = EXPECT_RESULT(); -#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_GCM */ - return res; -} - -static int test_wolfssl_EVP_sm4_gcm(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_GCM) - EXPECT_DECLS; - byte *key = (byte*)"0123456789012345"; - /* A 128 bit IV */ - byte *iv = (byte*)"0123456789012345"; - int ivSz = SM4_BLOCK_SIZE; - /* Message to be encrypted */ - byte *plaintxt = (byte*)"for things to change you have to change"; - /* Additional non-confidential data */ - byte *aad = (byte*)"Don't spend major time on minor things."; - - unsigned char tag[SM4_BLOCK_SIZE] = {0}; - int plaintxtSz = (int)XSTRLEN((char*)plaintxt); - int aadSz = (int)XSTRLEN((char*)aad); - byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - int i = 0; - EVP_CIPHER_CTX en[2]; - EVP_CIPHER_CTX de[2]; - - for (i = 0; i < 2; i++) { - EVP_CIPHER_CTX_init(&en[i]); - - if (i == 0) { - /* Default uses 96-bits IV length */ - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_gcm(), NULL, key, - iv)); - } - else { - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_gcm(), NULL, NULL, - NULL)); - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - } - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, - plaintxtSz)); - ciphertxtSz = len; - ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_GET_TAG, - SM4_BLOCK_SIZE, tag)); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]), 1); - - EVP_CIPHER_CTX_init(&de[i]); - if (i == 0) { - /* Default uses 96-bits IV length */ - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_gcm(), NULL, key, - iv)); - } - else { - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_gcm(), NULL, NULL, - NULL)); - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - - } - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, - SM4_BLOCK_SIZE, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(ciphertxtSz, decryptedtxtSz); - ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); - - /* modify tag*/ - tag[SM4_BLOCK_SIZE-1]+=0xBB; - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, - SM4_BLOCK_SIZE, tag)); - /* fail due to wrong tag */ - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - ExpectIntEQ(0, len); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]), 1); - } - - res = EXPECT_RESULT(); -#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_GCM */ - return res; -} - -static int test_wolfssl_EVP_sm4_ccm_zeroLen(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CCM) - /* Zero length plain text */ - EXPECT_DECLS; - byte key[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte iv[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 - }; /* align */ - byte plaintxt[1]; - int ivSz = 12; - int plaintxtSz = 0; - unsigned char tag[16]; - - byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - - EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); - EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); - - ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_sm4_ccm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, - plaintxtSz)); - ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_GET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); - - ExpectIntEQ(0, ciphertxtSz); - - EVP_CIPHER_CTX_init(de); - ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_sm4_ccm(), NULL, key, iv)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_TAG, 16, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(0, decryptedtxtSz); - - EVP_CIPHER_CTX_free(en); - EVP_CIPHER_CTX_free(de); - - res = EXPECT_RESULT(); -#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_CCM */ - return res; -} - -static int test_wolfssl_EVP_sm4_ccm(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CCM) - EXPECT_DECLS; - byte *key = (byte*)"0123456789012345"; - byte *iv = (byte*)"0123456789012"; - int ivSz = (int)XSTRLEN((char*)iv); - /* Message to be encrypted */ - byte *plaintxt = (byte*)"for things to change you have to change"; - /* Additional non-confidential data */ - byte *aad = (byte*)"Don't spend major time on minor things."; - - unsigned char tag[SM4_BLOCK_SIZE] = {0}; - int plaintxtSz = (int)XSTRLEN((char*)plaintxt); - int aadSz = (int)XSTRLEN((char*)aad); - byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; - byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; - int ciphertxtSz = 0; - int decryptedtxtSz = 0; - int len = 0; - int i = 0; - EVP_CIPHER_CTX en[2]; - EVP_CIPHER_CTX de[2]; - - for (i = 0; i < 2; i++) { - EVP_CIPHER_CTX_init(&en[i]); - - if (i == 0) { - /* Default uses 96-bits IV length */ - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_ccm(), NULL, key, - iv)); - } - else { - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_ccm(), NULL, NULL, - NULL)); - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); - } - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, - plaintxtSz)); - ciphertxtSz = len; - ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); - ciphertxtSz += len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_GET_TAG, - SM4_BLOCK_SIZE, tag)); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]), 1); - - EVP_CIPHER_CTX_init(&de[i]); - if (i == 0) { - /* Default uses 96-bits IV length */ - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_ccm(), NULL, key, - iv)); - } - else { - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_ccm(), NULL, NULL, - NULL)); - /* non-default must to set the IV length first */ - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_IVLEN, - ivSz, NULL)); - ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); - - } - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - decryptedtxtSz = len; - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, - SM4_BLOCK_SIZE, tag)); - ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - decryptedtxtSz += len; - ExpectIntEQ(ciphertxtSz, decryptedtxtSz); - ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); - - /* modify tag*/ - tag[SM4_BLOCK_SIZE-1]+=0xBB; - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); - ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, - SM4_BLOCK_SIZE, tag)); - /* fail due to wrong tag */ - ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, - ciphertxtSz)); - ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); - ExpectIntEQ(0, len); - ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]), 1); - } - - res = EXPECT_RESULT(); -#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_CCM */ - return res; -} - -static int test_wolfSSL_EVP_PKEY_hkdf(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(HAVE_HKDF) - EVP_PKEY_CTX* ctx = NULL; - byte salt[] = {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F}; - byte key[] = {0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F}; - byte info[] = {0X01, 0x02, 0x03, 0x04, 0x05}; - byte info2[] = {0X06, 0x07, 0x08, 0x09, 0x0A}; - byte outKey[34]; - size_t outKeySz = sizeof(outKey); - /* These expected outputs were gathered by running the same test below using - * OpenSSL. */ - const byte extractAndExpand[] = { - 0x8B, 0xEB, 0x90, 0xA9, 0x04, 0xFF, 0x05, 0x10, 0xE4, 0xB5, 0xB1, 0x10, - 0x31, 0x34, 0xFF, 0x07, 0x5B, 0xE3, 0xC6, 0x93, 0xD4, 0xF8, 0xC7, 0xEE, - 0x96, 0xDA, 0x78, 0x7A, 0xE2, 0x9A, 0x2D, 0x05, 0x4B, 0xF6 - }; - const byte extractOnly[] = { - 0xE7, 0x6B, 0x9E, 0x0F, 0xE4, 0x02, 0x1D, 0x62, 0xEA, 0x97, 0x74, 0x5E, - 0xF4, 0x3C, 0x65, 0x4D, 0xC1, 0x46, 0x98, 0xAA, 0x79, 0x9A, 0xCB, 0x9C, - 0xCC, 0x3E, 0x7F, 0x2A, 0x2B, 0x41, 0xA1, 0x9E - }; - const byte expandOnly[] = { - 0xFF, 0x29, 0x29, 0x56, 0x9E, 0xA7, 0x66, 0x02, 0xDB, 0x4F, 0xDB, 0x53, - 0x7D, 0x21, 0x67, 0x52, 0xC3, 0x0E, 0xF3, 0xFC, 0x71, 0xCE, 0x67, 0x2B, - 0xEA, 0x3B, 0xE9, 0xFC, 0xDD, 0xC8, 0xCC, 0xB7, 0x42, 0x74 - }; - const byte extractAndExpandAddInfo[] = { - 0x5A, 0x74, 0x79, 0x83, 0xA3, 0xA4, 0x2E, 0xB7, 0xD4, 0x08, 0xC2, 0x6A, - 0x2F, 0xA5, 0xE3, 0x4E, 0xF1, 0xF4, 0x87, 0x3E, 0xA6, 0xC7, 0x88, 0x45, - 0xD7, 0xE2, 0x15, 0xBC, 0xB8, 0x10, 0xEF, 0x6C, 0x4D, 0x7A - }; - - ExpectNotNull((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_HKDF, NULL))); - ExpectIntEQ(EVP_PKEY_derive_init(ctx), WOLFSSL_SUCCESS); - /* NULL ctx. */ - ExpectIntEQ(EVP_PKEY_CTX_set_hkdf_md(NULL, EVP_sha256()), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* NULL md. */ - ExpectIntEQ(EVP_PKEY_CTX_set_hkdf_md(ctx, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_CTX_set_hkdf_md(ctx, EVP_sha256()), WOLFSSL_SUCCESS); - /* NULL ctx. */ - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(NULL, salt, sizeof(salt)), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* NULL salt is ok. */ - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, NULL, sizeof(salt)), - WOLFSSL_SUCCESS); - /* Salt length <= 0. */ - /* Length 0 salt is ok. */ - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, salt, 0), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, salt, -1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, salt, sizeof(salt)), - WOLFSSL_SUCCESS); - /* NULL ctx. */ - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(NULL, key, sizeof(key)), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* NULL key. */ - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, NULL, sizeof(key)), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Key length <= 0 */ - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, key, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, key, -1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, key, sizeof(key)), - WOLFSSL_SUCCESS); - /* NULL ctx. */ - ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(NULL, info, sizeof(info)), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* NULL info is ok. */ - ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, NULL, sizeof(info)), - WOLFSSL_SUCCESS); - /* Info length <= 0 */ - /* Length 0 info is ok. */ - ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info, 0), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info, -1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info, sizeof(info)), - WOLFSSL_SUCCESS); - /* NULL ctx. */ - ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(NULL, EVP_PKEY_HKDEF_MODE_EXTRACT_ONLY), - WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Extract and expand (default). */ - ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); - ExpectIntEQ(outKeySz, sizeof(extractAndExpand)); - ExpectIntEQ(XMEMCMP(outKey, extractAndExpand, outKeySz), 0); - /* Extract only. */ - ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(ctx, EVP_PKEY_HKDEF_MODE_EXTRACT_ONLY), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); - ExpectIntEQ(outKeySz, sizeof(extractOnly)); - ExpectIntEQ(XMEMCMP(outKey, extractOnly, outKeySz), 0); - outKeySz = sizeof(outKey); - /* Expand only. */ - ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(ctx, EVP_PKEY_HKDEF_MODE_EXPAND_ONLY), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); - ExpectIntEQ(outKeySz, sizeof(expandOnly)); - ExpectIntEQ(XMEMCMP(outKey, expandOnly, outKeySz), 0); - outKeySz = sizeof(outKey); - /* Extract and expand with appended additional info. */ - ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info2, sizeof(info2)), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(ctx, - EVP_PKEY_HKDEF_MODE_EXTRACT_AND_EXPAND), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); - ExpectIntEQ(outKeySz, sizeof(extractAndExpandAddInfo)); - ExpectIntEQ(XMEMCMP(outKey, extractAndExpandAddInfo, outKeySz), 0); - - EVP_PKEY_CTX_free(ctx); -#endif /* OPENSSL_EXTRA && HAVE_HKDF */ - return EXPECT_RESULT(); -} - static int test_wolfSSL_dup_CA_list(void) { int res = TEST_SKIPPED; @@ -31612,6 +21670,58 @@ static int test_wolfSSL_CTX_LoadCRL(void) return EXPECT_RESULT(); } +static int test_wolfSSL_CTX_LoadCRL_largeCRLnum(void) +{ + EXPECT_DECLS; +#if defined(HAVE_CRL) && !defined(NO_RSA) && !defined(NO_FILESYSTEM) && \ + defined(HAVE_CRL_UPDATE_CB) + WOLFSSL_CERT_MANAGER* cm = NULL; + const char* caCert = "./certs/ca-cert.pem"; + const char* crl_lrgcrlnum = "./certs/crl/extra-crls/large_crlnum.pem"; + const char* crl_lrgcrlnum2 = "./certs/crl/extra-crls/large_crlnum2.pem"; + const char* exp_crlnum = "D8AFADA7F08B38E6178BD0E5CD7B0DF80071BA74"; + byte *crlLrgCrlNumBuff = NULL; + word32 crlLrgCrlNumSz; + CrlInfo crlInfo; + XFILE f; + word32 sz; + + cm = wolfSSL_CertManagerNew(); + ExpectNotNull(cm); + ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, caCert, NULL), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, crl_lrgcrlnum, + WOLFSSL_FILETYPE_PEM), + WOLFSSL_SUCCESS); + + AssertTrue((f = XFOPEN(crl_lrgcrlnum, "rb")) != XBADFILE); + AssertTrue(XFSEEK(f, 0, XSEEK_END) == 0); + AssertIntGE(sz = (word32) XFTELL(f), 1); + AssertTrue(XFSEEK(f, 0, XSEEK_SET) == 0); + AssertTrue( \ + (crlLrgCrlNumBuff = + (byte*)XMALLOC(sz, NULL, DYNAMIC_TYPE_FILE)) != NULL); + AssertTrue(XFREAD(crlLrgCrlNumBuff, 1, sz, f) == sz); + XFCLOSE(f); + crlLrgCrlNumSz = sz; + + AssertIntEQ(wolfSSL_CertManagerGetCRLInfo( + cm, &crlInfo, crlLrgCrlNumBuff, crlLrgCrlNumSz, WOLFSSL_FILETYPE_PEM), + WOLFSSL_SUCCESS); + AssertIntEQ(XMEMCMP( + crlInfo.crlNumber, exp_crlnum, XSTRLEN(exp_crlnum)), 0); + /* Expect to fail loading CRL because of >21 octets CRL number */ + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, crl_lrgcrlnum2, + WOLFSSL_FILETYPE_PEM), + ASN_PARSE_E); + + XFREE(crlLrgCrlNumBuff, NULL, DYNAMIC_TYPE_FILE); + wolfSSL_CertManagerFree(cm); +#endif + return EXPECT_RESULT(); + +} + #if defined(HAVE_CRL) && !defined(NO_RSA) && !defined(NO_FILESYSTEM) && \ defined(HAVE_CRL_UPDATE_CB) int crlUpdateTestStatus = 0; @@ -31669,7 +21779,7 @@ static void updateCrlCb(CrlInfo* old, CrlInfo* cnew) AssertIntEQ(crl1Info.nextDateMaxLen, old->nextDateMaxLen); AssertIntEQ(crl1Info.nextDateFormat, old->nextDateFormat); AssertIntEQ(XMEMCMP( - crl1Info.crlNumber, old->crlNumber, CRL_MAX_NUM_SZ), 0); + crl1Info.crlNumber, old->crlNumber, sizeof(old->crlNumber)), 0); AssertIntEQ(XMEMCMP( crl1Info.issuerHash, old->issuerHash, old->issuerHashLen), 0); AssertIntEQ(XMEMCMP( @@ -31684,7 +21794,7 @@ static void updateCrlCb(CrlInfo* old, CrlInfo* cnew) AssertIntEQ(crlRevInfo.nextDateMaxLen, cnew->nextDateMaxLen); AssertIntEQ(crlRevInfo.nextDateFormat, cnew->nextDateFormat); AssertIntEQ(XMEMCMP( - crlRevInfo.crlNumber, cnew->crlNumber, CRL_MAX_NUM_SZ), 0); + crlRevInfo.crlNumber, cnew->crlNumber, sizeof(cnew->crlNumber)), 0); AssertIntEQ(XMEMCMP( crlRevInfo.issuerHash, cnew->issuerHash, cnew->issuerHashLen), 0); AssertIntEQ(XMEMCMP( @@ -32849,253 +22959,6 @@ static int test_wolfSSL_dtls_stateless(void) #endif /* WOLFSSL_DTLS13 && WOLFSSL_SEND_HRR_COOKIE && * HAVE_IO_TESTS_DEPENDENCIES && !SINGLE_THREADED */ -#ifdef HAVE_CERT_CHAIN_VALIDATION -#ifndef WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION -#ifdef WOLFSSL_PEM_TO_DER -#ifndef NO_SHA256 -static int load_ca_into_cm(WOLFSSL_CERT_MANAGER* cm, char* certA) -{ - int ret; - - if ((ret = wolfSSL_CertManagerLoadCA(cm, certA, 0)) != WOLFSSL_SUCCESS) { - fprintf(stderr, "loading cert %s failed\n", certA); - fprintf(stderr, "Error: (%d): %s\n", ret, - wolfSSL_ERR_reason_error_string((word32)ret)); - return -1; - } - - return 0; -} - -static int verify_cert_with_cm(WOLFSSL_CERT_MANAGER* cm, char* certA) -{ - int ret; - if ((ret = wolfSSL_CertManagerVerify(cm, certA, CERT_FILETYPE)) - != WOLFSSL_SUCCESS) { - fprintf(stderr, "could not verify the cert: %s\n", certA); - fprintf(stderr, "Error: (%d): %s\n", ret, - wolfSSL_ERR_reason_error_string((word32)ret)); - return -1; - } - else { - fprintf(stderr, "successfully verified: %s\n", certA); - } - - return 0; -} -#define LOAD_ONE_CA(a, b, c, d) \ - do { \ - (a) = load_ca_into_cm(c, d); \ - if ((a) != 0) \ - return (b); \ - else \ - (b)--; \ - } while(0) - -#define VERIFY_ONE_CERT(a, b, c, d) \ - do { \ - (a) = verify_cert_with_cm(c, d);\ - if ((a) != 0) \ - return (b); \ - else \ - (b)--; \ - } while(0) - -static int test_chainG(WOLFSSL_CERT_MANAGER* cm) -{ - int ret; - int i = -1; - /* Chain G is a valid chain per RFC 5280 section 4.2.1.9 */ - char chainGArr[9][50] = {"certs/ca-cert.pem", - "certs/test-pathlen/chainG-ICA7-pathlen100.pem", - "certs/test-pathlen/chainG-ICA6-pathlen10.pem", - "certs/test-pathlen/chainG-ICA5-pathlen20.pem", - "certs/test-pathlen/chainG-ICA4-pathlen5.pem", - "certs/test-pathlen/chainG-ICA3-pathlen99.pem", - "certs/test-pathlen/chainG-ICA2-pathlen1.pem", - "certs/test-pathlen/chainG-ICA1-pathlen0.pem", - "certs/test-pathlen/chainG-entity.pem"}; - - LOAD_ONE_CA(ret, i, cm, chainGArr[0]); /* if failure, i = -1 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[1]); /* if failure, i = -2 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[2]); /* if failure, i = -3 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[3]); /* if failure, i = -4 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[4]); /* if failure, i = -5 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[5]); /* if failure, i = -6 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[6]); /* if failure, i = -7 here */ - LOAD_ONE_CA(ret, i, cm, chainGArr[7]); /* if failure, i = -8 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[1]); /* if failure, i = -9 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[2]); /* if failure, i = -10 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[3]); /* if failure, i = -11 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[4]); /* if failure, i = -12 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[5]); /* if failure, i = -13 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[6]); /* if failure, i = -14 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[7]); /* if failure, i = -15 here */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[8]); /* if failure, i = -16 here */ - - /* test validating the entity twice, should have no effect on pathLen since - * entity/leaf cert */ - VERIFY_ONE_CERT(ret, i, cm, chainGArr[8]); /* if failure, i = -17 here */ - - return ret; -} - -static int test_chainH(WOLFSSL_CERT_MANAGER* cm) -{ - int ret; - int i = -1; - /* Chain H is NOT a valid chain per RFC5280 section 4.2.1.9: - * ICA4-pathlen of 2 signing ICA3-pathlen of 2 (reduce max path len to 2) - * ICA3-pathlen of 2 signing ICA2-pathlen of 2 (reduce max path len to 1) - * ICA2-pathlen of 2 signing ICA1-pathlen of 0 (reduce max path len to 0) - * ICA1-pathlen of 0 signing entity (pathlen is already 0, ERROR) - * Test should successfully verify ICA4, ICA3, ICA2 and then fail on ICA1 - */ - char chainHArr[6][50] = {"certs/ca-cert.pem", - "certs/test-pathlen/chainH-ICA4-pathlen2.pem", - "certs/test-pathlen/chainH-ICA3-pathlen2.pem", - "certs/test-pathlen/chainH-ICA2-pathlen2.pem", - "certs/test-pathlen/chainH-ICA1-pathlen0.pem", - "certs/test-pathlen/chainH-entity.pem"}; - - LOAD_ONE_CA(ret, i, cm, chainHArr[0]); /* if failure, i = -1 here */ - LOAD_ONE_CA(ret, i, cm, chainHArr[1]); /* if failure, i = -2 here */ - LOAD_ONE_CA(ret, i, cm, chainHArr[2]); /* if failure, i = -3 here */ - LOAD_ONE_CA(ret, i, cm, chainHArr[3]); /* if failure, i = -4 here */ - LOAD_ONE_CA(ret, i, cm, chainHArr[4]); /* if failure, i = -5 here */ - VERIFY_ONE_CERT(ret, i, cm, chainHArr[1]); /* if failure, i = -6 here */ - VERIFY_ONE_CERT(ret, i, cm, chainHArr[2]); /* if failure, i = -7 here */ - VERIFY_ONE_CERT(ret, i, cm, chainHArr[3]); /* if failure, i = -8 here */ - VERIFY_ONE_CERT(ret, i, cm, chainHArr[4]); /* if failure, i = -9 here */ - VERIFY_ONE_CERT(ret, i, cm, chainHArr[5]); /* if failure, i = -10 here */ - - return ret; -} - -static int test_chainI(WOLFSSL_CERT_MANAGER* cm) -{ - int ret; - int i = -1; - /* Chain I is a valid chain per RFC5280 section 4.2.1.9: - * ICA3-pathlen of 2 signing ICA2 without a pathlen (reduce maxPathLen to 2) - * ICA2-no_pathlen signing ICA1-no_pathlen (reduce maxPathLen to 1) - * ICA1-no_pathlen signing entity (reduce maxPathLen to 0) - * Test should successfully verify ICA4, ICA3, ICA2 and then fail on ICA1 - */ - char chainIArr[5][50] = {"certs/ca-cert.pem", - "certs/test-pathlen/chainI-ICA3-pathlen2.pem", - "certs/test-pathlen/chainI-ICA2-no_pathlen.pem", - "certs/test-pathlen/chainI-ICA1-no_pathlen.pem", - "certs/test-pathlen/chainI-entity.pem"}; - - LOAD_ONE_CA(ret, i, cm, chainIArr[0]); /* if failure, i = -1 here */ - LOAD_ONE_CA(ret, i, cm, chainIArr[1]); /* if failure, i = -2 here */ - LOAD_ONE_CA(ret, i, cm, chainIArr[2]); /* if failure, i = -3 here */ - LOAD_ONE_CA(ret, i, cm, chainIArr[3]); /* if failure, i = -4 here */ - VERIFY_ONE_CERT(ret, i, cm, chainIArr[1]); /* if failure, i = -5 here */ - VERIFY_ONE_CERT(ret, i, cm, chainIArr[2]); /* if failure, i = -6 here */ - VERIFY_ONE_CERT(ret, i, cm, chainIArr[3]); /* if failure, i = -7 here */ - VERIFY_ONE_CERT(ret, i, cm, chainIArr[4]); /* if failure, i = -8 here */ - - return ret; -} - -static int test_chainJ(WOLFSSL_CERT_MANAGER* cm) -{ - int ret; - int i = -1; - /* Chain J is NOT a valid chain per RFC5280 section 4.2.1.9: - * ICA4-pathlen of 2 signing ICA3 without a pathlen (reduce maxPathLen to 2) - * ICA3-pathlen of 2 signing ICA2 without a pathlen (reduce maxPathLen to 1) - * ICA2-no_pathlen signing ICA1-no_pathlen (reduce maxPathLen to 0) - * ICA1-no_pathlen signing entity (ERROR, pathlen zero and non-leaf cert) - */ - char chainJArr[6][50] = {"certs/ca-cert.pem", - "certs/test-pathlen/chainJ-ICA4-pathlen2.pem", - "certs/test-pathlen/chainJ-ICA3-no_pathlen.pem", - "certs/test-pathlen/chainJ-ICA2-no_pathlen.pem", - "certs/test-pathlen/chainJ-ICA1-no_pathlen.pem", - "certs/test-pathlen/chainJ-entity.pem"}; - - LOAD_ONE_CA(ret, i, cm, chainJArr[0]); /* if failure, i = -1 here */ - LOAD_ONE_CA(ret, i, cm, chainJArr[1]); /* if failure, i = -2 here */ - LOAD_ONE_CA(ret, i, cm, chainJArr[2]); /* if failure, i = -3 here */ - LOAD_ONE_CA(ret, i, cm, chainJArr[3]); /* if failure, i = -4 here */ - LOAD_ONE_CA(ret, i, cm, chainJArr[4]); /* if failure, i = -5 here */ - VERIFY_ONE_CERT(ret, i, cm, chainJArr[1]); /* if failure, i = -6 here */ - VERIFY_ONE_CERT(ret, i, cm, chainJArr[2]); /* if failure, i = -7 here */ - VERIFY_ONE_CERT(ret, i, cm, chainJArr[3]); /* if failure, i = -8 here */ - VERIFY_ONE_CERT(ret, i, cm, chainJArr[4]); /* if failure, i = -9 here */ - VERIFY_ONE_CERT(ret, i, cm, chainJArr[5]); /* if failure, i = -10 here */ - - return ret; -} -#endif - -static int test_various_pathlen_chains(void) -{ - EXPECT_DECLS; -#ifndef NO_SHA256 - WOLFSSL_CERT_MANAGER* cm = NULL; - - /* Test chain G (large chain with varying pathLens) */ - ExpectNotNull(cm = wolfSSL_CertManagerNew()); -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(test_chainG(cm), -1); -#else - ExpectIntEQ(test_chainG(cm), 0); -#endif /* NO_WOLFSSL_CLIENT && NO_WOLFSSL_SERVER */ - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); - /* end test chain G */ - - /* Test chain H (5 chain with same pathLens) */ - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntLT(test_chainH(cm), 0); - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); - /* end test chain H */ - - /* Test chain I (only first ICA has pathLen set and it's set to 2, - * followed by 2 ICA's, should pass) */ - ExpectNotNull(cm = wolfSSL_CertManagerNew()); -#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) - ExpectIntEQ(test_chainI(cm), -1); -#else - ExpectIntEQ(test_chainI(cm), 0); -#endif /* NO_WOLFSSL_CLIENT && NO_WOLFSSL_SERVER */ - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); - cm = NULL; - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); - cm = NULL; - - /* Test chain J (Again only first ICA has pathLen set and it's set to 2, - * this time followed by 3 ICA's, should fail */ - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntLT(test_chainJ(cm), 0); - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); - cm = NULL; - - ExpectNotNull(cm = wolfSSL_CertManagerNew()); - ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); - wolfSSL_CertManagerFree(cm); -#endif - - return EXPECT_RESULT(); -} -#endif -#endif -#endif /* !NO_RSA && !NO_SHA && !NO_FILESYSTEM && !NO_CERTS */ - #if defined(HAVE_KEYING_MATERIAL) && defined(HAVE_SSL_MEMIO_TESTS_DEPENDENCIES) static int test_export_keying_material_cb(WOLFSSL_CTX *ctx, WOLFSSL *ssl) { @@ -34844,6 +24707,7 @@ static int error_test(void) if (EXPECT_FAIL()) return OPEN_RAN_E; #else + int start_idx = 0; int i; int j = 0; /* Values that are not or no longer error codes. */ @@ -34857,14 +24721,12 @@ static int error_test(void) #if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) || \ defined(HAVE_WEBSERVER) || defined(HAVE_MEMCACHED) - { -11, -12 }, - { -15, -17 }, - { -19, -19 }, - { -26, -27 }, - { -30, WC_SPAN1_FIRST_E + 1 }, -#else - { -9, WC_SPAN1_FIRST_E + 1 }, + {11, 11}, + {17, 15}, + {19, 19}, + {27, 26 }, #endif + { -9, WC_SPAN1_FIRST_E + 1 }, { -124, -124 }, { -167, -169 }, { -300, -300 }, @@ -34882,7 +24744,10 @@ static int error_test(void) * APIs. Check that the values that are not errors map to the unknown * string. */ - for (i = 0; i >= MIN_CODE_E; i--) { +#if defined(OPENSSL_EXTRA) + start_idx = WC_OSSL_V509_V_ERR_MAX - 1; +#endif + for (i = start_idx; i >= MIN_CODE_E; i--) { int this_missing = 0; for (j = 0; j < (int)XELEM_CNT(missing); ++j) { if ((i <= missing[j].first) && (i >= missing[j].last)) { @@ -34953,243 +24818,6 @@ static int test_wolfSSL_ERR_strings(void) return EXPECT_RESULT(); } -static int test_wolfSSL_EVP_shake128(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SHA3) && \ - defined(WOLFSSL_SHAKE128) - const EVP_MD* md = NULL; - - ExpectNotNull(md = EVP_shake128()); - ExpectIntEQ(XSTRNCMP(md, "SHAKE128", XSTRLEN("SHAKE128")), 0); -#endif - - return EXPECT_RESULT(); -} - -static int test_wolfSSL_EVP_shake256(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SHA3) && \ - defined(WOLFSSL_SHAKE256) - const EVP_MD* md = NULL; - - ExpectNotNull(md = EVP_shake256()); - ExpectIntEQ(XSTRNCMP(md, "SHAKE256", XSTRLEN("SHAKE256")), 0); -#endif - - return EXPECT_RESULT(); -} - -/* - * Testing EVP digest API with SM3 - */ -static int test_wolfSSL_EVP_sm3(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM3) - EXPECT_DECLS; - const EVP_MD* md = NULL; - EVP_MD_CTX* mdCtx = NULL; - byte data[WC_SM3_BLOCK_SIZE * 4]; - byte hash[WC_SM3_DIGEST_SIZE]; - byte calcHash[WC_SM3_DIGEST_SIZE]; - byte expHash[WC_SM3_DIGEST_SIZE] = { - 0x38, 0x48, 0x15, 0xa7, 0x0e, 0xae, 0x0b, 0x27, - 0x5c, 0xde, 0x9d, 0xa5, 0xd1, 0xa4, 0x30, 0xa1, - 0xca, 0xd4, 0x54, 0x58, 0x44, 0xa2, 0x96, 0x1b, - 0xd7, 0x14, 0x80, 0x3f, 0x80, 0x1a, 0x07, 0xb6 - }; - word32 chunk; - word32 i; - unsigned int sz; - int ret; - - XMEMSET(data, 0, sizeof(data)); - - md = EVP_sm3(); - ExpectTrue(md != NULL); - ExpectIntEQ(XSTRNCMP(md, "SM3", XSTRLEN("SM3")), 0); - mdCtx = EVP_MD_CTX_new(); - ExpectTrue(mdCtx != NULL); - - /* Invalid Parameters */ - ExpectIntEQ(EVP_DigestInit(NULL, md), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - /* Valid Parameters */ - ExpectIntEQ(EVP_DigestInit(mdCtx, md), WOLFSSL_SUCCESS); - - ExpectIntEQ(EVP_DigestUpdate(NULL, NULL, 1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_DigestUpdate(mdCtx, NULL, 1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_DigestUpdate(NULL, data, 1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - /* Valid Parameters */ - ExpectIntEQ(EVP_DigestUpdate(mdCtx, NULL, 0), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, 1), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_BLOCK_SIZE), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_BLOCK_SIZE - 2), - WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_BLOCK_SIZE * 2), - WOLFSSL_SUCCESS); - /* Ensure too many bytes for lengths. */ - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_PAD_SIZE), - WOLFSSL_SUCCESS); - - /* Invalid Parameters */ - ExpectIntEQ(EVP_DigestFinal(NULL, NULL, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_DigestFinal(mdCtx, NULL, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_DigestFinal(NULL, hash, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_DigestFinal(NULL, hash, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - ExpectIntEQ(EVP_DigestFinal(mdCtx, NULL, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); - - /* Valid Parameters */ - ExpectIntEQ(EVP_DigestFinal(mdCtx, hash, NULL), WOLFSSL_SUCCESS); - ExpectBufEQ(hash, expHash, WC_SM3_DIGEST_SIZE); - - /* Chunk tests. */ - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, sizeof(data)), WOLFSSL_SUCCESS); - ExpectIntEQ(EVP_DigestFinal(mdCtx, calcHash, &sz), WOLFSSL_SUCCESS); - ExpectIntEQ(sz, WC_SM3_DIGEST_SIZE); - for (chunk = 1; chunk <= WC_SM3_BLOCK_SIZE + 1; chunk++) { - for (i = 0; i + chunk <= (word32)sizeof(data); i += chunk) { - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data + i, chunk), - WOLFSSL_SUCCESS); - } - if (i < (word32)sizeof(data)) { - ExpectIntEQ(EVP_DigestUpdate(mdCtx, data + i, - (word32)sizeof(data) - i), WOLFSSL_SUCCESS); - } - ExpectIntEQ(EVP_DigestFinal(mdCtx, hash, NULL), WOLFSSL_SUCCESS); - ExpectBufEQ(hash, calcHash, WC_SM3_DIGEST_SIZE); - } - - /* Not testing when the low 32-bit length overflows. */ - - ret = EVP_MD_CTX_cleanup(mdCtx); - ExpectIntEQ(ret, WOLFSSL_SUCCESS); - wolfSSL_EVP_MD_CTX_free(mdCtx); - - res = EXPECT_RESULT(); -#endif - return res; -} /* END test_EVP_sm3 */ - -static int test_EVP_blake2(void) -{ - EXPECT_DECLS; -#if defined(OPENSSL_EXTRA) && (defined(HAVE_BLAKE2) || defined(HAVE_BLAKE2S)) - const EVP_MD* md = NULL; - (void)md; - -#if defined(HAVE_BLAKE2) - ExpectNotNull(md = EVP_blake2b512()); - ExpectIntEQ(XSTRNCMP(md, "BLAKE2b512", XSTRLEN("BLAKE2b512")), 0); -#endif - -#if defined(HAVE_BLAKE2S) - ExpectNotNull(md = EVP_blake2s256()); - ExpectIntEQ(XSTRNCMP(md, "BLAKE2s256", XSTRLEN("BLAKE2s256")), 0); -#endif -#endif - - return EXPECT_RESULT(); -} - -#if defined(OPENSSL_EXTRA) -static void list_md_fn(const EVP_MD* m, const char* from, - const char* to, void* arg) -{ - const char* mn; - BIO *bio; - - (void) from; - (void) to; - (void) arg; - (void) mn; - (void) bio; - - if (!m) { - /* alias */ - AssertNull(m); - AssertNotNull(to); - } - else { - AssertNotNull(m); - AssertNull(to); - } - - AssertNotNull(from); - -#if !defined(NO_FILESYSTEM) && defined(DEBUG_WOLFSSL_VERBOSE) - mn = EVP_get_digestbyname(from); - /* print to stderr */ - AssertNotNull(arg); - - bio = BIO_new(BIO_s_file()); - BIO_set_fp(bio, arg, BIO_NOCLOSE); - BIO_printf(bio, "Use %s message digest algorithm\n", mn); - BIO_free(bio); -#endif -} -#endif - -static int test_EVP_MD_do_all(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) - EVP_MD_do_all(NULL, stderr); - - EVP_MD_do_all(list_md_fn, stderr); - - res = TEST_SUCCESS; -#endif - - return res; -} - -#if defined(OPENSSL_EXTRA) -static void obj_name_t(const OBJ_NAME* nm, void* arg) -{ - (void)arg; - (void)nm; - - AssertIntGT(nm->type, OBJ_NAME_TYPE_UNDEF); - -#if !defined(NO_FILESYSTEM) && defined(DEBUG_WOLFSSL_VERBOSE) - /* print to stderr */ - AssertNotNull(arg); - - BIO *bio = BIO_new(BIO_s_file()); - BIO_set_fp(bio, arg, BIO_NOCLOSE); - BIO_printf(bio, "%s\n", nm); - BIO_free(bio); -#endif -} - -#endif -static int test_OBJ_NAME_do_all(void) -{ - int res = TEST_SKIPPED; -#if defined(OPENSSL_EXTRA) - - OBJ_NAME_do_all(OBJ_NAME_TYPE_MD_METH, NULL, NULL); - - OBJ_NAME_do_all(OBJ_NAME_TYPE_CIPHER_METH, NULL, stderr); - - OBJ_NAME_do_all(OBJ_NAME_TYPE_MD_METH, obj_name_t, stderr); - OBJ_NAME_do_all(OBJ_NAME_TYPE_PKEY_METH, obj_name_t, stderr); - OBJ_NAME_do_all(OBJ_NAME_TYPE_COMP_METH, obj_name_t, stderr); - OBJ_NAME_do_all(OBJ_NAME_TYPE_NUM, obj_name_t, stderr); - OBJ_NAME_do_all(OBJ_NAME_TYPE_UNDEF, obj_name_t, stderr); - OBJ_NAME_do_all(OBJ_NAME_TYPE_CIPHER_METH, obj_name_t, stderr); - OBJ_NAME_do_all(-1, obj_name_t, stderr); - - res = TEST_SUCCESS; -#endif - - return res; -} static int test_SSL_CIPHER_get_xxx(void) { @@ -41713,11 +31341,6 @@ TEST_CASE testCases[] = { TEST_DECL(test_wolfSSL_lhash), -#ifndef NO_BIO - TEST_DECL(test_wolfSSL_BIO), - TEST_DECL(test_wolfSSL_BIO_BIO_ring_read), -#endif - TEST_DECL(test_wolfSSL_certs), TEST_DECL(test_wolfSSL_X509_ext_d2i), @@ -41725,90 +31348,11 @@ TEST_CASE testCases[] = { TEST_SSL_PEM_DECLS, /* EVP API testing */ - TEST_DECL(test_wolfSSL_EVP_ENCODE_CTX_new), - TEST_DECL(test_wolfSSL_EVP_ENCODE_CTX_free), - TEST_DECL(test_wolfSSL_EVP_EncodeInit), - TEST_DECL(test_wolfSSL_EVP_EncodeUpdate), - TEST_DECL(test_wolfSSL_EVP_CipherUpdate_Null), - TEST_DECL(test_wolfSSL_EVP_CIPHER_type_string), - TEST_DECL(test_wolfSSL_EVP_EncodeFinal), - TEST_DECL(test_wolfSSL_EVP_DecodeInit), - TEST_DECL(test_wolfSSL_EVP_DecodeUpdate), - TEST_DECL(test_wolfSSL_EVP_DecodeFinal), + TEST_EVP_ENC_DECLS, + TEST_EVP_DIGEST_DECLS, + TEST_EVP_CIPHER_DECLS, + TEST_EVP_PKEY_DECLS, - TEST_DECL(test_wolfSSL_EVP_shake128), - TEST_DECL(test_wolfSSL_EVP_shake256), - TEST_DECL(test_wolfSSL_EVP_sm3), - TEST_DECL(test_EVP_blake2), -#ifdef OPENSSL_ALL - TEST_DECL(test_wolfSSL_EVP_md4), - TEST_DECL(test_wolfSSL_EVP_ripemd160), - TEST_DECL(test_wolfSSL_EVP_get_digestbynid), - TEST_DECL(test_wolfSSL_EVP_MD_nid), - - TEST_DECL(test_wolfSSL_EVP_DigestFinal_ex), - TEST_DECL(test_wolfSSL_EVP_DigestFinalXOF), -#endif - - TEST_DECL(test_EVP_MD_do_all), - TEST_DECL(test_wolfSSL_EVP_MD_size), - TEST_DECL(test_wolfSSL_EVP_MD_pkey_type), - TEST_DECL(test_wolfSSL_EVP_Digest), - TEST_DECL(test_wolfSSL_EVP_Digest_all), - TEST_DECL(test_wolfSSL_EVP_MD_hmac_signing), - TEST_DECL(test_wolfSSL_EVP_MD_rsa_signing), - TEST_DECL(test_wolfSSL_EVP_MD_ecc_signing), - - TEST_DECL(test_wolfssl_EVP_aes_gcm), - TEST_DECL(test_wolfssl_EVP_aes_gcm_AAD_2_parts), - TEST_DECL(test_wolfssl_EVP_aes_gcm_zeroLen), - TEST_DECL(test_wolfssl_EVP_aes_ccm), - TEST_DECL(test_wolfssl_EVP_aes_ccm_zeroLen), - TEST_DECL(test_wolfssl_EVP_chacha20), - TEST_DECL(test_wolfssl_EVP_chacha20_poly1305), - TEST_DECL(test_wolfssl_EVP_sm4_ecb), - TEST_DECL(test_wolfssl_EVP_sm4_cbc), - TEST_DECL(test_wolfssl_EVP_sm4_ctr), - TEST_DECL(test_wolfssl_EVP_sm4_gcm_zeroLen), - TEST_DECL(test_wolfssl_EVP_sm4_gcm), - TEST_DECL(test_wolfssl_EVP_sm4_ccm_zeroLen), - TEST_DECL(test_wolfssl_EVP_sm4_ccm), -#ifdef OPENSSL_ALL - TEST_DECL(test_wolfSSL_EVP_aes_256_gcm), - TEST_DECL(test_wolfSSL_EVP_aes_192_gcm), - TEST_DECL(test_wolfSSL_EVP_aes_256_ccm), - TEST_DECL(test_wolfSSL_EVP_aes_192_ccm), - TEST_DECL(test_wolfSSL_EVP_aes_128_ccm), - TEST_DECL(test_wolfSSL_EVP_rc4), - TEST_DECL(test_wolfSSL_EVP_enc_null), - TEST_DECL(test_wolfSSL_EVP_rc2_cbc), - TEST_DECL(test_wolfSSL_EVP_mdc2), - - TEST_DECL(test_evp_cipher_aes_gcm), -#endif - TEST_DECL(test_wolfssl_EVP_aria_gcm), - TEST_DECL(test_wolfSSL_EVP_Cipher_extra), -#ifdef OPENSSL_EXTRA - TEST_DECL(test_wolfSSL_EVP_get_cipherbynid), - TEST_DECL(test_wolfSSL_EVP_CIPHER_CTX), -#endif -#ifdef OPENSSL_ALL - TEST_DECL(test_wolfSSL_EVP_CIPHER_CTX_iv_length), - TEST_DECL(test_wolfSSL_EVP_CIPHER_CTX_key_length), - TEST_DECL(test_wolfSSL_EVP_CIPHER_CTX_set_iv), - TEST_DECL(test_wolfSSL_EVP_CIPHER_block_size), - TEST_DECL(test_wolfSSL_EVP_CIPHER_iv_length), - TEST_DECL(test_wolfSSL_EVP_X_STATE), - TEST_DECL(test_wolfSSL_EVP_X_STATE_LEN), - TEST_DECL(test_wolfSSL_EVP_BytesToKey), -#endif - - TEST_DECL(test_wolfSSL_EVP_PKEY_print_public), - TEST_DECL(test_wolfSSL_EVP_PKEY_new_mac_key), - TEST_DECL(test_wolfSSL_EVP_PKEY_new_CMAC_key), - TEST_DECL(test_wolfSSL_EVP_PKEY_up_ref), - TEST_DECL(test_wolfSSL_EVP_PKEY_hkdf), - TEST_DECL(test_wolfSSL_EVP_PKEY_derive), TEST_DECL(test_wolfSSL_d2i_and_i2d_PublicKey), TEST_DECL(test_wolfSSL_d2i_and_i2d_PublicKey_ecc), #ifndef NO_BIO @@ -41821,43 +31365,9 @@ TEST_CASE testCases[] = { #ifndef NO_BIO TEST_DECL(test_wolfSSL_d2i_PrivateKeys_bio), #endif /* !NO_BIO */ -#endif -#ifdef OPENSSL_ALL - TEST_DECL(test_wolfSSL_EVP_PKEY_set1_get1_DSA), - TEST_DECL(test_wolfSSL_EVP_PKEY_set1_get1_EC_KEY), - TEST_DECL(test_wolfSSL_EVP_PKEY_set1_get1_DH), - TEST_DECL(test_wolfSSL_EVP_PKEY_assign), - TEST_DECL(test_wolfSSL_EVP_PKEY_assign_DH), - TEST_DECL(test_wolfSSL_EVP_PKEY_base_id), - TEST_DECL(test_wolfSSL_EVP_PKEY_id), - TEST_DECL(test_wolfSSL_EVP_PKEY_paramgen), - TEST_DECL(test_wolfSSL_EVP_PKEY_keygen), - TEST_DECL(test_wolfSSL_EVP_PKEY_keygen_init), - TEST_DECL(test_wolfSSL_EVP_PKEY_missing_parameters), - TEST_DECL(test_wolfSSL_EVP_PKEY_copy_parameters), - TEST_DECL(test_wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits), - TEST_DECL(test_wolfSSL_EVP_PKEY_CTX_new_id), - TEST_DECL(test_wolfSSL_EVP_PKEY_get0_EC_KEY), #endif - TEST_DECL(test_EVP_PKEY_rsa), - TEST_DECL(test_wc_RsaPSS_DigitalSignVerify), - TEST_DECL(test_EVP_PKEY_ec), - TEST_DECL(test_wolfSSL_EVP_PKEY_encrypt), - TEST_DECL(test_wolfSSL_EVP_PKEY_sign_verify_rsa), - TEST_DECL(test_wolfSSL_EVP_PKEY_sign_verify_dsa), - TEST_DECL(test_wolfSSL_EVP_PKEY_sign_verify_ec), - TEST_DECL(test_EVP_PKEY_cmp), -#ifdef OPENSSL_ALL - TEST_DECL(test_wolfSSL_EVP_SignInit_ex), - TEST_DECL(test_wolfSSL_EVP_PKEY_param_check), - TEST_DECL(test_wolfSSL_QT_EVP_PKEY_CTX_free), -#endif - - TEST_DECL(test_wolfSSL_EVP_PBE_scrypt), - - TEST_DECL(test_wolfSSL_CTX_add_extra_chain_cert), #if !defined(NO_WOLFSSL_CLIENT) && !defined(NO_WOLFSSL_SERVER) TEST_DECL(test_wolfSSL_ERR_peek_last_error_line), #endif @@ -41902,6 +31412,8 @@ TEST_CASE testCases[] = { TEST_OSSL_X509_LOOKUP_DECLS, TEST_DECL(test_GENERAL_NAME_set0_othername), + TEST_DECL(test_RID_GENERAL_NAME_free), + TEST_DECL(test_RID_X509_get_ext_d2i), TEST_DECL(test_othername_and_SID_ext), TEST_DECL(test_wolfSSL_dup_CA_list), /* OpenSSL sk_X509 API test */ @@ -41915,10 +31427,7 @@ TEST_CASE testCases[] = { TEST_DECL(test_wolfSSL_X509_REQ_print), /* RAND compatibility API */ - TEST_DECL(test_wolfSSL_RAND_set_rand_method), - TEST_DECL(test_wolfSSL_RAND_bytes), - TEST_DECL(test_wolfSSL_RAND), - TEST_DECL(test_wolfSSL_RAND_poll), + TEST_OSSL_RAND_DECLS, /* BN compatibility API */ TEST_OSSL_ASN1_BN_DECLS, @@ -41931,21 +31440,10 @@ TEST_CASE testCases[] = { TEST_DECL(test_wolfSSL_PKCS8_d2i), /* OpenSSL PKCS7 API test */ - TEST_DECL(test_wolfssl_PKCS7), - TEST_DECL(test_wolfSSL_PKCS7_certs), - TEST_DECL(test_wolfSSL_PKCS7_sign), - TEST_DECL(test_wolfSSL_PKCS7_SIGNED_new), -#ifndef NO_BIO - TEST_DECL(test_wolfSSL_PEM_write_bio_PKCS7), - TEST_DECL(test_wolfSSL_PEM_write_bio_encryptedKey), -#ifdef HAVE_SMIME - TEST_DECL(test_wolfSSL_SMIME_read_PKCS7), - TEST_DECL(test_wolfSSL_SMIME_write_PKCS7), -#endif /* HAVE_SMIME */ -#endif /* !NO_BIO */ - + TEST_OSSL_PKCS7_DECLS, + TEST_OSSL_SMIME_DECLS, /* OpenSSL PKCS12 API test */ - TEST_DECL(test_wolfSSL_PKCS12), + TEST_OSSL_PKCS12_DECLS, /* Can't memory test as callbacks use Assert. */ TEST_DECL(test_error_queue_per_thread), @@ -41955,15 +31453,7 @@ TEST_CASE testCases[] = { TEST_DECL(test_wolfSSL_ERR_print_errors), #endif - TEST_DECL(test_OBJ_NAME_do_all), - TEST_DECL(test_wolfSSL_OBJ), - TEST_DECL(test_wolfSSL_OBJ_cmp), - TEST_DECL(test_wolfSSL_OBJ_txt2nid), - TEST_DECL(test_wolfSSL_OBJ_txt2obj), -#ifdef OPENSSL_ALL - TEST_DECL(test_wolfSSL_OBJ_ln), - TEST_DECL(test_wolfSSL_OBJ_sn), -#endif + TEST_OSSL_OBJ_DECLS, #ifndef NO_BIO TEST_OSSL_BIO_DECLS, @@ -42057,26 +31547,9 @@ TEST_CASE testCases[] = { /********************************* * CertManager API tests *********************************/ + TEST_CERTMAN_DECLS, - TEST_DECL(test_wolfSSL_CertManagerAPI), - TEST_DECL(test_wolfSSL_CertManagerLoadCABuffer), - TEST_DECL(test_wolfSSL_CertManagerLoadCABuffer_ex), - TEST_DECL(test_wolfSSL_CertManagerLoadCABufferType), - TEST_DECL(test_wolfSSL_CertManagerGetCerts), - TEST_DECL(test_wolfSSL_CertManagerSetVerify), - TEST_DECL(test_wolfSSL_CertManagerNameConstraint), - TEST_DECL(test_wolfSSL_CertManagerNameConstraint2), - TEST_DECL(test_wolfSSL_CertManagerNameConstraint3), - TEST_DECL(test_wolfSSL_CertManagerNameConstraint4), - TEST_DECL(test_wolfSSL_CertManagerNameConstraint5), - TEST_DECL(test_wolfSSL_CertManagerCRL), - TEST_DECL(test_wolfSSL_CRL_duplicate_extensions), - TEST_DECL(test_wolfSSL_CertManagerCheckOCSPResponse), TEST_DECL(test_wolfSSL_CheckOCSPResponse), -#if defined(HAVE_CERT_CHAIN_VALIDATION) && !defined(WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION) && \ - defined(WOLFSSL_PEM_TO_DER) - TEST_DECL(test_various_pathlen_chains), -#endif /********************************* * SSL/TLS API tests @@ -42169,6 +31642,7 @@ TEST_CASE testCases[] = { TEST_DECL(test_wolfSSL_CTX_load_verify_locations), /* Large number of memory allocations. */ TEST_DECL(test_wolfSSL_CTX_load_system_CA_certs), + TEST_DECL(test_wolfSSL_CTX_add_extra_chain_cert), #if defined(HAVE_CERT_CHAIN_VALIDATION) && \ !defined(WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION) @@ -42183,6 +31657,7 @@ TEST_CASE testCases[] = { TEST_DECL(test_wolfSSL_use_certificate_chain_file), TEST_DECL(test_wolfSSL_CTX_trust_peer_cert), TEST_DECL(test_wolfSSL_CTX_LoadCRL), + TEST_DECL(test_wolfSSL_CTX_LoadCRL_largeCRLnum), TEST_DECL(test_wolfSSL_crl_update_cb), TEST_DECL(test_wolfSSL_CTX_SetTmpDH_file), TEST_DECL(test_wolfSSL_CTX_SetTmpDH_buffer), diff --git a/tests/api/api.h b/tests/api/api.h index 125ec273d..fa14484c9 100644 --- a/tests/api/api.h +++ b/tests/api/api.h @@ -52,6 +52,11 @@ #define FOURK_BUF 4096 #endif +#if !defined(NO_RSA) && !defined(NO_SHA) && !defined(NO_FILESYSTEM) && \ + !defined(NO_CERTS) && \ + (!defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH)) + #define HAVE_CERT_CHAIN_VALIDATION +#endif #ifndef NO_RSA #define GEN_BUF 294 diff --git a/tests/api/include.am b/tests/api/include.am index 79d15c0ef..59091b081 100644 --- a/tests/api/include.am +++ b/tests/api/include.am @@ -93,6 +93,16 @@ tests_unit_test_SOURCES += tests/api/test_ossl_x509_str.c tests_unit_test_SOURCES += tests/api/test_ossl_x509_lu.c # SSL PEM tests_unit_test_SOURCES += tests/api/test_ossl_pem.c +# SSL Random +tests_unit_test_SOURCES += tests/api/test_ossl_rand.c +tests_unit_test_SOURCES += tests/api/test_ossl_obj.c +tests_unit_test_SOURCES += tests/api/test_ossl_p7p12.c +# EVP APIs +tests_unit_test_SOURCES += tests/api/test_evp_digest.c +tests_unit_test_SOURCES += tests/api/test_evp_cipher.c +tests_unit_test_SOURCES += tests/api/test_evp_pkey.c +# CertificateManager +tests_unit_test_SOURCES += tests/api/test_certman.c # TLS 1.3 specific tests_unit_test_SOURCES += tests/api/test_tls13.c endif @@ -174,5 +184,12 @@ EXTRA_DIST += tests/api/test_ossl_x509_info.h EXTRA_DIST += tests/api/test_ossl_x509_str.h EXTRA_DIST += tests/api/test_ossl_x509_lu.h EXTRA_DIST += tests/api/test_ossl_pem.h +EXTRA_DIST += tests/api/test_ossl_rand.h +EXTRA_DIST += tests/api/test_ossl_obj.h +EXTRA_DIST += tests/api/test_ossl_p7p12.h +EXTRA_DIST += tests/api/test_evp_digest.h +EXTRA_DIST += tests/api/test_evp_cipher.h +EXTRA_DIST += tests/api/test_evp_pkey.h +EXTRA_DIST += tests/api/test_certman.h EXTRA_DIST += tests/api/test_tls13.h diff --git a/tests/api/test_asn.c b/tests/api/test_asn.c index 794029da1..c3907b394 100644 --- a/tests/api/test_asn.c +++ b/tests/api/test_asn.c @@ -23,6 +23,8 @@ #include +#include + #if defined(WC_ENABLE_ASYM_KEY_EXPORT) && defined(HAVE_ED25519) static int test_SetAsymKeyDer_once(byte* privKey, word32 privKeySz, byte* pubKey, word32 pubKeySz, byte* trueDer, word32 trueDerSz) @@ -638,3 +640,150 @@ int test_wc_IndexSequenceOf(void) return EXPECT_RESULT(); } + +int test_wolfssl_local_MatchBaseName(void) +{ + EXPECT_DECLS; + +#if !defined(NO_CERTS) && !defined(NO_ASN) && !defined(IGNORE_NAME_CONSTRAINTS) + /* + * Tests for DNS type (ASN_DNS_TYPE = 0x02) + */ + + /* Positive tests - should match */ + /* Exact match */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "domain.com", 10, "domain.com", 10), 1); + /* Case insensitive match */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "DOMAIN.COM", 10, "domain.com", 10), 1); + /* Subdomain match (RFC 5280: adding labels to the left) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "sub.domain.com", 14, "domain.com", 10), 1); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "a.b.domain.com", 14, "domain.com", 10), 1); + /* Leading dot constraint with subdomain (not RFC 5280 compliant for DNS, + * but kept for backwards compatibility) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "sub.domain.com", 14, ".domain.com", 11), 1); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "a.b.domain.com", 14, ".domain.com", 11), 1); + + /* Negative tests - should NOT match */ + /* Bug #3: fakedomain.com should NOT match domain.com (no dot boundary) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "fakedomain.com", 14, "domain.com", 10), 0); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "notdomain.com", 13, "domain.com", 10), 0); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "xexample.com", 12, "example.com", 11), 0); + /* Bug #3: fakedomain.com should NOT match .domain.com */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "fakedomain.com", 14, ".domain.com", 11), 0); + /* domain.com should NOT match .domain.com (leading dot requires subdomain) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "domain.com", 10, ".domain.com", 11), 0); + /* Different domain */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "other.com", 9, "domain.com", 10), 0); + /* Name starting with dot */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + ".domain.com", 11, "domain.com", 10), 0); + + /* + * Tests for email type (ASN_RFC822_TYPE = 0x01) + */ + + /* Positive tests - should match */ + /* Exact email match */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@domain.com", 15, "user@domain.com", 15), 1); + /* Email with domain constraint (leading dot) - subdomain present */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@sub.domain.com", 19, ".domain.com", 11), 1); + /* Email with domain constraint (no leading dot) - exact domain */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@domain.com", 15, "domain.com", 10), 1); + + /* Negative tests - should NOT match */ + /* user@domain.com should NOT match .domain.com (subdomain required) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@domain.com", 15, ".domain.com", 11), 0); + /* user@sub.domain.com should NOT match domain.com (exact domain only) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@sub.domain.com", 19, "domain.com", 10), 0); + /* @ at start is invalid */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "@domain.com", 11, ".domain.com", 11), 0); + /* @ at end is invalid */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@", 5, ".domain.com", 11), 0); + /* double @ is invalid */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@@domain.com", 16, ".domain.com", 11), 0); + /* multiple @ is invalid */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@domain@extra.com", 21, ".domain.com", 11), 0); + /* No @ in email name */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "userdomain.com", 14, ".domain.com", 11), 0); + /* Email domain doesn't match constraint */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@other.com", 14, ".domain.com", 11), 0); + /* Email suffix without dot boundary (fakedomain) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@fakedomain.com", 19, ".domain.com", 11), 0); + /* Base constraint with invalid @ position */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@domain.com", 15, "@domain.com", 11), 0); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_RFC822_TYPE, + "user@domain.com", 15, "user@", 5), 0); + + /* + * Tests for directory type (ASN_DIR_TYPE = 0x04) + */ + + /* Positive tests - should match */ + /* Exact match */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DIR_TYPE, + "CN=test", 7, "CN=test", 7), 1); + /* Prefix match (name longer than base) */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DIR_TYPE, + "CN=test,O=org", 13, "CN=test", 7), 1); + + /* Negative tests - should NOT match */ + /* Different content */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DIR_TYPE, + "CN=other", 8, "CN=test", 7), 0); + /* Case sensitive for directory */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DIR_TYPE, + "CN=TEST", 7, "CN=test", 7), 0); + + /* + * Edge cases and error handling + */ + + /* NULL pointers */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + NULL, 10, "domain.com", 10), 0); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "domain.com", 10, NULL, 10), 0); + /* Empty/zero size */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "", 0, "domain.com", 10), 0); + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "domain.com", 10, "", 0), 0); + /* Invalid type */ + ExpectIntEQ(wolfssl_local_MatchBaseName(0xFF, + "domain.com", 10, "domain.com", 10), 0); + /* Name starting with dot */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + ".", 1, ".", 1), 0); + /* Name shorter than base */ + ExpectIntEQ(wolfssl_local_MatchBaseName(ASN_DNS_TYPE, + "a.com", 5, "domain.com", 10), 0); + +#endif /* !NO_CERTS && !NO_ASN && !IGNORE_NAME_CONSTRAINTS */ + + return EXPECT_RESULT(); +} diff --git a/tests/api/test_asn.h b/tests/api/test_asn.h index 1d2e20b2f..e78bb145b 100644 --- a/tests/api/test_asn.h +++ b/tests/api/test_asn.h @@ -27,10 +27,12 @@ int test_SetAsymKeyDer(void); int test_GetSetShortInt(void); int test_wc_IndexSequenceOf(void); +int test_wolfssl_local_MatchBaseName(void); #define TEST_ASN_DECLS \ TEST_DECL_GROUP("asn", test_SetAsymKeyDer), \ TEST_DECL_GROUP("asn", test_GetSetShortInt), \ - TEST_DECL_GROUP("asn", test_wc_IndexSequenceOf) + TEST_DECL_GROUP("asn", test_wc_IndexSequenceOf), \ + TEST_DECL_GROUP("asn", test_wolfssl_local_MatchBaseName) #endif /* WOLFCRYPT_TEST_ASN_H */ diff --git a/tests/api/test_certman.c b/tests/api/test_certman.c new file mode 100644 index 000000000..a2ff33373 --- /dev/null +++ b/tests/api/test_certman.c @@ -0,0 +1,2370 @@ +/* test_certman.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#include +#include +#include +#include + +int test_wolfSSL_CertManagerAPI(void) +{ + EXPECT_DECLS; +#ifndef NO_CERTS + WOLFSSL_CERT_MANAGER* cm = NULL; + unsigned char c = 0; + + ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); + + wolfSSL_CertManagerFree(NULL); + ExpectIntEQ(wolfSSL_CertManager_up_ref(NULL), 0); + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); +#ifdef WOLFSSL_TRUST_PEER_CERT + ExpectIntEQ(wolfSSL_CertManagerUnload_trust_peers(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); +#endif + + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer_ex(NULL, &c, 1, + WOLFSSL_FILETYPE_ASN1, 0, 0), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + +#if !defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH) + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, NULL, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, NULL, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, &c, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, NULL, 1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(NULL, &c, 1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, NULL, 1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, &c, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, &c, 1, -1), + WC_NO_ERR_TRACE(WOLFSSL_BAD_FILETYPE)); +#endif + +#if !defined(NO_FILESYSTEM) + { + #ifdef WOLFSSL_PEM_TO_DER + const char* ca_cert = "./certs/ca-cert.pem"; + #if !defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH) + const char* ca_cert_der = "./certs/ca-cert.der"; + #endif + #else + const char* ca_cert = "./certs/ca-cert.der"; + #endif + const char* ca_path = "./certs"; + + #if !defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH) + ExpectIntEQ(wolfSSL_CertManagerVerify(NULL, NULL, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerify(cm, NULL, WOLFSSL_FILETYPE_ASN1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerify(NULL, ca_cert, + WOLFSSL_FILETYPE_PEM), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerVerify(cm, ca_cert, -1), + WC_NO_ERR_TRACE(WOLFSSL_BAD_FILETYPE)); +#ifdef WOLFSSL_PEM_TO_DER + ExpectIntEQ(wolfSSL_CertManagerVerify(cm, ca_cert_der, + WOLFSSL_FILETYPE_PEM), WC_NO_ERR_TRACE(ASN_NO_PEM_HEADER)); +#endif + ExpectIntEQ(wolfSSL_CertManagerVerify(cm, "no-file", + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(WOLFSSL_BAD_FILE)); + #endif + + ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, NULL, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, ca_cert, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, NULL, ca_path), + WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(wolfSSL_CertManagerLoadCA(NULL, ca_cert, ca_path), + WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + } +#endif + +#ifdef OPENSSL_COMPATIBLE_DEFAULTS + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, 0), 1); +#elif !defined(HAVE_CRL) + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, 0), + WC_NO_ERR_TRACE(NOT_COMPILED_IN)); +#endif + + ExpectIntEQ(wolfSSL_CertManagerDisableCRL(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerDisableCRL(cm), 1); +#ifdef HAVE_CRL + /* Test APIs when CRL is disabled. */ +#ifdef HAVE_CRL_IO + ExpectIntEQ(wolfSSL_CertManagerSetCRL_IOCb(cm, NULL), 1); +#endif + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, + sizeof_server_cert_der_2048), 1); + ExpectIntEQ(wolfSSL_CertManagerFreeCRL(cm), 1); +#endif + + /* OCSP */ + ExpectIntEQ(wolfSSL_CertManagerEnableOCSP(NULL, 0), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerDisableOCSP(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerDisableOCSPStapling(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPMustStaple(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerDisableOCSPMustStaple(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); +#if !defined(HAVE_CERTIFICATE_STATUS_REQUEST) && \ + !defined(HAVE_CERTIFICATE_STATUS_REQUEST_V2) + ExpectIntEQ(wolfSSL_CertManagerDisableOCSPStapling(cm), + WC_NO_ERR_TRACE(NOT_COMPILED_IN)); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPMustStaple(cm), + WC_NO_ERR_TRACE(NOT_COMPILED_IN)); + ExpectIntEQ(wolfSSL_CertManagerDisableOCSPMustStaple(cm), + WC_NO_ERR_TRACE(NOT_COMPILED_IN)); +#endif + +#ifdef HAVE_OCSP + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, NULL, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, NULL, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, &c, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, NULL, 1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(NULL, &c, 1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, NULL, 1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, &c, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + + ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(NULL, NULL, 0, + NULL, NULL, NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(cm, NULL, 1, + NULL, NULL, NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(NULL, &c, 1, + NULL, NULL, NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + + ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(NULL, ""), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(cm, NULL), 1); + + ExpectIntEQ(wolfSSL_CertManagerSetOCSP_Cb(NULL, NULL, NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerSetOCSP_Cb(cm, NULL, NULL, NULL), 1); + + ExpectIntEQ(wolfSSL_CertManagerDisableOCSP(cm), 1); + /* Test APIs when OCSP is disabled. */ + ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(cm, &c, 1, + NULL, NULL, NULL, NULL), 1); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, &c, 1), 1); + +#endif + + ExpectIntEQ(wolfSSL_CertManager_up_ref(cm), 1); + if (EXPECT_SUCCESS()) { + wolfSSL_CertManagerFree(cm); + } + wolfSSL_CertManagerFree(cm); + cm = NULL; + + ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); + +#ifdef HAVE_OCSP + ExpectIntEQ(wolfSSL_CertManagerEnableOCSP(cm, WOLFSSL_OCSP_URL_OVERRIDE | + WOLFSSL_OCSP_CHECKALL), 1); +#if defined(HAVE_CERTIFICATE_STATUS_REQUEST) || \ + defined(HAVE_CERTIFICATE_STATUS_REQUEST_V2) + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(cm), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(cm), 1); + ExpectIntEQ(wolfSSL_CertManagerDisableOCSPStapling(cm), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPStapling(cm), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSPMustStaple(cm), 1); + ExpectIntEQ(wolfSSL_CertManagerDisableOCSPMustStaple(cm), 1); +#endif + + ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(cm, ""), 1); + ExpectIntEQ(wolfSSL_CertManagerSetOCSPOverrideURL(cm, ""), 1); +#endif + +#ifdef WOLFSSL_TRUST_PEER_CERT + ExpectIntEQ(wolfSSL_CertManagerUnload_trust_peers(cm), 1); +#endif + wolfSSL_CertManagerFree(cm); +#endif + return EXPECT_RESULT(); +} + +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) +static int test_cm_load_ca_buffer(const byte* cert_buf, size_t cert_sz, + int file_type) +{ + int ret; + WOLFSSL_CERT_MANAGER* cm; + + cm = wolfSSL_CertManagerNew(); + if (cm == NULL) { + fprintf(stderr, "test_cm_load_ca failed\n"); + return -1; + } + + ret = wolfSSL_CertManagerLoadCABuffer(cm, cert_buf, (sword32)cert_sz, + file_type); + + wolfSSL_CertManagerFree(cm); + + return ret; +} + +static int test_cm_load_ca_file(const char* ca_cert_file) +{ + int ret = 0; + byte* cert_buf = NULL; + size_t cert_sz = 0; +#if defined(WOLFSSL_PEM_TO_DER) + DerBuffer* pDer = NULL; +#endif + + ret = load_file(ca_cert_file, &cert_buf, &cert_sz); + if (ret == 0) { + /* normal test */ + ret = test_cm_load_ca_buffer(cert_buf, cert_sz, CERT_FILETYPE); + + if (ret == WOLFSSL_SUCCESS) { + /* test including null terminator in length */ + byte* tmp = (byte*)realloc(cert_buf, cert_sz+1); + if (tmp == NULL) { + ret = MEMORY_E; + } + else { + cert_buf = tmp; + cert_buf[cert_sz] = '\0'; + ret = test_cm_load_ca_buffer(cert_buf, cert_sz+1, + CERT_FILETYPE); + } + + } + + #if defined(WOLFSSL_PEM_TO_DER) + if (ret == WOLFSSL_SUCCESS) { + /* test loading DER */ + ret = wc_PemToDer(cert_buf, (sword32)cert_sz, CA_TYPE, &pDer, + NULL, NULL, NULL); + if (ret == 0 && pDer != NULL) { + ret = test_cm_load_ca_buffer(pDer->buffer, pDer->length, + WOLFSSL_FILETYPE_ASN1); + + wc_FreeDer(&pDer); + } + } + #endif + + } + free(cert_buf); + + return ret; +} + +static int test_cm_load_ca_buffer_ex(const byte* cert_buf, size_t cert_sz, + int file_type, word32 flags) +{ + int ret; + WOLFSSL_CERT_MANAGER* cm; + + cm = wolfSSL_CertManagerNew(); + if (cm == NULL) { + fprintf(stderr, "test_cm_load_ca failed\n"); + return -1; + } + + ret = wolfSSL_CertManagerLoadCABuffer_ex(cm, cert_buf, (sword32)cert_sz, + file_type, 0, flags); + + wolfSSL_CertManagerFree(cm); + + return ret; +} + +static int test_cm_load_ca_file_ex(const char* ca_cert_file, word32 flags) +{ + int ret = 0; + byte* cert_buf = NULL; + size_t cert_sz = 0; +#if defined(WOLFSSL_PEM_TO_DER) + DerBuffer* pDer = NULL; +#endif + + ret = load_file(ca_cert_file, &cert_buf, &cert_sz); + if (ret == 0) { + /* normal test */ + ret = test_cm_load_ca_buffer_ex(cert_buf, cert_sz, + CERT_FILETYPE, flags); + + if (ret == WOLFSSL_SUCCESS) { + /* test including null terminator in length */ + byte* tmp = (byte*)realloc(cert_buf, cert_sz+1); + if (tmp == NULL) { + ret = MEMORY_E; + } + else { + cert_buf = tmp; + cert_buf[cert_sz] = '\0'; + ret = test_cm_load_ca_buffer_ex(cert_buf, cert_sz+1, + CERT_FILETYPE, flags); + } + + } + + #if defined(WOLFSSL_PEM_TO_DER) + if (ret == WOLFSSL_SUCCESS) { + /* test loading DER */ + ret = wc_PemToDer(cert_buf, (sword32)cert_sz, CA_TYPE, &pDer, + NULL, NULL, NULL); + if (ret == 0 && pDer != NULL) { + ret = test_cm_load_ca_buffer_ex(pDer->buffer, pDer->length, + WOLFSSL_FILETYPE_ASN1, flags); + + wc_FreeDer(&pDer); + } + } + #endif + + } + free(cert_buf); + + return ret; +} + +#endif /* !NO_FILESYSTEM && !NO_CERTS */ + +int test_wolfSSL_CertManagerLoadCABuffer(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) +#if defined(WOLFSSL_PEM_TO_DER) + const char* ca_cert = "./certs/ca-cert.pem"; + const char* ca_expired_cert = "./certs/test/expired/expired-ca.pem"; +#else + const char* ca_cert = "./certs/ca-cert.der"; + const char* ca_expired_cert = "./certs/test/expired/expired-ca.der"; +#endif + int ret; + + ExpectIntLE(ret = test_cm_load_ca_file(ca_cert), 1); +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); +#elif defined(NO_RSA) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); +#else + ExpectIntEQ(ret, WOLFSSL_SUCCESS); +#endif + + ExpectIntLE(ret = test_cm_load_ca_file(ca_expired_cert), 1); +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); +#elif defined(NO_RSA) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); +#elif !(WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS && \ + WOLFSSL_LOAD_FLAG_DATE_ERR_OKAY) && !defined(NO_ASN_TIME) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_AFTER_DATE_E)); +#else + ExpectIntEQ(ret, WOLFSSL_SUCCESS); +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerLoadCABuffer_ex(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) +#if defined(WOLFSSL_PEM_TO_DER) + const char* ca_cert = "./certs/ca-cert.pem"; + const char* ca_expired_cert = "./certs/test/expired/expired-ca.pem"; +#else + const char* ca_cert = "./certs/ca-cert.der"; + const char* ca_expired_cert = "./certs/test/expired/expired-ca.der"; +#endif + int ret; + + ExpectIntLE(ret = test_cm_load_ca_file_ex(ca_cert, WOLFSSL_LOAD_FLAG_NONE), + 1); +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); +#elif defined(NO_RSA) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); +#else + ExpectIntEQ(ret, WOLFSSL_SUCCESS); +#endif + + ExpectIntLE(ret = test_cm_load_ca_file_ex(ca_expired_cert, + WOLFSSL_LOAD_FLAG_DATE_ERR_OKAY), 1); +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); +#elif defined(NO_RSA) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_UNKNOWN_OID_E)); +#elif !(WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS && \ + WOLFSSL_LOAD_FLAG_DATE_ERR_OKAY) && !defined(NO_ASN_TIME) && \ + defined(WOLFSSL_TRUST_PEER_CERT) && defined(OPENSSL_COMPATIBLE_DEFAULTS) + ExpectIntEQ(ret, WC_NO_ERR_TRACE(ASN_AFTER_DATE_E)); +#else + ExpectIntEQ(ret, WOLFSSL_SUCCESS); +#endif + +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerLoadCABufferType(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) && \ + !defined(NO_RSA) && !defined(NO_SHA256) && \ + !defined(WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION) +#if defined(WOLFSSL_PEM_TO_DER) + const char* ca_cert = "./certs/ca-cert.pem"; + const char* int1_cert = "./certs/intermediate/ca-int-cert.pem"; + const char* int2_cert = "./certs/intermediate/ca-int2-cert.pem"; + const char* client_cert = "./certs/intermediate/client-int-cert.pem"; +#else + const char* ca_cert = "./certs/ca-cert.der"; + const char* int1_cert = "./certs/intermediate/ca-int-cert.der"; + const char* int2_cert = "./certs/intermediate/ca-int2-cert.der"; + const char* client_cert = "./certs/intermediate/client-int-cert.der"; +#endif + byte* ca_cert_buf = NULL; + byte* int1_cert_buf = NULL; + byte* int2_cert_buf = NULL; + byte* client_cert_buf = NULL; + size_t ca_cert_sz = 0; + size_t int1_cert_sz = 0; + size_t int2_cert_sz = 0; + size_t client_cert_sz = 0; + WOLFSSL_CERT_MANAGER* cm = NULL; + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntEQ(load_file(ca_cert, &ca_cert_buf, &ca_cert_sz), 0); + ExpectIntEQ(load_file(int1_cert, &int1_cert_buf, &int1_cert_sz), 0); + ExpectIntEQ(load_file(int2_cert, &int2_cert_buf, &int2_cert_sz), 0); + ExpectIntEQ(load_file(client_cert, &client_cert_buf, &client_cert_sz), 0); + + ExpectIntNE(wolfSSL_CertManagerLoadCABufferType(cm, ca_cert_buf, + (sword32)ca_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, 0), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerLoadCABufferType(cm, ca_cert_buf, + (sword32)ca_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, 5), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, ca_cert_buf, + (sword32)ca_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_CA), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, + int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int1_cert_buf, + (sword32)int1_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int2_cert_buf, + (sword32)int2_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, client_cert_buf, + (sword32)client_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), + WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_USER_INTER), + WOLFSSL_SUCCESS); + + /* Intermediate certs have been unloaded, but CA cert is still + loaded. Expect first level intermediate to verify, rest to fail. */ + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, + int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int1_cert_buf, + (sword32)int1_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_TEMP_CA), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, int2_cert_buf, + (sword32)int2_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_CHAIN_CA), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCABufferType(cm, client_cert_buf, + (sword32)client_cert_sz, CERT_FILETYPE, 0, + WOLFSSL_LOAD_VERIFY_DEFAULT_FLAGS, WOLFSSL_USER_INTER), + WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_USER_INTER), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, + int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_CHAIN_CA), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, + int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_TEMP_CA), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, + int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_CertManagerUnloadTypeCerts(cm, WOLFSSL_USER_CA), + WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int1_cert_buf, + int1_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, int2_cert_buf, + int2_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + ExpectIntNE(wolfSSL_CertManagerVerifyBuffer(cm, client_cert_buf, + client_cert_sz, CERT_FILETYPE), WOLFSSL_SUCCESS); + + if (cm) + wolfSSL_CertManagerFree(cm); + if (ca_cert_buf) + free(ca_cert_buf); + if (int1_cert_buf) + free(int1_cert_buf); + if (int2_cert_buf) + free(int2_cert_buf); + if (client_cert_buf) + free(client_cert_buf); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerGetCerts(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && !defined(NO_CERTS) && \ + !defined(NO_FILESYSTEM) && !defined(NO_RSA) && \ + defined(WOLFSSL_SIGNER_DER_CERT) + WOLFSSL_CERT_MANAGER* cm = NULL; + WOLFSSL_STACK* sk = NULL; + X509* x509 = NULL; + X509* cert1 = NULL; + FILE* file1 = NULL; +#ifdef DEBUG_WOLFSSL_VERBOSE + WOLFSSL_BIO* bio = NULL; +#endif + int i = 0; + int ret = 0; + const byte* der = NULL; + int derSz = 0; + + ExpectNotNull(file1 = fopen("./certs/ca-cert.pem", "rb")); + + ExpectNotNull(cert1 = wolfSSL_PEM_read_X509(file1, NULL, NULL, NULL)); + if (file1 != NULL) { + fclose(file1); + } + + ExpectNull(sk = wolfSSL_CertManagerGetCerts(NULL)); + ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); + ExpectNull(sk = wolfSSL_CertManagerGetCerts(cm)); + + ExpectNotNull(der = wolfSSL_X509_get_der(cert1, &derSz)); +#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) + /* Check that ASN_SELF_SIGNED_E is returned for a self-signed cert for QT + * and full OpenSSL compatibility */ + ExpectIntEQ(ret = wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_SELF_SIGNED_E)); +#else + ExpectIntEQ(ret = wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)); +#endif + + ExpectIntEQ(WOLFSSL_SUCCESS, wolfSSL_CertManagerLoadCA(cm, + "./certs/ca-cert.pem", NULL)); + + ExpectNotNull(sk = wolfSSL_CertManagerGetCerts(cm)); + + for (i = 0; EXPECT_SUCCESS() && i < sk_X509_num(sk); i++) { + ExpectNotNull(x509 = sk_X509_value(sk, i)); + ExpectIntEQ(0, wolfSSL_X509_cmp(x509, cert1)); + +#ifdef DEBUG_WOLFSSL_VERBOSE + bio = BIO_new(wolfSSL_BIO_s_file()); + if (bio != NULL) { + BIO_set_fp(bio, stderr, BIO_NOCLOSE); + X509_print(bio, x509); + BIO_free(bio); + } +#endif /* DEBUG_WOLFSSL_VERBOSE */ + } + wolfSSL_X509_free(cert1); + sk_X509_pop_free(sk, NULL); + wolfSSL_CertManagerFree(cm); +#endif /* defined(OPENSSL_ALL) && !defined(NO_CERTS) && \ + !defined(NO_FILESYSTEM) && !defined(NO_RSA) && \ + defined(WOLFSSL_SIGNER_DER_CERT) */ + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerSetVerify(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && !defined(NO_TLS) && \ + !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ + (!defined(NO_WOLFSSL_CLIENT) || !defined(WOLFSSL_NO_CLIENT_AUTH)) + WOLFSSL_CERT_MANAGER* cm = NULL; + int tmp = myVerifyAction; +#ifdef WOLFSSL_PEM_TO_DER + const char* ca_cert = "./certs/ca-cert.pem"; + const char* expiredCert = "./certs/test/expired/expired-cert.pem"; +#else + const char* ca_cert = "./certs/ca-cert.der"; + const char* expiredCert = "./certs/test/expired/expired-cert.der"; +#endif + + wolfSSL_CertManagerSetVerify(NULL, NULL); + wolfSSL_CertManagerSetVerify(NULL, myVerify); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + + wolfSSL_CertManagerSetVerify(cm, myVerify); + +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL), -1); +#else + ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL), + WOLFSSL_SUCCESS); +#endif + /* Use the test CB that always accepts certs */ + myVerifyAction = VERIFY_OVERRIDE_ERROR; + + ExpectIntEQ(wolfSSL_CertManagerVerify(cm, expiredCert, + CERT_FILETYPE), WOLFSSL_SUCCESS); + +#ifdef WOLFSSL_ALWAYS_VERIFY_CB + { + const char* verifyCert = "./certs/server-cert.der"; + /* Use the test CB that always fails certs */ + myVerifyAction = VERIFY_FORCE_FAIL; + + ExpectIntEQ(wolfSSL_CertManagerVerify(cm, verifyCert, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(VERIFY_CERT_ERROR)); + } +#endif + + wolfSSL_CertManagerFree(cm); + myVerifyAction = tmp; +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerNameConstraint(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ + !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ + defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ + defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ + !defined(NO_SHA256) + WOLFSSL_CERT_MANAGER* cm = NULL; + WOLFSSL_EVP_PKEY *priv = NULL; + WOLFSSL_X509_NAME* name = NULL; + const char* ca_cert = "./certs/test/cert-ext-nc.der"; + const char* server_cert = "./certs/test/server-goodcn.pem"; + int i = 0; + static const byte extNameConsOid[] = {85, 29, 30}; + + RsaKey key; + WC_RNG rng; + byte *der = NULL; + int derSz = 0; + word32 idx = 0; + byte *pt; + WOLFSSL_X509 *x509 = NULL; + WOLFSSL_X509 *ca = NULL; + + wc_InitRng(&rng); + + /* load in CA private key for signing */ + ExpectIntEQ(wc_InitRsaKey_ex(&key, HEAP_HINT, testDevId), 0); + ExpectIntEQ(wc_RsaPrivateKeyDecode(server_key_der_2048, &idx, &key, + sizeof_server_key_der_2048), 0); + + /* get ca certificate then alter it */ + ExpectNotNull(der = + (byte*)XMALLOC(FOURK_BUF, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER)); + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(ca_cert, + WOLFSSL_FILETYPE_ASN1)); + ExpectNotNull(pt = (byte*)wolfSSL_X509_get_tbs(x509, &derSz)); + if (EXPECT_SUCCESS() && (der != NULL)) { + XMEMCPY(der, pt, (size_t)derSz); + + /* find the name constraint extension and alter it */ + pt = der; + for (i = 0; i < derSz - 3; i++) { + if (XMEMCMP(pt, extNameConsOid, 3) == 0) { + pt += 3; + break; + } + pt++; + } + ExpectIntNE(i, derSz - 3); /* did not find OID if this case is hit */ + + /* go to the length value and set it to 0 */ + while (i < derSz && *pt != 0x81) { + pt++; + i++; + } + ExpectIntNE(i, derSz); /* did not place to alter */ + pt++; + *pt = 0x00; + } + + /* resign the altered certificate */ + ExpectIntGT((derSz = wc_SignCert(derSz, CTC_SHA256wRSA, der, + FOURK_BUF, &key, NULL, &rng)), 0); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_PARSE_E)); + wolfSSL_CertManagerFree(cm); + + XFREE(der, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + wolfSSL_X509_free(x509); + wc_FreeRsaKey(&key); + wc_FreeRng(&rng); + + /* add email alt name to satisfy constraint */ + pt = (byte*)server_key_der_2048; + ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + (const unsigned char**)&pt, sizeof_server_key_der_2048)); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, + WOLFSSL_FILETYPE_ASN1)); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); + DEBUG_WRITE_DER(der, derSz, "ca.der"); + + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + /* Good cert test with proper alt email name */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, + (byte*)"support@info.wolfssl.com", 24, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "wolfssl@info.wolfssl.com", ASN_RFC822_TYPE); + + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + + /* Cert with bad alt name list */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, + (byte*)"support@info.wolfssl.com", 24, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + + wolfSSL_X509_add_altname(x509, "wolfssl@info.com", ASN_RFC822_TYPE); + wolfSSL_X509_add_altname(x509, "wolfssl@info.wolfssl.com", ASN_RFC822_TYPE); + + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + + wolfSSL_CertManagerFree(cm); + wolfSSL_X509_free(x509); + wolfSSL_X509_free(ca); + wolfSSL_EVP_PKEY_free(priv); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerNameConstraint2(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ + !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ + defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ + defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) + const char* ca_cert = "./certs/test/cert-ext-ndir.der"; + const char* ca_cert2 = "./certs/test/cert-ext-ndir-exc.der"; + const char* server_cert = "./certs/server-cert.pem"; + WOLFSSL_CERT_MANAGER* cm = NULL; + WOLFSSL_X509 *x509 = NULL; + WOLFSSL_X509 *ca = NULL; + + const unsigned char *der = NULL; + const unsigned char *pt; + WOLFSSL_EVP_PKEY *priv = NULL; + WOLFSSL_X509_NAME* name = NULL; + int derSz = 0; + + /* C=US*/ + char altName[] = { + 0x30, 0x0D, 0x31, 0x0B, 0x30, 0x09, + 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, 0x53 + }; + + /* C=ID */ + char altNameFail[] = { + 0x30, 0x0D, 0x31, 0x0B, 0x30, 0x09, + 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x49, 0x44 + }; + + /* C=US ST=California*/ + char altNameExc[] = { + 0x30, 0x22, + 0x31, 0x0B, + 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, 0x53, + 0x31, 0x13, + 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0C, 0x0A, + 0x43, 0x61, 0x6c, 0x69, 0x66, 0x6f, 0x72, 0x6e, 0x69, 0x61 + }; + /* load in CA private key for signing */ + pt = ca_key_der_2048; + ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, &pt, + sizeof_ca_key_der_2048)); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, + WOLFSSL_FILETYPE_ASN1)); + ExpectNotNull((der = wolfSSL_X509_get_der(ca, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); +#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) + wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); +#else + wolfSSL_X509_sign(x509, priv, EVP_sha256()); +#endif + ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + /* Test no name case. */ + ExpectIntEQ(wolfSSL_X509_add_altname_ex(x509, NULL, 0, ASN_DIR_TYPE), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_add_altname(x509, "", ASN_DIR_TYPE), + WOLFSSL_SUCCESS); + /* IP not supported. */ + ExpectIntEQ(wolfSSL_X509_add_altname(x509, "127.0.0.1", ASN_IP_TYPE), + WOLFSSL_FAILURE); + + /* add in matching DIR alt name and resign */ + wolfSSL_X509_add_altname_ex(x509, altName, sizeof(altName), ASN_DIR_TYPE); +#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) + wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); +#else + wolfSSL_X509_sign(x509, priv, EVP_sha256()); +#endif + + ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check verify fail */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + + /* add in miss matching DIR alt name and resign */ + wolfSSL_X509_add_altname_ex(x509, altNameFail, sizeof(altNameFail), + ASN_DIR_TYPE); + +#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) + wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); +#else + wolfSSL_X509_sign(x509, priv, EVP_sha256()); +#endif + ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); +#ifndef WOLFSSL_NO_ASN_STRICT + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); +#else + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); +#endif + + /* check that it still fails if one bad altname and one good altname is in + * the certificate */ + wolfSSL_X509_free(x509); + x509 = NULL; + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + wolfSSL_X509_add_altname_ex(x509, altName, sizeof(altName), ASN_DIR_TYPE); + wolfSSL_X509_add_altname_ex(x509, altNameFail, sizeof(altNameFail), + ASN_DIR_TYPE); + +#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) + wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); +#else + wolfSSL_X509_sign(x509, priv, EVP_sha256()); +#endif + ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); +#ifndef WOLFSSL_NO_ASN_STRICT + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); +#else + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); +#endif + + /* check it fails with switching position of bad altname */ + wolfSSL_X509_free(x509); + x509 = NULL; + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + wolfSSL_X509_add_altname_ex(x509, altNameFail, sizeof(altNameFail), + ASN_DIR_TYPE); + wolfSSL_X509_add_altname_ex(x509, altName, sizeof(altName), ASN_DIR_TYPE); + +#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) + wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); +#else + wolfSSL_X509_sign(x509, priv, EVP_sha256()); +#endif + ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); +#ifndef WOLFSSL_NO_ASN_STRICT + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); +#else + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); +#endif + wolfSSL_CertManagerFree(cm); + + wolfSSL_X509_free(x509); + x509 = NULL; + wolfSSL_X509_free(ca); + ca = NULL; + + /* now test with excluded name constraint */ + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert2, + WOLFSSL_FILETYPE_ASN1)); + ExpectNotNull((der = wolfSSL_X509_get_der(ca, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + wolfSSL_X509_add_altname_ex(x509, altNameExc, sizeof(altNameExc), + ASN_DIR_TYPE); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + +#if defined(WOLFSSL_SHA3) && !defined(WOLFSSL_NOSHA3_256) + wolfSSL_X509_sign(x509, priv, EVP_sha3_256()); +#else + wolfSSL_X509_sign(x509, priv, EVP_sha256()); +#endif + ExpectNotNull((der = wolfSSL_X509_get_der(x509, &derSz))); +#ifndef WOLFSSL_NO_ASN_STRICT + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); +#else + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); +#endif + wolfSSL_CertManagerFree(cm); + wolfSSL_X509_free(x509); + wolfSSL_X509_free(ca); + wolfSSL_EVP_PKEY_free(priv); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerNameConstraint3(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ + !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ + defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ + defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ + !defined(NO_SHA256) + WOLFSSL_CERT_MANAGER* cm = NULL; + WOLFSSL_EVP_PKEY *priv = NULL; + WOLFSSL_X509_NAME* name = NULL; + const char* ca_cert = "./certs/test/cert-ext-mnc.der"; + const char* server_cert = "./certs/test/server-goodcn.pem"; + + byte *der = NULL; + int derSz = 0; + byte *pt; + WOLFSSL_X509 *x509 = NULL; + WOLFSSL_X509 *ca = NULL; + + pt = (byte*)server_key_der_2048; + ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + (const unsigned char**)&pt, sizeof_server_key_der_2048)); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, + WOLFSSL_FILETYPE_ASN1)); + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); + DEBUG_WRITE_DER(der, derSz, "ca.der"); + + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + /* check satisfying .wolfssl.com constraint passes */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, + (byte*)"support@info.wolfssl.com", 24, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "wolfssl@info.wolfssl.com", ASN_RFC822_TYPE); + + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-1st-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check satisfying .random.com constraint passes */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, + (byte*)"support@info.example.com", 24, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "wolfssl@info.example.com", ASN_RFC822_TYPE); + + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-2nd-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check fail case when neither constraint is matched */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "emailAddress", MBSTRING_UTF8, + (byte*)"support@info.com", 16, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + + wolfSSL_X509_add_altname(x509, "wolfssl@info.com", ASN_RFC822_TYPE); + + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + + wolfSSL_CertManagerFree(cm); + wolfSSL_X509_free(x509); + wolfSSL_X509_free(ca); + wolfSSL_EVP_PKEY_free(priv); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerNameConstraint4(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ + !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ + defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ + defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ + !defined(NO_SHA256) + WOLFSSL_CERT_MANAGER* cm = NULL; + WOLFSSL_EVP_PKEY *priv = NULL; + WOLFSSL_X509_NAME* name = NULL; + const char* ca_cert = "./certs/test/cert-ext-ncdns.der"; + const char* server_cert = "./certs/test/server-goodcn.pem"; + + byte *der = NULL; + int derSz; + byte *pt; + WOLFSSL_X509 *x509 = NULL; + WOLFSSL_X509 *ca = NULL; + + pt = (byte*)server_key_der_2048; + ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + (const unsigned char**)&pt, sizeof_server_key_der_2048)); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, + WOLFSSL_FILETYPE_ASN1)); + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); + DEBUG_WRITE_DER(der, derSz, "ca.der"); + + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + /* check satisfying wolfssl.com constraint passes */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "www.wolfssl.com", ASN_DNS_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-1st-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check satisfying example.com constraint passes */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"example.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "www.example.com", ASN_DNS_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-2nd-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check satisfying wolfssl.com constraint passes with list of DNS's */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "www.wolfssl.com", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "www.info.wolfssl.com", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "extra.wolfssl.com", ASN_DNS_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-multiple-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check fail when one DNS in the list is bad */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "www.wolfssl.com", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "www.nomatch.com", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "www.info.wolfssl.com", ASN_DNS_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-multiple-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* check fail case when neither constraint is matched */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"common", 6, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + + wolfSSL_X509_add_altname(x509, "www.random.com", ASN_DNS_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + + wolfSSL_CertManagerFree(cm); + wolfSSL_X509_free(x509); + wolfSSL_X509_free(ca); + wolfSSL_EVP_PKEY_free(priv); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerNameConstraint5(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ + !defined(NO_WOLFSSL_CM_VERIFY) && !defined(NO_RSA) && \ + defined(OPENSSL_EXTRA) && defined(WOLFSSL_CERT_GEN) && \ + defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_ALT_NAMES) && \ + !defined(NO_SHA256) + WOLFSSL_CERT_MANAGER* cm = NULL; + WOLFSSL_EVP_PKEY *priv = NULL; + WOLFSSL_X509_NAME* name = NULL; + const char* ca_cert = "./certs/test/cert-ext-ncmixed.der"; + const char* server_cert = "./certs/test/server-goodcn.pem"; + + byte *der = NULL; + int derSz; + byte *pt; + WOLFSSL_X509 *x509 = NULL; + WOLFSSL_X509 *ca = NULL; + + pt = (byte*)server_key_der_2048; + ExpectNotNull(priv = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + (const unsigned char**)&pt, sizeof_server_key_der_2048)); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(ca_cert, + WOLFSSL_FILETYPE_ASN1)); + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(ca, &derSz))); + DEBUG_WRITE_DER(der, derSz, "ca.der"); + + ExpectIntEQ(wolfSSL_CertManagerLoadCABuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + + /* check satisfying wolfssl.com constraint passes */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"example", 7, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "good.example", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "facts@into.wolfssl.com", ASN_RFC822_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* fail with DNS check because of common name */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "commonName", MBSTRING_UTF8, + (byte*)"wolfssl.com", 11, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "example", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "facts@wolfssl.com", ASN_RFC822_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-cn-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* fail on permitted DNS name constraint */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "www.example", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "www.wolfssl", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "info@wolfssl.com", ASN_RFC822_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-1st-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* fail on permitted email name constraint */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + name = NULL; + + wolfSSL_X509_add_altname(x509, "example", ASN_DNS_TYPE); + wolfSSL_X509_add_altname(x509, "info@wolfssl.com", ASN_RFC822_TYPE); + wolfSSL_X509_add_altname(x509, "info@example.com", ASN_RFC822_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "bad-2nd-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(ASN_NAME_INVALID_E)); + wolfSSL_X509_free(x509); + x509 = NULL; + + /* success with empty email name */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(server_cert, + WOLFSSL_FILETYPE_PEM)); + ExpectNotNull(name = wolfSSL_X509_get_subject_name(ca)); + ExpectIntEQ(wolfSSL_X509_set_issuer_name(x509, name), WOLFSSL_SUCCESS); + name = NULL; + + ExpectNotNull(name = X509_NAME_new()); + ExpectIntEQ(X509_NAME_add_entry_by_txt(name, "countryName", MBSTRING_UTF8, + (byte*)"US", 2, -1, 0), SSL_SUCCESS); + ExpectIntEQ(wolfSSL_X509_set_subject_name(x509, name), WOLFSSL_SUCCESS); + X509_NAME_free(name); + + wolfSSL_X509_add_altname(x509, "example", ASN_DNS_TYPE); + ExpectIntGT(wolfSSL_X509_sign(x509, priv, EVP_sha256()), 0); + DEBUG_WRITE_CERT_X509(x509, "good-missing-constraint-cert.pem"); + + ExpectNotNull((der = (byte*)wolfSSL_X509_get_der(x509, &derSz))); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, der, derSz, + WOLFSSL_FILETYPE_ASN1), WOLFSSL_SUCCESS); + wolfSSL_X509_free(x509); + + wolfSSL_CertManagerFree(cm); + wolfSSL_X509_free(ca); + wolfSSL_EVP_PKEY_free(priv); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerCRL(void) +{ + EXPECT_DECLS; +#if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && defined(HAVE_CRL) && \ + !defined(NO_RSA) + const char* ca_cert = "./certs/ca-cert.pem"; + const char* crl1 = "./certs/crl/crl.pem"; + const char* crl2 = "./certs/crl/crl2.pem"; +#ifdef WC_RSA_PSS + const char* crl_rsapss = "./certs/crl/crl_rsapss.pem"; + const char* ca_rsapss = "./certs/rsapss/ca-rsapss.pem"; +#endif + /* ./certs/crl/crl.der */ + const unsigned char crl_buff[] = { + 0x30, 0x82, 0x02, 0x04, 0x30, 0x81, 0xED, 0x02, + 0x01, 0x01, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, + 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x0B, 0x05, + 0x00, 0x30, 0x81, 0x94, 0x31, 0x0B, 0x30, 0x09, + 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, + 0x53, 0x31, 0x10, 0x30, 0x0E, 0x06, 0x03, 0x55, + 0x04, 0x08, 0x0C, 0x07, 0x4D, 0x6F, 0x6E, 0x74, + 0x61, 0x6E, 0x61, 0x31, 0x10, 0x30, 0x0E, 0x06, + 0x03, 0x55, 0x04, 0x07, 0x0C, 0x07, 0x42, 0x6F, + 0x7A, 0x65, 0x6D, 0x61, 0x6E, 0x31, 0x11, 0x30, + 0x0F, 0x06, 0x03, 0x55, 0x04, 0x0A, 0x0C, 0x08, + 0x53, 0x61, 0x77, 0x74, 0x6F, 0x6F, 0x74, 0x68, + 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, + 0x0B, 0x0C, 0x0A, 0x43, 0x6F, 0x6E, 0x73, 0x75, + 0x6C, 0x74, 0x69, 0x6E, 0x67, 0x31, 0x18, 0x30, + 0x16, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0C, 0x0F, + 0x77, 0x77, 0x77, 0x2E, 0x77, 0x6F, 0x6C, 0x66, + 0x73, 0x73, 0x6C, 0x2E, 0x63, 0x6F, 0x6D, 0x31, + 0x1F, 0x30, 0x1D, 0x06, 0x09, 0x2A, 0x86, 0x48, + 0x86, 0xF7, 0x0D, 0x01, 0x09, 0x01, 0x16, 0x10, + 0x69, 0x6E, 0x66, 0x6F, 0x40, 0x77, 0x6F, 0x6C, + 0x66, 0x73, 0x73, 0x6C, 0x2E, 0x63, 0x6F, 0x6D, + 0x17, 0x0D, 0x32, 0x34, 0x30, 0x31, 0x30, 0x39, + 0x30, 0x30, 0x33, 0x34, 0x33, 0x30, 0x5A, 0x17, + 0x0D, 0x32, 0x36, 0x31, 0x30, 0x30, 0x35, 0x30, + 0x30, 0x33, 0x34, 0x33, 0x30, 0x5A, 0x30, 0x14, + 0x30, 0x12, 0x02, 0x01, 0x02, 0x17, 0x0D, 0x32, + 0x34, 0x30, 0x31, 0x30, 0x39, 0x30, 0x30, 0x33, + 0x34, 0x33, 0x30, 0x5A, 0xA0, 0x0E, 0x30, 0x0C, + 0x30, 0x0A, 0x06, 0x03, 0x55, 0x1D, 0x14, 0x04, + 0x03, 0x02, 0x01, 0x02, 0x30, 0x0D, 0x06, 0x09, + 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, + 0x0B, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, + 0xB3, 0x6F, 0xED, 0x72, 0xD2, 0x73, 0x6A, 0x77, + 0xBF, 0x3A, 0x55, 0xBC, 0x54, 0x18, 0x6A, 0x71, + 0xBC, 0x6A, 0xCC, 0xCD, 0x5D, 0x90, 0xF5, 0x64, + 0x8D, 0x1B, 0xF0, 0xE0, 0x48, 0x7B, 0xF2, 0x7B, + 0x06, 0x86, 0x53, 0x63, 0x9B, 0xD8, 0x24, 0x15, + 0x10, 0xB1, 0x19, 0x96, 0x9B, 0xD2, 0x75, 0xA8, + 0x25, 0xA2, 0x35, 0xA9, 0x14, 0xD6, 0xD5, 0x5E, + 0x53, 0xE3, 0x34, 0x9D, 0xF2, 0x8B, 0x07, 0x19, + 0x9B, 0x1F, 0xF1, 0x02, 0x0F, 0x04, 0x46, 0xE8, + 0xB8, 0xB6, 0xF2, 0x8D, 0xC7, 0xC0, 0x15, 0x3E, + 0x3E, 0x8E, 0x96, 0x73, 0x15, 0x1E, 0x62, 0xF6, + 0x4E, 0x2A, 0xF7, 0xAA, 0xA0, 0x91, 0x80, 0x12, + 0x7F, 0x81, 0x0C, 0x65, 0xCC, 0x38, 0xBE, 0x58, + 0x6C, 0x14, 0xA5, 0x21, 0xA1, 0x8D, 0xF7, 0x8A, + 0xB9, 0x24, 0xF4, 0x2D, 0xCA, 0xC0, 0x67, 0x43, + 0x0B, 0xC8, 0x1C, 0xB4, 0x7D, 0x12, 0x7F, 0xA2, + 0x1B, 0x19, 0x0E, 0x94, 0xCF, 0x7B, 0x9F, 0x75, + 0xA0, 0x08, 0x9A, 0x67, 0x3F, 0x87, 0x89, 0x3E, + 0xF8, 0x58, 0xA5, 0x8A, 0x1B, 0x2D, 0xDA, 0x9B, + 0xD0, 0x1B, 0x18, 0x92, 0xC3, 0xD2, 0x6A, 0xD7, + 0x1C, 0xFC, 0x45, 0x69, 0x77, 0xC3, 0x57, 0x65, + 0x75, 0x99, 0x9E, 0x47, 0x2A, 0x20, 0x25, 0xEF, + 0x90, 0xF2, 0x5F, 0x3B, 0x7D, 0x9C, 0x7D, 0x00, + 0xEA, 0x92, 0x54, 0xEB, 0x0B, 0xE7, 0x17, 0xAF, + 0x24, 0x1A, 0xF9, 0x7C, 0x83, 0x50, 0x68, 0x1D, + 0xDC, 0x5B, 0x60, 0x12, 0xA7, 0x52, 0x78, 0xD9, + 0xA9, 0xB0, 0x1F, 0x59, 0x48, 0x36, 0xC7, 0xA6, + 0x97, 0x34, 0xC7, 0x87, 0x3F, 0xAE, 0xFD, 0xA9, + 0x56, 0x5D, 0x48, 0xCC, 0x89, 0x7A, 0x79, 0x60, + 0x8F, 0x9B, 0x2B, 0x63, 0x3C, 0xB3, 0x04, 0x1D, + 0x5F, 0xF7, 0x20, 0xD2, 0xFD, 0xF2, 0x51, 0xB1, + 0x96, 0x93, 0x13, 0x5B, 0xAB, 0x74, 0x82, 0x8B + }; + + WOLFSSL_CERT_MANAGER* cm = NULL; + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(NULL, 0), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, WOLFSSL_CRL_CHECKALL), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, WOLFSSL_CRL_CHECK), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, + WOLFSSL_CRL_CHECK | WOLFSSL_CRL_CHECKALL), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, 16), 1); + ExpectIntEQ(wolfSSL_CertManagerEnableCRL(cm, WOLFSSL_CRL_CHECKALL), 1); + + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, NULL, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, NULL, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, server_cert_der_2048, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, NULL, 1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(NULL, server_cert_der_2048, 1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, NULL, 1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, -1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, + sizeof_server_cert_der_2048), WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)); + + ExpectIntEQ(wolfSSL_CertManagerSetCRL_Cb(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerSetCRL_Cb(cm, NULL), 1); +#ifdef HAVE_CRL_IO + ExpectIntEQ(wolfSSL_CertManagerSetCRL_IOCb(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerSetCRL_IOCb(cm, NULL), 1); +#endif + +#ifndef NO_FILESYSTEM + ExpectIntEQ(wolfSSL_CertManagerLoadCRL(NULL, NULL, WOLFSSL_FILETYPE_ASN1, + 0), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRL(cm, NULL, WOLFSSL_FILETYPE_ASN1, + 0), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + /* -1 seen as !WOLFSSL_FILETYPE_PEM */ + ExpectIntEQ(wolfSSL_CertManagerLoadCRL(cm, "./certs/crl", -1, 0), 1); + + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(NULL, NULL, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, NULL, WOLFSSL_FILETYPE_ASN1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + /* -1 seen as !WOLFSSL_FILETYPE_PEM */ + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, "./certs/crl/crl.pem", -1), + WC_NO_ERR_TRACE(ASN_PARSE_E)); +#endif + + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, NULL, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, NULL, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, crl_buff, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, NULL, 1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(NULL, crl_buff, 1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, NULL, 1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, crl_buff, -1, + WOLFSSL_FILETYPE_ASN1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + + ExpectIntEQ(wolfSSL_CertManagerFreeCRL(NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + DoExpectIntEQ(wolfSSL_CertManagerFreeCRL(cm), 1); + + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL)); + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCRL(cm, crl1, WOLFSSL_FILETYPE_PEM, 0)); + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCRL(cm, crl2, WOLFSSL_FILETYPE_PEM, 0)); + wolfSSL_CertManagerFreeCRL(cm); + +#ifndef WOLFSSL_CRL_ALLOW_MISSING_CDP + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCRL(cm, crl1, WOLFSSL_FILETYPE_PEM, 0)); + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL)); + ExpectIntEQ(wolfSSL_CertManagerCheckCRL(cm, server_cert_der_2048, + sizeof_server_cert_der_2048), WC_NO_ERR_TRACE(CRL_MISSING)); + ExpectIntEQ(wolfSSL_CertManagerVerifyBuffer(cm, server_cert_der_2048, + sizeof_server_cert_der_2048, WOLFSSL_FILETYPE_ASN1), + WC_NO_ERR_TRACE(CRL_MISSING)); +#endif /* !WOLFSSL_CRL_ALLOW_MISSING_CDP */ + + ExpectIntEQ(wolfSSL_CertManagerLoadCRLBuffer(cm, crl_buff, sizeof(crl_buff), + WOLFSSL_FILETYPE_ASN1), 1); + +#if !defined(NO_FILESYSTEM) && defined(WC_RSA_PSS) + /* loading should fail without the CA set */ + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, crl_rsapss, + WOLFSSL_FILETYPE_PEM), WC_NO_ERR_TRACE(ASN_CRL_NO_SIGNER_E)); + + /* now successfully load the RSA-PSS crl once loading in it's CA */ + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCA(cm, ca_rsapss, NULL)); + ExpectIntEQ(wolfSSL_CertManagerLoadCRLFile(cm, crl_rsapss, + WOLFSSL_FILETYPE_PEM), WOLFSSL_SUCCESS); +#endif + + wolfSSL_CertManagerFree(cm); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_CRL_duplicate_extensions(void) +{ + EXPECT_DECLS; +#if defined(WOLFSSL_ASN_TEMPLATE) && !defined(NO_CERTS) && \ + defined(HAVE_CRL) && !defined(NO_RSA) && \ + !defined(WOLFSSL_NO_ASN_STRICT) && \ + (defined(WC_ASN_RUNTIME_DATE_CHECK_CONTROL) || defined(NO_ASN_TIME_CHECK)) + const unsigned char crl_duplicate_akd[] = + "-----BEGIN X509 CRL-----\n" + "MIICCDCB8QIBATANBgkqhkiG9w0BAQsFADB5MQswCQYDVQQGEwJVUzETMBEGA1UE\n" + "CAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzETMBEGA1UECgwK\n" + "TXkgQ29tcGFueTETMBEGA1UEAwwKTXkgUm9vdCBDQTETMBEGA1UECwwKTXkgUm9v\n" + "dCBDQRcNMjQwOTAxMDAwMDAwWhcNMjUxMjAxMDAwMDAwWqBEMEIwHwYDVR0jBBgw\n" + "FoAU72ng99Ud5pns3G3Q9+K5XGRxgzUwHwYDVR0jBBgwFoAU72ng99Ud5pns3G3Q\n" + "9+K5XGRxgzUwDQYJKoZIhvcNAQELBQADggEBAIFVw4jrS4taSXR/9gPzqGrqFeHr\n" + "IXCnFtHJTLxqa8vUOAqSwqysvNpepVKioMVoGrLjFMjANjWQqTEiMROAnLfJ/+L8\n" + "FHZkV/mZwOKAXMhIC9MrJzifxBICwmvD028qnwQm09EP8z4ICZptD6wPdRTDzduc\n" + "KBuAX+zn8pNrJgyrheRKpPgno9KsbCzK4D/RIt1sTK2M3vVOtY+vpsN70QYUXvQ4\n" + "r2RZac3omlT43x5lddPxIlcouQpwWcVvr/K+Va770MRrjn88PBrJmvsEw/QYVBXp\n" + "Gxv2b78HFDacba80sMIm8ltRdqUCa5qIc6OATsz7izCQXEbkTEeESrcK1MA=\n" + "-----END X509 CRL-----\n"; + + WOLFSSL_CERT_MANAGER* cm = NULL; + int ret; + + (void)wc_AsnSetSkipDateCheck(1); + + cm = wolfSSL_CertManagerNew(); + ExpectNotNull(cm); + + /* Test loading CRL with duplicate extensions */ + WOLFSSL_MSG("Testing CRL with duplicate Authority Key Identifier " + "extensions"); + ret = wolfSSL_CertManagerLoadCRLBuffer(cm, crl_duplicate_akd, + sizeof(crl_duplicate_akd), + WOLFSSL_FILETYPE_PEM); + ExpectIntEQ(ret, ASN_PARSE_E); + + wolfSSL_CertManagerFree(cm); + + (void)wc_AsnSetSkipDateCheck(0); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_CertManagerCheckOCSPResponse(void) +{ + EXPECT_DECLS; +#if defined(HAVE_OCSP) && !defined(NO_RSA) && !defined(NO_SHA) +/* Need one of these for wolfSSL_OCSP_REQUEST_new. */ +#if defined(OPENSSL_ALL) || defined(WOLFSSL_NGINX) || \ + defined(WOLFSSL_HAPROXY) || defined(WOLFSSL_APACHE_HTTPD) || \ + defined(HAVE_LIGHTY) + WOLFSSL_CERT_MANAGER* cm = NULL; + /* Raw OCSP response bytes captured using the following setup: + * - Run responder with + * openssl ocsp -port 9999 -ndays 9999 + * -index certs/ocsp/index-intermediate1-ca-issued-certs.txt + * -rsigner certs/ocsp/ocsp-responder-cert.pem + * -rkey certs/ocsp/ocsp-responder-key.pem + * -CA certs/ocsp/intermediate1-ca-cert.pem + * - Run client with + * openssl ocsp -host 127.0.0.1:9999 -respout resp.out + * -issuer certs/ocsp/intermediate1-ca-cert.pem + * -cert certs/ocsp/server1-cert.pem + * -CAfile certs/ocsp/root-ca-cert.pem -noverify + * - Select the response packet in Wireshark, and export it using + * "File->Export Packet Dissection->As "C" Arrays". Select "Selected + * packets only". After importing into the editor, remove the initial + * ~148 bytes of header, ending with the Content-Length and the \r\n\r\n. + */ + static const byte response[] = { + 0x30, 0x82, 0x07, 0x40, /* ....0..@ */ + 0x0a, 0x01, 0x00, 0xa0, 0x82, 0x07, 0x39, 0x30, /* ......90 */ + 0x82, 0x07, 0x35, 0x06, 0x09, 0x2b, 0x06, 0x01, /* ..5..+.. */ + 0x05, 0x05, 0x07, 0x30, 0x01, 0x01, 0x04, 0x82, /* ...0.... */ + 0x07, 0x26, 0x30, 0x82, 0x07, 0x22, 0x30, 0x82, /* .&0.."0. */ + 0x01, 0x40, 0xa1, 0x81, 0xa1, 0x30, 0x81, 0x9e, /* .@...0.. */ + 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, /* 1.0...U. */ + 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, /* ...US1.0 */ + 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0c, 0x0a, /* ...U.... */ + 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, /* Washingt */ + 0x6f, 0x6e, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, /* on1.0... */ + 0x55, 0x04, 0x07, 0x0c, 0x07, 0x53, 0x65, 0x61, /* U....Sea */ + 0x74, 0x74, 0x6c, 0x65, 0x31, 0x10, 0x30, 0x0e, /* ttle1.0. */ + 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, 0x07, 0x77, /* ..U....w */ + 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x31, 0x14, /* olfSSL1. */ + 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x0c, /* 0...U... */ + 0x0b, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x65, /* .Enginee */ + 0x72, 0x69, 0x6e, 0x67, 0x31, 0x1f, 0x30, 0x1d, /* ring1.0. */ + 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x16, 0x77, /* ..U....w */ + 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x20, 0x4f, /* olfSSL O */ + 0x43, 0x53, 0x50, 0x20, 0x52, 0x65, 0x73, 0x70, /* CSP Resp */ + 0x6f, 0x6e, 0x64, 0x65, 0x72, 0x31, 0x1f, 0x30, /* onder1.0 */ + 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, /* ...*.H.. */ + 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, 0x69, 0x6e, /* ......in */ + 0x66, 0x6f, 0x40, 0x77, 0x6f, 0x6c, 0x66, 0x73, /* fo@wolfs */ + 0x73, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, 0x18, 0x0f, /* sl.com.. */ + 0x32, 0x30, 0x32, 0x34, 0x31, 0x32, 0x32, 0x30, /* 20241220 */ + 0x31, 0x37, 0x30, 0x37, 0x30, 0x34, 0x5a, 0x30, /* 170704Z0 */ + 0x64, 0x30, 0x62, 0x30, 0x3a, 0x30, 0x09, 0x06, /* d0b0:0.. */ + 0x05, 0x2b, 0x0e, 0x03, 0x02, 0x1a, 0x05, 0x00, /* .+...... */ + 0x04, 0x14, 0x71, 0x4d, 0x82, 0x23, 0x40, 0x59, /* ..qM.#@Y */ + 0xc0, 0x96, 0xa1, 0x37, 0x43, 0xfa, 0x31, 0xdb, /* ...7C.1. */ + 0xba, 0xb1, 0x43, 0x18, 0xda, 0x04, 0x04, 0x14, /* ..C..... */ + 0x83, 0xc6, 0x3a, 0x89, 0x2c, 0x81, 0xf4, 0x02, /* ..:.,... */ + 0xd7, 0x9d, 0x4c, 0xe2, 0x2a, 0xc0, 0x71, 0x82, /* ..L.*.q. */ + 0x64, 0x44, 0xda, 0x0e, 0x02, 0x01, 0x05, 0x80, /* dD...... */ + 0x00, 0x18, 0x0f, 0x32, 0x30, 0x32, 0x34, 0x31, /* ...20241 */ + 0x32, 0x32, 0x30, 0x31, 0x37, 0x30, 0x37, 0x30, /* 22017070 */ + 0x34, 0x5a, 0xa0, 0x11, 0x18, 0x0f, 0x32, 0x30, /* 4Z....20 */ + 0x35, 0x32, 0x30, 0x35, 0x30, 0x36, 0x31, 0x37, /* 52050617 */ + 0x30, 0x37, 0x30, 0x34, 0x5a, 0xa1, 0x23, 0x30, /* 0704Z.#0 */ + 0x21, 0x30, 0x1f, 0x06, 0x09, 0x2b, 0x06, 0x01, /* !0...+.. */ + 0x05, 0x05, 0x07, 0x30, 0x01, 0x02, 0x04, 0x12, /* ...0.... */ + 0x04, 0x10, 0x12, 0x7c, 0x27, 0xbd, 0x22, 0x28, /* ...|'."( */ + 0x5e, 0x62, 0x81, 0xed, 0x6d, 0x2c, 0x2d, 0x59, /* ^b..m,-Y */ + 0x42, 0xd7, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, /* B.0...*. */ + 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, /* H....... */ + 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, 0x6c, 0xce, /* ......l. */ + 0xa8, 0xe8, 0xfe, 0xaf, 0x33, 0xe2, 0xce, 0x4e, /* ....3..N */ + 0x63, 0x8d, 0x61, 0x16, 0x0f, 0x70, 0xb2, 0x0c, /* c.a..p.. */ + 0x9a, 0xe3, 0x01, 0xd5, 0xca, 0xe5, 0x9b, 0x70, /* .......p */ + 0x81, 0x6f, 0x94, 0x09, 0xe8, 0x88, 0x98, 0x1a, /* .o...... */ + 0x67, 0xa0, 0xc2, 0xe7, 0x8f, 0x9b, 0x5f, 0x13, /* g....._. */ + 0x17, 0x8d, 0x93, 0x8c, 0x31, 0x61, 0x7d, 0x72, /* ....1a}r */ + 0x34, 0xbd, 0x21, 0x48, 0xca, 0xb2, 0xc9, 0xae, /* 4.!H.... */ + 0x28, 0x5f, 0x97, 0x19, 0xcb, 0xdf, 0xed, 0xd4, /* (_...... */ + 0x6e, 0x89, 0x30, 0x89, 0x11, 0xd1, 0x05, 0x08, /* n.0..... */ + 0x81, 0xe9, 0xa7, 0xba, 0xf7, 0x16, 0x0c, 0xbe, /* ........ */ + 0x48, 0x2e, 0xc0, 0x05, 0xac, 0x90, 0xc2, 0x35, /* H......5 */ + 0xce, 0x6c, 0x94, 0x5d, 0x2b, 0xad, 0x4f, 0x19, /* .l.]+.O. */ + 0xea, 0x7b, 0xd9, 0x4f, 0x49, 0x20, 0x8d, 0x98, /* .{.OI .. */ + 0xa9, 0xe4, 0x53, 0x6d, 0xca, 0x34, 0xdb, 0x4a, /* ..Sm.4.J */ + 0x28, 0xb3, 0x33, 0xfb, 0xfd, 0xcc, 0x4b, 0xfa, /* (.3...K. */ + 0xdb, 0x70, 0xe1, 0x96, 0xc8, 0xd4, 0xf1, 0x85, /* .p...... */ + 0x99, 0xaf, 0x06, 0xeb, 0xfd, 0x96, 0x21, 0x86, /* ......!. */ + 0x81, 0xee, 0xcf, 0xd2, 0xf4, 0x83, 0xc9, 0x1d, /* ........ */ + 0x8f, 0x42, 0xd1, 0xc1, 0xbc, 0x50, 0x0a, 0xfb, /* .B...P.. */ + 0x95, 0x39, 0x4c, 0x36, 0xa8, 0xfe, 0x2b, 0x8e, /* .9L6..+. */ + 0xc5, 0xb5, 0xe0, 0xab, 0xdb, 0xc0, 0xbf, 0x1d, /* ........ */ + 0x35, 0x4d, 0xc0, 0x52, 0xfb, 0x08, 0x04, 0x4c, /* 5M.R...L */ + 0x98, 0xf0, 0xb5, 0x5b, 0xff, 0x99, 0x74, 0xce, /* ...[..t. */ + 0xb7, 0xc9, 0xe3, 0xe5, 0x70, 0x2e, 0xd3, 0x1d, /* ....p... */ + 0x46, 0x38, 0xf9, 0x51, 0x17, 0x73, 0xd1, 0x08, /* F8.Q.s.. */ + 0x8d, 0x3d, 0x12, 0x47, 0xd0, 0x66, 0x77, 0xaf, /* .=.G.fw. */ + 0xfd, 0x4c, 0x75, 0x1f, 0xe9, 0x6c, 0xf4, 0x5a, /* .Lu..l.Z */ + 0xde, 0xec, 0x37, 0xc7, 0xc4, 0x0a, 0xbe, 0x91, /* ..7..... */ + 0xbc, 0x05, 0x08, 0x86, 0x47, 0x30, 0x2a, 0xc6, /* ....G0*. */ + 0x85, 0x4b, 0x55, 0x6c, 0xef, 0xdf, 0x2d, 0x5a, /* .KUl..-Z */ + 0xf7, 0x5b, 0xb5, 0xba, 0xed, 0x38, 0xb0, 0xcb, /* .[...8.. */ + 0xeb, 0x7e, 0x84, 0x3a, 0x69, 0x2c, 0xa0, 0x82, /* .~.:i,.. */ + 0x04, 0xc6, 0x30, 0x82, 0x04, 0xc2, 0x30, 0x82, /* ..0...0. */ + 0x04, 0xbe, 0x30, 0x82, 0x03, 0xa6, 0xa0, 0x03, /* ..0..... */ + 0x02, 0x01, 0x02, 0x02, 0x01, 0x04, 0x30, 0x0d, /* ......0. */ + 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, /* ..*.H... */ + 0x01, 0x01, 0x0b, 0x05, 0x00, 0x30, 0x81, 0x97, /* .....0.. */ + 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, /* 1.0...U. */ + 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, /* ...US1.0 */ + 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0c, 0x0a, /* ...U.... */ + 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, /* Washingt */ + 0x6f, 0x6e, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, /* on1.0... */ + 0x55, 0x04, 0x07, 0x0c, 0x07, 0x53, 0x65, 0x61, /* U....Sea */ + 0x74, 0x74, 0x6c, 0x65, 0x31, 0x10, 0x30, 0x0e, /* ttle1.0. */ + 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, 0x07, 0x77, /* ..U....w */ + 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x31, 0x14, /* olfSSL1. */ + 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x0c, /* 0...U... */ + 0x0b, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x65, /* .Enginee */ + 0x72, 0x69, 0x6e, 0x67, 0x31, 0x18, 0x30, 0x16, /* ring1.0. */ + 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x0f, 0x77, /* ..U....w */ + 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, 0x20, 0x72, /* olfSSL r */ + 0x6f, 0x6f, 0x74, 0x20, 0x43, 0x41, 0x31, 0x1f, /* oot CA1. */ + 0x30, 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, /* 0...*.H. */ + 0xf7, 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, 0x69, /* .......i */ + 0x6e, 0x66, 0x6f, 0x40, 0x77, 0x6f, 0x6c, 0x66, /* nfo@wolf */ + 0x73, 0x73, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, 0x30, /* ssl.com0 */ + 0x1e, 0x17, 0x0d, 0x32, 0x34, 0x31, 0x32, 0x31, /* ...24121 */ + 0x38, 0x32, 0x31, 0x32, 0x35, 0x33, 0x31, 0x5a, /* 8212531Z */ + 0x17, 0x0d, 0x32, 0x37, 0x30, 0x39, 0x31, 0x34, /* ..270914 */ + 0x32, 0x31, 0x32, 0x35, 0x33, 0x31, 0x5a, 0x30, /* 212531Z0 */ + 0x81, 0x9e, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, /* ..1.0... */ + 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, /* U....US1 */ + 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, /* .0...U.. */ + 0x0c, 0x0a, 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, /* ..Washin */ + 0x67, 0x74, 0x6f, 0x6e, 0x31, 0x10, 0x30, 0x0e, /* gton1.0. */ + 0x06, 0x03, 0x55, 0x04, 0x07, 0x0c, 0x07, 0x53, /* ..U....S */ + 0x65, 0x61, 0x74, 0x74, 0x6c, 0x65, 0x31, 0x10, /* eattle1. */ + 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, /* 0...U... */ + 0x07, 0x77, 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, /* .wolfSSL */ + 0x31, 0x14, 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, /* 1.0...U. */ + 0x0b, 0x0c, 0x0b, 0x45, 0x6e, 0x67, 0x69, 0x6e, /* ...Engin */ + 0x65, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x31, 0x1f, /* eering1. */ + 0x30, 0x1d, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, /* 0...U... */ + 0x16, 0x77, 0x6f, 0x6c, 0x66, 0x53, 0x53, 0x4c, /* .wolfSSL */ + 0x20, 0x4f, 0x43, 0x53, 0x50, 0x20, 0x52, 0x65, /* OCSP Re */ + 0x73, 0x70, 0x6f, 0x6e, 0x64, 0x65, 0x72, 0x31, /* sponder1 */ + 0x1f, 0x30, 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, /* .0...*.H */ + 0x86, 0xf7, 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, /* ........ */ + 0x69, 0x6e, 0x66, 0x6f, 0x40, 0x77, 0x6f, 0x6c, /* info@wol */ + 0x66, 0x73, 0x73, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, /* fssl.com */ + 0x30, 0x82, 0x01, 0x22, 0x30, 0x0d, 0x06, 0x09, /* 0.."0... */ + 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, /* *.H..... */ + 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0f, 0x00, /* ........ */ + 0x30, 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, /* 0....... */ + 0x00, 0xb8, 0xba, 0x23, 0xb4, 0xf6, 0xc3, 0x7b, /* ...#...{ */ + 0x14, 0xc3, 0xa4, 0xf5, 0x1d, 0x61, 0xa1, 0xf5, /* .....a.. */ + 0x1e, 0x63, 0xb9, 0x85, 0x23, 0x34, 0x50, 0x6d, /* .c..#4Pm */ + 0xf8, 0x7c, 0xa2, 0x8a, 0x04, 0x8b, 0xd5, 0x75, /* .|.....u */ + 0x5c, 0x2d, 0xf7, 0x63, 0x88, 0xd1, 0x07, 0x7a, /* \-.c...z */ + 0xea, 0x0b, 0x45, 0x35, 0x2b, 0xeb, 0x1f, 0xb1, /* ..E5+... */ + 0x22, 0xb4, 0x94, 0x41, 0x38, 0xe2, 0x9d, 0x74, /* "..A8..t */ + 0xd6, 0x8b, 0x30, 0x22, 0x10, 0x51, 0xc5, 0xdb, /* ..0".Q.. */ + 0xca, 0x3f, 0x46, 0x2b, 0xfe, 0xe5, 0x5a, 0x3f, /* .?F+..Z? */ + 0x41, 0x74, 0x67, 0x75, 0x95, 0xa9, 0x94, 0xd5, /* Atgu.... */ + 0xc3, 0xee, 0x42, 0xf8, 0x8d, 0xeb, 0x92, 0x95, /* ..B..... */ + 0xe1, 0xd9, 0x65, 0xb7, 0x43, 0xc4, 0x18, 0xde, /* ..e.C... */ + 0x16, 0x80, 0x90, 0xce, 0x24, 0x35, 0x21, 0xc4, /* ....$5!. */ + 0x55, 0xac, 0x5a, 0x51, 0xe0, 0x2e, 0x2d, 0xb3, /* U.ZQ..-. */ + 0x0a, 0x5a, 0x4f, 0x4a, 0x73, 0x31, 0x50, 0xee, /* .ZOJs1P. */ + 0x4a, 0x16, 0xbd, 0x39, 0x8b, 0xad, 0x05, 0x48, /* J..9...H */ + 0x87, 0xb1, 0x99, 0xe2, 0x10, 0xa7, 0x06, 0x72, /* .......r */ + 0x67, 0xca, 0x5c, 0xd1, 0x97, 0xbd, 0xc8, 0xf1, /* g.\..... */ + 0x76, 0xf8, 0xe0, 0x4a, 0xec, 0xbc, 0x93, 0xf4, /* v..J.... */ + 0x66, 0x4c, 0x28, 0x71, 0xd1, 0xd8, 0x66, 0x03, /* fL(q..f. */ + 0xb4, 0x90, 0x30, 0xbb, 0x17, 0xb0, 0xfe, 0x97, /* ..0..... */ + 0xf5, 0x1e, 0xe8, 0xc7, 0x5d, 0x9b, 0x8b, 0x11, /* ....]... */ + 0x19, 0x12, 0x3c, 0xab, 0x82, 0x71, 0x78, 0xff, /* ..<..qx. */ + 0xae, 0x3f, 0x32, 0xb2, 0x08, 0x71, 0xb2, 0x1b, /* .?2..q.. */ + 0x8c, 0x27, 0xac, 0x11, 0xb8, 0xd8, 0x43, 0x49, /* .'....CI */ + 0xcf, 0xb0, 0x70, 0xb1, 0xf0, 0x8c, 0xae, 0xda, /* ..p..... */ + 0x24, 0x87, 0x17, 0x3b, 0xd8, 0x04, 0x65, 0x6c, /* $..;..el */ + 0x00, 0x76, 0x50, 0xef, 0x15, 0x08, 0xd7, 0xb4, /* .vP..... */ + 0x73, 0x68, 0x26, 0x14, 0x87, 0x95, 0xc3, 0x5f, /* sh&...._ */ + 0x6e, 0x61, 0xb8, 0x87, 0x84, 0xfa, 0x80, 0x1a, /* na...... */ + 0x0a, 0x8b, 0x98, 0xf3, 0xe3, 0xff, 0x4e, 0x44, /* ......ND */ + 0x1c, 0x65, 0x74, 0x7c, 0x71, 0x54, 0x65, 0xe5, /* .et|qTe. */ + 0x39, 0x02, 0x03, 0x01, 0x00, 0x01, 0xa3, 0x82, /* 9....... */ + 0x01, 0x0a, 0x30, 0x82, 0x01, 0x06, 0x30, 0x09, /* ..0...0. */ + 0x06, 0x03, 0x55, 0x1d, 0x13, 0x04, 0x02, 0x30, /* ..U....0 */ + 0x00, 0x30, 0x1d, 0x06, 0x03, 0x55, 0x1d, 0x0e, /* .0...U.. */ + 0x04, 0x16, 0x04, 0x14, 0x32, 0x67, 0xe1, 0xb1, /* ....2g.. */ + 0x79, 0xd2, 0x81, 0xfc, 0x9f, 0x23, 0x0c, 0x70, /* y....#.p */ + 0x40, 0x50, 0xb5, 0x46, 0x56, 0xb8, 0x30, 0x36, /* @P.FV.06 */ + 0x30, 0x81, 0xc4, 0x06, 0x03, 0x55, 0x1d, 0x23, /* 0....U.# */ + 0x04, 0x81, 0xbc, 0x30, 0x81, 0xb9, 0x80, 0x14, /* ...0.... */ + 0x73, 0xb0, 0x1c, 0xa4, 0x2f, 0x82, 0xcb, 0xcf, /* s.../... */ + 0x47, 0xa5, 0x38, 0xd7, 0xb0, 0x04, 0x82, 0x3a, /* G.8....: */ + 0x7e, 0x72, 0x15, 0x21, 0xa1, 0x81, 0x9d, 0xa4, /* ~r.!.... */ + 0x81, 0x9a, 0x30, 0x81, 0x97, 0x31, 0x0b, 0x30, /* ..0..1.0 */ + 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, /* ...U.... */ + 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, /* US1.0... */ + 0x55, 0x04, 0x08, 0x0c, 0x0a, 0x57, 0x61, 0x73, /* U....Was */ + 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, /* hington1 */ + 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, /* .0...U.. */ + 0x0c, 0x07, 0x53, 0x65, 0x61, 0x74, 0x74, 0x6c, /* ..Seattl */ + 0x65, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, /* e1.0...U */ + 0x04, 0x0a, 0x0c, 0x07, 0x77, 0x6f, 0x6c, 0x66, /* ....wolf */ + 0x53, 0x53, 0x4c, 0x31, 0x14, 0x30, 0x12, 0x06, /* SSL1.0.. */ + 0x03, 0x55, 0x04, 0x0b, 0x0c, 0x0b, 0x45, 0x6e, /* .U....En */ + 0x67, 0x69, 0x6e, 0x65, 0x65, 0x72, 0x69, 0x6e, /* gineerin */ + 0x67, 0x31, 0x18, 0x30, 0x16, 0x06, 0x03, 0x55, /* g1.0...U */ + 0x04, 0x03, 0x0c, 0x0f, 0x77, 0x6f, 0x6c, 0x66, /* ....wolf */ + 0x53, 0x53, 0x4c, 0x20, 0x72, 0x6f, 0x6f, 0x74, /* SSL root */ + 0x20, 0x43, 0x41, 0x31, 0x1f, 0x30, 0x1d, 0x06, /* CA1.0.. */ + 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, /* .*.H.... */ + 0x09, 0x01, 0x16, 0x10, 0x69, 0x6e, 0x66, 0x6f, /* ....info */ + 0x40, 0x77, 0x6f, 0x6c, 0x66, 0x73, 0x73, 0x6c, /* @wolfssl */ + 0x2e, 0x63, 0x6f, 0x6d, 0x82, 0x01, 0x63, 0x30, /* .com..c0 */ + 0x13, 0x06, 0x03, 0x55, 0x1d, 0x25, 0x04, 0x0c, /* ...U.%.. */ + 0x30, 0x0a, 0x06, 0x08, 0x2b, 0x06, 0x01, 0x05, /* 0...+... */ + 0x05, 0x07, 0x03, 0x09, 0x30, 0x0d, 0x06, 0x09, /* ....0... */ + 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, /* *.H..... */ + 0x0b, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, /* ........ */ + 0x4d, 0xa2, 0xd8, 0x55, 0xe0, 0x2b, 0xf4, 0xad, /* M..U.+.. */ + 0x65, 0xe2, 0x92, 0x35, 0xcb, 0x60, 0xa0, 0xa2, /* e..5.`.. */ + 0x6b, 0xa6, 0x88, 0xc1, 0x86, 0x58, 0x57, 0x37, /* k....XW7 */ + 0xbd, 0x2e, 0x28, 0x6e, 0x1c, 0x56, 0x2a, 0x35, /* ..(n.V*5 */ + 0xde, 0xff, 0x3e, 0x8e, 0x3d, 0x47, 0x21, 0x1a, /* ..>.=G!. */ + 0xe9, 0xd3, 0xc6, 0xb4, 0xe2, 0xcb, 0x3e, 0xc6, /* ......>. */ + 0xaf, 0x9b, 0xef, 0x23, 0x88, 0x56, 0x95, 0x73, /* ...#.V.s */ + 0x2e, 0xb3, 0xed, 0xc5, 0x11, 0x4b, 0x69, 0xf7, /* .....Ki. */ + 0x13, 0x3a, 0x05, 0xe1, 0xaf, 0xba, 0xc9, 0x59, /* .:.....Y */ + 0xfd, 0xe2, 0xa0, 0x81, 0xa0, 0x4c, 0x0c, 0x2c, /* .....L., */ + 0xcb, 0x57, 0xad, 0x96, 0x3a, 0x8c, 0x32, 0xa6, /* .W..:.2. */ + 0x4a, 0xf8, 0x72, 0xb8, 0xec, 0xb3, 0x26, 0x69, /* J.r...&i */ + 0xd6, 0x6a, 0x4c, 0x4c, 0x78, 0x18, 0x3c, 0xca, /* .jLLx.<. */ + 0x19, 0xf1, 0xb5, 0x8e, 0x23, 0x81, 0x5b, 0x27, /* ....#.[' */ + 0x90, 0xe0, 0x5c, 0x2b, 0x17, 0x4d, 0x78, 0x99, /* ..\+.Mx. */ + 0x6b, 0x25, 0xbd, 0x2f, 0xae, 0x1b, 0xaa, 0xce, /* k%./.... */ + 0x84, 0xb9, 0x44, 0x21, 0x46, 0xc0, 0x34, 0x6b, /* ..D!F.4k */ + 0x5b, 0xb9, 0x1b, 0xca, 0x5c, 0x60, 0xf1, 0xef, /* [...\`.. */ + 0xe6, 0x66, 0xbc, 0x84, 0x63, 0x56, 0x50, 0x7d, /* .f..cVP} */ + 0xbb, 0x2c, 0x2f, 0x7b, 0x47, 0xb4, 0xfd, 0x58, /* .,/{G..X */ + 0x77, 0x87, 0xee, 0x27, 0x20, 0x96, 0x72, 0x8e, /* w..' .r. */ + 0x4c, 0x7e, 0x4f, 0x93, 0xeb, 0x5f, 0x8f, 0x9c, /* L~O.._.. */ + 0x1e, 0x59, 0x7a, 0x96, 0xaa, 0x53, 0x77, 0x22, /* .Yz..Sw" */ + 0x41, 0xd8, 0xd3, 0xf9, 0x89, 0x8f, 0xe8, 0x9d, /* A....... */ + 0x65, 0xbd, 0x0c, 0x71, 0x3c, 0xbb, 0xa3, 0x07, /* e..q<... */ + 0xbf, 0xfb, 0xa8, 0xd1, 0x18, 0x0a, 0xb4, 0xc4, /* ........ */ + 0xf7, 0x83, 0xb3, 0x86, 0x2b, 0xf0, 0x5b, 0x05, /* ....+.[. */ + 0x28, 0xc1, 0x01, 0x31, 0x73, 0x5c, 0x2b, 0xbd, /* (..1s\+. */ + 0x60, 0x97, 0xa3, 0x36, 0x82, 0x96, 0xd7, 0x83, /* `..6.... */ + 0xdf, 0x75, 0xee, 0x29, 0x42, 0x97, 0x86, 0x41, /* .u.)B..A */ + 0x55, 0xb9, 0x70, 0x87, 0xd5, 0x02, 0x85, 0x13, /* U.p..... */ + 0x41, 0xf8, 0x25, 0x05, 0xab, 0x6a, 0xaa, 0x57 /* A.%..j.W */ + }; + OcspEntry entry[1]; + CertStatus status[1]; + OcspRequest* request = NULL; +#ifndef NO_FILESYSTEM + const char* ca_cert = "./certs/ca-cert.pem"; +#endif + + byte serial[] = {0x05}; + byte issuerHash[] = { + 0x71, 0x4d, 0x82, 0x23, 0x40, 0x59, 0xc0, 0x96, + 0xa1, 0x37, 0x43, 0xfa, 0x31, 0xdb, 0xba, 0xb1, + 0x43, 0x18, 0xda, 0x04 + }; + byte issuerKeyHash[] = { + 0x83, 0xc6, 0x3a, 0x89, 0x2c, 0x81, 0xf4, 0x02, + 0xd7, 0x9d, 0x4c, 0xe2, 0x2a, 0xc0, 0x71, 0x82, + 0x64, 0x44, 0xda, 0x0e + }; + + + XMEMSET(entry, 0, sizeof(OcspEntry)); + XMEMSET(status, 0, sizeof(CertStatus)); + + ExpectNotNull(request = wolfSSL_OCSP_REQUEST_new()); + ExpectNotNull(request->serial = (byte*)XMALLOC(sizeof(serial), NULL, + DYNAMIC_TYPE_OCSP_REQUEST)); + + if ((request != NULL) && (request->serial != NULL)) { + request->serialSz = sizeof(serial); + XMEMCPY(request->serial, serial, sizeof(serial)); + XMEMCPY(request->issuerHash, issuerHash, sizeof(issuerHash)); + XMEMCPY(request->issuerKeyHash, issuerKeyHash, sizeof(issuerKeyHash)); + } + + ExpectNotNull(cm = wolfSSL_CertManagerNew_ex(NULL)); + ExpectIntEQ(wolfSSL_CertManagerEnableOCSP(cm, 0), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_CertManagerLoadCA(cm, + "./certs/ocsp/intermediate1-ca-cert.pem", NULL), WOLFSSL_SUCCESS); + + /* Response should be valid. */ + ExpectIntEQ(wolfSSL_CertManagerCheckOCSPResponse(cm, (byte *)response, + sizeof(response), NULL, status, entry, request), WOLFSSL_SUCCESS); + + /* Flip a byte in the request serial number, response should be invalid + * now. */ + if ((request != NULL) && (request->serial != NULL)) + request->serial[0] ^= request->serial[0]; + ExpectIntNE(wolfSSL_CertManagerCheckOCSPResponse(cm, (byte *)response, + sizeof(response), NULL, status, entry, request), WOLFSSL_SUCCESS); + +#ifndef NO_FILESYSTEM + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, server_cert_der_2048, + sizeof(server_cert_der_2048)), WC_NO_ERR_TRACE(ASN_NO_SIGNER_E)); + ExpectIntEQ(WOLFSSL_SUCCESS, + wolfSSL_CertManagerLoadCA(cm, ca_cert, NULL)); + ExpectIntEQ(wolfSSL_CertManagerCheckOCSP(cm, server_cert_der_2048, + sizeof(server_cert_der_2048)), 1); +#endif + + wolfSSL_OCSP_REQUEST_free(request); + wolfSSL_CertManagerFree(cm); +#endif /* OPENSSL_ALL || WOLFSSL_NGINX || WOLFSSL_HAPROXY || + * WOLFSSL_APACHE_HTTPD || HAVE_LIGHTY */ +#endif /* HAVE_OCSP */ + return EXPECT_RESULT(); +} + +#ifdef HAVE_CERT_CHAIN_VALIDATION +#ifndef WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION +#ifdef WOLFSSL_PEM_TO_DER +#ifndef NO_SHA256 +static int load_ca_into_cm(WOLFSSL_CERT_MANAGER* cm, char* certA) +{ + int ret; + + if ((ret = wolfSSL_CertManagerLoadCA(cm, certA, 0)) != WOLFSSL_SUCCESS) { + fprintf(stderr, "loading cert %s failed\n", certA); + fprintf(stderr, "Error: (%d): %s\n", ret, + wolfSSL_ERR_reason_error_string((word32)ret)); + return -1; + } + + return 0; +} + +static int verify_cert_with_cm(WOLFSSL_CERT_MANAGER* cm, char* certA) +{ + int ret; + if ((ret = wolfSSL_CertManagerVerify(cm, certA, CERT_FILETYPE)) + != WOLFSSL_SUCCESS) { + fprintf(stderr, "could not verify the cert: %s\n", certA); + fprintf(stderr, "Error: (%d): %s\n", ret, + wolfSSL_ERR_reason_error_string((word32)ret)); + return -1; + } + else { + fprintf(stderr, "successfully verified: %s\n", certA); + } + + return 0; +} +#define LOAD_ONE_CA(a, b, c, d) \ + do { \ + (a) = load_ca_into_cm(c, d); \ + if ((a) != 0) \ + return (b); \ + else \ + (b)--; \ + } while(0) + +#define VERIFY_ONE_CERT(a, b, c, d) \ + do { \ + (a) = verify_cert_with_cm(c, d);\ + if ((a) != 0) \ + return (b); \ + else \ + (b)--; \ + } while(0) + +static int test_chainG(WOLFSSL_CERT_MANAGER* cm) +{ + int ret; + int i = -1; + /* Chain G is a valid chain per RFC 5280 section 4.2.1.9 */ + char chainGArr[9][50] = {"certs/ca-cert.pem", + "certs/test-pathlen/chainG-ICA7-pathlen100.pem", + "certs/test-pathlen/chainG-ICA6-pathlen10.pem", + "certs/test-pathlen/chainG-ICA5-pathlen20.pem", + "certs/test-pathlen/chainG-ICA4-pathlen5.pem", + "certs/test-pathlen/chainG-ICA3-pathlen99.pem", + "certs/test-pathlen/chainG-ICA2-pathlen1.pem", + "certs/test-pathlen/chainG-ICA1-pathlen0.pem", + "certs/test-pathlen/chainG-entity.pem"}; + + LOAD_ONE_CA(ret, i, cm, chainGArr[0]); /* if failure, i = -1 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[1]); /* if failure, i = -2 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[2]); /* if failure, i = -3 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[3]); /* if failure, i = -4 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[4]); /* if failure, i = -5 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[5]); /* if failure, i = -6 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[6]); /* if failure, i = -7 here */ + LOAD_ONE_CA(ret, i, cm, chainGArr[7]); /* if failure, i = -8 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[1]); /* if failure, i = -9 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[2]); /* if failure, i = -10 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[3]); /* if failure, i = -11 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[4]); /* if failure, i = -12 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[5]); /* if failure, i = -13 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[6]); /* if failure, i = -14 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[7]); /* if failure, i = -15 here */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[8]); /* if failure, i = -16 here */ + + /* test validating the entity twice, should have no effect on pathLen since + * entity/leaf cert */ + VERIFY_ONE_CERT(ret, i, cm, chainGArr[8]); /* if failure, i = -17 here */ + + return ret; +} + +static int test_chainH(WOLFSSL_CERT_MANAGER* cm) +{ + int ret; + int i = -1; + /* Chain H is NOT a valid chain per RFC5280 section 4.2.1.9: + * ICA4-pathlen of 2 signing ICA3-pathlen of 2 (reduce max path len to 2) + * ICA3-pathlen of 2 signing ICA2-pathlen of 2 (reduce max path len to 1) + * ICA2-pathlen of 2 signing ICA1-pathlen of 0 (reduce max path len to 0) + * ICA1-pathlen of 0 signing entity (pathlen is already 0, ERROR) + * Test should successfully verify ICA4, ICA3, ICA2 and then fail on ICA1 + */ + char chainHArr[6][50] = {"certs/ca-cert.pem", + "certs/test-pathlen/chainH-ICA4-pathlen2.pem", + "certs/test-pathlen/chainH-ICA3-pathlen2.pem", + "certs/test-pathlen/chainH-ICA2-pathlen2.pem", + "certs/test-pathlen/chainH-ICA1-pathlen0.pem", + "certs/test-pathlen/chainH-entity.pem"}; + + LOAD_ONE_CA(ret, i, cm, chainHArr[0]); /* if failure, i = -1 here */ + LOAD_ONE_CA(ret, i, cm, chainHArr[1]); /* if failure, i = -2 here */ + LOAD_ONE_CA(ret, i, cm, chainHArr[2]); /* if failure, i = -3 here */ + LOAD_ONE_CA(ret, i, cm, chainHArr[3]); /* if failure, i = -4 here */ + LOAD_ONE_CA(ret, i, cm, chainHArr[4]); /* if failure, i = -5 here */ + VERIFY_ONE_CERT(ret, i, cm, chainHArr[1]); /* if failure, i = -6 here */ + VERIFY_ONE_CERT(ret, i, cm, chainHArr[2]); /* if failure, i = -7 here */ + VERIFY_ONE_CERT(ret, i, cm, chainHArr[3]); /* if failure, i = -8 here */ + VERIFY_ONE_CERT(ret, i, cm, chainHArr[4]); /* if failure, i = -9 here */ + VERIFY_ONE_CERT(ret, i, cm, chainHArr[5]); /* if failure, i = -10 here */ + + return ret; +} + +static int test_chainI(WOLFSSL_CERT_MANAGER* cm) +{ + int ret; + int i = -1; + /* Chain I is a valid chain per RFC5280 section 4.2.1.9: + * ICA3-pathlen of 2 signing ICA2 without a pathlen (reduce maxPathLen to 2) + * ICA2-no_pathlen signing ICA1-no_pathlen (reduce maxPathLen to 1) + * ICA1-no_pathlen signing entity (reduce maxPathLen to 0) + * Test should successfully verify ICA4, ICA3, ICA2 and then fail on ICA1 + */ + char chainIArr[5][50] = {"certs/ca-cert.pem", + "certs/test-pathlen/chainI-ICA3-pathlen2.pem", + "certs/test-pathlen/chainI-ICA2-no_pathlen.pem", + "certs/test-pathlen/chainI-ICA1-no_pathlen.pem", + "certs/test-pathlen/chainI-entity.pem"}; + + LOAD_ONE_CA(ret, i, cm, chainIArr[0]); /* if failure, i = -1 here */ + LOAD_ONE_CA(ret, i, cm, chainIArr[1]); /* if failure, i = -2 here */ + LOAD_ONE_CA(ret, i, cm, chainIArr[2]); /* if failure, i = -3 here */ + LOAD_ONE_CA(ret, i, cm, chainIArr[3]); /* if failure, i = -4 here */ + VERIFY_ONE_CERT(ret, i, cm, chainIArr[1]); /* if failure, i = -5 here */ + VERIFY_ONE_CERT(ret, i, cm, chainIArr[2]); /* if failure, i = -6 here */ + VERIFY_ONE_CERT(ret, i, cm, chainIArr[3]); /* if failure, i = -7 here */ + VERIFY_ONE_CERT(ret, i, cm, chainIArr[4]); /* if failure, i = -8 here */ + + return ret; +} + +static int test_chainJ(WOLFSSL_CERT_MANAGER* cm) +{ + int ret; + int i = -1; + /* Chain J is NOT a valid chain per RFC5280 section 4.2.1.9: + * ICA4-pathlen of 2 signing ICA3 without a pathlen (reduce maxPathLen to 2) + * ICA3-pathlen of 2 signing ICA2 without a pathlen (reduce maxPathLen to 1) + * ICA2-no_pathlen signing ICA1-no_pathlen (reduce maxPathLen to 0) + * ICA1-no_pathlen signing entity (ERROR, pathlen zero and non-leaf cert) + */ + char chainJArr[6][50] = {"certs/ca-cert.pem", + "certs/test-pathlen/chainJ-ICA4-pathlen2.pem", + "certs/test-pathlen/chainJ-ICA3-no_pathlen.pem", + "certs/test-pathlen/chainJ-ICA2-no_pathlen.pem", + "certs/test-pathlen/chainJ-ICA1-no_pathlen.pem", + "certs/test-pathlen/chainJ-entity.pem"}; + + LOAD_ONE_CA(ret, i, cm, chainJArr[0]); /* if failure, i = -1 here */ + LOAD_ONE_CA(ret, i, cm, chainJArr[1]); /* if failure, i = -2 here */ + LOAD_ONE_CA(ret, i, cm, chainJArr[2]); /* if failure, i = -3 here */ + LOAD_ONE_CA(ret, i, cm, chainJArr[3]); /* if failure, i = -4 here */ + LOAD_ONE_CA(ret, i, cm, chainJArr[4]); /* if failure, i = -5 here */ + VERIFY_ONE_CERT(ret, i, cm, chainJArr[1]); /* if failure, i = -6 here */ + VERIFY_ONE_CERT(ret, i, cm, chainJArr[2]); /* if failure, i = -7 here */ + VERIFY_ONE_CERT(ret, i, cm, chainJArr[3]); /* if failure, i = -8 here */ + VERIFY_ONE_CERT(ret, i, cm, chainJArr[4]); /* if failure, i = -9 here */ + VERIFY_ONE_CERT(ret, i, cm, chainJArr[5]); /* if failure, i = -10 here */ + + return ret; +} +#endif +#endif +#endif +#endif + +int test_various_pathlen_chains(void) +{ + EXPECT_DECLS; +#if defined(WOLFSSL_PEM_TO_DER) && defined(HAVE_CERT_CHAIN_VALIDATION) && \ + !defined(WOLFSSL_TEST_APPLE_NATIVE_CERT_VALIDATION) +#ifndef NO_SHA256 + WOLFSSL_CERT_MANAGER* cm = NULL; + + /* Test chain G (large chain with varying pathLens) */ + ExpectNotNull(cm = wolfSSL_CertManagerNew()); +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(test_chainG(cm), -1); +#else + ExpectIntEQ(test_chainG(cm), 0); +#endif /* NO_WOLFSSL_CLIENT && NO_WOLFSSL_SERVER */ + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); + /* end test chain G */ + + /* Test chain H (5 chain with same pathLens) */ + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntLT(test_chainH(cm), 0); + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); + /* end test chain H */ + + /* Test chain I (only first ICA has pathLen set and it's set to 2, + * followed by 2 ICA's, should pass) */ + ExpectNotNull(cm = wolfSSL_CertManagerNew()); +#if defined(NO_WOLFSSL_CLIENT) && defined(NO_WOLFSSL_SERVER) + ExpectIntEQ(test_chainI(cm), -1); +#else + ExpectIntEQ(test_chainI(cm), 0); +#endif /* NO_WOLFSSL_CLIENT && NO_WOLFSSL_SERVER */ + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); + cm = NULL; + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); + cm = NULL; + + /* Test chain J (Again only first ICA has pathLen set and it's set to 2, + * this time followed by 3 ICA's, should fail */ + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntLT(test_chainJ(cm), 0); + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); + cm = NULL; + + ExpectNotNull(cm = wolfSSL_CertManagerNew()); + ExpectIntEQ(wolfSSL_CertManagerUnloadCAs(cm), WOLFSSL_SUCCESS); + wolfSSL_CertManagerFree(cm); +#endif +#endif + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_certman.h b/tests/api/test_certman.h new file mode 100644 index 000000000..e278b76a1 --- /dev/null +++ b/tests/api/test_certman.h @@ -0,0 +1,61 @@ +/* test_certman.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_CERTMAN_H +#define WOLFCRYPT_TEST_CERTMAN_H + +#include + +int test_wolfSSL_CertManagerAPI(void); +int test_wolfSSL_CertManagerLoadCABuffer(void); +int test_wolfSSL_CertManagerLoadCABuffer_ex(void); +int test_wolfSSL_CertManagerLoadCABufferType(void); +int test_wolfSSL_CertManagerGetCerts(void); +int test_wolfSSL_CertManagerSetVerify(void); +int test_wolfSSL_CertManagerNameConstraint(void); +int test_wolfSSL_CertManagerNameConstraint2(void); +int test_wolfSSL_CertManagerNameConstraint3(void); +int test_wolfSSL_CertManagerNameConstraint4(void); +int test_wolfSSL_CertManagerNameConstraint5(void); +int test_wolfSSL_CertManagerCRL(void); +int test_wolfSSL_CRL_duplicate_extensions(void); +int test_wolfSSL_CertManagerCheckOCSPResponse(void); +int test_various_pathlen_chains(void); + +#define TEST_CERTMAN_DECLS \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerAPI), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerLoadCABuffer), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerLoadCABuffer_ex), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerLoadCABufferType), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerGetCerts), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerSetVerify), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerNameConstraint), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerNameConstraint2), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerNameConstraint3), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerNameConstraint4), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerNameConstraint5), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerCRL), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CRL_duplicate_extensions), \ + TEST_DECL_GROUP("certman", test_wolfSSL_CertManagerCheckOCSPResponse), \ + TEST_DECL_GROUP("certman", test_various_pathlen_chains) + +#endif /* WOLFCRYPT_TEST_CERTMAN_H */ + diff --git a/tests/api/test_chacha.c b/tests/api/test_chacha.c index 5a843a7e9..6da3266e3 100644 --- a/tests/api/test_chacha.c +++ b/tests/api/test_chacha.c @@ -186,3 +186,184 @@ int test_wc_Chacha_Process(void) return EXPECT_RESULT(); } /* END test_wc_Chacha_Process */ + +#define CHACHA_LEN 1024 +/* + * Testing wc_Chacha_Process() + */ +int test_wc_Chacha_Process_Chunking(void) +{ + EXPECT_DECLS; +#ifdef HAVE_CHACHA + ChaCha enc; + WC_DECLARE_VAR(plain, byte, CHACHA_LEN, NULL); + WC_DECLARE_VAR(cipher, byte, CHACHA_LEN, NULL); + byte key[CHACHA_MAX_KEY_SZ]; + byte iv[CHACHA_IV_BYTES]; + int i; + int cnt; + int sz; + const byte expected[CHACHA_LEN] = { + 0xbc, 0xf5, 0x3b, 0xf2, 0x75, 0x85, 0x9e, 0x0a, + 0x09, 0x58, 0x83, 0x50, 0x33, 0x12, 0x01, 0xa1, + 0xb4, 0xaf, 0x8a, 0xe8, 0x4d, 0x3d, 0xa5, 0x68, + 0xf7, 0x6d, 0x3e, 0xe0, 0x62, 0x7e, 0x62, 0x66, + 0xdd, 0x07, 0xe9, 0x36, 0x6f, 0x4d, 0xe9, 0x7a, + 0x16, 0x48, 0xa1, 0x83, 0x9e, 0x67, 0x4d, 0xa3, + 0xfe, 0x7e, 0x4a, 0x31, 0xdd, 0xb6, 0x50, 0x39, + 0xd2, 0x2b, 0x93, 0xf2, 0x4d, 0x51, 0x44, 0x42, + 0x5d, 0xf1, 0xd9, 0x24, 0xd7, 0xef, 0x4b, 0xa4, + 0xfd, 0x6a, 0x53, 0xa5, 0x1e, 0x4a, 0xc8, 0x68, + 0x11, 0x69, 0xc6, 0xbd, 0xe1, 0x59, 0xe4, 0xca, + 0x5b, 0xa9, 0x77, 0xfe, 0x4f, 0x82, 0x9f, 0xcf, + 0x55, 0x16, 0x3c, 0xd5, 0x83, 0xee, 0xc7, 0x53, + 0xaf, 0xca, 0x8a, 0xe2, 0xcf, 0xf1, 0x4b, 0x3b, + 0x44, 0xf6, 0xc9, 0x6c, 0x5b, 0xd3, 0x28, 0x8a, + 0x7e, 0x67, 0xaa, 0x9e, 0xad, 0xce, 0x96, 0xc4, + 0x6e, 0x95, 0x8c, 0xf8, 0xf6, 0xb6, 0x42, 0x8e, + 0xe7, 0xab, 0xc8, 0x2c, 0x66, 0x8b, 0x80, 0xcf, + 0x78, 0xfe, 0x35, 0x8b, 0x59, 0x18, 0x45, 0xcb, + 0x18, 0xd4, 0x09, 0x88, 0xa9, 0xf9, 0x27, 0xd1, + 0x3b, 0x9d, 0x2b, 0xff, 0x89, 0x21, 0xb0, 0xd2, + 0xa7, 0x7e, 0x35, 0x61, 0xae, 0x1c, 0xc3, 0x1c, + 0x07, 0x5c, 0x10, 0x5d, 0x71, 0x3a, 0x3a, 0xe8, + 0x4c, 0xba, 0x00, 0xde, 0xd1, 0xf9, 0xa1, 0xae, + 0x7b, 0x91, 0x9d, 0x66, 0x31, 0x18, 0x55, 0x39, + 0xec, 0x1d, 0x83, 0x85, 0x1e, 0x5b, 0x35, 0x17, + 0x2e, 0xbc, 0x7a, 0x22, 0x79, 0x09, 0xa7, 0x02, + 0xf7, 0x3b, 0x93, 0x2c, 0x89, 0x1b, 0x69, 0xde, + 0x80, 0xc8, 0xdf, 0xce, 0xf9, 0xcd, 0xc8, 0x58, + 0xd6, 0x4b, 0x65, 0x9a, 0xc4, 0x4f, 0x27, 0xdb, + 0x9a, 0x6c, 0x3a, 0xef, 0x20, 0x0b, 0x00, 0x5c, + 0x9f, 0x91, 0xc1, 0xf6, 0x80, 0x53, 0x6c, 0x42, + 0xe3, 0xd0, 0xfb, 0x3b, 0x23, 0x75, 0x45, 0xa7, + 0x5b, 0x9b, 0xaa, 0xcd, 0x1e, 0x03, 0x35, 0x68, + 0x17, 0xee, 0xff, 0xd7, 0x4f, 0x77, 0x2f, 0xd0, + 0x1d, 0x5e, 0x89, 0x16, 0x50, 0x6f, 0x22, 0x44, + 0x10, 0x64, 0x37, 0x66, 0x70, 0x7f, 0x4d, 0x58, + 0x36, 0xec, 0x56, 0x4e, 0xfd, 0x22, 0x8d, 0x77, + 0xb1, 0x37, 0x07, 0x13, 0xdf, 0x34, 0x40, 0x1c, + 0x65, 0x95, 0x9b, 0xb9, 0xac, 0x11, 0xfe, 0x7a, + 0xae, 0x1f, 0x17, 0x94, 0xd4, 0xdd, 0x5b, 0x4f, + 0x69, 0xa8, 0x04, 0x8e, 0x80, 0x87, 0x7d, 0x96, + 0x25, 0x37, 0x83, 0x0e, 0xca, 0xa4, 0xb3, 0x29, + 0x2f, 0x4b, 0x83, 0xa4, 0x01, 0x36, 0x0d, 0xdb, + 0xd7, 0x6e, 0x7a, 0x9c, 0x3e, 0x82, 0xc8, 0x5f, + 0x4e, 0xc6, 0xd2, 0x97, 0x64, 0xe6, 0xd9, 0x50, + 0x89, 0xcb, 0x64, 0x33, 0x28, 0x9c, 0x14, 0xf9, + 0x41, 0x33, 0x99, 0x0c, 0x87, 0x6f, 0x00, 0x3f, + 0x00, 0x6f, 0xae, 0xe9, 0x20, 0xc2, 0xcd, 0xb8, + 0x7a, 0x58, 0xde, 0x57, 0x34, 0xda, 0x63, 0xa1, + 0x0b, 0x55, 0xfc, 0x54, 0x2a, 0xed, 0xc0, 0xbc, + 0x29, 0x5f, 0x88, 0x7d, 0x37, 0x3b, 0x48, 0x86, + 0x3f, 0x88, 0xa2, 0xef, 0x55, 0xe6, 0xc4, 0xf8, + 0xb8, 0x11, 0x9e, 0x3a, 0x45, 0x79, 0xac, 0x85, + 0xb2, 0x70, 0x40, 0xd0, 0x66, 0xe7, 0x66, 0xc8, + 0x8e, 0x8f, 0xde, 0xde, 0xf8, 0x50, 0x79, 0x9e, + 0x37, 0x04, 0x07, 0x83, 0x5b, 0xe0, 0x68, 0x5b, + 0x32, 0xbc, 0x6e, 0x50, 0x05, 0xca, 0xf8, 0x3b, + 0xec, 0x15, 0x13, 0xf8, 0x9a, 0xa2, 0x58, 0x98, + 0x03, 0x29, 0x83, 0x7f, 0x11, 0xb4, 0x98, 0x41, + 0xc1, 0xd9, 0x02, 0x6e, 0x2c, 0x45, 0x55, 0xab, + 0xff, 0xcf, 0x23, 0x80, 0xf0, 0x82, 0x73, 0xe9, + 0xe6, 0x8f, 0x1a, 0xd9, 0x70, 0xd6, 0x46, 0x1f, + 0xa8, 0xf8, 0xbd, 0x14, 0xd9, 0x50, 0x59, 0x8e, + 0x46, 0xbf, 0xe2, 0x8a, 0x8e, 0xce, 0xe7, 0x81, + 0xf4, 0x3a, 0xd9, 0x07, 0xd8, 0x1d, 0x29, 0x19, + 0xc1, 0x9d, 0xac, 0x6f, 0xfb, 0xce, 0x95, 0x03, + 0x29, 0xce, 0x4a, 0x60, 0x34, 0x6a, 0x88, 0xc7, + 0x5e, 0x8c, 0x71, 0x29, 0x81, 0x64, 0x2f, 0xfb, + 0xb4, 0x20, 0x08, 0x57, 0xba, 0x50, 0x75, 0x7b, + 0x1e, 0xfa, 0xcc, 0x60, 0xe7, 0x09, 0xab, 0x4e, + 0x46, 0x64, 0xfe, 0x17, 0x00, 0x84, 0x8b, 0xca, + 0xa8, 0xcb, 0x18, 0x5b, 0xa2, 0x04, 0x13, 0x68, + 0x99, 0x02, 0xaf, 0xcb, 0x75, 0xcb, 0x46, 0x61, + 0x66, 0x05, 0xd9, 0x5c, 0x6d, 0x8c, 0xf9, 0x8a, + 0x57, 0xde, 0xf4, 0xb9, 0x5d, 0x51, 0x17, 0x4a, + 0x8c, 0x42, 0xca, 0x0d, 0x7f, 0x92, 0x69, 0x0d, + 0x88, 0x2b, 0xc6, 0xee, 0xbd, 0x5a, 0x32, 0x17, + 0x84, 0xef, 0xf9, 0xd9, 0x51, 0x33, 0x57, 0x2f, + 0x87, 0xf8, 0xda, 0x3c, 0x3c, 0x14, 0xa9, 0x26, + 0xad, 0x19, 0xfd, 0x14, 0x5e, 0x33, 0x92, 0xb1, + 0xe1, 0xd7, 0xfb, 0x1e, 0x55, 0x40, 0xe5, 0x80, + 0x9b, 0x8e, 0x4b, 0x88, 0x58, 0x77, 0xa9, 0xd2, + 0xbf, 0x40, 0x90, 0xbe, 0x8f, 0x1f, 0xa7, 0x8a, + 0xaf, 0x8e, 0x03, 0x93, 0x4d, 0x8a, 0x73, 0x8e, + 0x76, 0x67, 0x43, 0x37, 0xc1, 0x76, 0x87, 0x50, + 0x37, 0xc4, 0x02, 0x4a, 0x53, 0x1a, 0x5b, 0xe8, + 0x5f, 0xc8, 0x28, 0xad, 0xd3, 0x8a, 0x97, 0x53, + 0xa3, 0xf6, 0x48, 0xba, 0x05, 0x18, 0x56, 0x90, + 0xa9, 0x95, 0xd8, 0xac, 0xe9, 0xd5, 0x6c, 0xe3, + 0x1f, 0xd8, 0xfc, 0xc5, 0x27, 0x19, 0xab, 0x4a, + 0xc4, 0x36, 0xc9, 0xe9, 0xaa, 0x30, 0xef, 0x8e, + 0x9e, 0x01, 0x18, 0x68, 0xe9, 0x06, 0xf8, 0x54, + 0xe5, 0xe2, 0xec, 0xde, 0x52, 0xfc, 0x3b, 0xdd, + 0xe9, 0xc7, 0xc8, 0x2b, 0x93, 0xd4, 0xdb, 0x28, + 0x72, 0x06, 0x07, 0xd1, 0xba, 0x05, 0x23, 0xa6, + 0x41, 0x42, 0x55, 0x6a, 0x6e, 0x6f, 0x6c, 0x40, + 0x6a, 0x19, 0xa4, 0xd5, 0xa2, 0x11, 0xb5, 0x2b, + 0x16, 0x4a, 0xe3, 0x41, 0xf3, 0xaf, 0x93, 0xbd, + 0xc8, 0xd9, 0x26, 0x43, 0x71, 0x56, 0xd2, 0x5e, + 0xf5, 0xa8, 0x3c, 0x64, 0x83, 0x04, 0x89, 0x62, + 0x20, 0xd3, 0xe9, 0x8e, 0x60, 0xcd, 0xec, 0xd9, + 0xce, 0x89, 0xf0, 0x5c, 0xf2, 0x26, 0x72, 0x51, + 0xd5, 0x16, 0x7b, 0xef, 0x19, 0x10, 0xb4, 0xce, + 0x60, 0x47, 0xab, 0x98, 0x86, 0xbd, 0x39, 0xb7, + 0xc9, 0x29, 0x38, 0x1a, 0xc1, 0x5c, 0xab, 0x77, + 0xea, 0xe9, 0xf4, 0x7f, 0x6a, 0x06, 0xf7, 0xc0, + 0x0b, 0x17, 0x1f, 0x2f, 0xce, 0x07, 0x1b, 0x33, + 0x68, 0x4d, 0x64, 0x6a, 0x28, 0x6d, 0x1d, 0xc6, + 0x54, 0x5c, 0xa2, 0x69, 0xf9, 0xb4, 0x62, 0xc9, + 0x71, 0xf5, 0xd1, 0xb7, 0x7b, 0x02, 0x81, 0x6d, + 0x4b, 0x1f, 0x62, 0xc5, 0xce, 0x2e, 0xc6, 0x2a, + 0x1d, 0x6f, 0xc7, 0xc1, 0x99, 0x48, 0x7b, 0xc7, + 0xf3, 0x53, 0xb7, 0x02, 0x7f, 0x82, 0xda, 0xfa, + 0xce, 0xd3, 0x54, 0xf8, 0x9b, 0x30, 0x6f, 0xed, + 0x6c, 0xec, 0x1c, 0x21, 0x49, 0x04, 0x51, 0xae, + 0xd0, 0x3f, 0xb1, 0xfb, 0x78, 0x1a, 0x6f, 0x35, + 0xc8, 0x3f, 0x4c, 0x43, 0x71, 0xe9, 0xb8, 0xd7, + 0x74, 0xca, 0x46, 0x68, 0xeb, 0xd9, 0xa3, 0x94, + 0x6e, 0x9d, 0xea, 0x57, 0x22, 0x1e, 0x15, 0x27, + 0x40, 0xd4, 0x0c, 0x32, 0x40, 0xc0, 0x40, 0x8a, + 0x1e, 0x2e, 0x1a, 0x58, 0x84, 0xa0, 0xc3, 0x68, + 0x96, 0xfe, 0xb0, 0x96, 0x6c, 0x04, 0x61, 0x35, + 0x4a, 0x78, 0xc5, 0xeb, 0x50, 0xca, 0xcb, 0x22, + 0x7b, 0x53, 0x02, 0xfa, 0x63, 0x28, 0x10, 0x68, + 0x77, 0xab, 0xda, 0x7d, 0xd1, 0xc2, 0x3f, 0x95, + 0xa6, 0x5a, 0x92, 0x56, 0xb3, 0xb0, 0x29, 0x7e, + 0x0c, 0xb3, 0xc9, 0x39, 0x0f, 0x1f, 0x51, 0x9d + }; + + WC_ALLOC_VAR(plain, byte, CHACHA_LEN, NULL); + WC_ALLOC_VAR(cipher, byte, CHACHA_LEN, NULL); + + XMEMSET(plain, 0xa5, CHACHA_LEN); + for (i = 0; i < (int)sizeof(key); i++) { + key[i] = (byte)i; + } + for (i = 0; i < (int)sizeof(iv); i++) { + iv[i] = (byte)(i + 0x40); + } + + for (sz = 1; sz < CHACHA_LEN; sz++) { + ExpectIntEQ(wc_Chacha_SetKey(&enc, key, (word32)sizeof(key)), 0); + ExpectIntEQ(wc_Chacha_SetIV(&enc, iv, 0), 0); + + for (cnt = 0; cnt + sz <= CHACHA_LEN; cnt += sz) { + ExpectIntEQ(wc_Chacha_Process(&enc, cipher + cnt, plain + cnt, sz), + 0); + } + if (cnt < CHACHA_LEN) { + ExpectIntEQ(wc_Chacha_Process(&enc, cipher + cnt, plain + cnt, + CHACHA_LEN - cnt), 0); + } + ExpectBufEQ(cipher, expected, (int)sizeof(expected)); + } + + WC_FREE_VAR(plain, NULL); + WC_FREE_VAR(cipher, NULL); +#endif + return EXPECT_RESULT(); +} /* END test_wc_Chacha_Process */ + + diff --git a/tests/api/test_chacha.h b/tests/api/test_chacha.h index d9146775e..8403d25c0 100644 --- a/tests/api/test_chacha.h +++ b/tests/api/test_chacha.h @@ -26,9 +26,11 @@ int test_wc_Chacha_SetKey(void); int test_wc_Chacha_Process(void); +int test_wc_Chacha_Process_Chunking(void); -#define TEST_CHACHA_DECLS \ - TEST_DECL_GROUP("chacha", test_wc_Chacha_SetKey), \ - TEST_DECL_GROUP("chacha", test_wc_Chacha_Process) +#define TEST_CHACHA_DECLS \ + TEST_DECL_GROUP("chacha", test_wc_Chacha_SetKey), \ + TEST_DECL_GROUP("chacha", test_wc_Chacha_Process), \ + TEST_DECL_GROUP("chacha", test_wc_Chacha_Process_Chunking) #endif /* WOLFCRYPT_TEST_CHACHA_H */ diff --git a/tests/api/test_dsa.c b/tests/api/test_dsa.c index 5599100dd..37f437f9e 100644 --- a/tests/api/test_dsa.c +++ b/tests/api/test_dsa.c @@ -117,7 +117,7 @@ int test_wc_DsaSignVerify(void) ExpectIntEQ(wc_DsaVerify(hash, signature, NULL, &answer), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wc_DsaVerify(hash, signature, &key, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); -#if !defined(HAVE_FIPS) && defined(WOLFSSL_PUBLIC_MP) +#if !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) && defined(WOLFSSL_PUBLIC_MP) /* hard set q to 0 and test fail case */ mp_free(&key.q); ExpectIntEQ(mp_init(&key.q), 0); diff --git a/tests/api/test_dtls.c b/tests/api/test_dtls.c index 78e83d02c..a1f592c70 100644 --- a/tests/api/test_dtls.c +++ b/tests/api/test_dtls.c @@ -2489,3 +2489,81 @@ int test_dtls_mtu_split_messages(void) return TEST_SKIPPED; #endif } + +/* Test DTLS 1.3 minimum retransmission interval. This test calls + * wolfSSL_dtls_got_timeout() to simulate timeouts and verify that + * retransmissions are spaced at least DTLS13_MIN_RTX_INTERVAL apart. + * This tests relies on timing of the retransmission logic so it may be + * flaky on very slow systems. + */ +int test_dtls13_min_rtx_interval(void) +{ + EXPECT_DECLS; +#if defined(HAVE_MANUAL_MEMIO_TESTS_DEPENDENCIES) && \ + defined(WOLFSSL_DTLS13) && !defined(DTLS13_MIN_RTX_INTERVAL) && \ + !defined(NO_ASN_TIME) + /* We don't want to test when DTLS13_MIN_RTX_INTERVAL is defined because + * it may be too low to trigger reliably in a test. The default value is + * 1 second which is sufficient for testing here. */ + WOLFSSL_CTX *ctx_c = NULL, *ctx_s = NULL; + WOLFSSL *ssl_c = NULL, *ssl_s = NULL; + struct test_memio_ctx test_ctx; + int c_msg_count = 0; + + XMEMSET(&test_ctx, 0, sizeof(test_ctx)); + + /* Setup DTLS 1.3 contexts */ + ExpectIntEQ(test_memio_setup(&test_ctx, &ctx_c, &ctx_s, &ssl_c, &ssl_s, + wolfDTLSv1_3_client_method, wolfDTLSv1_3_server_method), 0); + + /* CH0 */ + ExpectIntEQ(wolfSSL_connect(ssl_c), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_c, -1), SSL_ERROR_WANT_READ); + + /* HRR */ + ExpectIntEQ(wolfSSL_accept(ssl_s), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_s, -1), SSL_ERROR_WANT_READ); + + /* CH1 */ + ExpectIntEQ(wolfSSL_connect(ssl_c), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_c, -1), SSL_ERROR_WANT_READ); + + /* SH ... FINISHED */ + ExpectIntEQ(wolfSSL_accept(ssl_s), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_s, -1), SSL_ERROR_WANT_READ); + + /* We should have SH ... FINISHED messages in the buffer */ + ExpectIntGE(test_ctx.c_msg_count, 2); + + /* Drop everything */ + test_memio_clear_buffer(&test_ctx, 1); + + /* First timeout. This one should trigger a retransmission */ + if (wolfSSL_dtls13_use_quick_timeout(ssl_s)) + ExpectIntEQ(wolfSSL_dtls_got_timeout(ssl_s), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_dtls_got_timeout(ssl_s), WOLFSSL_SUCCESS); + /* Save the message count to make sure no new messages are sent */ + ExpectIntGE(test_ctx.c_msg_count, 2); + c_msg_count = test_ctx.c_msg_count; + + /* Second timeout. This one should not trigger a retransmission */ + if (wolfSSL_dtls13_use_quick_timeout(ssl_s)) + ExpectIntEQ(wolfSSL_dtls_got_timeout(ssl_s), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_dtls_got_timeout(ssl_s), WOLFSSL_SUCCESS); + /* This is the critical check. The message count should not increase + * after the second timeout. DTLS13_MIN_RTX_INTERVAL should have blocked + * retransmission here. */ + ExpectIntEQ(c_msg_count, test_ctx.c_msg_count); + + /* Now complete the handshake. We didn't clear the first retransmission + * so the handshake should proceed without issues. */ + ExpectIntEQ(test_memio_do_handshake(ssl_c, ssl_s, 10, NULL), 0); + + /* Cleanup */ + wolfSSL_free(ssl_c); + wolfSSL_CTX_free(ctx_c); + wolfSSL_free(ssl_s); + wolfSSL_CTX_free(ctx_s); +#endif + return EXPECT_RESULT(); +} diff --git a/tests/api/test_dtls.h b/tests/api/test_dtls.h index 429536bd5..c9c5600ef 100644 --- a/tests/api/test_dtls.h +++ b/tests/api/test_dtls.h @@ -49,6 +49,7 @@ int test_dtls_memio_wolfio(void); int test_dtls_memio_wolfio_stateless(void); int test_dtls_mtu_fragment_headroom(void); int test_dtls_mtu_split_messages(void); +int test_dtls13_min_rtx_interval(void); #define TEST_DTLS_DECLS \ TEST_DECL_GROUP("dtls", test_dtls12_basic_connection_id), \ @@ -77,5 +78,6 @@ int test_dtls_mtu_split_messages(void); TEST_DECL_GROUP("dtls", test_dtls_memio_wolfio), \ TEST_DECL_GROUP("dtls", test_dtls_mtu_fragment_headroom), \ TEST_DECL_GROUP("dtls", test_dtls_mtu_split_messages), \ - TEST_DECL_GROUP("dtls", test_dtls_memio_wolfio_stateless) + TEST_DECL_GROUP("dtls", test_dtls_memio_wolfio_stateless), \ + TEST_DECL_GROUP("dtls", test_dtls13_min_rtx_interval) #endif /* TESTS_API_DTLS_H */ diff --git a/tests/api/test_ecc.c b/tests/api/test_ecc.c index 0142f0c11..69fd8b1e9 100644 --- a/tests/api/test_ecc.c +++ b/tests/api/test_ecc.c @@ -676,7 +676,8 @@ int test_wc_ecc_export_x963_ex(void) WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wc_ecc_export_x963_ex(&key, out, NULL, COMP), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); -#if defined(HAVE_FIPS) && (!defined(FIPS_VERSION_LT) || FIPS_VERSION_LT(5,3)) +#if (defined(HAVE_FIPS) && (!defined(FIPS_VERSION_LT) || FIPS_VERSION_LT(5,3)))\ + || defined(HAVE_SELFTEST) ExpectIntEQ(wc_ecc_export_x963_ex(&key, out, &badOutLen, COMP), WC_NO_ERR_TRACE(BUFFER_E)); #else diff --git a/tests/api/test_evp.c b/tests/api/test_evp.c index 43166aab2..8c768eb87 100644 --- a/tests/api/test_evp.c +++ b/tests/api/test_evp.c @@ -26,75 +26,546 @@ #include #include -/* Test for NULL_CIPHER_TYPE in wolfSSL_EVP_CipherUpdate() */ -int test_wolfSSL_EVP_CipherUpdate_Null(void) +/* Test functions for base64 encode/decode */ +int test_wolfSSL_EVP_ENCODE_CTX_new(void) { EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - WOLFSSL_EVP_CIPHER_CTX* ctx; - const char* testData = "Test NULL cipher data"; - unsigned char output[100]; - int outputLen = 0; - int testDataLen = (int)XSTRLEN(testData); +#if defined(OPENSSL_EXTRA) && \ +( defined(WOLFSSL_BASE64_ENCODE) || defined(WOLFSSL_BASE64_DECODE)) + EVP_ENCODE_CTX* ctx = NULL; - /* Create and initialize the cipher context */ - ctx = wolfSSL_EVP_CIPHER_CTX_new(); - ExpectNotNull(ctx); - - /* Initialize with NULL cipher */ - ExpectIntEQ(wolfSSL_EVP_CipherInit_ex(ctx, wolfSSL_EVP_enc_null(), - NULL, NULL, NULL, 1), WOLFSSL_SUCCESS); - - /* Test encryption (which should just copy the data) */ - ExpectIntEQ(wolfSSL_EVP_CipherUpdate(ctx, output, &outputLen, - (const unsigned char*)testData, - testDataLen), WOLFSSL_SUCCESS); - - /* Verify output length matches input length */ - ExpectIntEQ(outputLen, testDataLen); - - /* Verify output data matches input data (no encryption occurred) */ - ExpectIntEQ(XMEMCMP(output, testData, testDataLen), 0); - - /* Clean up */ - wolfSSL_EVP_CIPHER_CTX_free(ctx); -#endif /* OPENSSL_EXTRA */ + ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); + ExpectIntEQ(ctx->remaining,0); + ExpectIntEQ(ctx->data[0],0); + ExpectIntEQ(ctx->data[sizeof(ctx->data) -1],0); + EVP_ENCODE_CTX_free(ctx); +#endif /* OPENSSL_EXTRA && (WOLFSSL_BASE64_ENCODE || WOLFSSL_BASE64_DECODE) */ + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_ENCODE_CTX_free(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && \ +( defined(WOLFSSL_BASE64_ENCODE) || defined(WOLFSSL_BASE64_DECODE)) + EVP_ENCODE_CTX* ctx = NULL; + ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); + EVP_ENCODE_CTX_free(ctx); +#endif /* OPENSSL_EXTRA && (WOLFSSL_BASE64_ENCODE || WOLFSSL_BASE64_DECODE) */ return EXPECT_RESULT(); } -/* Test for wolfSSL_EVP_CIPHER_type_string() */ -int test_wolfSSL_EVP_CIPHER_type_string(void) +int test_wolfSSL_EVP_EncodeInit(void) { EXPECT_DECLS; -#ifdef OPENSSL_EXTRA - const char* cipherStr; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_ENCODE) + EVP_ENCODE_CTX* ctx = NULL; - /* Test with valid cipher types */ -#ifdef HAVE_AES_CBC - #ifdef WOLFSSL_AES_128 - cipherStr = wolfSSL_EVP_CIPHER_type_string(WC_AES_128_CBC_TYPE); - ExpectNotNull(cipherStr); - ExpectStrEQ(cipherStr, "AES-128-CBC"); - #endif -#endif + ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); + ExpectIntEQ(ctx->remaining, 0); + ExpectIntEQ(ctx->data[0], 0); + ExpectIntEQ(ctx->data[sizeof(ctx->data) -1], 0); -#ifndef NO_DES3 - cipherStr = wolfSSL_EVP_CIPHER_type_string(WC_DES_CBC_TYPE); - ExpectNotNull(cipherStr); - ExpectStrEQ(cipherStr, "DES-CBC"); -#endif + if (ctx != NULL) { + /* make ctx dirty */ + ctx->remaining = 10; + XMEMSET(ctx->data, 0x77, sizeof(ctx->data)); + } - /* Test with NULL cipher type */ - cipherStr = wolfSSL_EVP_CIPHER_type_string(WC_NULL_CIPHER_TYPE); - ExpectNotNull(cipherStr); - ExpectStrEQ(cipherStr, "NULL"); + EVP_EncodeInit(ctx); - /* Test with invalid cipher type */ - cipherStr = wolfSSL_EVP_CIPHER_type_string(0xFFFF); - ExpectNull(cipherStr); -#endif /* OPENSSL_EXTRA */ + ExpectIntEQ(ctx->remaining, 0); + ExpectIntEQ(ctx->data[0], 0); + ExpectIntEQ(ctx->data[sizeof(ctx->data) -1], 0); + EVP_ENCODE_CTX_free(ctx); +#endif /* OPENSSL_EXTRA && WOLFSSL_BASE64_ENCODE*/ return EXPECT_RESULT(); } +int test_wolfSSL_EVP_EncodeUpdate(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_ENCODE) + int outl; + int total; + + const unsigned char plain0[] = {"Th"}; + const unsigned char plain1[] = {"This is a base64 encodeing test."}; + const unsigned char plain2[] = {"This is additional data."}; + + const unsigned char encBlock0[] = {"VGg="}; + const unsigned char enc0[] = {"VGg=\n"}; + /* expected encoded result for the first output 64 chars plus trailing LF*/ + const unsigned char enc1[] = { + "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVpbmcgdGVzdC5UaGlzIGlzIGFkZGl0aW9u\n" + }; + + const unsigned char enc2[] = { + "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVpbmcgdGVzdC5UaGlzIGlzIGFkZGl0aW9u\n" + "YWwgZGF0YS4=\n" + }; + + unsigned char encOutBuff[300]; + + EVP_ENCODE_CTX* ctx = NULL; + + ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); + + EVP_EncodeInit(ctx); + + /* illegal parameter test */ + ExpectIntEQ( + EVP_EncodeUpdate( + NULL, /* pass NULL as ctx */ + encOutBuff, + &outl, + plain1, + sizeof(plain1)-1), + 0 /* expected result code 0: fail */ + ); + + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + NULL, /* pass NULL as out buff */ + &outl, + plain1, + sizeof(plain1)-1), + 0 /* expected result code 0: fail */ + ); + + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + encOutBuff, + NULL, /* pass NULL as outl */ + plain1, + sizeof(plain1)-1), + 0 /* expected result code 0: fail */ + ); + + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + encOutBuff, + &outl, + NULL, /* pass NULL as in */ + sizeof(plain1)-1), + 0 /* expected result code 0: fail */ + ); + + ExpectIntEQ(EVP_EncodeBlock(NULL, NULL, 0), -1); + + /* meaningless parameter test */ + + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + encOutBuff, + &outl, + plain1, + 0), /* pass zero input */ + 1 /* expected result code 1: success */ + ); + + /* very small data encoding test */ + + EVP_EncodeInit(ctx); + + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + encOutBuff, + &outl, + plain0, + sizeof(plain0)-1), + 1 /* expected result code 1: success */ + ); + ExpectIntEQ(outl,0); + + if (EXPECT_SUCCESS()) { + EVP_EncodeFinal( + ctx, + encOutBuff + outl, + &outl); + } + + ExpectIntEQ( outl, sizeof(enc0)-1); + ExpectIntEQ( + XSTRNCMP( + (const char*)encOutBuff, + (const char*)enc0,sizeof(enc0) ), + 0); + + XMEMSET( encOutBuff,0, sizeof(encOutBuff)); + ExpectIntEQ(EVP_EncodeBlock(encOutBuff, plain0, sizeof(plain0)-1), + sizeof(encBlock0)-1); + ExpectStrEQ(encOutBuff, encBlock0); + + /* pass small size( < 48bytes ) input, then make sure they are not + * encoded and just stored in ctx + */ + + EVP_EncodeInit(ctx); + + total = 0; + outl = 0; + XMEMSET( encOutBuff,0, sizeof(encOutBuff)); + + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + encOutBuff, /* buffer for output */ + &outl, /* size of output */ + plain1, /* input */ + sizeof(plain1)-1), /* size of input */ + 1); /* expected result code 1:success */ + + total += outl; + + ExpectIntEQ(outl, 0); /* no output expected */ + ExpectIntEQ(ctx->remaining, sizeof(plain1) -1); + ExpectTrue( + XSTRNCMP((const char*)(ctx->data), + (const char*)plain1, + ctx->remaining) ==0 ); + ExpectTrue(encOutBuff[0] == 0); + + /* call wolfSSL_EVP_EncodeUpdate again to make it encode + * the stored data and the new input together + */ + ExpectIntEQ( + EVP_EncodeUpdate( + ctx, + encOutBuff + outl, /* buffer for output */ + &outl, /* size of output */ + plain2, /* additional input */ + sizeof(plain2) -1), /* size of additional input */ + 1); /* expected result code 1:success */ + + total += outl; + + ExpectIntNE(outl, 0); /* some output is expected this time*/ + ExpectIntEQ(outl, BASE64_ENCODE_RESULT_BLOCK_SIZE +1); /* 64 bytes and LF */ + ExpectIntEQ( + XSTRNCMP((const char*)encOutBuff,(const char*)enc1,sizeof(enc1) ),0); + + /* call wolfSSL_EVP_EncodeFinal to flush all the unprocessed input */ + EVP_EncodeFinal( + ctx, + encOutBuff + outl, + &outl); + + total += outl; + + ExpectIntNE(total,0); + ExpectIntNE(outl,0); + ExpectIntEQ(XSTRNCMP( + (const char*)encOutBuff,(const char*)enc2,sizeof(enc2) ),0); + + /* test with illeagal parameters */ + outl = 1; + EVP_EncodeFinal(NULL, encOutBuff + outl, &outl); + ExpectIntEQ(outl, 0); + outl = 1; + EVP_EncodeFinal(ctx, NULL, &outl); + ExpectIntEQ(outl, 0); + EVP_EncodeFinal(ctx, encOutBuff + outl, NULL); + EVP_EncodeFinal(NULL, NULL, NULL); + + EVP_ENCODE_CTX_free(ctx); +#endif /* OPENSSL_EXTRA && WOLFSSL_BASE64_ENCODE*/ + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_EncodeFinal(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_ENCODE) + /* tests for wolfSSL_EVP_EncodeFinal are included in + * test_wolfSSL_EVP_EncodeUpdate + */ + res = TEST_SUCCESS; +#endif /* OPENSSL_EXTRA && WOLFSSL_BASE64_ENCODE*/ + return res; +} + + +int test_wolfSSL_EVP_DecodeInit(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_DECODE) + EVP_ENCODE_CTX* ctx = NULL; + + ExpectNotNull( ctx = EVP_ENCODE_CTX_new()); + ExpectIntEQ( ctx->remaining,0); + ExpectIntEQ( ctx->data[0],0); + ExpectIntEQ( ctx->data[sizeof(ctx->data) -1],0); + + if (ctx != NULL) { + /* make ctx dirty */ + ctx->remaining = 10; + XMEMSET( ctx->data, 0x77, sizeof(ctx->data)); + } + + EVP_DecodeInit(ctx); + + ExpectIntEQ( ctx->remaining,0); + ExpectIntEQ( ctx->data[0],0); + ExpectIntEQ( ctx->data[sizeof(ctx->data) -1],0); + + EVP_ENCODE_CTX_free(ctx); +#endif /* OPENSSL && WOLFSSL_BASE_DECODE */ + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_DecodeUpdate(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_DECODE) + int outl; + unsigned char decOutBuff[300]; + + EVP_ENCODE_CTX* ctx = NULL; + + static const unsigned char enc1[] = + {"VGhpcyBpcyBhIGJhc2U2NCBkZWNvZGluZyB0ZXN0Lg==\n"}; +/* const unsigned char plain1[] = + {"This is a base64 decoding test."} */ + + ExpectNotNull(ctx = EVP_ENCODE_CTX_new()); + + EVP_DecodeInit(ctx); + + /* illegal parameter tests */ + + /* pass NULL as ctx */ + ExpectIntEQ( + EVP_DecodeUpdate( + NULL, /* pass NULL as ctx */ + decOutBuff, + &outl, + enc1, + sizeof(enc1)-1), + -1 /* expected result code -1: fail */ + ); + ExpectIntEQ( outl, 0); + + /* pass NULL as output */ + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + NULL, /* pass NULL as out buff */ + &outl, + enc1, + sizeof(enc1)-1), + -1 /* expected result code -1: fail */ + ); + ExpectIntEQ( outl, 0); + + /* pass NULL as outl */ + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + NULL, /* pass NULL as outl */ + enc1, + sizeof(enc1)-1), + -1 /* expected result code -1: fail */ + ); + + /* pass NULL as input */ + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + &outl, + NULL, /* pass NULL as in */ + sizeof(enc1)-1), + -1 /* expected result code -1: fail */ + ); + ExpectIntEQ( outl, 0); + + ExpectIntEQ(EVP_DecodeBlock(NULL, NULL, 0), -1); + + /* pass zero length input */ + + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + &outl, + enc1, + 0), /* pass zero as input len */ + 1 /* expected result code 1: success */ + ); + + /* decode correct base64 string */ + + { + static const unsigned char enc2[] = + {"VGhpcyBpcyBhIGJhc2U2NCBkZWNvZGluZyB0ZXN0Lg==\n"}; + static const unsigned char plain2[] = + {"This is a base64 decoding test."}; + + EVP_EncodeInit(ctx); + + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + &outl, + enc2, + sizeof(enc2)-1), + 0 /* expected result code 0: success */ + ); + + ExpectIntEQ(outl,sizeof(plain2) -1); + + ExpectIntEQ( + EVP_DecodeFinal( + ctx, + decOutBuff + outl, + &outl), + 1 /* expected result code 1: success */ + ); + ExpectIntEQ(outl, 0); /* expected DecodeFinal output no data */ + + ExpectIntEQ(XSTRNCMP( (const char*)plain2,(const char*)decOutBuff, + sizeof(plain2) -1 ),0); + ExpectIntEQ(EVP_DecodeBlock(decOutBuff, enc2, sizeof(enc2)), + sizeof(plain2)-1); + ExpectIntEQ(XSTRNCMP( (const char*)plain2,(const char*)decOutBuff, + sizeof(plain2) -1 ),0); + } + + /* decode correct base64 string which does not have '\n' in its last*/ + + { + static const unsigned char enc3[] = + {"VGhpcyBpcyBhIGJhc2U2NCBkZWNvZGluZyB0ZXN0Lg=="}; /* 44 chars */ + static const unsigned char plain3[] = + {"This is a base64 decoding test."}; /* 31 chars */ + + EVP_EncodeInit(ctx); + + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + &outl, + enc3, + sizeof(enc3)-1), + 0 /* expected result code 0: success */ + ); + + ExpectIntEQ(outl,sizeof(plain3)-1); /* 31 chars should be output */ + + ExpectIntEQ(XSTRNCMP( (const char*)plain3,(const char*)decOutBuff, + sizeof(plain3) -1 ),0); + + ExpectIntEQ( + EVP_DecodeFinal( + ctx, + decOutBuff + outl, + &outl), + 1 /* expected result code 1: success */ + ); + + ExpectIntEQ(outl,0 ); + + ExpectIntEQ(EVP_DecodeBlock(decOutBuff, enc3, sizeof(enc3)-1), + sizeof(plain3)-1); + ExpectIntEQ(XSTRNCMP( (const char*)plain3,(const char*)decOutBuff, + sizeof(plain3) -1 ),0); + } + + /* decode string which has a padding char ('=') in the illegal position*/ + + { + static const unsigned char enc4[] = + {"VGhpcyBpcyBhIGJhc2U2N=CBkZWNvZGluZyB0ZXN0Lg==\n"}; + + EVP_EncodeInit(ctx); + + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + &outl, + enc4, + sizeof(enc4)-1), + -1 /* expected result code -1: error */ + ); + ExpectIntEQ(outl,0); + ExpectIntEQ(EVP_DecodeBlock(decOutBuff, enc4, sizeof(enc4)-1), -1); + } + + /* small data decode test */ + + { + static const unsigned char enc00[] = {"VG"}; + static const unsigned char enc01[] = {"g=\n"}; + static const unsigned char plain4[] = {"Th"}; + + EVP_EncodeInit(ctx); + + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff, + &outl, + enc00, + sizeof(enc00)-1), + 1 /* expected result code 1: success */ + ); + ExpectIntEQ(outl,0); + + ExpectIntEQ( + EVP_DecodeUpdate( + ctx, + decOutBuff + outl, + &outl, + enc01, + sizeof(enc01)-1), + 0 /* expected result code 0: success */ + ); + + ExpectIntEQ(outl,sizeof(plain4)-1); + + /* test with illegal parameters */ + ExpectIntEQ(EVP_DecodeFinal(NULL,decOutBuff + outl,&outl), -1); + ExpectIntEQ(EVP_DecodeFinal(ctx,NULL,&outl), -1); + ExpectIntEQ(EVP_DecodeFinal(ctx,decOutBuff + outl, NULL), -1); + ExpectIntEQ(EVP_DecodeFinal(NULL,NULL, NULL), -1); + + if (EXPECT_SUCCESS()) { + EVP_DecodeFinal( + ctx, + decOutBuff + outl, + &outl); + } + + ExpectIntEQ( outl, 0); + ExpectIntEQ( + XSTRNCMP( + (const char*)decOutBuff, + (const char*)plain4,sizeof(plain4)-1 ), + 0); + } + + EVP_ENCODE_CTX_free(ctx); +#endif /* OPENSSL && WOLFSSL_BASE_DECODE */ + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_DecodeFinal(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_BASE64_DECODE) + /* tests for wolfSSL_EVP_DecodeFinal are included in + * test_wolfSSL_EVP_DecodeUpdate + */ + res = TEST_SUCCESS; +#endif /* OPENSSL && WOLFSSL_BASE_DECODE */ + return res; +} + diff --git a/tests/api/test_evp.h b/tests/api/test_evp.h index 013ac50aa..a24609903 100644 --- a/tests/api/test_evp.h +++ b/tests/api/test_evp.h @@ -22,7 +22,25 @@ #ifndef WOLFSSL_TEST_EVP_H #define WOLFSSL_TEST_EVP_H -int test_wolfSSL_EVP_CipherUpdate_Null(void); -int test_wolfSSL_EVP_CIPHER_type_string(void); +#include + +int test_wolfSSL_EVP_ENCODE_CTX_new(void); +int test_wolfSSL_EVP_ENCODE_CTX_free(void); +int test_wolfSSL_EVP_EncodeInit(void); +int test_wolfSSL_EVP_EncodeUpdate(void); +int test_wolfSSL_EVP_EncodeFinal(void); +int test_wolfSSL_EVP_DecodeInit(void); +int test_wolfSSL_EVP_DecodeUpdate(void); +int test_wolfSSL_EVP_DecodeFinal(void); + +#define TEST_EVP_ENC_DECLS \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_ENCODE_CTX_new), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_ENCODE_CTX_free), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_EncodeInit), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_EncodeUpdate), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_EncodeFinal), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_DecodeInit), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_DecodeUpdate), \ + TEST_DECL_GROUP("evp_enc", test_wolfSSL_EVP_DecodeFinal) #endif /* WOLFSSL_TEST_EVP_H */ diff --git a/tests/api/test_evp_cipher.c b/tests/api/test_evp_cipher.c new file mode 100644 index 000000000..99fae5a65 --- /dev/null +++ b/tests/api/test_evp_cipher.c @@ -0,0 +1,2704 @@ +/* test_evp_cipher.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#include +#include +#if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION == 2) + #include +#endif + + +int test_wolfSSL_EVP_CIPHER_CTX(void) +{ + EXPECT_DECLS; +#if !defined(NO_AES) && defined(HAVE_AES_CBC) && defined(WOLFSSL_AES_128) && \ + defined(OPENSSL_EXTRA) + EVP_CIPHER_CTX *ctx = EVP_CIPHER_CTX_new(); + const EVP_CIPHER *init = EVP_aes_128_cbc(); + const EVP_CIPHER *test; + byte key[AES_BLOCK_SIZE] = {0}; + byte iv[AES_BLOCK_SIZE] = {0}; + + ExpectNotNull(ctx); + wolfSSL_EVP_CIPHER_CTX_init(ctx); + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + test = EVP_CIPHER_CTX_cipher(ctx); + ExpectTrue(init == test); + ExpectIntEQ(EVP_CIPHER_nid(test), NID_aes_128_cbc); + + ExpectIntEQ(EVP_CIPHER_CTX_reset(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_CIPHER_CTX_reset(NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + EVP_CIPHER_CTX_free(ctx); + /* test EVP_CIPHER_CTX_cleanup with NULL */ + ExpectIntEQ(EVP_CIPHER_CTX_cleanup(NULL), WOLFSSL_SUCCESS); +#endif /* !NO_AES && HAVE_AES_CBC && WOLFSSL_AES_128 && OPENSSL_EXTRA */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_CIPHER_CTX_iv_length(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + /* This is large enough to be used for all key sizes */ + byte key[AES_256_KEY_SIZE] = {0}; + byte iv[AES_BLOCK_SIZE] = {0}; + int i; + int nids[] = { + #ifdef HAVE_AES_CBC + NID_aes_128_cbc, + #endif + #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) + #ifdef HAVE_AESGCM + NID_aes_128_gcm, + #endif + #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ + #ifdef WOLFSSL_AES_COUNTER + NID_aes_128_ctr, + #endif + #ifndef NO_DES3 + NID_des_cbc, + NID_des_ede3_cbc, + #endif + }; + int iv_lengths[] = { + #ifdef HAVE_AES_CBC + AES_BLOCK_SIZE, + #endif + #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) + #ifdef HAVE_AESGCM + GCM_NONCE_MID_SZ, + #endif + #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ + #ifdef WOLFSSL_AES_COUNTER + AES_BLOCK_SIZE, + #endif + #ifndef NO_DES3 + DES_BLOCK_SIZE, + DES_BLOCK_SIZE, + #endif + }; + int nidsLen = (sizeof(nids)/sizeof(int)); + + for (i = 0; i < nidsLen; i++) { + const EVP_CIPHER* init = wolfSSL_EVP_get_cipherbynid(nids[i]); + EVP_CIPHER_CTX* ctx = EVP_CIPHER_CTX_new(); + wolfSSL_EVP_CIPHER_CTX_init(ctx); + + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_iv_length(ctx), iv_lengths[i]); + + EVP_CIPHER_CTX_free(ctx); + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_CIPHER_CTX_key_length(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + byte key[AES_256_KEY_SIZE] = {0}; + byte iv[AES_BLOCK_SIZE] = {0}; + int i; + int nids[] = { + #ifdef HAVE_AES_CBC + NID_aes_128_cbc, + #ifdef WOLFSSL_AES_256 + NID_aes_256_cbc, + #endif + #endif + #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) + #ifdef HAVE_AESGCM + NID_aes_128_gcm, + #ifdef WOLFSSL_AES_256 + NID_aes_256_gcm, + #endif + #endif + #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ + #ifdef WOLFSSL_AES_COUNTER + NID_aes_128_ctr, + #ifdef WOLFSSL_AES_256 + NID_aes_256_ctr, + #endif + #endif + #ifndef NO_DES3 + NID_des_cbc, + NID_des_ede3_cbc, + #endif + }; + int key_lengths[] = { + #ifdef HAVE_AES_CBC + AES_128_KEY_SIZE, + #ifdef WOLFSSL_AES_256 + AES_256_KEY_SIZE, + #endif + #endif + #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) + #ifdef HAVE_AESGCM + AES_128_KEY_SIZE, + #ifdef WOLFSSL_AES_256 + AES_256_KEY_SIZE, + #endif + #endif + #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ + #ifdef WOLFSSL_AES_COUNTER + AES_128_KEY_SIZE, + #ifdef WOLFSSL_AES_256 + AES_256_KEY_SIZE, + #endif + #endif + #ifndef NO_DES3 + DES_KEY_SIZE, + DES3_KEY_SIZE, + #endif + }; + int nidsLen = (sizeof(nids)/sizeof(int)); + + for (i = 0; i < nidsLen; i++) { + const EVP_CIPHER *init = wolfSSL_EVP_get_cipherbynid(nids[i]); + EVP_CIPHER_CTX* ctx = EVP_CIPHER_CTX_new(); + wolfSSL_EVP_CIPHER_CTX_init(ctx); + + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_key_length(ctx), key_lengths[i]); + + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_key_length(ctx, key_lengths[i]), + WOLFSSL_SUCCESS); + + EVP_CIPHER_CTX_free(ctx); + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_CIPHER_CTX_set_iv(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESGCM) && !defined(NO_DES3) && defined(OPENSSL_ALL) + int ivLen, keyLen; + EVP_CIPHER_CTX *ctx = EVP_CIPHER_CTX_new(); +#ifdef HAVE_AESGCM + byte key[AES_128_KEY_SIZE] = {0}; + byte iv[AES_BLOCK_SIZE] = {0}; + const EVP_CIPHER *init = EVP_aes_128_gcm(); +#else + byte key[DES3_KEY_SIZE] = {0}; + byte iv[DES_BLOCK_SIZE] = {0}; + const EVP_CIPHER *init = EVP_des_ede3_cbc(); +#endif + + wolfSSL_EVP_CIPHER_CTX_init(ctx); + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + + ivLen = wolfSSL_EVP_CIPHER_CTX_iv_length(ctx); + keyLen = wolfSSL_EVP_CIPHER_CTX_key_length(ctx); + + /* Bad cases */ + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(NULL, iv, ivLen), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, NULL, ivLen), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, iv, 0), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(NULL, NULL, 0), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, iv, keyLen), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + /* Good case */ + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_set_iv(ctx, iv, ivLen), 1); + + EVP_CIPHER_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_get_cipherbynid(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA +#ifndef NO_AES + const WOLFSSL_EVP_CIPHER* c; + + c = wolfSSL_EVP_get_cipherbynid(419); + #if (defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)) && \ + defined(WOLFSSL_AES_128) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_128_CBC", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(423); + #if (defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)) && \ + defined(WOLFSSL_AES_192) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_192_CBC", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(427); + #if (defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)) && \ + defined(WOLFSSL_AES_256) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_256_CBC", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(904); + #if defined(WOLFSSL_AES_COUNTER) && defined(WOLFSSL_AES_128) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_128_CTR", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(905); + #if defined(WOLFSSL_AES_COUNTER) && defined(WOLFSSL_AES_192) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_192_CTR", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(906); + #if defined(WOLFSSL_AES_COUNTER) && defined(WOLFSSL_AES_256) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_256_CTR", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(418); + #if defined(HAVE_AES_ECB) && defined(WOLFSSL_AES_128) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_128_ECB", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(422); + #if defined(HAVE_AES_ECB) && defined(WOLFSSL_AES_192) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_192_ECB", c)); + #else + ExpectNull(c); + #endif + + c = wolfSSL_EVP_get_cipherbynid(426); + #if defined(HAVE_AES_ECB) && defined(WOLFSSL_AES_256) + ExpectNotNull(c); + ExpectNotNull(XSTRCMP("EVP_AES_256_ECB", c)); + #else + ExpectNull(c); + #endif +#endif /* !NO_AES */ + +#ifndef NO_DES3 + ExpectNotNull(XSTRCMP("EVP_DES_CBC", wolfSSL_EVP_get_cipherbynid(31))); +#ifdef WOLFSSL_DES_ECB + ExpectNotNull(XSTRCMP("EVP_DES_ECB", wolfSSL_EVP_get_cipherbynid(29))); +#endif + ExpectNotNull(XSTRCMP("EVP_DES_EDE3_CBC", wolfSSL_EVP_get_cipherbynid(44))); +#ifdef WOLFSSL_DES_ECB + ExpectNotNull(XSTRCMP("EVP_DES_EDE3_ECB", wolfSSL_EVP_get_cipherbynid(33))); +#endif +#endif /* !NO_DES3 */ + +#if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) + ExpectNotNull(XSTRCMP("EVP_CHACHA20_POLY13O5", EVP_get_cipherbynid(1018))); +#endif + + /* test for nid is out of range */ + ExpectNull(wolfSSL_EVP_get_cipherbynid(1)); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_CIPHER_block_size(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL +#ifdef HAVE_AES_CBC + #ifdef WOLFSSL_AES_128 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_cbc()), AES_BLOCK_SIZE); + #endif + #ifdef WOLFSSL_AES_192 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_cbc()), AES_BLOCK_SIZE); + #endif + #ifdef WOLFSSL_AES_256 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_cbc()), AES_BLOCK_SIZE); + #endif +#endif + +#ifdef HAVE_AESGCM + #ifdef WOLFSSL_AES_128 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_gcm()), 1); + #endif + #ifdef WOLFSSL_AES_192 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_gcm()), 1); + #endif + #ifdef WOLFSSL_AES_256 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_gcm()), 1); + #endif +#endif + +#ifdef HAVE_AESCCM + #ifdef WOLFSSL_AES_128 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ccm()), 1); + #endif + #ifdef WOLFSSL_AES_192 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ccm()), 1); + #endif + #ifdef WOLFSSL_AES_256 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ccm()), 1); + #endif +#endif + +#ifdef WOLFSSL_AES_COUNTER + #ifdef WOLFSSL_AES_128 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ctr()), 1); + #endif + #ifdef WOLFSSL_AES_192 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ctr()), 1); + #endif + #ifdef WOLFSSL_AES_256 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ctr()), 1); + #endif +#endif + +#ifdef HAVE_AES_ECB + #ifdef WOLFSSL_AES_128 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ecb()), AES_BLOCK_SIZE); + #endif + #ifdef WOLFSSL_AES_192 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ecb()), AES_BLOCK_SIZE); + #endif + #ifdef WOLFSSL_AES_256 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ecb()), AES_BLOCK_SIZE); + #endif +#endif + +#ifdef WOLFSSL_AES_OFB + #ifdef WOLFSSL_AES_128 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_128_ofb()), 1); + #endif + #ifdef WOLFSSL_AES_192 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_192_ofb()), 1); + #endif + #ifdef WOLFSSL_AES_256 + ExpectIntEQ(EVP_CIPHER_block_size(EVP_aes_256_ofb()), 1); + #endif +#endif + +#ifndef NO_RC4 + ExpectIntEQ(EVP_CIPHER_block_size(wolfSSL_EVP_rc4()), 1); +#endif + +#if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) + ExpectIntEQ(EVP_CIPHER_block_size(wolfSSL_EVP_chacha20_poly1305()), 1); +#endif + +#ifdef WOLFSSL_SM4_ECB + ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_ecb()), SM4_BLOCK_SIZE); +#endif +#ifdef WOLFSSL_SM4_CBC + ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_cbc()), SM4_BLOCK_SIZE); +#endif +#ifdef WOLFSSL_SM4_CTR + ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_ctr()), 1); +#endif +#ifdef WOLFSSL_SM4_GCM + ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_gcm()), 1); +#endif +#ifdef WOLFSSL_SM4_CCM + ExpectIntEQ(EVP_CIPHER_block_size(EVP_sm4_ccm()), 1); +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_CIPHER_iv_length(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + int nids[] = { + #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) + #ifdef WOLFSSL_AES_128 + NID_aes_128_cbc, + #endif + #ifdef WOLFSSL_AES_192 + NID_aes_192_cbc, + #endif + #ifdef WOLFSSL_AES_256 + NID_aes_256_cbc, + #endif + #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */ + #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) + #ifdef HAVE_AESGCM + #ifdef WOLFSSL_AES_128 + NID_aes_128_gcm, + #endif + #ifdef WOLFSSL_AES_192 + NID_aes_192_gcm, + #endif + #ifdef WOLFSSL_AES_256 + NID_aes_256_gcm, + #endif + #endif /* HAVE_AESGCM */ + #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ + #ifdef WOLFSSL_AES_COUNTER + #ifdef WOLFSSL_AES_128 + NID_aes_128_ctr, + #endif + #ifdef WOLFSSL_AES_192 + NID_aes_192_ctr, + #endif + #ifdef WOLFSSL_AES_256 + NID_aes_256_ctr, + #endif + #endif + #ifndef NO_DES3 + NID_des_cbc, + NID_des_ede3_cbc, + #endif + #if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) + NID_chacha20_poly1305, + #endif + }; + int iv_lengths[] = { + #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) + #ifdef WOLFSSL_AES_128 + AES_BLOCK_SIZE, + #endif + #ifdef WOLFSSL_AES_192 + AES_BLOCK_SIZE, + #endif + #ifdef WOLFSSL_AES_256 + AES_BLOCK_SIZE, + #endif + #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */ + #if (!defined(HAVE_FIPS) && !defined(HAVE_SELFTEST)) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2)) + #ifdef HAVE_AESGCM + #ifdef WOLFSSL_AES_128 + GCM_NONCE_MID_SZ, + #endif + #ifdef WOLFSSL_AES_192 + GCM_NONCE_MID_SZ, + #endif + #ifdef WOLFSSL_AES_256 + GCM_NONCE_MID_SZ, + #endif + #endif /* HAVE_AESGCM */ + #endif /* (HAVE_FIPS && !HAVE_SELFTEST) || HAVE_FIPS_VERSION > 2 */ + #ifdef WOLFSSL_AES_COUNTER + #ifdef WOLFSSL_AES_128 + AES_BLOCK_SIZE, + #endif + #ifdef WOLFSSL_AES_192 + AES_BLOCK_SIZE, + #endif + #ifdef WOLFSSL_AES_256 + AES_BLOCK_SIZE, + #endif + #endif + #ifndef NO_DES3 + DES_BLOCK_SIZE, + DES_BLOCK_SIZE, + #endif + #if defined(HAVE_CHACHA) && defined(HAVE_POLY1305) + CHACHA20_POLY1305_AEAD_IV_SIZE, + #endif + }; + int i; + int nidsLen = (sizeof(nids)/sizeof(int)); + + for (i = 0; i < nidsLen; i++) { + const EVP_CIPHER *c = EVP_get_cipherbynid(nids[i]); + ExpectIntEQ(EVP_CIPHER_iv_length(c), iv_lengths[i]); + } +#endif + return EXPECT_RESULT(); +} + +/* Test for NULL_CIPHER_TYPE in wolfSSL_EVP_CipherUpdate() */ +int test_wolfSSL_EVP_CipherUpdate_Null(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + WOLFSSL_EVP_CIPHER_CTX* ctx; + const char* testData = "Test NULL cipher data"; + unsigned char output[100]; + int outputLen = 0; + int testDataLen = (int)XSTRLEN(testData); + + /* Create and initialize the cipher context */ + ctx = wolfSSL_EVP_CIPHER_CTX_new(); + ExpectNotNull(ctx); + + /* Initialize with NULL cipher */ + ExpectIntEQ(wolfSSL_EVP_CipherInit_ex(ctx, wolfSSL_EVP_enc_null(), + NULL, NULL, NULL, 1), WOLFSSL_SUCCESS); + + /* Test encryption (which should just copy the data) */ + ExpectIntEQ(wolfSSL_EVP_CipherUpdate(ctx, output, &outputLen, + (const unsigned char*)testData, + testDataLen), WOLFSSL_SUCCESS); + + /* Verify output length matches input length */ + ExpectIntEQ(outputLen, testDataLen); + + /* Verify output data matches input data (no encryption occurred) */ + ExpectIntEQ(XMEMCMP(output, testData, testDataLen), 0); + + /* Clean up */ + wolfSSL_EVP_CIPHER_CTX_free(ctx); +#endif /* OPENSSL_EXTRA */ + + return EXPECT_RESULT(); +} + +/* Test for wolfSSL_EVP_CIPHER_type_string() */ +int test_wolfSSL_EVP_CIPHER_type_string(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + const char* cipherStr; + + /* Test with valid cipher types */ +#ifdef HAVE_AES_CBC + #ifdef WOLFSSL_AES_128 + cipherStr = wolfSSL_EVP_CIPHER_type_string(WC_AES_128_CBC_TYPE); + ExpectNotNull(cipherStr); + ExpectStrEQ(cipherStr, "AES-128-CBC"); + #endif +#endif + +#ifndef NO_DES3 + cipherStr = wolfSSL_EVP_CIPHER_type_string(WC_DES_CBC_TYPE); + ExpectNotNull(cipherStr); + ExpectStrEQ(cipherStr, "DES-CBC"); +#endif + + /* Test with NULL cipher type */ + cipherStr = wolfSSL_EVP_CIPHER_type_string(WC_NULL_CIPHER_TYPE); + ExpectNotNull(cipherStr); + ExpectStrEQ(cipherStr, "NULL"); + + /* Test with invalid cipher type */ + cipherStr = wolfSSL_EVP_CIPHER_type_string(0xFFFF); + ExpectNull(cipherStr); +#endif /* OPENSSL_EXTRA */ + + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_BytesToKey(void) +{ + EXPECT_DECLS; +#if !defined(NO_AES) && defined(HAVE_AES_CBC) && defined(OPENSSL_ALL) + byte key[AES_BLOCK_SIZE] = {0}; + byte iv[AES_BLOCK_SIZE] = {0}; + int count = 0; + const EVP_MD* md = EVP_sha256(); + const EVP_CIPHER *type; + const unsigned char *salt = (unsigned char *)"salt1234"; + int sz = 5; + const byte data[] = { + 0x48,0x65,0x6c,0x6c,0x6f,0x20,0x57,0x6f, + 0x72,0x6c,0x64 + }; + + type = wolfSSL_EVP_get_cipherbynid(NID_aes_128_cbc); + + /* Bad cases */ + ExpectIntEQ(EVP_BytesToKey(NULL, md, salt, data, sz, count, key, iv), + 0); + ExpectIntEQ(EVP_BytesToKey(type, md, salt, NULL, sz, count, key, iv), + 16); + md = "2"; + ExpectIntEQ(EVP_BytesToKey(type, md, salt, data, sz, count, key, iv), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + /* Good case */ + md = EVP_sha256(); + ExpectIntEQ(EVP_BytesToKey(type, md, salt, data, sz, count, key, iv), + 16); +#endif + return EXPECT_RESULT(); +} + +#if (defined(OPENSSL_EXTRA) || defined(OPENSSL_ALL)) &&\ + (!defined(NO_AES) && defined(HAVE_AES_CBC) && defined(WOLFSSL_AES_128)) +static void binary_dump(void *ptr, int size) +{ + #ifdef WOLFSSL_EVP_PRINT + int i = 0; + unsigned char *p = (unsigned char *) ptr; + + fprintf(stderr, "{"); + while ((p != NULL) && (i < size)) { + if ((i % 8) == 0) { + fprintf(stderr, "\n"); + fprintf(stderr, " "); + } + fprintf(stderr, "0x%02x, ", p[i]); + i++; + } + fprintf(stderr, "\n};\n"); + #else + (void) ptr; + (void) size; + #endif +} + +static int last_val = 0x0f; + +static int check_result(unsigned char *data, int len) +{ + int i; + + for ( ; len; ) { + last_val = (last_val + 1) % 16; + for (i = 0; i < 16; len--, i++, data++) + if (*data != last_val) { + return -1; + } + } + return 0; +} + +static int r_offset; +static int w_offset; + +static void init_offset(void) +{ + r_offset = 0; + w_offset = 0; +} + +static void get_record(unsigned char *data, unsigned char *buf, int len) +{ + XMEMCPY(buf, data+r_offset, len); + r_offset += len; +} + +static void set_record(unsigned char *data, unsigned char *buf, int len) +{ + XMEMCPY(data+w_offset, buf, len); + w_offset += len; +} + +static void set_plain(unsigned char *plain, int rec) +{ + int i, j; + unsigned char *p = plain; + + #define BLOCKSZ 16 + + for (i=0; i<(rec/BLOCKSZ); i++) { + for (j=0; j 0 && keylen != klen) { + ExpectIntNE(EVP_CIPHER_CTX_set_key_length(evp, keylen), 0); + } + ilen = EVP_CIPHER_CTX_iv_length(evp); + if (ilen > 0 && ivlen != ilen) { + ExpectIntNE(EVP_CIPHER_CTX_set_iv_length(evp, ivlen), 0); + } + + ExpectIntNE((ret = EVP_CipherInit(evp, NULL, key, iv, 1)), 0); + + for (j = 0; j 0) + set_record(cipher, outb, outl); + } + + for (i = 0; test_drive[i]; i++) { + last_val = 0x0f; + + ExpectIntNE((ret = EVP_CipherInit(evp, NULL, key, iv, 0)), 0); + + init_offset(); + + for (j = 0; test_drive[i][j]; j++) { + inl = test_drive[i][j]; + get_record(cipher, inb, inl); + + ExpectIntNE((ret = EVP_DecryptUpdate(evp, outb, &outl, inb, inl)), + 0); + + binary_dump(outb, outl); + ExpectIntEQ((ret = check_result(outb, outl)), 0); + ExpectFalse(outl > ((inl/16+1)*16) && outl > 16); + } + + ret = EVP_CipherFinal(evp, outb, &outl); + + binary_dump(outb, outl); + + ret = (((test_drive_len[i] % 16) != 0) && (ret == 0)) || + (((test_drive_len[i] % 16) == 0) && (ret == 1)); + ExpectTrue(ret); + } + + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(evp), WOLFSSL_SUCCESS); + + EVP_CIPHER_CTX_free(evp); + evp = NULL; + + /* Do an extra test to verify correct behavior with empty input. */ + + ExpectNotNull(evp = EVP_CIPHER_CTX_new()); + ExpectIntNE((ret = EVP_CipherInit(evp, type, NULL, iv, 0)), 0); + + ExpectIntEQ(EVP_CIPHER_CTX_nid(evp), NID_aes_128_cbc); + + klen = EVP_CIPHER_CTX_key_length(evp); + if (klen > 0 && keylen != klen) { + ExpectIntNE(EVP_CIPHER_CTX_set_key_length(evp, keylen), 0); + } + ilen = EVP_CIPHER_CTX_iv_length(evp); + if (ilen > 0 && ivlen != ilen) { + ExpectIntNE(EVP_CIPHER_CTX_set_iv_length(evp, ivlen), 0); + } + + ExpectIntNE((ret = EVP_CipherInit(evp, NULL, key, iv, 1)), 0); + + /* outl should be set to 0 after passing NULL, 0 for input args. */ + outl = -1; + ExpectIntNE((ret = EVP_CipherUpdate(evp, outb, &outl, NULL, 0)), 0); + ExpectIntEQ(outl, 0); + + EVP_CIPHER_CTX_free(evp); +#endif /* test_EVP_Cipher */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_X_STATE(void) +{ + EXPECT_DECLS; +#if !defined(NO_DES3) && !defined(NO_RC4) && defined(OPENSSL_ALL) + byte key[DES3_KEY_SIZE] = {0}; + byte iv[DES_IV_SIZE] = {0}; + EVP_CIPHER_CTX *ctx = NULL; + const EVP_CIPHER *init = NULL; + + /* Bad test cases */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + ExpectNotNull(init = EVP_des_ede3_cbc()); + + wolfSSL_EVP_CIPHER_CTX_init(ctx); + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + + ExpectNull(wolfSSL_EVP_X_STATE(NULL)); + ExpectNull(wolfSSL_EVP_X_STATE(ctx)); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* Good test case */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + ExpectNotNull(init = wolfSSL_EVP_rc4()); + + wolfSSL_EVP_CIPHER_CTX_init(ctx); + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + + ExpectNotNull(wolfSSL_EVP_X_STATE(ctx)); + EVP_CIPHER_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_X_STATE_LEN(void) +{ + EXPECT_DECLS; +#if !defined(NO_DES3) && !defined(NO_RC4) && defined(OPENSSL_ALL) + byte key[DES3_KEY_SIZE] = {0}; + byte iv[DES_IV_SIZE] = {0}; + EVP_CIPHER_CTX *ctx = NULL; + const EVP_CIPHER *init = NULL; + + /* Bad test cases */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + ExpectNotNull(init = EVP_des_ede3_cbc()); + + wolfSSL_EVP_CIPHER_CTX_init(ctx); + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_EVP_X_STATE_LEN(NULL), 0); + ExpectIntEQ(wolfSSL_EVP_X_STATE_LEN(ctx), 0); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* Good test case */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + ExpectNotNull(init = wolfSSL_EVP_rc4()); + + wolfSSL_EVP_CIPHER_CTX_init(ctx); + ExpectIntEQ(EVP_CipherInit(ctx, init, key, iv, 1), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_EVP_X_STATE_LEN(ctx), sizeof(Arc4)); + EVP_CIPHER_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} + + +int test_wolfSSL_EVP_aes_256_gcm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESGCM) && defined(WOLFSSL_AES_256) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_aes_256_gcm()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_aes_192_gcm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESGCM) && defined(WOLFSSL_AES_192) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_aes_192_gcm()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_aes_128_gcm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESGCM) && defined(WOLFSSL_AES_128) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_aes_128_gcm()); +#endif + return EXPECT_RESULT(); +} + +int test_evp_cipher_aes_gcm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESGCM) && defined(OPENSSL_ALL) && ((!defined(HAVE_FIPS) && \ + !defined(HAVE_SELFTEST)) || (defined(HAVE_FIPS_VERSION) && \ + (HAVE_FIPS_VERSION >= 2))) && defined(WOLFSSL_AES_256) + /* + * This test checks data at various points in the encrypt/decrypt process + * against known values produced using the same test with OpenSSL. This + * interop testing is critical for verifying the correctness of our + * EVP_Cipher implementation with AES-GCM. Specifically, this test exercises + * a flow supported by OpenSSL that uses the control command + * EVP_CTRL_GCM_IV_GEN to increment the IV between cipher operations without + * the need to call EVP_CipherInit. OpenSSH uses this flow, for example. We + * had a bug with OpenSSH where wolfSSL OpenSSH servers could only talk to + * wolfSSL OpenSSH clients because there was a bug in this flow that + * happened to "cancel out" if both sides of the connection had the bug. + */ + enum { + NUM_ENCRYPTIONS = 3, + AAD_SIZE = 4 + }; + static const byte plainText1[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, + 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23 + }; + static const byte plainText2[] = { + 0x42, 0x49, 0x3b, 0x27, 0x03, 0x35, 0x59, 0x14, 0x41, 0x47, 0x37, 0x14, + 0x0e, 0x34, 0x0d, 0x28, 0x63, 0x09, 0x0a, 0x5b, 0x22, 0x57, 0x42, 0x22, + 0x0f, 0x5c, 0x1e, 0x53, 0x45, 0x15, 0x62, 0x08, 0x60, 0x43, 0x50, 0x2c + }; + static const byte plainText3[] = { + 0x36, 0x0d, 0x2b, 0x09, 0x4a, 0x56, 0x3b, 0x4c, 0x21, 0x22, 0x58, 0x0e, + 0x5b, 0x57, 0x10 + }; + static const byte* plainTexts[NUM_ENCRYPTIONS] = { + plainText1, + plainText2, + plainText3 + }; + static const int plainTextSzs[NUM_ENCRYPTIONS] = { + sizeof(plainText1), + sizeof(plainText2), + sizeof(plainText3) + }; + static const byte aad1[AAD_SIZE] = { + 0x00, 0x00, 0x00, 0x01 + }; + static const byte aad2[AAD_SIZE] = { + 0x00, 0x00, 0x00, 0x10 + }; + static const byte aad3[AAD_SIZE] = { + 0x00, 0x00, 0x01, 0x00 + }; + static const byte* aads[NUM_ENCRYPTIONS] = { + aad1, + aad2, + aad3 + }; + const byte iv[GCM_NONCE_MID_SZ] = { + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF + }; + byte currentIv[GCM_NONCE_MID_SZ]; + const byte key[] = { + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, + 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, + 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f + }; + const byte expIvs[NUM_ENCRYPTIONS][GCM_NONCE_MID_SZ] = { + { + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, + 0xEF + }, + { + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, + 0xF0 + }, + { + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, + 0xF1 + } + }; + const byte expTags[NUM_ENCRYPTIONS][AES_BLOCK_SIZE] = { + { + 0x65, 0x4F, 0xF7, 0xA0, 0xBB, 0x7B, 0x90, 0xB7, 0x9C, 0xC8, 0x14, + 0x3D, 0x32, 0x18, 0x34, 0xA9 + }, + { + 0x50, 0x3A, 0x13, 0x8D, 0x91, 0x1D, 0xEC, 0xBB, 0xBA, 0x5B, 0x57, + 0xA2, 0xFD, 0x2D, 0x6B, 0x7F + }, + { + 0x3B, 0xED, 0x18, 0x9C, 0xB3, 0xE3, 0x61, 0x1E, 0x11, 0xEB, 0x13, + 0x5B, 0xEC, 0x52, 0x49, 0x32, + } + }; + static const byte expCipherText1[] = { + 0xCB, 0x93, 0x4F, 0xC8, 0x22, 0xE2, 0xC0, 0x35, 0xAA, 0x6B, 0x41, 0x15, + 0x17, 0x30, 0x2F, 0x97, 0x20, 0x74, 0x39, 0x28, 0xF8, 0xEB, 0xC5, 0x51, + 0x7B, 0xD9, 0x8A, 0x36, 0xB8, 0xDA, 0x24, 0x80, 0xE7, 0x9E, 0x09, 0xDE + }; + static const byte expCipherText2[] = { + 0xF9, 0x32, 0xE1, 0x87, 0x37, 0x0F, 0x04, 0xC1, 0xB5, 0x59, 0xF0, 0x45, + 0x3A, 0x0D, 0xA0, 0x26, 0xFF, 0xA6, 0x8D, 0x38, 0xFE, 0xB8, 0xE5, 0xC2, + 0x2A, 0x98, 0x4A, 0x54, 0x8F, 0x1F, 0xD6, 0x13, 0x03, 0xB2, 0x1B, 0xC0 + }; + static const byte expCipherText3[] = { + 0xD0, 0x37, 0x59, 0x1C, 0x2F, 0x85, 0x39, 0x4D, 0xED, 0xC2, 0x32, 0x5B, + 0x80, 0x5E, 0x6B, + }; + static const byte* expCipherTexts[NUM_ENCRYPTIONS] = { + expCipherText1, + expCipherText2, + expCipherText3 + }; + byte* cipherText = NULL; + byte* calcPlainText = NULL; + byte tag[AES_BLOCK_SIZE]; + EVP_CIPHER_CTX* encCtx = NULL; + EVP_CIPHER_CTX* decCtx = NULL; + int i, j, outl; + + /****************************************************/ + for (i = 0; i < 3; ++i) { + ExpectNotNull(encCtx = EVP_CIPHER_CTX_new()); + ExpectNotNull(decCtx = EVP_CIPHER_CTX_new()); + + /* First iteration, set key before IV. */ + if (i == 0) { + ExpectIntEQ(EVP_CipherInit(encCtx, EVP_aes_256_gcm(), key, NULL, 1), + SSL_SUCCESS); + + /* + * The call to EVP_CipherInit below (with NULL key) should clear the + * authIvGenEnable flag set by EVP_CTRL_GCM_SET_IV_FIXED. As such, a + * subsequent EVP_CTRL_GCM_IV_GEN should fail. This matches OpenSSL + * behavior. + */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_SET_IV_FIXED, + -1, (void*)iv), SSL_SUCCESS); + ExpectIntEQ(EVP_CipherInit(encCtx, NULL, NULL, iv, 1), + SSL_SUCCESS); + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_IV_GEN, -1, + currentIv), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + ExpectIntEQ(EVP_CipherInit(decCtx, EVP_aes_256_gcm(), key, NULL, 0), + SSL_SUCCESS); + ExpectIntEQ(EVP_CipherInit(decCtx, NULL, NULL, iv, 0), + SSL_SUCCESS); + } + /* Second iteration, IV before key. */ + else { + ExpectIntEQ(EVP_CipherInit(encCtx, EVP_aes_256_gcm(), NULL, iv, 1), + SSL_SUCCESS); + ExpectIntEQ(EVP_CipherInit(encCtx, NULL, key, NULL, 1), + SSL_SUCCESS); + ExpectIntEQ(EVP_CipherInit(decCtx, EVP_aes_256_gcm(), NULL, iv, 0), + SSL_SUCCESS); + ExpectIntEQ(EVP_CipherInit(decCtx, NULL, key, NULL, 0), + SSL_SUCCESS); + } + + /* + * EVP_CTRL_GCM_IV_GEN should fail if EVP_CTRL_GCM_SET_IV_FIXED hasn't + * been issued first. + */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_IV_GEN, -1, + currentIv), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_SET_IV_FIXED, -1, + (void*)iv), SSL_SUCCESS); + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(decCtx, EVP_CTRL_GCM_SET_IV_FIXED, -1, + (void*)iv), SSL_SUCCESS); + + for (j = 0; j < NUM_ENCRYPTIONS; ++j) { + /*************** Encrypt ***************/ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_IV_GEN, -1, + currentIv), SSL_SUCCESS); + /* Check current IV against expected. */ + ExpectIntEQ(XMEMCMP(currentIv, expIvs[j], GCM_NONCE_MID_SZ), 0); + + /* Add AAD. */ + if (i == 2) { + /* Test streaming API. */ + ExpectIntEQ(EVP_CipherUpdate(encCtx, NULL, &outl, aads[j], + AAD_SIZE), SSL_SUCCESS); + } + else { + ExpectIntEQ(EVP_Cipher(encCtx, NULL, (byte *)aads[j], AAD_SIZE), + AAD_SIZE); + } + + ExpectNotNull(cipherText = (byte*)XMALLOC(plainTextSzs[j], NULL, + DYNAMIC_TYPE_TMP_BUFFER)); + + /* Encrypt plaintext. */ + if (i == 2) { + ExpectIntEQ(EVP_CipherUpdate(encCtx, cipherText, &outl, + plainTexts[j], plainTextSzs[j]), + SSL_SUCCESS); + } + else { + ExpectIntEQ(EVP_Cipher(encCtx, cipherText, + (byte *)plainTexts[j], plainTextSzs[j]), + plainTextSzs[j]); + } + + if (i == 2) { + ExpectIntEQ(EVP_CipherFinal(encCtx, cipherText, &outl), + SSL_SUCCESS); + } + else { + /* + * Calling EVP_Cipher with NULL input and output for AES-GCM is + * akin to calling EVP_CipherFinal. + */ + ExpectIntGE(EVP_Cipher(encCtx, NULL, NULL, 0), 0); + } + + /* Check ciphertext against expected. */ + ExpectIntEQ(XMEMCMP(cipherText, expCipherTexts[j], plainTextSzs[j]), + 0); + + /* Get and check tag against expected. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(encCtx, EVP_CTRL_GCM_GET_TAG, + sizeof(tag), tag), SSL_SUCCESS); + ExpectIntEQ(XMEMCMP(tag, expTags[j], sizeof(tag)), 0); + + /*************** Decrypt ***************/ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(decCtx, EVP_CTRL_GCM_IV_GEN, -1, + currentIv), SSL_SUCCESS); + /* Check current IV against expected. */ + ExpectIntEQ(XMEMCMP(currentIv, expIvs[j], GCM_NONCE_MID_SZ), 0); + + /* Add AAD. */ + if (i == 2) { + /* Test streaming API. */ + ExpectIntEQ(EVP_CipherUpdate(decCtx, NULL, &outl, aads[j], + AAD_SIZE), SSL_SUCCESS); + } + else { + ExpectIntEQ(EVP_Cipher(decCtx, NULL, (byte *)aads[j], AAD_SIZE), + AAD_SIZE); + } + + /* Set expected tag. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(decCtx, EVP_CTRL_GCM_SET_TAG, + sizeof(tag), tag), SSL_SUCCESS); + + /* Decrypt ciphertext. */ + ExpectNotNull(calcPlainText = (byte*)XMALLOC(plainTextSzs[j], NULL, + DYNAMIC_TYPE_TMP_BUFFER)); + if (i == 2) { + ExpectIntEQ(EVP_CipherUpdate(decCtx, calcPlainText, &outl, + cipherText, plainTextSzs[j]), + SSL_SUCCESS); + } + else { + /* This first EVP_Cipher call will check the tag, too. */ + ExpectIntEQ(EVP_Cipher(decCtx, calcPlainText, cipherText, + plainTextSzs[j]), plainTextSzs[j]); + } + + if (i == 2) { + ExpectIntEQ(EVP_CipherFinal(decCtx, calcPlainText, &outl), + SSL_SUCCESS); + } + else { + ExpectIntGE(EVP_Cipher(decCtx, NULL, NULL, 0), 0); + } + + /* Check plaintext against expected. */ + ExpectIntEQ(XMEMCMP(calcPlainText, plainTexts[j], plainTextSzs[j]), + 0); + + XFREE(cipherText, NULL, DYNAMIC_TYPE_TMP_BUFFER); + cipherText = NULL; + XFREE(calcPlainText, NULL, DYNAMIC_TYPE_TMP_BUFFER); + calcPlainText = NULL; + } + + EVP_CIPHER_CTX_free(encCtx); + encCtx = NULL; + EVP_CIPHER_CTX_free(decCtx); + decCtx = NULL; + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_aes_gcm(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESGCM) && \ + !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) + /* A 256 bit key, AES_128 will use the first 128 bit*/ + byte *key = (byte*)"01234567890123456789012345678901"; + /* A 128 bit IV */ + byte *iv = (byte*)"0123456789012345"; + int ivSz = AES_BLOCK_SIZE; + /* Message to be encrypted */ + byte *plaintxt = (byte*)"for things to change you have to change"; + /* Additional non-confidential data */ + byte *aad = (byte*)"Don't spend major time on minor things."; + + unsigned char tag[AES_BLOCK_SIZE] = {0}; + int plaintxtSz = (int)XSTRLEN((char*)plaintxt); + int aadSz = (int)XSTRLEN((char*)aad); + byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + int i = 0; + EVP_CIPHER_CTX en[2]; + EVP_CIPHER_CTX de[2]; + + for (i = 0; i < 2; i++) { + EVP_CIPHER_CTX_init(&en[i]); + if (i == 0) { + /* Default uses 96-bits IV length */ +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_gcm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_gcm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_gcm(), NULL, + key, iv)); +#endif + } + else { +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_gcm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_gcm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_gcm(), NULL, + NULL, NULL)); +#endif + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + } + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, + plaintxtSz)); + ciphertxtSz = len; + ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_GET_TAG, + AES_BLOCK_SIZE, tag)); + wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]); + + EVP_CIPHER_CTX_init(&de[i]); + if (i == 0) { + /* Default uses 96-bits IV length */ +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, + key, iv)); +#endif + } + else { +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, + NULL, NULL)); +#endif + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + + } + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, + AES_BLOCK_SIZE, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(ciphertxtSz, decryptedtxtSz); + ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); + + /* modify tag*/ + if (i == 0) { + /* Default uses 96-bits IV length */ +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, + key, iv)); +#endif + } + else { +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_gcm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_gcm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_gcm(), NULL, + NULL, NULL)); +#endif + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + + } + tag[AES_BLOCK_SIZE-1]+=0xBB; + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, + AES_BLOCK_SIZE, tag)); + /* fail due to wrong tag */ + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + ExpectIntEQ(0, len); + + wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]); + } +#endif /* OPENSSL_EXTRA && !NO_AES && HAVE_AESGCM */ + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_aes_gcm_AAD_2_parts(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESGCM) && \ + !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) + const byte iv[12] = { 0 }; + const byte key[16] = { 0 }; + const byte cleartext[16] = { 0 }; + const byte aad[] = { + 0x01, 0x10, 0x00, 0x2a, 0x08, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, + 0x00, 0x00, 0xdc, 0x4d, 0xad, 0x6b, 0x06, 0x93, + 0x4f + }; + byte out1Part[16]; + byte outTag1Part[16]; + byte out2Part[16]; + byte outTag2Part[16]; + byte decryptBuf[16]; + int len = 0; + int tlen; + EVP_CIPHER_CTX* ctx = NULL; + + /* ENCRYPT */ + /* Send AAD and data in 1 part */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + tlen = 0; + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), + 1); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), 1); + ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &len, aad, sizeof(aad)), 1); + ExpectIntEQ(EVP_EncryptUpdate(ctx, out1Part, &len, cleartext, + sizeof(cleartext)), 1); + tlen += len; + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, out1Part, &len), 1); + tlen += len; + ExpectIntEQ(tlen, sizeof(cleartext)); + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, 16, + outTag1Part), 1); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* DECRYPT */ + /* Send AAD and data in 1 part */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + tlen = 0; + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), + 1); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), 1); + ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &len, aad, sizeof(aad)), 1); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptBuf, &len, out1Part, + sizeof(cleartext)), 1); + tlen += len; + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, + outTag1Part), 1); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptBuf, &len), 1); + tlen += len; + ExpectIntEQ(tlen, sizeof(cleartext)); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + ExpectIntEQ(XMEMCMP(decryptBuf, cleartext, len), 0); + + /* ENCRYPT */ + /* Send AAD and data in 2 parts */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + tlen = 0; + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), + 1); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), 1); + ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &len, aad, 1), 1); + ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &len, aad + 1, sizeof(aad) - 1), + 1); + ExpectIntEQ(EVP_EncryptUpdate(ctx, out2Part, &len, cleartext, 1), 1); + tlen += len; + ExpectIntEQ(EVP_EncryptUpdate(ctx, out2Part + tlen, &len, cleartext + 1, + sizeof(cleartext) - 1), 1); + tlen += len; + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, out2Part + tlen, &len), 1); + tlen += len; + ExpectIntEQ(tlen, sizeof(cleartext)); + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, 16, + outTag2Part), 1); + + ExpectIntEQ(XMEMCMP(out1Part, out2Part, sizeof(out1Part)), 0); + ExpectIntEQ(XMEMCMP(outTag1Part, outTag2Part, sizeof(outTag1Part)), 0); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* DECRYPT */ + /* Send AAD and data in 2 parts */ + ExpectNotNull(ctx = EVP_CIPHER_CTX_new()); + tlen = 0; + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_aes_128_gcm(), NULL, NULL, NULL), + 1); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), 1); + ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &len, aad, 1), 1); + ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &len, aad + 1, sizeof(aad) - 1), + 1); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptBuf, &len, out1Part, 1), 1); + tlen += len; + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptBuf + tlen, &len, out1Part + 1, + sizeof(cleartext) - 1), 1); + tlen += len; + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, + outTag1Part), 1); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptBuf + tlen, &len), 1); + tlen += len; + ExpectIntEQ(tlen, sizeof(cleartext)); + + ExpectIntEQ(XMEMCMP(decryptBuf, cleartext, len), 0); + + /* Test AAD reuse */ + EVP_CIPHER_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_aes_gcm_zeroLen(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESGCM) && \ + !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) && defined(WOLFSSL_AES_256) + /* Zero length plain text */ + byte key[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte iv[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte plaintxt[1]; + int ivSz = 12; + int plaintxtSz = 0; + unsigned char tag[16]; + unsigned char tag_kat[] = { + 0x53,0x0f,0x8a,0xfb,0xc7,0x45,0x36,0xb9, + 0xa9,0x63,0xb4,0xf1,0xc4,0xcb,0x73,0x8b + }; + + byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + + EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); + EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); + + ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_aes_256_gcm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, + plaintxtSz)); + ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_GET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); + + ExpectIntEQ(0, ciphertxtSz); + ExpectIntEQ(0, XMEMCMP(tag, tag_kat, sizeof(tag))); + + EVP_CIPHER_CTX_init(de); + ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_aes_256_gcm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(0, decryptedtxtSz); + + EVP_CIPHER_CTX_free(en); + EVP_CIPHER_CTX_free(de); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_aes_256_ccm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESCCM) && defined(WOLFSSL_AES_256) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_aes_256_ccm()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_aes_192_ccm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESCCM) && defined(WOLFSSL_AES_192) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_aes_192_ccm()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_aes_128_ccm(void) +{ + EXPECT_DECLS; +#if defined(HAVE_AESCCM) && defined(WOLFSSL_AES_128) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_aes_128_ccm()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_aes_ccm(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESCCM) && \ + !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) + /* A 256 bit key, AES_128 will use the first 128 bit*/ + byte *key = (byte*)"01234567890123456789012345678901"; + /* A 128 bit IV */ + byte *iv = (byte*)"0123456789012"; + int ivSz = (int)XSTRLEN((char*)iv); + /* Message to be encrypted */ + byte *plaintxt = (byte*)"for things to change you have to change"; + /* Additional non-confidential data */ + byte *aad = (byte*)"Don't spend major time on minor things."; + + unsigned char tag[AES_BLOCK_SIZE] = {0}; + int plaintxtSz = (int)XSTRLEN((char*)plaintxt); + int aadSz = (int)XSTRLEN((char*)aad); + byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + int i = 0; + int ret; + EVP_CIPHER_CTX en[2]; + EVP_CIPHER_CTX de[2]; + + for (i = 0; i < 2; i++) { + EVP_CIPHER_CTX_init(&en[i]); + + if (i == 0) { + /* Default uses 96-bits IV length */ +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_ccm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_ccm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_ccm(), NULL, + key, iv)); +#endif + } + else { +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_128_ccm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_192_ccm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aes_256_ccm(), NULL, + NULL, NULL)); +#endif + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + } + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, + plaintxtSz)); + ciphertxtSz = len; + ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_GET_TAG, + AES_BLOCK_SIZE, tag)); + ret = wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]); + ExpectIntEQ(ret, 1); + + EVP_CIPHER_CTX_init(&de[i]); + if (i == 0) { + /* Default uses 96-bits IV length */ +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_ccm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_ccm(), NULL, + key, iv)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_ccm(), NULL, + key, iv)); +#endif + } + else { +#ifdef WOLFSSL_AES_128 + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_128_ccm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_192) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_192_ccm(), NULL, + NULL, NULL)); +#elif defined(WOLFSSL_AES_256) + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aes_256_ccm(), NULL, + NULL, NULL)); +#endif + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + + } + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, + AES_BLOCK_SIZE, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(ciphertxtSz, decryptedtxtSz); + ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); + + /* modify tag*/ + tag[AES_BLOCK_SIZE-1]+=0xBB; + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, + AES_BLOCK_SIZE, tag)); + /* fail due to wrong tag */ + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + ExpectIntEQ(0, len); + ret = wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]); + ExpectIntEQ(ret, 1); + } +#endif /* OPENSSL_EXTRA && !NO_AES && HAVE_AESCCM */ + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_aes_ccm_zeroLen(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_AES) && defined(HAVE_AESCCM) && \ + !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) && defined(WOLFSSL_AES_256) + /* Zero length plain text */ + byte key[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte iv[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte plaintxt[1]; + int ivSz = 12; + int plaintxtSz = 0; + unsigned char tag[16]; + + byte ciphertxt[AES_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[AES_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + + EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); + EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); + + ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_aes_256_ccm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, + plaintxtSz)); + ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_GET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); + + ExpectIntEQ(0, ciphertxtSz); + + EVP_CIPHER_CTX_init(de); + ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_aes_256_ccm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(0, decryptedtxtSz); + + EVP_CIPHER_CTX_free(en); + EVP_CIPHER_CTX_free(de); +#endif + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_chacha20(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_CHACHA) + byte key[CHACHA_MAX_KEY_SZ]; + byte iv [WOLFSSL_EVP_CHACHA_IV_BYTES]; + byte plainText[] = {0xDE, 0xAD, 0xBE, 0xEF}; + byte cipherText[sizeof(plainText)]; + byte decryptedText[sizeof(plainText)]; + EVP_CIPHER_CTX* ctx = NULL; + int outSz; + + XMEMSET(key, 0, sizeof(key)); + XMEMSET(iv, 0, sizeof(iv)); + /* Encrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_chacha20(), NULL, NULL, + NULL), WOLFSSL_SUCCESS); + /* Any tag length must fail - not an AEAD cipher. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, + 16, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, + sizeof(plainText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText, &outSz), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* Decrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_chacha20(), NULL, NULL, + NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(cipherText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* Test partial Inits. CipherInit() allow setting of key and iv + * in separate calls. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_chacha20(), + key, NULL, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 1), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(cipherText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + EVP_CIPHER_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_chacha20_poly1305(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_CHACHA) && defined(HAVE_POLY1305) + byte key[CHACHA20_POLY1305_AEAD_KEYSIZE]; + byte iv [CHACHA20_POLY1305_AEAD_IV_SIZE]; + byte plainText[] = {0xDE, 0xAD, 0xBE, 0xEF}; + byte aad[] = {0xAA, 0XBB, 0xCC, 0xDD, 0xEE, 0xFF}; + byte cipherText[sizeof(plainText)]; + byte decryptedText[sizeof(plainText)]; + byte tag[CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE]; + EVP_CIPHER_CTX* ctx = NULL; + int outSz; + + XMEMSET(key, 0, sizeof(key)); + XMEMSET(iv, 0, sizeof(iv)); + + /* Encrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_chacha20_poly1305(), NULL, NULL, + NULL), WOLFSSL_SUCCESS); + /* Invalid IV length. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, + CHACHA20_POLY1305_AEAD_IV_SIZE-1, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Valid IV length. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, + CHACHA20_POLY1305_AEAD_IV_SIZE, NULL), WOLFSSL_SUCCESS); + /* Invalid tag length. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, + CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE-1, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Valid tag length. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, + CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE, NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_EncryptUpdate(ctx, NULL, &outSz, aad, sizeof(aad)), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(aad)); + ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, + sizeof(plainText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText, &outSz), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + /* Invalid tag length. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, + CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE-1, tag), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Valid tag length. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, + CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE, tag), WOLFSSL_SUCCESS); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* Decrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_chacha20_poly1305(), NULL, NULL, + NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, + CHACHA20_POLY1305_AEAD_IV_SIZE, NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, + CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE, tag), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, NULL, &outSz, aad, sizeof(aad)), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(aad)); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(cipherText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + EVP_CIPHER_CTX_free(ctx); + ctx = NULL; + + /* Test partial Inits. CipherInit() allow setting of key and iv + * in separate calls. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_chacha20_poly1305(), + key, NULL, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 1), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CipherUpdate(ctx, NULL, &outSz, + aad, sizeof(aad)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(aad)); + ExpectIntEQ(outSz, sizeof(aad)); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(cipherText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + EVP_CIPHER_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} + +int test_wolfssl_EVP_aria_gcm(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(HAVE_ARIA) && \ + !defined(HAVE_SELFTEST) && !defined(HAVE_FIPS) + + /* A 256 bit key, AES_128 will use the first 128 bit*/ + byte *key = (byte*)"01234567890123456789012345678901"; + /* A 128 bit IV */ + byte *iv = (byte*)"0123456789012345"; + int ivSz = ARIA_BLOCK_SIZE; + /* Message to be encrypted */ + const int plaintxtSz = 40; + byte plaintxt[WC_ARIA_GCM_GET_CIPHERTEXT_SIZE(plaintxtSz)]; + XMEMCPY(plaintxt,"for things to change you have to change",plaintxtSz); + /* Additional non-confidential data */ + byte *aad = (byte*)"Don't spend major time on minor things."; + + unsigned char tag[ARIA_BLOCK_SIZE] = {0}; + int aadSz = (int)XSTRLEN((char*)aad); + byte ciphertxt[WC_ARIA_GCM_GET_CIPHERTEXT_SIZE(plaintxtSz)]; + byte decryptedtxt[plaintxtSz]; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + int i = 0; + #define TEST_ARIA_GCM_COUNT 6 + EVP_CIPHER_CTX en[TEST_ARIA_GCM_COUNT]; + EVP_CIPHER_CTX de[TEST_ARIA_GCM_COUNT]; + + for (i = 0; i < TEST_ARIA_GCM_COUNT; i++) { + + EVP_CIPHER_CTX_init(&en[i]); + switch (i) { + case 0: + /* Default uses 96-bits IV length */ + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_128_gcm(), + NULL, key, iv)); + break; + case 1: + /* Default uses 96-bits IV length */ + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_192_gcm(), + NULL, key, iv)); + break; + case 2: + /* Default uses 96-bits IV length */ + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_256_gcm(), + NULL, key, iv)); + break; + case 3: + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_128_gcm(), + NULL, NULL, NULL)); + /* non-default must to set the IV length first */ + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], + EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + break; + case 4: + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_192_gcm(), + NULL, NULL, NULL)); + /* non-default must to set the IV length first */ + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], + EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + break; + case 5: + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_aria_256_gcm(), + NULL, NULL, NULL)); + /* non-default must to set the IV length first */ + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], + EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + AssertIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + break; + } + XMEMSET(ciphertxt,0,sizeof(ciphertxt)); + AssertIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); + AssertIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, + plaintxtSz)); + ciphertxtSz = len; + AssertIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); + AssertIntNE(0, XMEMCMP(plaintxt, ciphertxt, plaintxtSz)); + ciphertxtSz += len; + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_GET_TAG, + ARIA_BLOCK_SIZE, tag)); + AssertIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]), 1); + + EVP_CIPHER_CTX_init(&de[i]); + switch (i) { + case 0: + /* Default uses 96-bits IV length */ + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_128_gcm(), + NULL, key, iv)); + break; + case 1: + /* Default uses 96-bits IV length */ + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_192_gcm(), + NULL, key, iv)); + break; + case 2: + /* Default uses 96-bits IV length */ + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_256_gcm(), + NULL, key, iv)); + break; + case 3: + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_128_gcm(), + NULL, NULL, NULL)); + /* non-default must to set the IV length first */ + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], + EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + break; + case 4: + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_192_gcm(), + NULL, NULL, NULL)); + /* non-default must to set the IV length first */ + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], + EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + break; + case 5: + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_aria_256_gcm(), + NULL, NULL, NULL)); + /* non-default must to set the IV length first */ + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], + EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + AssertIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + break; + } + XMEMSET(decryptedtxt,0,sizeof(decryptedtxt)); + AssertIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + AssertIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + decryptedtxtSz = len; + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, + ARIA_BLOCK_SIZE, tag)); + AssertIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + decryptedtxtSz += len; + AssertIntEQ(plaintxtSz, decryptedtxtSz); + AssertIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); + + XMEMSET(decryptedtxt,0,sizeof(decryptedtxt)); + /* modify tag*/ + tag[AES_BLOCK_SIZE-1]+=0xBB; + AssertIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + AssertIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, + ARIA_BLOCK_SIZE, tag)); + /* fail due to wrong tag */ + AssertIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + AssertIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + AssertIntEQ(0, len); + AssertIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]), 1); + } + + res = TEST_RES_CHECK(1); +#endif /* OPENSSL_EXTRA && !NO_AES && HAVE_AESGCM */ + return res; +} + +int test_wolfssl_EVP_sm4_ecb(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_ECB) + EXPECT_DECLS; + byte key[SM4_KEY_SIZE]; + byte plainText[SM4_BLOCK_SIZE] = { + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF + }; + byte cipherText[sizeof(plainText) + SM4_BLOCK_SIZE]; + byte decryptedText[sizeof(plainText) + SM4_BLOCK_SIZE]; + EVP_CIPHER_CTX* ctx; + int outSz; + + XMEMSET(key, 0, sizeof(key)); + + /* Encrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_sm4_ecb(), NULL, NULL, NULL), + WOLFSSL_SUCCESS); + /* Any tag length must fail - not an AEAD cipher. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, NULL), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, + sizeof(plainText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText + outSz, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, SM4_BLOCK_SIZE); + ExpectBufNE(cipherText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + /* Decrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_sm4_ecb(), NULL, NULL, NULL), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, NULL), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText + outSz, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + res = EXPECT_RESULT(); +#endif + return res; +} + +int test_wolfssl_EVP_sm4_cbc(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CBC) + EXPECT_DECLS; + byte key[SM4_KEY_SIZE]; + byte iv[SM4_BLOCK_SIZE]; + byte plainText[SM4_BLOCK_SIZE] = { + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF, + 0xDE, 0xAD, 0xBE, 0xEF, 0xDE, 0xAD, 0xBE, 0xEF + }; + byte cipherText[sizeof(plainText) + SM4_BLOCK_SIZE]; + byte decryptedText[sizeof(plainText) + SM4_BLOCK_SIZE]; + EVP_CIPHER_CTX* ctx; + int outSz; + + XMEMSET(key, 0, sizeof(key)); + XMEMSET(iv, 0, sizeof(iv)); + + /* Encrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_sm4_cbc(), NULL, NULL, NULL), + WOLFSSL_SUCCESS); + /* Any tag length must fail - not an AEAD cipher. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, + sizeof(plainText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText + outSz, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, SM4_BLOCK_SIZE); + ExpectBufNE(cipherText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + /* Decrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_sm4_cbc(), NULL, NULL, NULL), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText + outSz, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + /* Test partial Inits. CipherInit() allow setting of key and iv + * in separate calls. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_sm4_cbc(), key, NULL, 0), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 0), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText + outSz, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + res = EXPECT_RESULT(); +#endif + return res; +} + +int test_wolfssl_EVP_sm4_ctr(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CTR) + EXPECT_DECLS; + byte key[SM4_KEY_SIZE]; + byte iv[SM4_BLOCK_SIZE]; + byte plainText[] = {0xDE, 0xAD, 0xBE, 0xEF}; + byte cipherText[sizeof(plainText)]; + byte decryptedText[sizeof(plainText)]; + EVP_CIPHER_CTX* ctx; + int outSz; + + XMEMSET(key, 0, sizeof(key)); + XMEMSET(iv, 0, sizeof(iv)); + + /* Encrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, EVP_sm4_ctr(), NULL, NULL, NULL), + WOLFSSL_SUCCESS); + /* Any tag length must fail - not an AEAD cipher. */ + ExpectIntEQ(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, 16, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_EncryptUpdate(ctx, cipherText, &outSz, plainText, + sizeof(plainText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(plainText)); + ExpectIntEQ(EVP_EncryptFinal_ex(ctx, cipherText, &outSz), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + ExpectBufNE(cipherText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + /* Decrypt. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, EVP_sm4_ctr(), NULL, NULL, NULL), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptInit_ex(ctx, NULL, NULL, key, iv), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(cipherText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + /* Test partial Inits. CipherInit() allow setting of key and iv + * in separate calls. */ + ExpectNotNull((ctx = EVP_CIPHER_CTX_new())); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, EVP_sm4_ctr(), key, NULL, 1), + WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_CipherInit(ctx, NULL, NULL, iv, 1), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DecryptUpdate(ctx, decryptedText, &outSz, cipherText, + sizeof(cipherText)), WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, sizeof(cipherText)); + ExpectIntEQ(EVP_DecryptFinal_ex(ctx, decryptedText, &outSz), + WOLFSSL_SUCCESS); + ExpectIntEQ(outSz, 0); + ExpectBufEQ(decryptedText, plainText, sizeof(plainText)); + EVP_CIPHER_CTX_free(ctx); + + res = EXPECT_RESULT(); +#endif + return res; +} + +int test_wolfssl_EVP_sm4_gcm_zeroLen(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_GCM) + /* Zero length plain text */ + EXPECT_DECLS; + byte key[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte iv[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte plaintxt[1]; + int ivSz = 12; + int plaintxtSz = 0; + unsigned char tag[16]; + unsigned char tag_kat[16] = { + 0x23,0x2f,0x0c,0xfe,0x30,0x8b,0x49,0xea, + 0x6f,0xc8,0x82,0x29,0xb5,0xdc,0x85,0x8d + }; + + byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + + EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); + EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); + + ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_sm4_gcm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, + plaintxtSz)); + ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_GCM_GET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); + + ExpectIntEQ(0, ciphertxtSz); + ExpectIntEQ(0, XMEMCMP(tag, tag_kat, sizeof(tag))); + + EVP_CIPHER_CTX_init(de); + ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_sm4_gcm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_GCM_SET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(0, decryptedtxtSz); + + EVP_CIPHER_CTX_free(en); + EVP_CIPHER_CTX_free(de); + + res = EXPECT_RESULT(); +#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_GCM */ + return res; +} + +int test_wolfssl_EVP_sm4_gcm(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_GCM) + EXPECT_DECLS; + byte *key = (byte*)"0123456789012345"; + /* A 128 bit IV */ + byte *iv = (byte*)"0123456789012345"; + int ivSz = SM4_BLOCK_SIZE; + /* Message to be encrypted */ + byte *plaintxt = (byte*)"for things to change you have to change"; + /* Additional non-confidential data */ + byte *aad = (byte*)"Don't spend major time on minor things."; + + unsigned char tag[SM4_BLOCK_SIZE] = {0}; + int plaintxtSz = (int)XSTRLEN((char*)plaintxt); + int aadSz = (int)XSTRLEN((char*)aad); + byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + int i = 0; + EVP_CIPHER_CTX en[2]; + EVP_CIPHER_CTX de[2]; + + for (i = 0; i < 2; i++) { + EVP_CIPHER_CTX_init(&en[i]); + + if (i == 0) { + /* Default uses 96-bits IV length */ + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_gcm(), NULL, key, + iv)); + } + else { + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_gcm(), NULL, NULL, + NULL)); + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + } + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, + plaintxtSz)); + ciphertxtSz = len; + ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_GCM_GET_TAG, + SM4_BLOCK_SIZE, tag)); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]), 1); + + EVP_CIPHER_CTX_init(&de[i]); + if (i == 0) { + /* Default uses 96-bits IV length */ + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_gcm(), NULL, key, + iv)); + } + else { + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_gcm(), NULL, NULL, + NULL)); + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + + } + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, + SM4_BLOCK_SIZE, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(ciphertxtSz, decryptedtxtSz); + ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); + + /* modify tag*/ + tag[SM4_BLOCK_SIZE-1]+=0xBB; + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_GCM_SET_TAG, + SM4_BLOCK_SIZE, tag)); + /* fail due to wrong tag */ + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + ExpectIntEQ(0, len); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]), 1); + } + + res = EXPECT_RESULT(); +#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_GCM */ + return res; +} + +int test_wolfssl_EVP_sm4_ccm_zeroLen(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CCM) + /* Zero length plain text */ + EXPECT_DECLS; + byte key[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte iv[] = { + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 + }; /* align */ + byte plaintxt[1]; + int ivSz = 12; + int plaintxtSz = 0; + unsigned char tag[16]; + + byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + + EVP_CIPHER_CTX *en = EVP_CIPHER_CTX_new(); + EVP_CIPHER_CTX *de = EVP_CIPHER_CTX_new(); + + ExpectIntEQ(1, EVP_EncryptInit_ex(en, EVP_sm4_ccm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptUpdate(en, ciphertxt, &ciphertxtSz , plaintxt, + plaintxtSz)); + ExpectIntEQ(1, EVP_EncryptFinal_ex(en, ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(en, EVP_CTRL_CCM_GET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_CIPHER_CTX_cleanup(en)); + + ExpectIntEQ(0, ciphertxtSz); + + EVP_CIPHER_CTX_init(de); + ExpectIntEQ(1, EVP_DecryptInit_ex(de, EVP_sm4_ccm(), NULL, key, iv)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_IVLEN, ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptUpdate(de, NULL, &len, ciphertxt, len)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(de, EVP_CTRL_CCM_SET_TAG, 16, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(de, decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(0, decryptedtxtSz); + + EVP_CIPHER_CTX_free(en); + EVP_CIPHER_CTX_free(de); + + res = EXPECT_RESULT(); +#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_CCM */ + return res; +} + +int test_wolfssl_EVP_sm4_ccm(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM4_CCM) + EXPECT_DECLS; + byte *key = (byte*)"0123456789012345"; + byte *iv = (byte*)"0123456789012"; + int ivSz = (int)XSTRLEN((char*)iv); + /* Message to be encrypted */ + byte *plaintxt = (byte*)"for things to change you have to change"; + /* Additional non-confidential data */ + byte *aad = (byte*)"Don't spend major time on minor things."; + + unsigned char tag[SM4_BLOCK_SIZE] = {0}; + int plaintxtSz = (int)XSTRLEN((char*)plaintxt); + int aadSz = (int)XSTRLEN((char*)aad); + byte ciphertxt[SM4_BLOCK_SIZE * 4] = {0}; + byte decryptedtxt[SM4_BLOCK_SIZE * 4] = {0}; + int ciphertxtSz = 0; + int decryptedtxtSz = 0; + int len = 0; + int i = 0; + EVP_CIPHER_CTX en[2]; + EVP_CIPHER_CTX de[2]; + + for (i = 0; i < 2; i++) { + EVP_CIPHER_CTX_init(&en[i]); + + if (i == 0) { + /* Default uses 96-bits IV length */ + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_ccm(), NULL, key, + iv)); + } + else { + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], EVP_sm4_ccm(), NULL, NULL, + NULL)); + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_EncryptInit_ex(&en[i], NULL, NULL, key, iv)); + } + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_EncryptUpdate(&en[i], ciphertxt, &len, plaintxt, + plaintxtSz)); + ciphertxtSz = len; + ExpectIntEQ(1, EVP_EncryptFinal_ex(&en[i], ciphertxt, &len)); + ciphertxtSz += len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&en[i], EVP_CTRL_CCM_GET_TAG, + SM4_BLOCK_SIZE, tag)); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&en[i]), 1); + + EVP_CIPHER_CTX_init(&de[i]); + if (i == 0) { + /* Default uses 96-bits IV length */ + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_ccm(), NULL, key, + iv)); + } + else { + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], EVP_sm4_ccm(), NULL, NULL, + NULL)); + /* non-default must to set the IV length first */ + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_IVLEN, + ivSz, NULL)); + ExpectIntEQ(1, EVP_DecryptInit_ex(&de[i], NULL, NULL, key, iv)); + + } + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + decryptedtxtSz = len; + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, + SM4_BLOCK_SIZE, tag)); + ExpectIntEQ(1, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + decryptedtxtSz += len; + ExpectIntEQ(ciphertxtSz, decryptedtxtSz); + ExpectIntEQ(0, XMEMCMP(plaintxt, decryptedtxt, decryptedtxtSz)); + + /* modify tag*/ + tag[SM4_BLOCK_SIZE-1]+=0xBB; + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], NULL, &len, aad, aadSz)); + ExpectIntEQ(1, EVP_CIPHER_CTX_ctrl(&de[i], EVP_CTRL_CCM_SET_TAG, + SM4_BLOCK_SIZE, tag)); + /* fail due to wrong tag */ + ExpectIntEQ(1, EVP_DecryptUpdate(&de[i], decryptedtxt, &len, ciphertxt, + ciphertxtSz)); + ExpectIntEQ(0, EVP_DecryptFinal_ex(&de[i], decryptedtxt, &len)); + ExpectIntEQ(0, len); + ExpectIntEQ(wolfSSL_EVP_CIPHER_CTX_cleanup(&de[i]), 1); + } + + res = EXPECT_RESULT(); +#endif /* OPENSSL_EXTRA && WOLFSSL_SM4_CCM */ + return res; +} + + +int test_wolfSSL_EVP_rc4(void) +{ + EXPECT_DECLS; +#if !defined(NO_RC4) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_rc4()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_enc_null(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + ExpectNotNull(wolfSSL_EVP_enc_null()); +#endif + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_rc2_cbc(void) + +{ + EXPECT_DECLS; +#if defined(WOLFSSL_QT) && !defined(NO_WOLFSSL_STUB) + ExpectNull(wolfSSL_EVP_rc2_cbc()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_mdc2(void) +{ + EXPECT_DECLS; +#if !defined(NO_WOLFSSL_STUB) && defined(OPENSSL_ALL) + ExpectNull(wolfSSL_EVP_mdc2()); +#endif + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_evp_cipher.h b/tests/api/test_evp_cipher.h new file mode 100644 index 000000000..eef58ed12 --- /dev/null +++ b/tests/api/test_evp_cipher.h @@ -0,0 +1,108 @@ +/* test_evp_cipher.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_EVP_CIPHER_H +#define WOLFCRYPT_TEST_EVP_CIPHER_H + +#include + +int test_wolfSSL_EVP_CIPHER_CTX(void); +int test_wolfSSL_EVP_CIPHER_CTX_iv_length(void); +int test_wolfSSL_EVP_CIPHER_CTX_key_length(void); +int test_wolfSSL_EVP_CIPHER_CTX_set_iv(void); +int test_wolfSSL_EVP_get_cipherbynid(void); +int test_wolfSSL_EVP_CIPHER_block_size(void); +int test_wolfSSL_EVP_CIPHER_iv_length(void); +int test_wolfSSL_EVP_CipherUpdate_Null(void); +int test_wolfSSL_EVP_CIPHER_type_string(void); +int test_wolfSSL_EVP_BytesToKey(void); +int test_wolfSSL_EVP_Cipher_extra(void); +int test_wolfSSL_EVP_X_STATE(void); +int test_wolfSSL_EVP_X_STATE_LEN(void); +int test_wolfSSL_EVP_aes_256_gcm(void); +int test_wolfSSL_EVP_aes_192_gcm(void); +int test_wolfSSL_EVP_aes_128_gcm(void); +int test_evp_cipher_aes_gcm(void); +int test_wolfssl_EVP_aes_gcm(void); +int test_wolfssl_EVP_aes_gcm_AAD_2_parts(void); +int test_wolfssl_EVP_aes_gcm_zeroLen(void); +int test_wolfSSL_EVP_aes_256_ccm(void); +int test_wolfSSL_EVP_aes_192_ccm(void); +int test_wolfSSL_EVP_aes_128_ccm(void); +int test_wolfssl_EVP_aes_ccm(void); +int test_wolfssl_EVP_aes_ccm_zeroLen(void); +int test_wolfssl_EVP_chacha20(void); +int test_wolfssl_EVP_chacha20_poly1305(void); +int test_wolfssl_EVP_aria_gcm(void); +int test_wolfssl_EVP_sm4_ecb(void); +int test_wolfssl_EVP_sm4_cbc(void); +int test_wolfssl_EVP_sm4_ctr(void); +int test_wolfssl_EVP_sm4_gcm_zeroLen(void); +int test_wolfssl_EVP_sm4_gcm(void); +int test_wolfssl_EVP_sm4_ccm_zeroLen(void); +int test_wolfssl_EVP_sm4_ccm(void); +int test_wolfSSL_EVP_rc4(void); +int test_wolfSSL_EVP_enc_null(void); +int test_wolfSSL_EVP_rc2_cbc(void); +int test_wolfSSL_EVP_mdc2(void); + +#define TEST_EVP_CIPHER_DECLS \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_CTX), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_CTX_iv_length), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_CTX_key_length), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_CTX_set_iv), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_get_cipherbynid), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_block_size), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_iv_length), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CipherUpdate_Null), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_CIPHER_type_string), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_BytesToKey), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_Cipher_extra), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_X_STATE), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_X_STATE_LEN), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_aes_256_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_aes_192_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_aes_128_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_evp_cipher_aes_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_aes_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_aes_gcm_AAD_2_parts), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_aes_gcm_zeroLen), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_aes_256_ccm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_aes_192_ccm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_aes_128_ccm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_aes_ccm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_aes_ccm_zeroLen), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_chacha20), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_chacha20_poly1305), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_aria_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_ecb), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_cbc), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_ctr), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_gcm_zeroLen), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_gcm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_ccm_zeroLen), \ + TEST_DECL_GROUP("evp_cipher", test_wolfssl_EVP_sm4_ccm), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_rc4), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_enc_null), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_rc2_cbc), \ + TEST_DECL_GROUP("evp_cipher", test_wolfSSL_EVP_mdc2) + +#endif /* WOLFCRYPT_TEST_EVP_CIPHER_H */ diff --git a/tests/api/test_evp_digest.c b/tests/api/test_evp_digest.c new file mode 100644 index 000000000..56bcc41b0 --- /dev/null +++ b/tests/api/test_evp_digest.c @@ -0,0 +1,588 @@ +/* test_evp_digest.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#include +#include + +int test_wolfSSL_EVP_shake128(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SHA3) && \ + defined(WOLFSSL_SHAKE128) + const EVP_MD* md = NULL; + + ExpectNotNull(md = EVP_shake128()); + ExpectIntEQ(XSTRNCMP(md, "SHAKE128", XSTRLEN("SHAKE128")), 0); +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_shake256(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SHA3) && \ + defined(WOLFSSL_SHAKE256) + const EVP_MD* md = NULL; + + ExpectNotNull(md = EVP_shake256()); + ExpectIntEQ(XSTRNCMP(md, "SHAKE256", XSTRLEN("SHAKE256")), 0); +#endif + + return EXPECT_RESULT(); +} + +/* + * Testing EVP digest API with SM3 + */ +int test_wolfSSL_EVP_sm3(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_SM3) + EXPECT_DECLS; + const EVP_MD* md = NULL; + EVP_MD_CTX* mdCtx = NULL; + byte data[WC_SM3_BLOCK_SIZE * 4]; + byte hash[WC_SM3_DIGEST_SIZE]; + byte calcHash[WC_SM3_DIGEST_SIZE]; + byte expHash[WC_SM3_DIGEST_SIZE] = { + 0x38, 0x48, 0x15, 0xa7, 0x0e, 0xae, 0x0b, 0x27, + 0x5c, 0xde, 0x9d, 0xa5, 0xd1, 0xa4, 0x30, 0xa1, + 0xca, 0xd4, 0x54, 0x58, 0x44, 0xa2, 0x96, 0x1b, + 0xd7, 0x14, 0x80, 0x3f, 0x80, 0x1a, 0x07, 0xb6 + }; + word32 chunk; + word32 i; + unsigned int sz; + int ret; + + XMEMSET(data, 0, sizeof(data)); + + md = EVP_sm3(); + ExpectTrue(md != NULL); + ExpectIntEQ(XSTRNCMP(md, "SM3", XSTRLEN("SM3")), 0); + mdCtx = EVP_MD_CTX_new(); + ExpectTrue(mdCtx != NULL); + + /* Invalid Parameters */ + ExpectIntEQ(EVP_DigestInit(NULL, md), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Valid Parameters */ + ExpectIntEQ(EVP_DigestInit(mdCtx, md), WOLFSSL_SUCCESS); + + ExpectIntEQ(EVP_DigestUpdate(NULL, NULL, 1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_DigestUpdate(mdCtx, NULL, 1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_DigestUpdate(NULL, data, 1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + /* Valid Parameters */ + ExpectIntEQ(EVP_DigestUpdate(mdCtx, NULL, 0), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_BLOCK_SIZE), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_BLOCK_SIZE - 2), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_BLOCK_SIZE * 2), + WOLFSSL_SUCCESS); + /* Ensure too many bytes for lengths. */ + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, WC_SM3_PAD_SIZE), + WOLFSSL_SUCCESS); + + /* Invalid Parameters */ + ExpectIntEQ(EVP_DigestFinal(NULL, NULL, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_DigestFinal(mdCtx, NULL, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_DigestFinal(NULL, hash, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_DigestFinal(NULL, hash, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_DigestFinal(mdCtx, NULL, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + /* Valid Parameters */ + ExpectIntEQ(EVP_DigestFinal(mdCtx, hash, NULL), WOLFSSL_SUCCESS); + ExpectBufEQ(hash, expHash, WC_SM3_DIGEST_SIZE); + + /* Chunk tests. */ + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data, sizeof(data)), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestFinal(mdCtx, calcHash, &sz), WOLFSSL_SUCCESS); + ExpectIntEQ(sz, WC_SM3_DIGEST_SIZE); + for (chunk = 1; chunk <= WC_SM3_BLOCK_SIZE + 1; chunk++) { + for (i = 0; i + chunk <= (word32)sizeof(data); i += chunk) { + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data + i, chunk), + WOLFSSL_SUCCESS); + } + if (i < (word32)sizeof(data)) { + ExpectIntEQ(EVP_DigestUpdate(mdCtx, data + i, + (word32)sizeof(data) - i), WOLFSSL_SUCCESS); + } + ExpectIntEQ(EVP_DigestFinal(mdCtx, hash, NULL), WOLFSSL_SUCCESS); + ExpectBufEQ(hash, calcHash, WC_SM3_DIGEST_SIZE); + } + + /* Not testing when the low 32-bit length overflows. */ + + ret = EVP_MD_CTX_cleanup(mdCtx); + ExpectIntEQ(ret, WOLFSSL_SUCCESS); + wolfSSL_EVP_MD_CTX_free(mdCtx); + + res = EXPECT_RESULT(); +#endif + return res; +} /* END test_EVP_sm3 */ + +int test_EVP_blake2(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && (defined(HAVE_BLAKE2) || defined(HAVE_BLAKE2S)) + const EVP_MD* md = NULL; + (void)md; + +#if defined(HAVE_BLAKE2) + ExpectNotNull(md = EVP_blake2b512()); + ExpectIntEQ(XSTRNCMP(md, "BLAKE2b512", XSTRLEN("BLAKE2b512")), 0); +#endif + +#if defined(HAVE_BLAKE2S) + ExpectNotNull(md = EVP_blake2s256()); + ExpectIntEQ(XSTRNCMP(md, "BLAKE2s256", XSTRLEN("BLAKE2s256")), 0); +#endif +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_md4(void) +{ + EXPECT_DECLS; +#if !defined(NO_MD4) && defined(OPENSSL_ALL) + ExpectNotNull(wolfSSL_EVP_md4()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_ripemd160(void) +{ + EXPECT_DECLS; +#if !defined(NO_WOLFSSL_STUB) && defined(OPENSSL_ALL) + ExpectNull(wolfSSL_EVP_ripemd160()); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_get_digestbynid(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL +#ifndef NO_MD5 + ExpectNotNull(wolfSSL_EVP_get_digestbynid(NID_md5)); +#endif +#ifndef NO_SHA + ExpectNotNull(wolfSSL_EVP_get_digestbynid(NID_sha1)); +#endif +#ifndef NO_SHA256 + ExpectNotNull(wolfSSL_EVP_get_digestbynid(NID_sha256)); +#endif + ExpectNull(wolfSSL_EVP_get_digestbynid(0)); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_Digest(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_SHA256) && !defined(NO_PWDBASED) + const char* in = "abc"; + int inLen = (int)XSTRLEN(in); + byte out[WC_SHA256_DIGEST_SIZE]; + unsigned int outLen; + const char* expOut = + "\xBA\x78\x16\xBF\x8F\x01\xCF\xEA\x41\x41\x40\xDE\x5D\xAE\x22" + "\x23\xB0\x03\x61\xA3\x96\x17\x7A\x9C\xB4\x10\xFF\x61\xF2\x00" + "\x15\xAD"; + + ExpectIntEQ(wolfSSL_EVP_Digest((unsigned char*)in, inLen, out, &outLen, + "SHA256", NULL), 1); + ExpectIntEQ(outLen, WC_SHA256_DIGEST_SIZE); + ExpectIntEQ(XMEMCMP(out, expOut, WC_SHA256_DIGEST_SIZE), 0); +#endif /* OPEN_EXTRA && ! NO_SHA256 */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_Digest_all(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + const char* digests[] = { +#ifndef NO_MD5 + "MD5", +#endif +#ifndef NO_SHA + "SHA", +#endif +#ifdef WOLFSSL_SHA224 + "SHA224", +#endif +#ifndef NO_SHA256 + "SHA256", +#endif +#ifdef WOLFSSL_SHA384 + "SHA384", +#endif +#ifdef WOLFSSL_SHA512 + "SHA512", +#endif +#if defined(WOLFSSL_SHA512) && !defined(WOLFSSL_NOSHA512_224) + "SHA512-224", +#endif +#if defined(WOLFSSL_SHA512) && !defined(WOLFSSL_NOSHA512_256) + "SHA512-256", +#endif +#ifdef WOLFSSL_SHA3 +#ifndef WOLFSSL_NOSHA3_224 + "SHA3-224", +#endif +#ifndef WOLFSSL_NOSHA3_256 + "SHA3-256", +#endif + "SHA3-384", +#ifndef WOLFSSL_NOSHA3_512 + "SHA3-512", +#endif +#endif /* WOLFSSL_SHA3 */ + NULL + }; + const char** d; + const unsigned char in[] = "abc"; + int inLen = XSTR_SIZEOF(in); + byte out[WC_MAX_DIGEST_SIZE]; + unsigned int outLen; + + for (d = digests; *d != NULL; d++) { + ExpectIntEQ(EVP_Digest(in, inLen, out, &outLen, *d, NULL), 1); + ExpectIntGT(outLen, 0); + ExpectIntEQ(EVP_MD_size(*d), outLen); + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_DigestFinal_ex(void) +{ + EXPECT_DECLS; +#if !defined(NO_SHA256) && defined(OPENSSL_ALL) + WOLFSSL_EVP_MD_CTX mdCtx; + unsigned int s = 0; + unsigned char md[WC_SHA256_DIGEST_SIZE]; + unsigned char md2[WC_SHA256_DIGEST_SIZE]; + + /* Bad Case */ +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && \ + (HAVE_FIPS_VERSION > 2)) + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestFinal_ex(&mdCtx, md, &s), 0); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + +#else + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestFinal_ex(&mdCtx, md, &s), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); + +#endif + + /* Good Case */ + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, EVP_sha256()), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_DigestFinal_ex(&mdCtx, md2, &s), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_DigestFinalXOF(void) +{ + EXPECT_DECLS; +#if defined(WOLFSSL_SHA3) && defined(WOLFSSL_SHAKE256) && defined(OPENSSL_ALL) + WOLFSSL_EVP_MD_CTX mdCtx; + unsigned char shake[256]; + unsigned char zeros[10]; + unsigned char data[] = "Test data"; + unsigned int sz; + + XMEMSET(zeros, 0, sizeof(zeros)); + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(EVP_DigestInit(&mdCtx, EVP_shake256()), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_MD_flags(EVP_shake256()), EVP_MD_FLAG_XOF); + ExpectIntEQ(EVP_MD_flags(EVP_sha3_256()), 0); + ExpectIntEQ(EVP_DigestUpdate(&mdCtx, data, 1), WOLFSSL_SUCCESS); + XMEMSET(shake, 0, sizeof(shake)); + ExpectIntEQ(EVP_DigestFinalXOF(&mdCtx, shake, 10), WOLFSSL_SUCCESS); + + /* make sure was only size of 10 */ + ExpectIntEQ(XMEMCMP(&shake[11], zeros, 10), 0); + ExpectIntEQ(EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(EVP_DigestInit(&mdCtx, EVP_shake256()), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(&mdCtx, data, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestFinal(&mdCtx, shake, &sz), WOLFSSL_SUCCESS); + ExpectIntEQ(sz, 32); + ExpectIntEQ(EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); + + #if defined(WOLFSSL_SHAKE128) + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(EVP_DigestInit(&mdCtx, EVP_shake128()), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestUpdate(&mdCtx, data, 1), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_DigestFinal(&mdCtx, shake, &sz), WOLFSSL_SUCCESS); + ExpectIntEQ(sz, 16); + ExpectIntEQ(EVP_MD_CTX_cleanup(&mdCtx), WOLFSSL_SUCCESS); + #endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_MD_nid(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL +#ifndef NO_MD5 + ExpectIntEQ(EVP_MD_nid(EVP_md5()), NID_md5); +#endif +#ifndef NO_SHA + ExpectIntEQ(EVP_MD_nid(EVP_sha1()), NID_sha1); +#endif +#ifndef NO_SHA256 + ExpectIntEQ(EVP_MD_nid(EVP_sha256()), NID_sha256); +#endif + ExpectIntEQ(EVP_MD_nid(NULL), NID_undef); +#endif + return EXPECT_RESULT(); +} + +#if defined(OPENSSL_EXTRA) +static void list_md_fn(const EVP_MD* m, const char* from, + const char* to, void* arg) +{ + const char* mn; + BIO *bio; + + (void) from; + (void) to; + (void) arg; + (void) mn; + (void) bio; + + if (!m) { + /* alias */ + AssertNull(m); + AssertNotNull(to); + } + else { + AssertNotNull(m); + AssertNull(to); + } + + AssertNotNull(from); + +#if !defined(NO_FILESYSTEM) && defined(DEBUG_WOLFSSL_VERBOSE) + mn = EVP_get_digestbyname(from); + /* print to stderr */ + AssertNotNull(arg); + + bio = BIO_new(BIO_s_file()); + BIO_set_fp(bio, arg, BIO_NOCLOSE); + BIO_printf(bio, "Use %s message digest algorithm\n", mn); + BIO_free(bio); +#endif +} +#endif + +int test_EVP_MD_do_all(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) + EVP_MD_do_all(NULL, stderr); + + EVP_MD_do_all(list_md_fn, stderr); + + res = TEST_SUCCESS; +#endif + + return res; +} + +int test_wolfSSL_EVP_MD_size(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + WOLFSSL_EVP_MD_CTX mdCtx; + +#ifdef WOLFSSL_SHA3 +#ifndef WOLFSSL_NOSHA3_224 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-224"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_224_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_224_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#endif +#ifndef WOLFSSL_NOSHA3_256 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-256"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_256_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_256_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#endif + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-384"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_384_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_384_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#ifndef WOLFSSL_NOSHA3_512 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA3-512"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA3_512_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA3_512_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#endif +#endif /* WOLFSSL_SHA3 */ + +#ifndef NO_SHA256 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA256"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA256_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA256_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA256_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA256_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + +#endif + +#ifndef NO_MD5 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "MD5"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_MD5_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_MD5_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_MD5_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_MD5_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + +#endif + +#ifdef WOLFSSL_SHA224 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA224"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA224_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA224_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA224_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA224_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + +#endif + +#ifdef WOLFSSL_SHA384 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA384"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA384_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA384_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA384_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA384_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + +#endif + +#ifdef WOLFSSL_SHA512 + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA512"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA512_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA512_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA512_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA512_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + +#endif + +#ifndef NO_SHA + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, "SHA1"), 1); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_block_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), + WC_SHA_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_size(&mdCtx), WC_SHA_DIGEST_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), WC_SHA_BLOCK_SIZE); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#endif + /* error case */ + wolfSSL_EVP_MD_CTX_init(&mdCtx); + + ExpectIntEQ(wolfSSL_EVP_DigestInit(&mdCtx, ""), 0); + ExpectIntEQ(wolfSSL_EVP_MD_size(wolfSSL_EVP_MD_CTX_md(&mdCtx)), 0); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_block_size(&mdCtx), 0); + /* Cleanup is valid on uninit'ed struct */ + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#endif /* OPENSSL_EXTRA */ + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_evp_digest.h b/tests/api/test_evp_digest.h new file mode 100644 index 000000000..e6989139e --- /dev/null +++ b/tests/api/test_evp_digest.h @@ -0,0 +1,58 @@ +/* test_evp_digest.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_EVP_DIGEST_H +#define WOLFCRYPT_TEST_EVP_DIGEST_H + +#include + +int test_wolfSSL_EVP_shake128(void); +int test_wolfSSL_EVP_shake256(void); +int test_wolfSSL_EVP_sm3(void); +int test_EVP_blake2(void); +int test_wolfSSL_EVP_md4(void); +int test_wolfSSL_EVP_ripemd160(void); +int test_wolfSSL_EVP_get_digestbynid(void); +int test_wolfSSL_EVP_Digest(void); +int test_wolfSSL_EVP_Digest_all(void); +int test_wolfSSL_EVP_DigestFinal_ex(void); +int test_wolfSSL_EVP_DigestFinalXOF(void); +int test_wolfSSL_EVP_MD_nid(void); +int test_EVP_MD_do_all(void); +int test_wolfSSL_EVP_MD_size(void); + +#define TEST_EVP_DIGEST_DECLS \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_shake128), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_shake256), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_sm3), \ + TEST_DECL_GROUP("evp_digest", test_EVP_blake2), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_md4), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_ripemd160), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_get_digestbynid), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_Digest), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_Digest_all), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_DigestFinal_ex), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_DigestFinalXOF), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_MD_nid), \ + TEST_DECL_GROUP("evp_digest", test_EVP_MD_do_all), \ + TEST_DECL_GROUP("evp_digest", test_wolfSSL_EVP_MD_size) + +#endif /* WOLFCRYPT_TEST_EVP_DIGEST_H */ diff --git a/tests/api/test_evp_pkey.c b/tests/api/test_evp_pkey.c new file mode 100644 index 000000000..35f1c720b --- /dev/null +++ b/tests/api/test_evp_pkey.c @@ -0,0 +1,2359 @@ +/* test_evp_pkey.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#include +#include +#include + + +int test_wolfSSL_EVP_PKEY_CTX_new_id(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_ENGINE* e = NULL; + int id = 0; + EVP_PKEY_CTX *ctx = NULL; + + ExpectNotNull(ctx = wolfSSL_EVP_PKEY_CTX_new_id(id, e)); + + EVP_PKEY_CTX_free(ctx); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_EVP_PKEY* pkey = NULL; + EVP_PKEY_CTX* ctx = NULL; + int bits = 2048; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + + ExpectIntEQ(wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits(ctx, bits), + WOLFSSL_SUCCESS); + + EVP_PKEY_CTX_free(ctx); + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_QT_EVP_PKEY_CTX_free(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(OPENSSL_ALL) + EVP_PKEY* pkey = NULL; + EVP_PKEY_CTX* ctx = NULL; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + +#if defined(OPENSSL_VERSION_NUMBER) && OPENSSL_VERSION_NUMBER >= 0x10100000L + /* void */ + EVP_PKEY_CTX_free(ctx); +#else + /* int */ + ExpectIntEQ(EVP_PKEY_CTX_free(ctx), WOLFSSL_SUCCESS); +#endif + + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_up_ref(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) + EVP_PKEY* pkey; + + pkey = EVP_PKEY_new(); + ExpectNotNull(pkey); + ExpectIntEQ(EVP_PKEY_up_ref(NULL), 0); + ExpectIntEQ(EVP_PKEY_up_ref(pkey), 1); + EVP_PKEY_free(pkey); + ExpectIntEQ(EVP_PKEY_up_ref(pkey), 1); + EVP_PKEY_free(pkey); + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_base_id(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_EVP_PKEY* pkey = NULL; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + + ExpectIntEQ(wolfSSL_EVP_PKEY_base_id(NULL), NID_undef); + + ExpectIntEQ(wolfSSL_EVP_PKEY_base_id(pkey), EVP_PKEY_RSA); + + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_PKEY_id(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_EVP_PKEY* pkey = NULL; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + + ExpectIntEQ(wolfSSL_EVP_PKEY_id(NULL), 0); + + ExpectIntEQ(wolfSSL_EVP_PKEY_id(pkey), EVP_PKEY_RSA); + + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_MD_pkey_type(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + const WOLFSSL_EVP_MD* md; + +#ifndef NO_MD5 + ExpectNotNull(md = EVP_md5()); + ExpectIntEQ(EVP_MD_pkey_type(md), NID_md5WithRSAEncryption); +#endif +#ifndef NO_SHA + ExpectNotNull(md = EVP_sha1()); + ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha1WithRSAEncryption); +#endif +#ifdef WOLFSSL_SHA224 + ExpectNotNull(md = EVP_sha224()); + ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha224WithRSAEncryption); +#endif + ExpectNotNull(md = EVP_sha256()); + ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha256WithRSAEncryption); +#ifdef WOLFSSL_SHA384 + ExpectNotNull(md = EVP_sha384()); + ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha384WithRSAEncryption); +#endif +#ifdef WOLFSSL_SHA512 + ExpectNotNull(md = EVP_sha512()); + ExpectIntEQ(EVP_MD_pkey_type(md), NID_sha512WithRSAEncryption); +#endif +#endif + return EXPECT_RESULT(); +} + +#ifdef OPENSSL_EXTRA +static int test_hmac_signing(const WOLFSSL_EVP_MD *type, const byte* testKey, + size_t testKeySz, const char* testData, size_t testDataSz, + const byte* testResult, size_t testResultSz) +{ + EXPECT_DECLS; + unsigned char check[WC_MAX_DIGEST_SIZE]; + size_t checkSz = 0; + WOLFSSL_EVP_PKEY* key = NULL; + WOLFSSL_EVP_MD_CTX mdCtx; + + ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, + testKey, (int)testKeySz)); + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, type, NULL, key), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, + (unsigned int)testDataSz), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + ExpectIntEQ((int)checkSz, (int)testResultSz); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz,(int)testResultSz); + ExpectIntEQ(XMEMCMP(testResult, check, testResultSz), 0); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, type, NULL, key), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, + (unsigned int)testDataSz), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, testResult, checkSz), 1); + + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, type, NULL, key), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, 4), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + ExpectIntEQ((int)checkSz, (int)testResultSz); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz,(int)testResultSz); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData + 4, + (unsigned int)testDataSz - 4), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz,(int)testResultSz); + ExpectIntEQ(XMEMCMP(testResult, check, testResultSz), 0); + + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, type, NULL, key), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, 4), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData + 4, + (unsigned int)testDataSz - 4), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, testResult, checkSz), 1); + + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); + + wolfSSL_EVP_PKEY_free(key); + + return EXPECT_RESULT(); +} +#endif + +int test_wolfSSL_EVP_MD_hmac_signing(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + static const unsigned char testKey[] = + { + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b + }; + static const char testData[] = "Hi There"; +#ifdef WOLFSSL_SHA224 + static const unsigned char testResultSha224[] = + { + 0x89, 0x6f, 0xb1, 0x12, 0x8a, 0xbb, 0xdf, 0x19, + 0x68, 0x32, 0x10, 0x7c, 0xd4, 0x9d, 0xf3, 0x3f, + 0x47, 0xb4, 0xb1, 0x16, 0x99, 0x12, 0xba, 0x4f, + 0x53, 0x68, 0x4b, 0x22 + }; +#endif +#ifndef NO_SHA256 + static const unsigned char testResultSha256[] = + { + 0xb0, 0x34, 0x4c, 0x61, 0xd8, 0xdb, 0x38, 0x53, + 0x5c, 0xa8, 0xaf, 0xce, 0xaf, 0x0b, 0xf1, 0x2b, + 0x88, 0x1d, 0xc2, 0x00, 0xc9, 0x83, 0x3d, 0xa7, + 0x26, 0xe9, 0x37, 0x6c, 0x2e, 0x32, 0xcf, 0xf7 + }; +#endif +#ifdef WOLFSSL_SHA384 + static const unsigned char testResultSha384[] = + { + 0xaf, 0xd0, 0x39, 0x44, 0xd8, 0x48, 0x95, 0x62, + 0x6b, 0x08, 0x25, 0xf4, 0xab, 0x46, 0x90, 0x7f, + 0x15, 0xf9, 0xda, 0xdb, 0xe4, 0x10, 0x1e, 0xc6, + 0x82, 0xaa, 0x03, 0x4c, 0x7c, 0xeb, 0xc5, 0x9c, + 0xfa, 0xea, 0x9e, 0xa9, 0x07, 0x6e, 0xde, 0x7f, + 0x4a, 0xf1, 0x52, 0xe8, 0xb2, 0xfa, 0x9c, 0xb6 + }; +#endif +#ifdef WOLFSSL_SHA512 + static const unsigned char testResultSha512[] = + { + 0x87, 0xaa, 0x7c, 0xde, 0xa5, 0xef, 0x61, 0x9d, + 0x4f, 0xf0, 0xb4, 0x24, 0x1a, 0x1d, 0x6c, 0xb0, + 0x23, 0x79, 0xf4, 0xe2, 0xce, 0x4e, 0xc2, 0x78, + 0x7a, 0xd0, 0xb3, 0x05, 0x45, 0xe1, 0x7c, 0xde, + 0xda, 0xa8, 0x33, 0xb7, 0xd6, 0xb8, 0xa7, 0x02, + 0x03, 0x8b, 0x27, 0x4e, 0xae, 0xa3, 0xf4, 0xe4, + 0xbe, 0x9d, 0x91, 0x4e, 0xeb, 0x61, 0xf1, 0x70, + 0x2e, 0x69, 0x6c, 0x20, 0x3a, 0x12, 0x68, 0x54 + }; +#endif +#ifdef WOLFSSL_SHA3 + #ifndef WOLFSSL_NOSHA3_224 + static const unsigned char testResultSha3_224[] = + { + 0x3b, 0x16, 0x54, 0x6b, 0xbc, 0x7b, 0xe2, 0x70, + 0x6a, 0x03, 0x1d, 0xca, 0xfd, 0x56, 0x37, 0x3d, + 0x98, 0x84, 0x36, 0x76, 0x41, 0xd8, 0xc5, 0x9a, + 0xf3, 0xc8, 0x60, 0xf7 + }; + #endif + #ifndef WOLFSSL_NOSHA3_256 + static const unsigned char testResultSha3_256[] = + { + 0xba, 0x85, 0x19, 0x23, 0x10, 0xdf, 0xfa, 0x96, + 0xe2, 0xa3, 0xa4, 0x0e, 0x69, 0x77, 0x43, 0x51, + 0x14, 0x0b, 0xb7, 0x18, 0x5e, 0x12, 0x02, 0xcd, + 0xcc, 0x91, 0x75, 0x89, 0xf9, 0x5e, 0x16, 0xbb + }; + #endif + #ifndef WOLFSSL_NOSHA3_384 + static const unsigned char testResultSha3_384[] = + { + 0x68, 0xd2, 0xdc, 0xf7, 0xfd, 0x4d, 0xdd, 0x0a, + 0x22, 0x40, 0xc8, 0xa4, 0x37, 0x30, 0x5f, 0x61, + 0xfb, 0x73, 0x34, 0xcf, 0xb5, 0xd0, 0x22, 0x6e, + 0x1b, 0xc2, 0x7d, 0xc1, 0x0a, 0x2e, 0x72, 0x3a, + 0x20, 0xd3, 0x70, 0xb4, 0x77, 0x43, 0x13, 0x0e, + 0x26, 0xac, 0x7e, 0x3d, 0x53, 0x28, 0x86, 0xbd + }; + #endif + #ifndef WOLFSSL_NOSHA3_512 + static const unsigned char testResultSha3_512[] = + { + 0xeb, 0x3f, 0xbd, 0x4b, 0x2e, 0xaa, 0xb8, 0xf5, + 0xc5, 0x04, 0xbd, 0x3a, 0x41, 0x46, 0x5a, 0xac, + 0xec, 0x15, 0x77, 0x0a, 0x7c, 0xab, 0xac, 0x53, + 0x1e, 0x48, 0x2f, 0x86, 0x0b, 0x5e, 0xc7, 0xba, + 0x47, 0xcc, 0xb2, 0xc6, 0xf2, 0xaf, 0xce, 0x8f, + 0x88, 0xd2, 0x2b, 0x6d, 0xc6, 0x13, 0x80, 0xf2, + 0x3a, 0x66, 0x8f, 0xd3, 0x88, 0x8b, 0xb8, 0x05, + 0x37, 0xc0, 0xa0, 0xb8, 0x64, 0x07, 0x68, 0x9e + }; + #endif +#endif + +#ifndef NO_SHA256 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha256(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha256, + sizeof(testResultSha256)), TEST_SUCCESS); +#endif +#ifdef WOLFSSL_SHA224 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha224(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha224, + sizeof(testResultSha224)), TEST_SUCCESS); +#endif +#ifdef WOLFSSL_SHA384 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha384(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha384, + sizeof(testResultSha384)), TEST_SUCCESS); +#endif +#ifdef WOLFSSL_SHA512 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha512(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha512, + sizeof(testResultSha512)), TEST_SUCCESS); +#endif +#ifdef WOLFSSL_SHA3 + #ifndef WOLFSSL_NOSHA3_224 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_224(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_224, + sizeof(testResultSha3_224)), TEST_SUCCESS); + #endif + #ifndef WOLFSSL_NOSHA3_256 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_256(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_256, + sizeof(testResultSha3_256)), TEST_SUCCESS); + #endif + #ifndef WOLFSSL_NOSHA3_384 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_384(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_384, + sizeof(testResultSha3_384)), TEST_SUCCESS); + #endif + #ifndef WOLFSSL_NOSHA3_512 + ExpectIntEQ(test_hmac_signing(wolfSSL_EVP_sha3_512(), testKey, + sizeof(testKey), testData, XSTRLEN(testData), testResultSha3_512, + sizeof(testResultSha3_512)), TEST_SUCCESS); + #endif +#endif +#endif /* OPENSSL_EXTRA */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_new_mac_key(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_EXTRA + static const unsigned char pw[] = "password"; + static const int pwSz = sizeof(pw) - 1; + size_t checkPwSz = 0; + const unsigned char* checkPw = NULL; + WOLFSSL_EVP_PKEY* key = NULL; + + ExpectNull(key = wolfSSL_EVP_PKEY_new_mac_key(0, NULL, pw, pwSz)); + ExpectNull(key = wolfSSL_EVP_PKEY_new_mac_key(0, NULL, NULL, pwSz)); + + ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, pw, + pwSz)); + if (key != NULL) { + ExpectIntEQ(key->type, EVP_PKEY_HMAC); + ExpectIntEQ(key->save_type, EVP_PKEY_HMAC); + ExpectIntEQ(key->pkey_sz, pwSz); + ExpectIntEQ(XMEMCMP(key->pkey.ptr, pw, pwSz), 0); + } + ExpectNotNull(checkPw = wolfSSL_EVP_PKEY_get0_hmac(key, &checkPwSz)); + ExpectIntEQ((int)checkPwSz, pwSz); + ExpectIntEQ(XMEMCMP(checkPw, pw, pwSz), 0); + wolfSSL_EVP_PKEY_free(key); + key = NULL; + + ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, pw, + 0)); + ExpectIntEQ(key->pkey_sz, 0); + if (EXPECT_SUCCESS()) { + /* Allocation for key->pkey.ptr may fail - OK key len is 0 */ + checkPw = wolfSSL_EVP_PKEY_get0_hmac(key, &checkPwSz); + } + ExpectTrue((checkPwSz == 0) || (checkPw != NULL)); + ExpectIntEQ((int)checkPwSz, 0); + wolfSSL_EVP_PKEY_free(key); + key = NULL; + + ExpectNotNull(key = wolfSSL_EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, NULL, + 0)); + ExpectIntEQ(key->pkey_sz, 0); + if (EXPECT_SUCCESS()) { + /* Allocation for key->pkey.ptr may fail - OK key len is 0 */ + checkPw = wolfSSL_EVP_PKEY_get0_hmac(key, &checkPwSz); + } + ExpectTrue((checkPwSz == 0) || (checkPw != NULL)); + ExpectIntEQ((int)checkPwSz, 0); + wolfSSL_EVP_PKEY_free(key); + key = NULL; +#endif /* OPENSSL_EXTRA */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_hkdf(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_HKDF) + EVP_PKEY_CTX* ctx = NULL; + byte salt[] = {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F}; + byte key[] = {0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F}; + byte info[] = {0X01, 0x02, 0x03, 0x04, 0x05}; + byte info2[] = {0X06, 0x07, 0x08, 0x09, 0x0A}; + byte outKey[34]; + size_t outKeySz = sizeof(outKey); + /* These expected outputs were gathered by running the same test below using + * OpenSSL. */ + const byte extractAndExpand[] = { + 0x8B, 0xEB, 0x90, 0xA9, 0x04, 0xFF, 0x05, 0x10, 0xE4, 0xB5, 0xB1, 0x10, + 0x31, 0x34, 0xFF, 0x07, 0x5B, 0xE3, 0xC6, 0x93, 0xD4, 0xF8, 0xC7, 0xEE, + 0x96, 0xDA, 0x78, 0x7A, 0xE2, 0x9A, 0x2D, 0x05, 0x4B, 0xF6 + }; + const byte extractOnly[] = { + 0xE7, 0x6B, 0x9E, 0x0F, 0xE4, 0x02, 0x1D, 0x62, 0xEA, 0x97, 0x74, 0x5E, + 0xF4, 0x3C, 0x65, 0x4D, 0xC1, 0x46, 0x98, 0xAA, 0x79, 0x9A, 0xCB, 0x9C, + 0xCC, 0x3E, 0x7F, 0x2A, 0x2B, 0x41, 0xA1, 0x9E + }; + const byte expandOnly[] = { + 0xFF, 0x29, 0x29, 0x56, 0x9E, 0xA7, 0x66, 0x02, 0xDB, 0x4F, 0xDB, 0x53, + 0x7D, 0x21, 0x67, 0x52, 0xC3, 0x0E, 0xF3, 0xFC, 0x71, 0xCE, 0x67, 0x2B, + 0xEA, 0x3B, 0xE9, 0xFC, 0xDD, 0xC8, 0xCC, 0xB7, 0x42, 0x74 + }; + const byte extractAndExpandAddInfo[] = { + 0x5A, 0x74, 0x79, 0x83, 0xA3, 0xA4, 0x2E, 0xB7, 0xD4, 0x08, 0xC2, 0x6A, + 0x2F, 0xA5, 0xE3, 0x4E, 0xF1, 0xF4, 0x87, 0x3E, 0xA6, 0xC7, 0x88, 0x45, + 0xD7, 0xE2, 0x15, 0xBC, 0xB8, 0x10, 0xEF, 0x6C, 0x4D, 0x7A + }; + + ExpectNotNull((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_HKDF, NULL))); + ExpectIntEQ(EVP_PKEY_derive_init(ctx), WOLFSSL_SUCCESS); + /* NULL ctx. */ + ExpectIntEQ(EVP_PKEY_CTX_set_hkdf_md(NULL, EVP_sha256()), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* NULL md. */ + ExpectIntEQ(EVP_PKEY_CTX_set_hkdf_md(ctx, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_CTX_set_hkdf_md(ctx, EVP_sha256()), WOLFSSL_SUCCESS); + /* NULL ctx. */ + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(NULL, salt, sizeof(salt)), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* NULL salt is ok. */ + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, NULL, sizeof(salt)), + WOLFSSL_SUCCESS); + /* Salt length <= 0. */ + /* Length 0 salt is ok. */ + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, salt, 0), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, salt, -1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_salt(ctx, salt, sizeof(salt)), + WOLFSSL_SUCCESS); + /* NULL ctx. */ + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(NULL, key, sizeof(key)), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* NULL key. */ + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, NULL, sizeof(key)), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Key length <= 0 */ + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, key, 0), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, key, -1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_CTX_set1_hkdf_key(ctx, key, sizeof(key)), + WOLFSSL_SUCCESS); + /* NULL ctx. */ + ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(NULL, info, sizeof(info)), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* NULL info is ok. */ + ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, NULL, sizeof(info)), + WOLFSSL_SUCCESS); + /* Info length <= 0 */ + /* Length 0 info is ok. */ + ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info, 0), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info, -1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info, sizeof(info)), + WOLFSSL_SUCCESS); + /* NULL ctx. */ + ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(NULL, EVP_PKEY_HKDEF_MODE_EXTRACT_ONLY), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Extract and expand (default). */ + ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); + ExpectIntEQ(outKeySz, sizeof(extractAndExpand)); + ExpectIntEQ(XMEMCMP(outKey, extractAndExpand, outKeySz), 0); + /* Extract only. */ + ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(ctx, EVP_PKEY_HKDEF_MODE_EXTRACT_ONLY), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); + ExpectIntEQ(outKeySz, sizeof(extractOnly)); + ExpectIntEQ(XMEMCMP(outKey, extractOnly, outKeySz), 0); + outKeySz = sizeof(outKey); + /* Expand only. */ + ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(ctx, EVP_PKEY_HKDEF_MODE_EXPAND_ONLY), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); + ExpectIntEQ(outKeySz, sizeof(expandOnly)); + ExpectIntEQ(XMEMCMP(outKey, expandOnly, outKeySz), 0); + outKeySz = sizeof(outKey); + /* Extract and expand with appended additional info. */ + ExpectIntEQ(EVP_PKEY_CTX_add1_hkdf_info(ctx, info2, sizeof(info2)), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_hkdf_mode(ctx, + EVP_PKEY_HKDEF_MODE_EXTRACT_AND_EXPAND), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_derive(ctx, outKey, &outKeySz), WOLFSSL_SUCCESS); + ExpectIntEQ(outKeySz, sizeof(extractAndExpandAddInfo)); + ExpectIntEQ(XMEMCMP(outKey, extractAndExpandAddInfo, outKeySz), 0); + + EVP_PKEY_CTX_free(ctx); +#endif /* OPENSSL_EXTRA && HAVE_HKDF */ + return EXPECT_RESULT(); +} + + +int test_wolfSSL_EVP_PBE_scrypt(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_SCRYPT) && defined(HAVE_PBKDF2) && \ + (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 5)) +#if !defined(NO_PWDBASED) && !defined(NO_SHA256) + int ret; + + const char pwd[] = {'p','a','s','s','w','o','r','d'}; + int pwdlen = sizeof(pwd); + const byte salt[] = {'N','a','C','l'}; + int saltlen = sizeof(salt); + byte key[80]; + word64 numOvr32 = (word64)INT32_MAX + 1; + + /* expected derived key for N:16, r:1, p:1 */ + const byte expectedKey[] = { + 0xAE, 0xC6, 0xB7, 0x48, 0x3E, 0xD2, 0x6E, 0x08, 0x80, 0x2B, + 0x41, 0xF4, 0x03, 0x20, 0x86, 0xA0, 0xE8, 0x86, 0xBE, 0x7A, + 0xC4, 0x8F, 0xCF, 0xD9, 0x2F, 0xF0, 0xCE, 0xF8, 0x10, 0x97, + 0x52, 0xF4, 0xAC, 0x74, 0xB0, 0x77, 0x26, 0x32, 0x56, 0xA6, + 0x5A, 0x99, 0x70, 0x1B, 0x7A, 0x30, 0x4D, 0x46, 0x61, 0x1C, + 0x8A, 0xA3, 0x91, 0xE7, 0x99, 0xCE, 0x10, 0xA2, 0x77, 0x53, + 0xE7, 0xE9, 0xC0, 0x9A}; + + /* N r p mx key keylen */ + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 0, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 0); /* N must be greater than 1 */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 3, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 0); /* N must be power of 2 */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 0, 1, 0, key, 64); + ExpectIntEQ(ret, 0); /* r must be greater than 0 */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 0, 0, key, 64); + ExpectIntEQ(ret, 0); /* p must be greater than 0 */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 1, 0, key, 0); + ExpectIntEQ(ret, 0); /* keylen must be greater than 0 */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 9, 1, 0, key, 64); + ExpectIntEQ(ret, 0); /* r must be smaller than 9 */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 1, 0, NULL, 64); + ExpectIntEQ(ret, 1); /* should succeed if key is NULL */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 1); /* should succeed */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, numOvr32, 1, 0, + key, 64); + ExpectIntEQ(ret, 0); /* should fail since r is greater than INT32_MAC */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 2, 1, numOvr32, 0, + key, 64); + ExpectIntEQ(ret, 0); /* should fail since p is greater than INT32_MAC */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, NULL, 0, 2, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 1); /* should succeed even if salt is NULL */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, NULL, 4, 2, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 0); /* if salt is NULL, saltlen must be 0, otherwise fail*/ + + ret = EVP_PBE_scrypt(NULL, 0, salt, saltlen, 2, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 1); /* should succeed if pwd is NULL and pwdlen is 0*/ + + ret = EVP_PBE_scrypt(NULL, 4, salt, saltlen, 2, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 0); /* if pwd is NULL, pwdlen must be 0 */ + + ret = EVP_PBE_scrypt(NULL, 0, NULL, 0, 2, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 1); /* should succeed even both pwd and salt are NULL */ + + ret = EVP_PBE_scrypt(pwd, pwdlen, salt, saltlen, 16, 1, 1, 0, key, 64); + ExpectIntEQ(ret, 1); + + ret = XMEMCMP(expectedKey, key, sizeof(expectedKey)); + ExpectIntEQ(ret, 0); /* derived key must be the same as expected-key */ +#endif /* !NO_PWDBASED && !NO_SHA256 */ +#endif /* OPENSSL_EXTRA && HAVE_SCRYPT && HAVE_PBKDF2 */ + return EXPECT_RESULT(); +} + +int test_EVP_PKEY_cmp(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) + EVP_PKEY *a = NULL; + EVP_PKEY *b = NULL; + const unsigned char *in; + +#if !defined(NO_RSA) && defined(USE_CERT_BUFFERS_2048) + in = client_key_der_2048; + ExpectNotNull(a = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + &in, (long)sizeof_client_key_der_2048)); + in = client_key_der_2048; + ExpectNotNull(b = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + &in, (long)sizeof_client_key_der_2048)); + + /* Test success case RSA */ +#if defined(WOLFSSL_ERROR_CODE_OPENSSL) + ExpectIntEQ(EVP_PKEY_cmp(a, b), 1); +#else + ExpectIntEQ(EVP_PKEY_cmp(a, b), 0); +#endif /* WOLFSSL_ERROR_CODE_OPENSSL */ + + EVP_PKEY_free(b); + b = NULL; + EVP_PKEY_free(a); + a = NULL; +#endif + +#if defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) + in = ecc_clikey_der_256; + ExpectNotNull(a = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, + &in, (long)sizeof_ecc_clikey_der_256)); + in = ecc_clikey_der_256; + ExpectNotNull(b = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, + &in, (long)sizeof_ecc_clikey_der_256)); + + /* Test success case ECC */ +#if defined(WOLFSSL_ERROR_CODE_OPENSSL) + ExpectIntEQ(EVP_PKEY_cmp(a, b), 1); +#else + ExpectIntEQ(EVP_PKEY_cmp(a, b), 0); +#endif /* WOLFSSL_ERROR_CODE_OPENSSL */ + + EVP_PKEY_free(b); + b = NULL; + EVP_PKEY_free(a); + a = NULL; +#endif + + /* Test failure cases */ +#if !defined(NO_RSA) && defined(USE_CERT_BUFFERS_2048) && \ + defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) + + in = client_key_der_2048; + ExpectNotNull(a = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, + &in, (long)sizeof_client_key_der_2048)); + in = ecc_clikey_der_256; + ExpectNotNull(b = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, + &in, (long)sizeof_ecc_clikey_der_256)); + +#if defined(WOLFSSL_ERROR_CODE_OPENSSL) + ExpectIntEQ(EVP_PKEY_cmp(a, b), -1); +#else + ExpectIntNE(EVP_PKEY_cmp(a, b), 0); +#endif /* WOLFSSL_ERROR_CODE_OPENSSL */ + EVP_PKEY_free(b); + b = NULL; + EVP_PKEY_free(a); + a = NULL; +#endif + + /* invalid or empty failure cases */ + a = EVP_PKEY_new(); + b = EVP_PKEY_new(); +#if defined(WOLFSSL_ERROR_CODE_OPENSSL) + ExpectIntEQ(EVP_PKEY_cmp(NULL, NULL), 0); + ExpectIntEQ(EVP_PKEY_cmp(a, NULL), 0); + ExpectIntEQ(EVP_PKEY_cmp(NULL, b), 0); +#ifdef NO_RSA + /* Type check will fail since RSA is the default EVP key type */ + ExpectIntEQ(EVP_PKEY_cmp(a, b), -2); +#else + ExpectIntEQ(EVP_PKEY_cmp(a, b), 0); +#endif +#else + ExpectIntNE(EVP_PKEY_cmp(NULL, NULL), 0); + ExpectIntNE(EVP_PKEY_cmp(a, NULL), 0); + ExpectIntNE(EVP_PKEY_cmp(NULL, b), 0); + ExpectIntNE(EVP_PKEY_cmp(a, b), 0); +#endif + EVP_PKEY_free(b); + EVP_PKEY_free(a); + + (void)in; +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_set1_get1_DSA(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && !defined (NO_DSA) && !defined(HAVE_SELFTEST) && \ + defined(WOLFSSL_KEY_GEN) + DSA *dsa = NULL; + DSA *setDsa = NULL; + EVP_PKEY *pkey = NULL; + EVP_PKEY *set1Pkey = NULL; + + SHA_CTX sha; + byte signature[DSA_SIG_SIZE]; + byte hash[WC_SHA_DIGEST_SIZE]; + word32 bytes; + int answer; +#ifdef USE_CERT_BUFFERS_1024 + const unsigned char* dsaKeyDer = dsa_key_der_1024; + int dsaKeySz = sizeof_dsa_key_der_1024; + byte tmp[ONEK_BUF]; + + XMEMSET(tmp, 0, sizeof(tmp)); + XMEMCPY(tmp, dsaKeyDer , dsaKeySz); + bytes = dsaKeySz; +#elif defined(USE_CERT_BUFFERS_2048) + const unsigned char* dsaKeyDer = dsa_key_der_2048; + int dsaKeySz = sizeof_dsa_key_der_2048; + byte tmp[TWOK_BUF]; + + XMEMSET(tmp, 0, sizeof(tmp)); + XMEMCPY(tmp, dsaKeyDer , dsaKeySz); + bytes = (word32)dsaKeySz; +#else + byte tmp[TWOK_BUF]; + const unsigned char* dsaKeyDer = (const unsigned char*)tmp; + int dsaKeySz; + XFILE fp = XBADFILE; + + XMEMSET(tmp, 0, sizeof(tmp)); + ExpectTrue((fp = XFOPEN("./certs/dsa2048.der", "rb")) != XBADFILE); + ExpectIntGT(dsaKeySz = bytes = (word32) XFREAD(tmp, 1, sizeof(tmp), fp), 0); + if (fp != XBADFILE) + XFCLOSE(fp); +#endif /* END USE_CERT_BUFFERS_1024 */ + + /* Create hash to later Sign and Verify */ + ExpectIntEQ(SHA1_Init(&sha), WOLFSSL_SUCCESS); + ExpectIntEQ(SHA1_Update(&sha, tmp, bytes), WOLFSSL_SUCCESS); + ExpectIntEQ(SHA1_Final(hash,&sha), WOLFSSL_SUCCESS); + + /* Initialize pkey with der format dsa key */ + ExpectNotNull(d2i_PrivateKey(EVP_PKEY_DSA, &pkey, &dsaKeyDer, + (long)dsaKeySz)); + + /* Test wolfSSL_EVP_PKEY_get1_DSA */ + /* Should Fail: NULL argument */ + ExpectNull(dsa = EVP_PKEY_get0_DSA(NULL)); + ExpectNull(dsa = EVP_PKEY_get1_DSA(NULL)); + /* Should Pass: Initialized pkey argument */ + ExpectNotNull(dsa = EVP_PKEY_get0_DSA(pkey)); + ExpectNotNull(dsa = EVP_PKEY_get1_DSA(pkey)); + +#ifdef USE_CERT_BUFFERS_1024 + ExpectIntEQ(DSA_bits(dsa), 1024); +#else + ExpectIntEQ(DSA_bits(dsa), 2048); +#endif + + /* Sign */ + ExpectIntEQ(wolfSSL_DSA_do_sign(hash, signature, dsa), WOLFSSL_SUCCESS); + /* Verify. */ + ExpectIntEQ(wolfSSL_DSA_do_verify(hash, signature, dsa, &answer), + WOLFSSL_SUCCESS); + + /* Test wolfSSL_EVP_PKEY_set1_DSA */ + /* Should Fail: set1Pkey not initialized */ + ExpectIntNE(EVP_PKEY_set1_DSA(set1Pkey, dsa), WOLFSSL_SUCCESS); + + /* Initialize set1Pkey */ + set1Pkey = EVP_PKEY_new(); + + /* Should Fail Verify: setDsa not initialized from set1Pkey */ + ExpectIntNE(wolfSSL_DSA_do_verify(hash,signature,setDsa,&answer), + WOLFSSL_SUCCESS); + + /* Should Pass: set dsa into set1Pkey */ + ExpectIntEQ(EVP_PKEY_set1_DSA(set1Pkey, dsa), WOLFSSL_SUCCESS); + + DSA_free(dsa); + DSA_free(setDsa); + EVP_PKEY_free(pkey); + EVP_PKEY_free(set1Pkey); +#endif /* OPENSSL_ALL && !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ + return EXPECT_RESULT(); +} /* END test_EVP_PKEY_set1_get1_DSA */ + +int test_wolfSSL_EVP_PKEY_set1_get1_EC_KEY(void) +{ + EXPECT_DECLS; +#if defined(HAVE_ECC) && defined(OPENSSL_ALL) + WOLFSSL_EC_KEY* ecKey = NULL; + WOLFSSL_EC_KEY* ecGet1 = NULL; + EVP_PKEY* pkey = NULL; + + ExpectNotNull(ecKey = wolfSSL_EC_KEY_new()); + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + + /* Test wolfSSL_EVP_PKEY_set1_EC_KEY */ + ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(NULL, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(pkey, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Should fail since ecKey is empty */ + ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(pkey, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EC_KEY_generate_key(ecKey), 1); + ExpectIntEQ(wolfSSL_EVP_PKEY_set1_EC_KEY(pkey, ecKey), WOLFSSL_SUCCESS); + + /* Test wolfSSL_EVP_PKEY_get1_EC_KEY */ + ExpectNull(wolfSSL_EVP_PKEY_get1_EC_KEY(NULL)); + ExpectNotNull(ecGet1 = wolfSSL_EVP_PKEY_get1_EC_KEY(pkey)); + + wolfSSL_EC_KEY_free(ecKey); + wolfSSL_EC_KEY_free(ecGet1); + EVP_PKEY_free(pkey); +#endif /* HAVE_ECC && OPENSSL_ALL */ + return EXPECT_RESULT(); +} /* END test_EVP_PKEY_set1_get1_EC_KEY */ + +int test_wolfSSL_EVP_PKEY_get0_EC_KEY(void) +{ + EXPECT_DECLS; +#if defined(HAVE_ECC) && defined(OPENSSL_ALL) + WOLFSSL_EVP_PKEY* pkey = NULL; + + ExpectNull(EVP_PKEY_get0_EC_KEY(NULL)); + + ExpectNotNull(pkey = EVP_PKEY_new()); + ExpectNull(EVP_PKEY_get0_EC_KEY(pkey)); + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_set1_get1_DH(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) +#if !defined(NO_DH) && defined(WOLFSSL_DH_EXTRA) && !defined(NO_FILESYSTEM) + DH *dh = NULL; + DH *setDh = NULL; + EVP_PKEY *pkey = NULL; + + XFILE f = XBADFILE; + unsigned char buf[4096]; + const unsigned char* pt = buf; + const char* dh2048 = "./certs/dh2048.der"; + long len = 0; + int code = -1; + + XMEMSET(buf, 0, sizeof(buf)); + + ExpectTrue((f = XFOPEN(dh2048, "rb")) != XBADFILE); + ExpectTrue((len = (long)XFREAD(buf, 1, sizeof(buf), f)) > 0); + if (f != XBADFILE) + XFCLOSE(f); + + /* Load dh2048.der into DH with internal format */ + ExpectNotNull(setDh = wolfSSL_d2i_DHparams(NULL, &pt, len)); + + ExpectIntEQ(wolfSSL_DH_check(setDh, &code), WOLFSSL_SUCCESS); + ExpectIntEQ(code, 0); + code = -1; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + + /* Set DH into PKEY */ + ExpectIntEQ(wolfSSL_EVP_PKEY_set1_DH(pkey, setDh), WOLFSSL_SUCCESS); + + /* Get DH from PKEY */ + ExpectNotNull(dh = wolfSSL_EVP_PKEY_get1_DH(pkey)); + + ExpectIntEQ(wolfSSL_DH_check(dh, &code), WOLFSSL_SUCCESS); + ExpectIntEQ(code, 0); + + EVP_PKEY_free(pkey); + DH_free(setDh); + setDh = NULL; + DH_free(dh); + dh = NULL; +#endif /* !NO_DH && WOLFSSL_DH_EXTRA && !NO_FILESYSTEM */ +#endif /* !HAVE_FIPS || HAVE_FIPS_VERSION > 2 */ +#endif /* OPENSSL_ALL || WOLFSSL_QT || WOLFSSL_OPENSSH */ + return EXPECT_RESULT(); +} /* END test_EVP_PKEY_set1_get1_DH */ + +int test_wolfSSL_EVP_PKEY_assign(void) +{ + EXPECT_DECLS; +#if (!defined(NO_RSA) || !defined(NO_DSA) || defined(HAVE_ECC)) && \ + defined(OPENSSL_ALL) + int type; + WOLFSSL_EVP_PKEY* pkey = NULL; +#ifndef NO_RSA + WOLFSSL_RSA* rsa = NULL; +#endif +#ifndef NO_DSA + WOLFSSL_DSA* dsa = NULL; +#endif +#ifdef HAVE_ECC + WOLFSSL_EC_KEY* ecKey = NULL; +#endif + +#ifndef NO_RSA + type = EVP_PKEY_RSA; + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(rsa = wolfSSL_RSA_new()); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(NULL, type, rsa), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, -1, rsa), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, rsa), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + wolfSSL_RSA_free(rsa); + } + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; +#endif /* NO_RSA */ + +#ifndef NO_DSA + type = EVP_PKEY_DSA; + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(dsa = wolfSSL_DSA_new()); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(NULL, type, dsa), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, -1, dsa), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, dsa), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + wolfSSL_DSA_free(dsa); + } + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; +#endif /* NO_DSA */ + +#ifdef HAVE_ECC + type = EVP_PKEY_EC; + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(ecKey = wolfSSL_EC_KEY_new()); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(NULL, type, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, -1, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EC_KEY_generate_key(ecKey), 1); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign(pkey, type, ecKey), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + wolfSSL_EC_KEY_free(ecKey); + } + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; +#endif /* HAVE_ECC */ +#endif /* (!NO_RSA || !NO_DSA || HAVE_ECC) && OPENSSL_ALL */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_assign_DH(void) +{ + EXPECT_DECLS; +#if !defined(NO_DH) && defined(OPENSSL_ALL) && (!defined(HAVE_FIPS) || \ + (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION > 2))) + XFILE f = XBADFILE; + unsigned char buf[4096]; + const unsigned char* pt = buf; + const char* params1 = "./certs/dh2048.der"; + long len = 0; + WOLFSSL_DH* dh = NULL; + WOLFSSL_EVP_PKEY* pkey = NULL; + XMEMSET(buf, 0, sizeof(buf)); + + /* Load DH parameters DER. */ + ExpectTrue((f = XFOPEN(params1, "rb")) != XBADFILE); + ExpectTrue((len = (long)XFREAD(buf, 1, sizeof(buf), f)) > 0); + if (f != XBADFILE) + XFCLOSE(f); + + ExpectNotNull(dh = wolfSSL_d2i_DHparams(NULL, &pt, len)); + ExpectIntEQ(DH_generate_key(dh), WOLFSSL_SUCCESS); + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + + /* Bad cases */ + ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(NULL, dh), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(pkey, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(NULL, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + /* Good case */ + ExpectIntEQ(wolfSSL_EVP_PKEY_assign_DH(pkey, dh), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + wolfSSL_DH_free(dh); + } + + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_EVP_PKEY_rsa(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) + WOLFSSL_RSA* rsa = NULL; + WOLFSSL_EVP_PKEY* pkey = NULL; + + ExpectNotNull(rsa = wolfSSL_RSA_new()); + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectIntEQ(EVP_PKEY_assign_RSA(NULL, rsa), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, rsa), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + wolfSSL_RSA_free(rsa); + } + ExpectPtrEq(EVP_PKEY_get0_RSA(pkey), rsa); + wolfSSL_EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_EVP_PKEY_ec(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + WOLFSSL_EC_KEY* ecKey = NULL; + WOLFSSL_EVP_PKEY* pkey = NULL; + + ExpectNotNull(ecKey = wolfSSL_EC_KEY_new()); + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectIntEQ(EVP_PKEY_assign_EC_KEY(NULL, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(EVP_PKEY_assign_EC_KEY(pkey, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + /* Should fail since ecKey is empty */ + ExpectIntEQ(EVP_PKEY_assign_EC_KEY(pkey, ecKey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EC_KEY_generate_key(ecKey), 1); + ExpectIntEQ(EVP_PKEY_assign_EC_KEY(pkey, ecKey), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + wolfSSL_EC_KEY_free(ecKey); + } + wolfSSL_EVP_PKEY_free(pkey); +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_missing_parameters(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && !defined(NO_WOLFSSL_STUB) + WOLFSSL_EVP_PKEY* pkey = NULL; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + + ExpectIntEQ(wolfSSL_EVP_PKEY_missing_parameters(pkey), 0); + ExpectIntEQ(wolfSSL_EVP_PKEY_missing_parameters(NULL), 0); + + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_copy_parameters(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_DH) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) && defined(WOLFSSL_DH_EXTRA) && \ + (defined(OPENSSL_ALL) || defined(WOLFSSL_QT)) && !defined(NO_FILESYSTEM) + WOLFSSL_EVP_PKEY* params = NULL; + WOLFSSL_EVP_PKEY* copy = NULL; + DH* dh = NULL; + BIGNUM* p1; + BIGNUM* g1; + BIGNUM* q1; + BIGNUM* p2; + BIGNUM* g2; + BIGNUM* q2; + + /* create DH with DH_get_2048_256 params */ + ExpectNotNull(params = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(dh = DH_get_2048_256()); + ExpectIntEQ(EVP_PKEY_set1_DH(params, dh), WOLFSSL_SUCCESS); + DH_get0_pqg(dh, (const BIGNUM**)&p1, + (const BIGNUM**)&q1, + (const BIGNUM**)&g1); + DH_free(dh); + dh = NULL; + + /* create DH with random generated DH params */ + ExpectNotNull(copy = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(dh = DH_generate_parameters(2048, 2, NULL, NULL)); + ExpectIntEQ(EVP_PKEY_set1_DH(copy, dh), WOLFSSL_SUCCESS); + DH_free(dh); + dh = NULL; + + ExpectIntEQ(EVP_PKEY_copy_parameters(copy, params), WOLFSSL_SUCCESS); + ExpectNotNull(dh = EVP_PKEY_get1_DH(copy)); + ExpectNotNull(dh->p); + ExpectNotNull(dh->g); + ExpectNotNull(dh->q); + DH_get0_pqg(dh, (const BIGNUM**)&p2, + (const BIGNUM**)&q2, + (const BIGNUM**)&g2); + + ExpectIntEQ(BN_cmp(p1, p2), 0); + ExpectIntEQ(BN_cmp(q1, q2), 0); + ExpectIntEQ(BN_cmp(g1, g2), 0); + + DH_free(dh); + dh = NULL; + EVP_PKEY_free(copy); + EVP_PKEY_free(params); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_paramgen(void) +{ + EXPECT_DECLS; + /* ECC check taken from ecc.c. It is the condition that defines ECC256 */ +#if defined(OPENSSL_ALL) && !defined(NO_ECC_SECP) && \ + ((!defined(NO_ECC256) || defined(HAVE_ALL_CURVES)) && \ + ECC_MIN_KEY_SZ <= 256) + EVP_PKEY_CTX* ctx = NULL; + EVP_PKEY* pkey = NULL; + + /* Test error conditions. */ + ExpectIntEQ(EVP_PKEY_paramgen(NULL, &pkey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectNotNull(ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_EC, NULL)); + ExpectIntEQ(EVP_PKEY_paramgen(ctx, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + +#ifndef NO_RSA + EVP_PKEY_CTX_free(ctx); + /* Parameter generation for RSA not supported yet. */ + ExpectNotNull(ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL)); + ExpectIntEQ(EVP_PKEY_paramgen(ctx, &pkey), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); +#endif + +#ifdef HAVE_ECC + EVP_PKEY_CTX_free(ctx); + ExpectNotNull(ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_EC, NULL)); + ExpectIntEQ(EVP_PKEY_paramgen_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_ec_paramgen_curve_nid(ctx, + NID_X9_62_prime256v1), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_paramgen(ctx, &pkey), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_ec_param_enc(ctx, OPENSSL_EC_NAMED_CURVE), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_keygen_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_keygen(ctx, &pkey), WOLFSSL_SUCCESS); +#endif + + EVP_PKEY_CTX_free(ctx); + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_param_check(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) +#if !defined(NO_DH) && defined(WOLFSSL_DH_EXTRA) && !defined(NO_FILESYSTEM) + + DH *dh = NULL; + DH *setDh = NULL; + EVP_PKEY *pkey = NULL; + EVP_PKEY_CTX* ctx = NULL; + + FILE* f = NULL; + unsigned char buf[512]; + const unsigned char* pt = buf; + const char* dh2048 = "./certs/dh2048.der"; + long len = 0; + int code = -1; + + XMEMSET(buf, 0, sizeof(buf)); + + ExpectTrue((f = XFOPEN(dh2048, "rb")) != XBADFILE); + ExpectTrue((len = (long)XFREAD(buf, 1, sizeof(buf), f)) > 0); + if (f != XBADFILE) + XFCLOSE(f); + + /* Load dh2048.der into DH with internal format */ + ExpectNotNull(setDh = d2i_DHparams(NULL, &pt, len)); + ExpectIntEQ(DH_check(setDh, &code), WOLFSSL_SUCCESS); + ExpectIntEQ(code, 0); + code = -1; + + pkey = wolfSSL_EVP_PKEY_new(); + /* Set DH into PKEY */ + ExpectIntEQ(EVP_PKEY_set1_DH(pkey, setDh), WOLFSSL_SUCCESS); + /* create ctx from pkey */ + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_param_check(ctx), 1/* valid */); + + /* TODO: more invalid cases */ + ExpectIntEQ(EVP_PKEY_param_check(NULL), 0); + + EVP_PKEY_CTX_free(ctx); + EVP_PKEY_free(pkey); + DH_free(setDh); + setDh = NULL; + DH_free(dh); + dh = NULL; +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_keygen_init(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_EVP_PKEY* pkey = NULL; + EVP_PKEY_CTX *ctx = NULL; + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + + ExpectIntEQ(wolfSSL_EVP_PKEY_keygen_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_EVP_PKEY_keygen_init(NULL), WOLFSSL_SUCCESS); + + EVP_PKEY_CTX_free(ctx); + EVP_PKEY_free(pkey); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_keygen(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_EVP_PKEY* pkey = NULL; + EVP_PKEY_CTX* ctx = NULL; +#if !defined(NO_DH) && (!defined(HAVE_FIPS) || FIPS_VERSION_GT(2,0)) + WOLFSSL_EVP_PKEY* params = NULL; + DH* dh = NULL; + const BIGNUM* pubkey = NULL; + const BIGNUM* privkey = NULL; + ASN1_INTEGER* asn1int = NULL; + unsigned int length = 0; + byte* derBuffer = NULL; +#endif + + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + + /* Bad cases */ + ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(NULL, &pkey), 0); + ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(ctx, NULL), 0); + ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(NULL, NULL), 0); + + /* Good case */ + ExpectIntEQ(wolfSSL_EVP_PKEY_keygen(ctx, &pkey), 0); + + EVP_PKEY_CTX_free(ctx); + ctx = NULL; + EVP_PKEY_free(pkey); + pkey = NULL; + +#if !defined(NO_DH) && (!defined(HAVE_FIPS) || FIPS_VERSION_GT(2,0)) + /* Test DH keygen */ + { + ExpectNotNull(params = wolfSSL_EVP_PKEY_new()); + ExpectNotNull(dh = DH_get_2048_256()); + ExpectIntEQ(EVP_PKEY_set1_DH(params, dh), WOLFSSL_SUCCESS); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(params, NULL)); + ExpectIntEQ(EVP_PKEY_keygen_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_keygen(ctx, &pkey), WOLFSSL_SUCCESS); + + DH_free(dh); + dh = NULL; + EVP_PKEY_CTX_free(ctx); + EVP_PKEY_free(params); + + /* try exporting generated key to DER, to verify */ + ExpectNotNull(dh = EVP_PKEY_get1_DH(pkey)); + DH_get0_key(dh, &pubkey, &privkey); + ExpectNotNull(pubkey); + ExpectNotNull(privkey); + ExpectNotNull(asn1int = BN_to_ASN1_INTEGER(pubkey, NULL)); + ExpectIntGT((length = i2d_ASN1_INTEGER(asn1int, &derBuffer)), 0); + + ASN1_INTEGER_free(asn1int); + DH_free(dh); + dh = NULL; + XFREE(derBuffer, NULL, DYNAMIC_TYPE_TMP_BUFFER); + + EVP_PKEY_free(pkey); + } +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_SignInit_ex(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + WOLFSSL_EVP_MD_CTX mdCtx; + WOLFSSL_ENGINE* e = 0; + const EVP_MD* md = EVP_sha256(); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_SignInit_ex(&mdCtx, md, e), WOLFSSL_SUCCESS); + + ExpectIntEQ(wolfSSL_EVP_MD_CTX_cleanup(&mdCtx), 1); +#endif + return EXPECT_RESULT(); +} + +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + #ifndef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY + #define TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY + #endif +#endif +#endif +#if defined(OPENSSL_EXTRA) +#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) + #ifndef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY + #define TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY + #endif +#endif +#endif +#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + #ifndef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY + #define TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY + #endif +#endif +#endif + +#ifdef TEST_WOLFSSL_EVP_PKEY_SIGN_VERIFY +static int test_wolfSSL_EVP_PKEY_sign_verify(int keyType) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + WOLFSSL_RSA* rsa = NULL; +#endif +#endif +#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) + WOLFSSL_DSA* dsa = NULL; +#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ +#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + WOLFSSL_EC_KEY* ecKey = NULL; +#endif +#endif + WOLFSSL_EVP_PKEY* pkey = NULL; + WOLFSSL_EVP_PKEY_CTX* ctx = NULL; + WOLFSSL_EVP_PKEY_CTX* ctx_verify = NULL; + const char* in = "What is easy to do is easy not to do."; + size_t inlen = XSTRLEN(in); + byte hash[SHA256_DIGEST_LENGTH] = {0}; + byte zero[SHA256_DIGEST_LENGTH] = {0}; + SHA256_CTX c; + byte* sig = NULL; + byte* sigVerify = NULL; + size_t siglen; + size_t siglenOnlyLen; + size_t keySz = 2048/8; /* Bytes */ + + ExpectNotNull(sig = + (byte*)XMALLOC(keySz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER)); + ExpectNotNull(sigVerify = + (byte*)XMALLOC(keySz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER)); + + siglen = keySz; + ExpectNotNull(XMEMSET(sig, 0, keySz)); + ExpectNotNull(XMEMSET(sigVerify, 0, keySz)); + + /* Generate hash */ + SHA256_Init(&c); + SHA256_Update(&c, in, inlen); + SHA256_Final(hash, &c); +#ifdef WOLFSSL_SMALL_STACK_CACHE + /* workaround for small stack cache case */ + wc_Sha256Free((wc_Sha256*)&c); +#endif + + /* Generate key */ + ExpectNotNull(pkey = EVP_PKEY_new()); + switch (keyType) { + case EVP_PKEY_RSA: +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + { + ExpectNotNull(rsa = RSA_generate_key(2048, 3, NULL, NULL)); + ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, rsa), WOLFSSL_SUCCESS); + } +#endif +#endif + break; + case EVP_PKEY_DSA: +#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) + ExpectNotNull(dsa = DSA_new()); + ExpectIntEQ(DSA_generate_parameters_ex(dsa, 2048, + NULL, 0, NULL, NULL, NULL), 1); + ExpectIntEQ(DSA_generate_key(dsa), 1); + ExpectIntEQ(EVP_PKEY_set1_DSA(pkey, dsa), WOLFSSL_SUCCESS); +#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ + break; + case EVP_PKEY_EC: +#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + { + ExpectNotNull(ecKey = EC_KEY_new()); + ExpectIntEQ(EC_KEY_generate_key(ecKey), 1); + ExpectIntEQ( + EVP_PKEY_assign_EC_KEY(pkey, ecKey), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + EC_KEY_free(ecKey); + } + } +#endif +#endif + break; + } + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + if (keyType == EVP_PKEY_RSA) + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING), + WOLFSSL_SUCCESS); +#endif +#endif + + /* Check returning only length */ + ExpectIntEQ(EVP_PKEY_sign(ctx, NULL, &siglenOnlyLen, hash, + SHA256_DIGEST_LENGTH), WOLFSSL_SUCCESS); + ExpectIntGT(siglenOnlyLen, 0); + /* Sign data */ + ExpectIntEQ(EVP_PKEY_sign(ctx, sig, &siglen, hash, + SHA256_DIGEST_LENGTH), WOLFSSL_SUCCESS); + ExpectIntGE(siglenOnlyLen, siglen); + + /* Verify signature */ + ExpectNotNull(ctx_verify = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_verify_init(ctx_verify), WOLFSSL_SUCCESS); +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + if (keyType == EVP_PKEY_RSA) + ExpectIntEQ( + EVP_PKEY_CTX_set_rsa_padding(ctx_verify, RSA_PKCS1_PADDING), + WOLFSSL_SUCCESS); +#endif +#endif + ExpectIntEQ(EVP_PKEY_verify( + ctx_verify, sig, siglen, hash, SHA256_DIGEST_LENGTH), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_verify( + ctx_verify, sig, siglen, zero, SHA256_DIGEST_LENGTH), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + if (keyType == EVP_PKEY_RSA) { + #if defined(WC_RSA_NO_PADDING) || defined(WC_RSA_DIRECT) + /* Try RSA sign/verify with no padding. */ + ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_NO_PADDING), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_sign(ctx, sigVerify, &siglen, sig, + siglen), WOLFSSL_SUCCESS); + ExpectIntGE(siglenOnlyLen, siglen); + ExpectIntEQ(EVP_PKEY_verify_init(ctx_verify), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx_verify, + RSA_NO_PADDING), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_verify(ctx_verify, sigVerify, siglen, sig, + siglen), WOLFSSL_SUCCESS); + #endif + + /* Wrong padding schemes. */ + ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, + RSA_PKCS1_OAEP_PADDING), WOLFSSL_SUCCESS); + ExpectIntNE(EVP_PKEY_sign(ctx, sigVerify, &siglen, sig, + siglen), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_verify_init(ctx_verify), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx_verify, + RSA_PKCS1_OAEP_PADDING), WOLFSSL_SUCCESS); + ExpectIntNE(EVP_PKEY_verify(ctx_verify, sigVerify, siglen, sig, + siglen), WOLFSSL_SUCCESS); + + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx_verify, + RSA_PKCS1_PADDING), WOLFSSL_SUCCESS); + } +#endif +#endif + + /* error cases */ + siglen = keySz; /* Reset because sig size may vary slightly */ + ExpectIntNE(EVP_PKEY_sign_init(NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_sign_init(ctx), WOLFSSL_SUCCESS); + ExpectIntNE(EVP_PKEY_sign(NULL, sig, &siglen, (byte*)in, inlen), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_sign(ctx, sig, &siglen, (byte*)in, inlen), + WOLFSSL_SUCCESS); + + EVP_PKEY_free(pkey); + pkey = NULL; +#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) + DSA_free(dsa); + dsa = NULL; +#endif /* !NO_DSA && !HAVE_SELFTEST && WOLFSSL_KEY_GEN */ + EVP_PKEY_CTX_free(ctx_verify); + ctx_verify = NULL; + EVP_PKEY_CTX_free(ctx); + ctx = NULL; + + XFREE(sig, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(sigVerify, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); +#endif /* OPENSSL_EXTRA */ + return EXPECT_RESULT(); +} +#endif + +int test_wolfSSL_EVP_PKEY_sign_verify_rsa(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) && \ + !defined(HAVE_SELFTEST) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + ExpectIntEQ(test_wolfSSL_EVP_PKEY_sign_verify(EVP_PKEY_RSA), TEST_SUCCESS); +#endif +#endif + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_PKEY_sign_verify_dsa(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) +#if !defined (NO_DSA) && !defined(HAVE_SELFTEST) && defined(WOLFSSL_KEY_GEN) + ExpectIntEQ(test_wolfSSL_EVP_PKEY_sign_verify(EVP_PKEY_DSA), TEST_SUCCESS); +#endif +#endif + return EXPECT_RESULT(); +} +int test_wolfSSL_EVP_PKEY_sign_verify_ec(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + ExpectIntEQ(test_wolfSSL_EVP_PKEY_sign_verify(EVP_PKEY_EC), TEST_SUCCESS); +#endif +#endif + return EXPECT_RESULT(); +} + + +int test_wolfSSL_EVP_MD_rsa_signing(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(USE_CERT_BUFFERS_2048) + WOLFSSL_EVP_PKEY* privKey = NULL; + WOLFSSL_EVP_PKEY* pubKey = NULL; + WOLFSSL_EVP_PKEY_CTX* keyCtx = NULL; + const char testData[] = "Hi There"; + WOLFSSL_EVP_MD_CTX mdCtx; + WOLFSSL_EVP_MD_CTX mdCtxCopy; + int ret; + size_t checkSz = -1; + int sz = 2048 / 8; + const unsigned char* cp; + const unsigned char* p; + unsigned char check[2048/8]; + size_t i; + int paddings[] = { + RSA_PKCS1_PADDING, +#if !defined(HAVE_FIPS) && !defined(HAVE_SELFTEST) && defined(WC_RSA_PSS) + RSA_PKCS1_PSS_PADDING, +#endif + }; + + + cp = client_key_der_2048; + ExpectNotNull((privKey = wolfSSL_d2i_PrivateKey(EVP_PKEY_RSA, NULL, &cp, + sizeof_client_key_der_2048))); + p = client_keypub_der_2048; + ExpectNotNull((pubKey = wolfSSL_d2i_PUBKEY(NULL, &p, + sizeof_client_keypub_der_2048))); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + wolfSSL_EVP_MD_CTX_init(&mdCtxCopy); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, privKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, + (unsigned int)XSTRLEN(testData)), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + ExpectIntEQ((int)checkSz, sz); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz,sz); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_copy_ex(&mdCtxCopy, &mdCtx), 1); + ExpectIntEQ(wolfSSL_EVP_MD_CTX_copy_ex(&mdCtxCopy, &mdCtx), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtxCopy); + ExpectIntEQ(ret, 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, pubKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, + (unsigned int)XSTRLEN(testData)), + 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, privKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, 4), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + ExpectIntEQ((int)checkSz, sz); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz, sz); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData + 4, + (unsigned int)XSTRLEN(testData) - 4), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz, sz); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, pubKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, 4), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData + 4, + (unsigned int)XSTRLEN(testData) - 4), + 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + /* Check all signing padding types */ + for (i = 0; i < sizeof(paddings)/sizeof(int); i++) { + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, &keyCtx, + wolfSSL_EVP_sha256(), NULL, privKey), 1); + ExpectIntEQ(wolfSSL_EVP_PKEY_CTX_set_rsa_padding(keyCtx, + paddings[i]), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, + (unsigned int)XSTRLEN(testData)), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + ExpectIntEQ((int)checkSz, sz); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ((int)checkSz,sz); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, &keyCtx, + wolfSSL_EVP_sha256(), NULL, pubKey), 1); + ExpectIntEQ(wolfSSL_EVP_PKEY_CTX_set_rsa_padding(keyCtx, + paddings[i]), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, + (unsigned int)XSTRLEN(testData)), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + } + + wolfSSL_EVP_PKEY_free(pubKey); + wolfSSL_EVP_PKEY_free(privKey); +#endif + return EXPECT_RESULT(); +} + +/* Test RSA-PSS digital signature creation and verification */ +int test_wc_RsaPSS_DigitalSignVerify(void) +{ + EXPECT_DECLS; + + /* Early FIPS did not support PSS. */ +#if (!defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && \ + (HAVE_FIPS_VERSION > 2))) && \ + (!defined(HAVE_SELFTEST) || (defined(HAVE_SELFTEST_VERSION) && \ + (HAVE_SELFTEST_VERSION > 2))) && \ + !defined(NO_RSA) && defined(WC_RSA_PSS) && defined(OPENSSL_EXTRA) && \ + defined(WOLFSSL_KEY_GEN) && defined(WC_RSA_NO_PADDING) && \ + !defined(NO_SHA256) + + /* Test digest */ + const unsigned char test_digest[32] = { + 0x08, 0x09, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, + 0x06, 0x07, 0x08, 0x09, 0x00, 0x01, 0x02, 0x03, + 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x00, 0x01, + 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09 + }; + const unsigned int digest_len = sizeof(test_digest); + + /* Variables for RSA key generation and signature operations */ + EVP_PKEY_CTX *pkctx = NULL; + EVP_PKEY *pkey = NULL; + EVP_PKEY_CTX *sign_ctx = NULL; + EVP_PKEY_CTX *verify_ctx = NULL; + unsigned char signature[256+MAX_DER_DIGEST_ASN_SZ] = {0}; + size_t signature_len = sizeof(signature); + int modulus_bits = 2048; + + /* Generate RSA key pair to avoid file dependencies */ + ExpectNotNull(pkctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL)); + ExpectIntEQ(EVP_PKEY_keygen_init(pkctx), 1); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_keygen_bits(pkctx, modulus_bits), 1); + ExpectIntEQ(EVP_PKEY_keygen(pkctx, &pkey), 1); + + /* Create signing context */ + ExpectNotNull(sign_ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_sign_init(sign_ctx), 1); + + /* Configure RSA-PSS parameters for signing. */ + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(sign_ctx, RSA_PKCS1_PSS_PADDING), + 1); + /* Default salt length matched hash so use 32 for SHA256 */ + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_pss_saltlen(sign_ctx, 32), 1); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_mgf1_md(sign_ctx, EVP_sha256()), 1); + ExpectIntEQ(EVP_PKEY_CTX_set_signature_md(sign_ctx, EVP_sha256()), 1); + + /* Create the digital signature */ + ExpectIntEQ(EVP_PKEY_sign(sign_ctx, signature, &signature_len, test_digest, + digest_len), 1); + ExpectIntGT((int)signature_len, 0); + + /* Create verification context */ + ExpectNotNull(verify_ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_verify_init(verify_ctx), 1); + + /* Configure RSA-PSS parameters for verification */ + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(verify_ctx, RSA_PKCS1_PSS_PADDING), + 1); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_pss_saltlen(verify_ctx, 32), 1); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_mgf1_md(verify_ctx, EVP_sha256()), 1); + ExpectIntEQ(EVP_PKEY_CTX_set_signature_md(verify_ctx, EVP_sha256()), 1); + + /* Verify the digital signature */ + ExpectIntEQ(EVP_PKEY_verify(verify_ctx, signature, signature_len, + test_digest, digest_len), 1); + + /* Test with wrong digest to ensure verification fails (negative test) */ + { + const unsigned char wrong_digest[32] = { + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, + 0x07, 0x08, 0x09, 0x00, 0x01, 0x02, 0x03, 0x04, + 0x05, 0x06, 0x07, 0x08, 0x09, 0x00, 0x01, 0x02 + }; + ExpectIntNE(EVP_PKEY_verify(verify_ctx, signature, signature_len, + wrong_digest, digest_len), 1); + } + + /* Clean up */ + if (verify_ctx) + EVP_PKEY_CTX_free(verify_ctx); + if (sign_ctx) + EVP_PKEY_CTX_free(sign_ctx); + if (pkey) + EVP_PKEY_free(pkey); + if (pkctx) + EVP_PKEY_CTX_free(pkctx); + +#endif + + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_MD_ecc_signing(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) + WOLFSSL_EVP_PKEY* privKey = NULL; + WOLFSSL_EVP_PKEY* pubKey = NULL; + const char testData[] = "Hi There"; + WOLFSSL_EVP_MD_CTX mdCtx; + int ret; + const unsigned char* cp; + const unsigned char* p; + unsigned char check[2048/8]; + size_t checkSz = sizeof(check); + + XMEMSET(check, 0, sizeof(check)); + + cp = ecc_clikey_der_256; + ExpectNotNull(privKey = wolfSSL_d2i_PrivateKey(EVP_PKEY_EC, NULL, &cp, + sizeof_ecc_clikey_der_256)); + p = ecc_clikeypub_der_256; + ExpectNotNull((pubKey = wolfSSL_d2i_PUBKEY(NULL, &p, + sizeof_ecc_clikeypub_der_256))); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, privKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, + (unsigned int)XSTRLEN(testData)), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, pubKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, + (unsigned int)XSTRLEN(testData)), + 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestSignInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, privKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData, 4), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, NULL, &checkSz), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ExpectIntEQ(wolfSSL_EVP_DigestSignUpdate(&mdCtx, testData + 4, + (unsigned int)XSTRLEN(testData) - 4), 1); + checkSz = sizeof(check); + ExpectIntEQ(wolfSSL_EVP_DigestSignFinal(&mdCtx, check, &checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_MD_CTX_init(&mdCtx); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyInit(&mdCtx, NULL, wolfSSL_EVP_sha256(), + NULL, pubKey), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData, 4), 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyUpdate(&mdCtx, testData + 4, + (unsigned int)XSTRLEN(testData) - 4), + 1); + ExpectIntEQ(wolfSSL_EVP_DigestVerifyFinal(&mdCtx, check, checkSz), 1); + ret = wolfSSL_EVP_MD_CTX_cleanup(&mdCtx); + ExpectIntEQ(ret, 1); + + wolfSSL_EVP_PKEY_free(pubKey); + wolfSSL_EVP_PKEY_free(privKey); +#endif + return EXPECT_RESULT(); +} + + +int test_wolfSSL_EVP_PKEY_encrypt(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_RSA) && defined(WOLFSSL_KEY_GEN) + WOLFSSL_RSA* rsa = NULL; + WOLFSSL_EVP_PKEY* pkey = NULL; + WOLFSSL_EVP_PKEY_CTX* ctx = NULL; + const char* in = "What is easy to do is easy not to do."; + size_t inlen = XSTRLEN(in); + size_t outEncLen = 0; + byte* outEnc = NULL; + byte* outDec = NULL; + size_t outDecLen = 0; + size_t rsaKeySz = 2048/8; /* Bytes */ +#if !defined(HAVE_FIPS) && defined(WC_RSA_NO_PADDING) + byte* inTmp = NULL; + byte* outEncTmp = NULL; + byte* outDecTmp = NULL; +#endif + + ExpectNotNull(outEnc = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER)); + if (outEnc != NULL) { + XMEMSET(outEnc, 0, rsaKeySz); + } + ExpectNotNull(outDec = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER)); + if (outDec != NULL) { + XMEMSET(outDec, 0, rsaKeySz); + } + + ExpectNotNull(rsa = RSA_generate_key(2048, 3, NULL, NULL)); + ExpectNotNull(pkey = wolfSSL_EVP_PKEY_new()); + ExpectIntEQ(EVP_PKEY_assign_RSA(pkey, rsa), WOLFSSL_SUCCESS); + if (EXPECT_FAIL()) { + RSA_free(rsa); + } + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_encrypt_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING), + WOLFSSL_SUCCESS); + + /* Test pkey references count is decremented. pkey shouldn't be destroyed + since ctx uses it.*/ + ExpectIntEQ(pkey->ref.count, 2); + EVP_PKEY_free(pkey); + ExpectIntEQ(pkey->ref.count, 1); + + /* Encrypt data */ + /* Check that we can get the required output buffer length by passing in a + * NULL output buffer. */ + ExpectIntEQ(EVP_PKEY_encrypt(ctx, NULL, &outEncLen, + (const unsigned char*)in, inlen), WOLFSSL_SUCCESS); + ExpectIntEQ(rsaKeySz, outEncLen); + /* Now do the actual encryption. */ + ExpectIntEQ(EVP_PKEY_encrypt(ctx, outEnc, &outEncLen, + (const unsigned char*)in, inlen), WOLFSSL_SUCCESS); + + /* Decrypt data */ + ExpectIntEQ(EVP_PKEY_decrypt_init(ctx), WOLFSSL_SUCCESS); + /* Check that we can get the required output buffer length by passing in a + * NULL output buffer. */ + ExpectIntEQ(EVP_PKEY_decrypt(ctx, NULL, &outDecLen, outEnc, outEncLen), + WOLFSSL_SUCCESS); + ExpectIntEQ(rsaKeySz, outDecLen); + /* Now do the actual decryption. */ + ExpectIntEQ(EVP_PKEY_decrypt(ctx, outDec, &outDecLen, outEnc, outEncLen), + WOLFSSL_SUCCESS); + + ExpectIntEQ(XMEMCMP(in, outDec, outDecLen), 0); + +#if !defined(HAVE_FIPS) && defined(WC_RSA_NO_PADDING) + /* The input length must be the same size as the RSA key.*/ + ExpectNotNull(inTmp = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER)); + if (inTmp != NULL) { + XMEMSET(inTmp, 9, rsaKeySz); + } + ExpectNotNull(outEncTmp = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER)); + if (outEncTmp != NULL) { + XMEMSET(outEncTmp, 0, rsaKeySz); + } + ExpectNotNull(outDecTmp = (byte*)XMALLOC(rsaKeySz, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER)); + if (outDecTmp != NULL) { + XMEMSET(outDecTmp, 0, rsaKeySz); + } + ExpectIntEQ(EVP_PKEY_encrypt_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_NO_PADDING), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_encrypt(ctx, outEncTmp, &outEncLen, inTmp, rsaKeySz), + WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_decrypt_init(ctx), WOLFSSL_SUCCESS); + ExpectIntEQ(EVP_PKEY_decrypt(ctx, outDecTmp, &outDecLen, outEncTmp, + outEncLen), WOLFSSL_SUCCESS); + ExpectIntEQ(XMEMCMP(inTmp, outDecTmp, outDecLen), 0); +#endif + EVP_PKEY_CTX_free(ctx); + XFREE(outEnc, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(outDec, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); +#if !defined(HAVE_FIPS) && defined(WC_RSA_NO_PADDING) + XFREE(inTmp, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(outEncTmp, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(outDecTmp, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_derive(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) || defined(WOLFSSL_QT) || defined(WOLFSSL_OPENSSH) +#if (!defined(NO_DH) && defined(WOLFSSL_DH_EXTRA)) || defined(HAVE_ECC) + EVP_PKEY_CTX *ctx = NULL; + unsigned char *skey = NULL; + size_t skeylen; + EVP_PKEY *pkey = NULL; + EVP_PKEY *peerkey = NULL; + const unsigned char* key; + +#if !defined(NO_DH) && defined(WOLFSSL_DH_EXTRA) + /* DH */ + key = dh_key_der_2048; + ExpectNotNull((pkey = d2i_PrivateKey(EVP_PKEY_DH, NULL, &key, + sizeof_dh_key_der_2048))); + ExpectIntEQ(DH_generate_key(EVP_PKEY_get0_DH(pkey)), 1); + key = dh_key_der_2048; + ExpectNotNull((peerkey = d2i_PrivateKey(EVP_PKEY_DH, NULL, &key, + sizeof_dh_key_der_2048))); + ExpectIntEQ(DH_generate_key(EVP_PKEY_get0_DH(peerkey)), 1); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_derive_init(ctx), 1); + ExpectIntEQ(EVP_PKEY_derive_set_peer(ctx, peerkey), 1); + ExpectIntEQ(EVP_PKEY_derive(ctx, NULL, &skeylen), 1); + ExpectNotNull(skey = (unsigned char*)XMALLOC(skeylen, NULL, + DYNAMIC_TYPE_OPENSSL)); + ExpectIntEQ(EVP_PKEY_derive(ctx, skey, &skeylen), 1); + + EVP_PKEY_CTX_free(ctx); + ctx = NULL; + EVP_PKEY_free(peerkey); + peerkey = NULL; + EVP_PKEY_free(pkey); + pkey = NULL; + XFREE(skey, NULL, DYNAMIC_TYPE_OPENSSL); + skey = NULL; +#endif + +#ifdef HAVE_ECC + /* ECDH */ + key = ecc_clikey_der_256; + ExpectNotNull((pkey = d2i_PrivateKey(EVP_PKEY_EC, NULL, &key, + sizeof_ecc_clikey_der_256))); + key = ecc_clikeypub_der_256; + ExpectNotNull((peerkey = d2i_PUBKEY(NULL, &key, + sizeof_ecc_clikeypub_der_256))); + ExpectNotNull(ctx = EVP_PKEY_CTX_new(pkey, NULL)); + ExpectIntEQ(EVP_PKEY_derive_init(ctx), 1); + ExpectIntEQ(EVP_PKEY_derive_set_peer(ctx, peerkey), 1); + ExpectIntEQ(EVP_PKEY_derive(ctx, NULL, &skeylen), 1); + ExpectNotNull(skey = (unsigned char*)XMALLOC(skeylen, NULL, + DYNAMIC_TYPE_OPENSSL)); + ExpectIntEQ(EVP_PKEY_derive(ctx, skey, &skeylen), 1); + + EVP_PKEY_CTX_free(ctx); + EVP_PKEY_free(peerkey); + EVP_PKEY_free(pkey); + XFREE(skey, NULL, DYNAMIC_TYPE_OPENSSL); +#endif /* HAVE_ECC */ +#endif /* (!NO_DH && WOLFSSL_DH_EXTRA) || HAVE_ECC */ +#endif /* OPENSSL_ALL || WOLFSSL_QT || WOLFSSL_OPENSSH */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_EVP_PKEY_print_public(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_BIO) + WOLFSSL_BIO* rbio = NULL; + WOLFSSL_BIO* wbio = NULL; + WOLFSSL_EVP_PKEY* pkey = NULL; + char line[256] = { 0 }; + char line1[256] = { 0 }; + int i = 0; + + /* test error cases */ + ExpectIntEQ( EVP_PKEY_print_public(NULL,NULL,0,NULL),0L); + + /* + * test RSA public key print + * in this test, pass '3' for indent + */ +#if !defined(NO_RSA) && defined(USE_CERT_BUFFERS_1024) + + ExpectNotNull(rbio = BIO_new_mem_buf( client_keypub_der_1024, + sizeof_client_keypub_der_1024)); + + ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); + + ExpectNotNull(wbio = BIO_new(BIO_s_mem())); + + ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,3,NULL),1); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, " RSA Public-Key: (1024 bit)\n"); + ExpectIntEQ(XSTRNCMP(line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, " Modulus:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, " 00:bc:73:0e:a8:49:f3:74:a2:a9:ef:18:a5:da:55:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of modulus element*/ + for (i = 0; i < 8 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, " Exponent: 65537 (0x010001)\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + + /* should reach EOF */ + ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); + + EVP_PKEY_free(pkey); + pkey = NULL; + BIO_free(rbio); + BIO_free(wbio); + rbio = NULL; + wbio = NULL; + +#endif /* !NO_RSA && USE_CERT_BUFFERS_1024*/ + + /* + * test DSA public key print + */ +#if !defined(NO_DSA) && defined(USE_CERT_BUFFERS_2048) + ExpectNotNull(rbio = BIO_new_mem_buf( dsa_pub_key_der_2048, + sizeof_dsa_pub_key_der_2048)); + + ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); + + ExpectNotNull(wbio = BIO_new(BIO_s_mem())); + + ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,0,NULL),1); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "DSA Public-Key: (2048 bit)\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "pub:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, + " 00:C2:35:2D:EC:83:83:6C:73:13:9E:52:7C:74:C8:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of pub element*/ + for (i = 0; i < 17 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "P:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of P element*/ + for (i = 0; i < 18 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "Q:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of Q element*/ + for (i = 0; i < 3 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "G:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of G element*/ + for (i = 0; i < 18 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + /* should reach EOF */ + ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); + + EVP_PKEY_free(pkey); + pkey = NULL; + BIO_free(rbio); + BIO_free(wbio); + rbio = NULL; + wbio = NULL; + +#endif /* !NO_DSA && USE_CERT_BUFFERS_2048 */ + + /* + * test ECC public key print + */ +#if defined(HAVE_ECC) && defined(USE_CERT_BUFFERS_256) + + ExpectNotNull(rbio = BIO_new_mem_buf( ecc_clikeypub_der_256, + sizeof_ecc_clikeypub_der_256)); + + ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); + + ExpectNotNull(wbio = BIO_new(BIO_s_mem())); + + ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,0,NULL),1); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + ExpectStrEQ(line, "Public-Key: (256 bit)\n"); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "pub:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, + " 04:55:BF:F4:0F:44:50:9A:3D:CE:9B:B7:F0:C5:4D:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of pub element*/ + for (i = 0; i < 4 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "ASN1 OID: prime256v1\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "NIST CURVE: P-256\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + + /* should reach EOF */ + ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); + + EVP_PKEY_free(pkey); + pkey = NULL; + BIO_free(rbio); + BIO_free(wbio); + rbio = NULL; + wbio = NULL; + +#endif /* HAVE_ECC && USE_CERT_BUFFERS_256 */ + + /* + * test DH public key print + */ +#if defined(WOLFSSL_DH_EXTRA) && defined(USE_CERT_BUFFERS_2048) + + ExpectNotNull(rbio = BIO_new_mem_buf( dh_pub_key_der_2048, + sizeof_dh_pub_key_der_2048)); + + ExpectNotNull(wolfSSL_d2i_PUBKEY_bio(rbio, &pkey)); + + ExpectNotNull(wbio = BIO_new(BIO_s_mem())); + + ExpectIntEQ(EVP_PKEY_print_public(wbio, pkey,0,NULL), 1); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "DH Public-Key: (2048 bit)\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "public-key:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, + " 34:41:BF:E9:F2:11:BF:05:DB:B2:72:A8:29:CC:BD:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of public-key element*/ + for (i = 0; i < 17 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "prime:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, + " 00:D3:B2:99:84:5C:0A:4C:E7:37:CC:FC:18:37:01:\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* skip to the end of prime element*/ + for (i = 0; i < 17 ;i++) { + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + } + + ExpectIntGT(BIO_gets(wbio, line, sizeof(line)), 0); + strcpy(line1, "generator: 2 (0x02)\n"); + ExpectIntEQ(XSTRNCMP( line, line1, XSTRLEN(line1)), 0); + + /* should reach EOF */ + ExpectIntLE(BIO_gets(wbio, line, sizeof(line)), 0); + + EVP_PKEY_free(pkey); + pkey = NULL; + BIO_free(rbio); + BIO_free(wbio); + rbio = NULL; + wbio = NULL; + +#endif /* WOLFSSL_DH_EXTRA && USE_CERT_BUFFERS_2048 */ + + /* to prevent "unused variable" warning */ + (void)pkey; + (void)wbio; + (void)rbio; + (void)line; + (void)line1; + (void)i; +#endif /* OPENSSL_EXTRA */ + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_evp_pkey.h b/tests/api/test_evp_pkey.h new file mode 100644 index 000000000..222a19ea5 --- /dev/null +++ b/tests/api/test_evp_pkey.h @@ -0,0 +1,102 @@ +/* test_evp_pkey.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_EVP_PKEY_H +#define WOLFCRYPT_TEST_EVP_PKEY_H + +#include + +int test_wolfSSL_EVP_PKEY_CTX_new_id(void); +int test_wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits(void); +int test_wolfSSL_QT_EVP_PKEY_CTX_free(void); +int test_wolfSSL_EVP_PKEY_up_ref(void); +int test_wolfSSL_EVP_PKEY_base_id(void); +int test_wolfSSL_EVP_PKEY_id(void); +int test_wolfSSL_EVP_MD_pkey_type(void); +int test_wolfSSL_EVP_MD_hmac_signing(void); +int test_wolfSSL_EVP_PKEY_new_mac_key(void); +int test_wolfSSL_EVP_PKEY_hkdf(void); +int test_wolfSSL_EVP_PBE_scrypt(void); +int test_EVP_PKEY_cmp(void); +int test_wolfSSL_EVP_PKEY_set1_get1_DSA(void); +int test_wolfSSL_EVP_PKEY_set1_get1_EC_KEY (void); +int test_wolfSSL_EVP_PKEY_get0_EC_KEY(void); +int test_wolfSSL_EVP_PKEY_set1_get1_DH (void); +int test_wolfSSL_EVP_PKEY_assign(void); +int test_wolfSSL_EVP_PKEY_assign_DH(void); +int test_EVP_PKEY_rsa(void); +int test_EVP_PKEY_ec(void); +int test_wolfSSL_EVP_PKEY_missing_parameters(void); +int test_wolfSSL_EVP_PKEY_copy_parameters(void); +int test_wolfSSL_EVP_PKEY_paramgen(void); +int test_wolfSSL_EVP_PKEY_param_check(void); +int test_wolfSSL_EVP_PKEY_keygen_init(void); +int test_wolfSSL_EVP_PKEY_keygen(void); +int test_wolfSSL_EVP_SignInit_ex(void); +int test_wolfSSL_EVP_PKEY_sign_verify_rsa(void); +int test_wolfSSL_EVP_PKEY_sign_verify_dsa(void); +int test_wolfSSL_EVP_PKEY_sign_verify_ec(void); +int test_wolfSSL_EVP_MD_rsa_signing(void); +int test_wc_RsaPSS_DigitalSignVerify(void); +int test_wolfSSL_EVP_MD_ecc_signing(void); +int test_wolfSSL_EVP_PKEY_encrypt(void); +int test_wolfSSL_EVP_PKEY_derive(void); +int test_wolfSSL_EVP_PKEY_print_public(void); + +#define TEST_EVP_PKEY_DECLS \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_CTX_new_id), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_CTX_set_rsa_keygen_bits),\ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_QT_EVP_PKEY_CTX_free), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_up_ref), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_base_id), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_id), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_MD_pkey_type), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_MD_hmac_signing), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_new_mac_key), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_hkdf), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PBE_scrypt), \ + TEST_DECL_GROUP("evp_pkey", test_EVP_PKEY_cmp), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_set1_get1_DSA), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_set1_get1_EC_KEY), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_get0_EC_KEY), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_set1_get1_DH), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_assign), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_assign_DH), \ + TEST_DECL_GROUP("evp_pkey", test_EVP_PKEY_rsa), \ + TEST_DECL_GROUP("evp_pkey", test_EVP_PKEY_ec), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_missing_parameters), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_copy_parameters), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_paramgen), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_param_check), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_keygen_init), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_keygen), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_SignInit_ex), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_sign_verify_rsa), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_sign_verify_dsa), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_sign_verify_ec), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_MD_rsa_signing), \ + TEST_DECL_GROUP("evp_pkey", test_wc_RsaPSS_DigitalSignVerify), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_MD_ecc_signing), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_encrypt), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_derive), \ + TEST_DECL_GROUP("evp_pkey", test_wolfSSL_EVP_PKEY_print_public) + +#endif /* WOLFCRYPT_TEST_EVP_PKEY_H */ diff --git a/tests/api/test_ossl_asn1.c b/tests/api/test_ossl_asn1.c index b046fd4c3..f91cd0c6a 100644 --- a/tests/api/test_ossl_asn1.c +++ b/tests/api/test_ossl_asn1.c @@ -1195,7 +1195,8 @@ int test_wolfSSL_ASN1_STRING_to_UTF8(void) ExpectNotNull(e = wolfSSL_X509_NAME_get_entry(subject, idx)); ExpectNotNull(a = wolfSSL_X509_NAME_ENTRY_get_data(e)); ExpectIntEQ((len = wolfSSL_ASN1_STRING_to_UTF8(&actual_output, a)), 15); - ExpectIntEQ(strncmp((const char*)actual_output, targetOutput, (size_t)len), 0); + ExpectIntEQ(strncmp((const char*)actual_output, targetOutput, (size_t)len), + 0); a = NULL; /* wolfSSL_ASN1_STRING_to_UTF8(NULL, valid) */ @@ -1269,9 +1270,12 @@ int test_wolfSSL_ASN1_STRING_canon(void) ExpectNotNull(canon = ASN1_STRING_new()); /* Invalid parameter testing. */ - ExpectIntEQ(wolfSSL_ASN1_STRING_canon(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_ASN1_STRING_canon(canon, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_ASN1_STRING_canon(NULL, orig), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_ASN1_STRING_canon(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_ASN1_STRING_canon(canon, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_ASN1_STRING_canon(NULL, orig), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wolfSSL_ASN1_STRING_canon(canon, orig), 1); ExpectIntEQ(ASN1_STRING_cmp(orig, canon), 0); @@ -1622,9 +1626,12 @@ int test_wolfSSL_ASN1_GENERALIZEDTIME_print(void) ExpectIntEQ(wolfSSL_ASN1_TIME_set_string(gtime, "20180504123500Z"), 1); /* Invalid parameters testing. */ - ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(bio, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(NULL, gtime), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(bio, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(NULL, gtime), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wolfSSL_ASN1_GENERALIZEDTIME_print(bio, gtime), 1); ExpectIntEQ(BIO_read(bio, buf, sizeof(buf)), 20); diff --git a/tests/api/test_ossl_bio.c b/tests/api/test_ossl_bio.c index cedf9f918..2fdc792cc 100644 --- a/tests/api/test_ossl_bio.c +++ b/tests/api/test_ossl_bio.c @@ -58,7 +58,8 @@ int test_wolfSSL_BIO_gets(void) /* try with bad args */ ExpectNull(bio = BIO_new_mem_buf(NULL, sizeof(msg))); #ifdef OPENSSL_ALL - ExpectIntEQ(BIO_set_mem_buf(bio, NULL, BIO_NOCLOSE), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(BIO_set_mem_buf(bio, NULL, BIO_NOCLOSE), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); #endif /* try with real msg */ @@ -594,7 +595,8 @@ int test_wolfSSL_BIO_tls(void) int test_wolfSSL_BIO_datagram(void) { EXPECT_DECLS; -#if !defined(NO_BIO) && defined(WOLFSSL_DTLS) && defined(WOLFSSL_HAVE_BIO_ADDR) && defined(OPENSSL_EXTRA) +#if !defined(NO_BIO) && defined(WOLFSSL_DTLS) && \ + defined(WOLFSSL_HAVE_BIO_ADDR) && defined(OPENSSL_EXTRA) int ret; SOCKET_T fd1 = SOCKET_INVALID, fd2 = SOCKET_INVALID; WOLFSSL_BIO *bio1 = NULL, *bio2 = NULL; @@ -636,7 +638,8 @@ int test_wolfSSL_BIO_datagram(void) sin1.sin_port = 0; slen = (socklen_t)sizeof(sin1); ExpectIntEQ(bind(fd1, (const struct sockaddr *)&sin1, slen), 0); - ExpectIntEQ(setsockopt(fd1, SOL_SOCKET, SO_RCVTIMEO, (const char *)&timeout, sizeof(timeout)), 0); + ExpectIntEQ(setsockopt(fd1, SOL_SOCKET, SO_RCVTIMEO, + (const char *)&timeout, sizeof(timeout)), 0); ExpectIntEQ(getsockname(fd1, (struct sockaddr *)&sin1, &slen), 0); } @@ -646,7 +649,8 @@ int test_wolfSSL_BIO_datagram(void) sin2.sin_port = 0; slen = (socklen_t)sizeof(sin2); ExpectIntEQ(bind(fd2, (const struct sockaddr *)&sin2, slen), 0); - ExpectIntEQ(setsockopt(fd2, SOL_SOCKET, SO_RCVTIMEO, (const char *)&timeout, sizeof(timeout)), 0); + ExpectIntEQ(setsockopt(fd2, SOL_SOCKET, SO_RCVTIMEO, + (const char *)&timeout, sizeof(timeout)), 0); ExpectIntEQ(getsockname(fd2, (struct sockaddr *)&sin2, &slen), 0); } @@ -661,15 +665,19 @@ int test_wolfSSL_BIO_datagram(void) } if (EXPECT_SUCCESS()) { - /* for OpenSSL compatibility, direct copying of sockaddrs into BIO_ADDRs must work right. */ + /* for OpenSSL compatibility, direct copying of sockaddrs into BIO_ADDRs + * must work right. */ XMEMCPY(&bio_addr2->sa_in, &sin2, sizeof(sin2)); - ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_PEER, 0, bio_addr2), WOLFSSL_SUCCESS); + ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_PEER, 0, + bio_addr2), WOLFSSL_SUCCESS); wolfSSL_BIO_ADDR_clear(bio_addr2); } test_msg_recvd[0] = 0; - ExpectIntEQ(wolfSSL_BIO_write(bio1, test_msg, sizeof(test_msg)), (int)sizeof(test_msg)); - ExpectIntEQ(wolfSSL_BIO_read(bio2, test_msg_recvd, sizeof(test_msg_recvd)), (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_write(bio1, test_msg, sizeof(test_msg)), + (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_read(bio2, test_msg_recvd, sizeof(test_msg_recvd)), + (int)sizeof(test_msg)); ExpectIntEQ(XMEMCMP(test_msg_recvd, test_msg, sizeof(test_msg)), 0); #ifdef WOLFSSL_BIO_HAVE_FLOW_STATS @@ -682,58 +690,76 @@ int test_wolfSSL_BIO_datagram(void) */ test_msg_recvd[0] = 0; - ExpectIntEQ(wolfSSL_BIO_write(bio2, test_msg, sizeof(test_msg)), (int)sizeof(test_msg)); - ExpectIntEQ(wolfSSL_BIO_read(bio1, test_msg_recvd, sizeof(test_msg_recvd)), (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_write(bio2, test_msg, sizeof(test_msg)), + (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_read(bio1, test_msg_recvd, sizeof(test_msg_recvd)), + (int)sizeof(test_msg)); ExpectIntEQ(XMEMCMP(test_msg_recvd, test_msg, sizeof(test_msg)), 0); - ExpectIntEQ(wolfSSL_BIO_read(bio1, test_msg_recvd, sizeof(test_msg_recvd)), WOLFSSL_BIO_ERROR); + ExpectIntEQ(wolfSSL_BIO_read(bio1, test_msg_recvd, sizeof(test_msg_recvd)), + WOLFSSL_BIO_ERROR); ExpectIntNE(BIO_should_retry(bio1), 0); - ExpectIntEQ(wolfSSL_BIO_read(bio2, test_msg_recvd, sizeof(test_msg_recvd)), WOLFSSL_BIO_ERROR); + ExpectIntEQ(wolfSSL_BIO_read(bio2, test_msg_recvd, sizeof(test_msg_recvd)), + WOLFSSL_BIO_ERROR); ExpectIntNE(BIO_should_retry(bio2), 0); /* now "connect" the sockets. */ - ExpectIntEQ(connect(fd1, (const struct sockaddr *)&sin2, (socklen_t)sizeof(sin2)), 0); - ExpectIntEQ(connect(fd2, (const struct sockaddr *)&sin1, (socklen_t)sizeof(sin1)), 0); + ExpectIntEQ(connect(fd1, (const struct sockaddr *)&sin2, + (socklen_t)sizeof(sin2)), 0); + ExpectIntEQ(connect(fd2, (const struct sockaddr *)&sin1, + (socklen_t)sizeof(sin1)), 0); if (EXPECT_SUCCESS()) { XMEMCPY(&bio_addr2->sa_in, &sin2, sizeof(sin2)); - ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_CONNECTED, 0, bio_addr2), WOLFSSL_SUCCESS); + ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_CONNECTED, 0, + bio_addr2), WOLFSSL_SUCCESS); wolfSSL_BIO_ADDR_clear(bio_addr2); } if (EXPECT_SUCCESS()) { XMEMCPY(&bio_addr1->sa_in, &sin1, sizeof(sin1)); - ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio2, BIO_CTRL_DGRAM_SET_CONNECTED, 0, bio_addr1), WOLFSSL_SUCCESS); + ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio2, BIO_CTRL_DGRAM_SET_CONNECTED, 0, + bio_addr1), WOLFSSL_SUCCESS); wolfSSL_BIO_ADDR_clear(bio_addr1); } test_msg_recvd[0] = 0; - ExpectIntEQ(wolfSSL_BIO_write(bio2, test_msg, sizeof(test_msg)), (int)sizeof(test_msg)); - ExpectIntEQ(wolfSSL_BIO_read(bio1, test_msg_recvd, sizeof(test_msg_recvd)), (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_write(bio2, test_msg, sizeof(test_msg)), + (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_read(bio1, test_msg_recvd, sizeof(test_msg_recvd)), + (int)sizeof(test_msg)); ExpectIntEQ(XMEMCMP(test_msg_recvd, test_msg, sizeof(test_msg)), 0); test_msg_recvd[0] = 0; - ExpectIntEQ(wolfSSL_BIO_write(bio1, test_msg, sizeof(test_msg)), (int)sizeof(test_msg)); - ExpectIntEQ(wolfSSL_BIO_read(bio2, test_msg_recvd, sizeof(test_msg_recvd)), (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_write(bio1, test_msg, sizeof(test_msg)), + (int)sizeof(test_msg)); + ExpectIntEQ(wolfSSL_BIO_read(bio2, test_msg_recvd, sizeof(test_msg_recvd)), + (int)sizeof(test_msg)); ExpectIntEQ(XMEMCMP(test_msg_recvd, test_msg, sizeof(test_msg)), 0); #ifdef __linux__ /* now "disconnect" the sockets and attempt transmits expected to fail. */ sin1.sin_family = AF_UNSPEC; - ExpectIntEQ(connect(fd1, (const struct sockaddr *)&sin1, (socklen_t)sizeof(sin1)), 0); - ExpectIntEQ(connect(fd2, (const struct sockaddr *)&sin1, (socklen_t)sizeof(sin1)), 0); + ExpectIntEQ(connect(fd1, (const struct sockaddr *)&sin1, + (socklen_t)sizeof(sin1)), 0); + ExpectIntEQ(connect(fd2, (const struct sockaddr *)&sin1, + (socklen_t)sizeof(sin1)), 0); sin1.sin_family = AF_INET; - ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_CONNECTED, 0, NULL), WOLFSSL_SUCCESS); - ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio2, BIO_CTRL_DGRAM_SET_CONNECTED, 0, NULL), WOLFSSL_SUCCESS); + ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_CONNECTED, 0, + NULL), WOLFSSL_SUCCESS); + ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio2, BIO_CTRL_DGRAM_SET_CONNECTED, 0, + NULL), WOLFSSL_SUCCESS); if (EXPECT_SUCCESS()) { - sin2.sin_addr.s_addr = htonl(0xc0a8c0a8); /* 192.168.192.168 -- invalid for loopback interface. */ + /* 192.168.192.168 -- invalid for loopback interface. */ + sin2.sin_addr.s_addr = htonl(0xc0a8c0a8); XMEMCPY(&bio_addr2->sa_in, &sin2, sizeof(sin2)); - ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_PEER, 0, bio_addr2), WOLFSSL_SUCCESS); + ExpectIntEQ((int)wolfSSL_BIO_ctrl(bio1, BIO_CTRL_DGRAM_SET_PEER, 0, + bio_addr2), WOLFSSL_SUCCESS); wolfSSL_BIO_ADDR_clear(bio_addr2); } @@ -796,7 +822,8 @@ static THREAD_RETURN WOLFSSL_THREAD test_wolfSSL_BIO_accept_client(void* args) (void)args; - AssertIntGT(snprintf(connectAddr, sizeof(connectAddr), "%s:%d", wolfSSLIP, wolfSSLPort), 0); + AssertIntGT(snprintf(connectAddr, sizeof(connectAddr), "%s:%d", wolfSSLIP, + wolfSSLPort), 0); clientBio = BIO_new_connect(connectAddr); AssertNotNull(clientBio); AssertIntEQ(BIO_do_connect(clientBio), 1); @@ -804,7 +831,8 @@ static THREAD_RETURN WOLFSSL_THREAD test_wolfSSL_BIO_accept_client(void* args) AssertNotNull(ctx); sslClient = SSL_new(ctx); AssertNotNull(sslClient); - AssertIntEQ(wolfSSL_CTX_load_verify_locations(ctx, caCertFile, 0), WOLFSSL_SUCCESS); + AssertIntEQ(wolfSSL_CTX_load_verify_locations(ctx, caCertFile, 0), + WOLFSSL_SUCCESS); SSL_set_bio(sslClient, clientBio, clientBio); AssertIntEQ(SSL_connect(sslClient), 1); @@ -1156,5 +1184,287 @@ int test_wolfSSL_BIO_get_len(void) return EXPECT_RESULT(); } +#if defined(OPENSSL_EXTRA) && !defined(NO_BIO) +static long bioCallback(BIO *bio, int cmd, const char* argp, int argi, + long argl, long ret) +{ + (void)bio; + (void)cmd; + (void)argp; + (void)argi; + (void)argl; + return ret; +} +#endif + +int test_wolfSSL_BIO(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_BIO) + const unsigned char* p = NULL; + byte buff[20]; + BIO* bio1 = NULL; + BIO* bio2 = NULL; + BIO* bio3 = NULL; + char* bufPt = NULL; + int i; + + for (i = 0; i < 20; i++) { + buff[i] = i; + } + /* test BIO_free with NULL */ + ExpectIntEQ(BIO_free(NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + + /* Creating and testing type BIO_s_bio */ + ExpectNotNull(bio1 = BIO_new(BIO_s_bio())); + ExpectNotNull(bio2 = BIO_new(BIO_s_bio())); + ExpectNotNull(bio3 = BIO_new(BIO_s_bio())); + + /* read/write before set up */ + ExpectIntEQ(BIO_read(bio1, buff, 2), WOLFSSL_BIO_UNSET); + ExpectIntEQ(BIO_write(bio1, buff, 2), WOLFSSL_BIO_UNSET); + + ExpectIntEQ(BIO_set_nbio(bio1, 1), 1); + ExpectIntEQ(BIO_set_write_buf_size(bio1, 20), WOLFSSL_SUCCESS); + ExpectIntEQ(BIO_set_write_buf_size(bio2, 8), WOLFSSL_SUCCESS); + ExpectIntEQ(BIO_make_bio_pair(bio1, bio2), WOLFSSL_SUCCESS); + + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 10), 10); + ExpectNotNull(XMEMCPY(bufPt, buff, 10)); + ExpectIntEQ(BIO_write(bio1, buff + 10, 10), 10); + /* write buffer full */ + ExpectIntEQ(BIO_write(bio1, buff, 10), WOLFSSL_BIO_ERROR); + ExpectIntEQ(BIO_flush(bio1), WOLFSSL_SUCCESS); + ExpectIntEQ((int)BIO_ctrl_pending(bio1), 0); + + /* write the other direction with pair */ + ExpectIntEQ((int)BIO_nwrite(bio2, &bufPt, 10), 8); + ExpectNotNull(XMEMCPY(bufPt, buff, 8)); + ExpectIntEQ(BIO_write(bio2, buff, 10), WOLFSSL_BIO_ERROR); + + /* try read */ + ExpectIntEQ((int)BIO_ctrl_pending(bio1), 8); + ExpectIntEQ((int)BIO_ctrl_pending(bio2), 20); + + /* try read using ctrl function */ + ExpectIntEQ((int)BIO_ctrl(bio1, BIO_CTRL_WPENDING, 0, NULL), 8); + ExpectIntEQ((int)BIO_ctrl(bio1, BIO_CTRL_PENDING, 0, NULL), 8); + ExpectIntEQ((int)BIO_ctrl(bio2, BIO_CTRL_WPENDING, 0, NULL), 20); + ExpectIntEQ((int)BIO_ctrl(bio2, BIO_CTRL_PENDING, 0, NULL), 20); + + ExpectIntEQ(BIO_nread(bio2, &bufPt, (int)BIO_ctrl_pending(bio2)), 20); + for (i = 0; i < 20; i++) { + ExpectIntEQ((int)bufPt[i], i); + } + ExpectIntEQ(BIO_nread(bio2, &bufPt, 1), 0); + ExpectIntEQ(BIO_nread(bio1, &bufPt, (int)BIO_ctrl_pending(bio1)), 8); + for (i = 0; i < 8; i++) { + ExpectIntEQ((int)bufPt[i], i); + } + ExpectIntEQ(BIO_nread(bio1, &bufPt, 1), 0); + ExpectIntEQ(BIO_ctrl_reset_read_request(bio1), 1); + + /* new pair */ + ExpectIntEQ(BIO_make_bio_pair(bio1, bio3), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + BIO_free(bio2); /* free bio2 and automatically remove from pair */ + bio2 = NULL; + ExpectIntEQ(BIO_make_bio_pair(bio1, bio3), WOLFSSL_SUCCESS); + ExpectIntEQ((int)BIO_ctrl_pending(bio3), 0); + ExpectIntEQ(BIO_nread(bio3, &bufPt, 10), 0); + + /* test wrap around... */ + ExpectIntEQ(BIO_reset(bio1), 1); + ExpectIntEQ(BIO_reset(bio3), 1); + + /* fill write buffer, read only small amount then write again */ + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 20), 20); + ExpectNotNull(XMEMCPY(bufPt, buff, 20)); + ExpectIntEQ(BIO_nread(bio3, &bufPt, 4), 4); + for (i = 0; i < 4; i++) { + ExpectIntEQ(bufPt[i], i); + } + + /* try writing over read index */ + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 5), 4); + ExpectNotNull(XMEMSET(bufPt, 0, 4)); + ExpectIntEQ((int)BIO_ctrl_pending(bio3), 20); + + /* read and write 0 bytes */ + ExpectIntEQ(BIO_nread(bio3, &bufPt, 0), 0); + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 0), 0); + + /* should read only to end of write buffer then need to read again */ + ExpectIntEQ(BIO_nread(bio3, &bufPt, 20), 16); + for (i = 0; i < 16; i++) { + ExpectIntEQ(bufPt[i], buff[4 + i]); + } + + ExpectIntEQ(BIO_nread(bio3, NULL, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(BIO_nread0(bio3, &bufPt), 4); + for (i = 0; i < 4; i++) { + ExpectIntEQ(bufPt[i], 0); + } + + /* read index should not have advanced with nread0 */ + ExpectIntEQ(BIO_nread(bio3, &bufPt, 5), 4); + for (i = 0; i < 4; i++) { + ExpectIntEQ(bufPt[i], 0); + } + + /* write and fill up buffer checking reset of index state */ + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 20), 20); + ExpectNotNull(XMEMCPY(bufPt, buff, 20)); + + /* test reset on data in bio1 write buffer */ + ExpectIntEQ(BIO_reset(bio1), 1); + ExpectIntEQ((int)BIO_ctrl_pending(bio3), 0); + ExpectIntEQ(BIO_nread(bio3, &bufPt, 3), 0); + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 20), 20); + ExpectIntEQ((int)BIO_ctrl(bio1, BIO_CTRL_INFO, 0, &p), 20); + ExpectNotNull(p); + ExpectNotNull(XMEMCPY(bufPt, buff, 20)); + ExpectIntEQ(BIO_nread(bio3, &bufPt, 6), 6); + for (i = 0; i < 6; i++) { + ExpectIntEQ(bufPt[i], i); + } + + /* test case of writing twice with offset read index */ + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 3), 3); + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), 3); /* try overwriting */ + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), WOLFSSL_BIO_ERROR); + ExpectIntEQ(BIO_nread(bio3, &bufPt, 0), 0); + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), WOLFSSL_BIO_ERROR); + ExpectIntEQ(BIO_nread(bio3, &bufPt, 1), 1); + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), 1); + ExpectIntEQ(BIO_nwrite(bio1, &bufPt, 4), WOLFSSL_BIO_ERROR); + + BIO_free(bio1); + bio1 = NULL; + BIO_free(bio3); + bio3 = NULL; + + #if defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) + { + BIO* bioA = NULL; + BIO* bioB = NULL; + ExpectIntEQ(BIO_new_bio_pair(NULL, 256, NULL, 256), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(BIO_new_bio_pair(&bioA, 256, &bioB, 256), WOLFSSL_SUCCESS); + BIO_free(bioA); + bioA = NULL; + BIO_free(bioB); + bioB = NULL; + } + #endif /* OPENSSL_ALL || WOLFSSL_ASIO */ + + /* BIOs with file pointers */ + #if !defined(NO_FILESYSTEM) + { + XFILE f1 = XBADFILE; + XFILE f2 = XBADFILE; + BIO* f_bio1 = NULL; + BIO* f_bio2 = NULL; + unsigned char cert[300]; + char testFile[] = "tests/bio_write_test.txt"; + char msg[] = "bio_write_test.txt contains the first 300 bytes of " + "certs/server-cert.pem\n" + "created by tests/unit.test\n\n"; + + ExpectNotNull(f_bio1 = BIO_new(BIO_s_file())); + ExpectNotNull(f_bio2 = BIO_new(BIO_s_file())); + + /* Failure due to wrong BIO type */ + ExpectIntEQ((int)BIO_set_mem_eof_return(f_bio1, -1), 0); + ExpectIntEQ((int)BIO_set_mem_eof_return(NULL, -1), 0); + + ExpectTrue((f1 = XFOPEN(svrCertFile, "rb+")) != XBADFILE); + ExpectIntEQ((int)BIO_set_fp(f_bio1, f1, BIO_CLOSE), WOLFSSL_SUCCESS); + ExpectIntEQ(BIO_write_filename(f_bio2, testFile), + WOLFSSL_SUCCESS); + + ExpectIntEQ(BIO_read(f_bio1, cert, sizeof(cert)), sizeof(cert)); + ExpectIntEQ(BIO_tell(f_bio1),sizeof(cert)); + ExpectIntEQ(BIO_write(f_bio2, msg, sizeof(msg)), sizeof(msg)); + ExpectIntEQ(BIO_tell(f_bio2),sizeof(msg)); + ExpectIntEQ(BIO_write(f_bio2, cert, sizeof(cert)), sizeof(cert)); + ExpectIntEQ(BIO_tell(f_bio2),sizeof(cert) + sizeof(msg)); + + ExpectIntEQ((int)BIO_get_fp(f_bio2, &f2), WOLFSSL_SUCCESS); + ExpectIntEQ(BIO_reset(f_bio2), 1); + ExpectIntEQ(BIO_tell(NULL),-1); + ExpectIntEQ(BIO_tell(f_bio2),0); + ExpectIntEQ(BIO_seek(f_bio2, 4), 0); + ExpectIntEQ(BIO_tell(f_bio2),4); + + BIO_free(f_bio1); + f_bio1 = NULL; + BIO_free(f_bio2); + f_bio2 = NULL; + + ExpectNotNull(f_bio1 = BIO_new_file(svrCertFile, "rb+")); + ExpectIntEQ((int)BIO_set_mem_eof_return(f_bio1, -1), 0); + ExpectIntEQ(BIO_read(f_bio1, cert, sizeof(cert)), sizeof(cert)); + BIO_free(f_bio1); + f_bio1 = NULL; + } + #endif /* !defined(NO_FILESYSTEM) */ + + /* BIO info callback */ + { + const char* testArg = "test"; + BIO* cb_bio = NULL; + ExpectNotNull(cb_bio = BIO_new(BIO_s_mem())); + + BIO_set_callback(cb_bio, bioCallback); + ExpectNotNull(BIO_get_callback(cb_bio)); + BIO_set_callback(cb_bio, NULL); + ExpectNull(BIO_get_callback(cb_bio)); + + BIO_set_callback_arg(cb_bio, (char*)testArg); + ExpectStrEQ(BIO_get_callback_arg(cb_bio), testArg); + ExpectNull(BIO_get_callback_arg(NULL)); + + BIO_free(cb_bio); + cb_bio = NULL; + } + + /* BIO_vfree */ + ExpectNotNull(bio1 = BIO_new(BIO_s_bio())); + BIO_vfree(NULL); + BIO_vfree(bio1); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_BIO_BIO_ring_read(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && !defined(NO_BIO) + BIO* bio1 = NULL; + BIO* bio2 = NULL; + byte data[50]; + byte tmp[50]; + + XMEMSET(data, 42, sizeof(data)); + + + ExpectIntEQ(BIO_new_bio_pair(&bio1, sizeof(data), &bio2, sizeof(data)), + SSL_SUCCESS); + + ExpectIntEQ(BIO_write(bio1, data, 40), 40); + ExpectIntEQ(BIO_read(bio1, tmp, 20), -1); + ExpectIntEQ(BIO_read(bio2, tmp, 20), 20); + ExpectBufEQ(tmp, data, 20); + ExpectIntEQ(BIO_write(bio1, data, 20), 20); + ExpectIntEQ(BIO_read(bio2, tmp, 40), 40); + ExpectBufEQ(tmp, data, 40); + + BIO_free(bio1); + BIO_free(bio2); +#endif + return EXPECT_RESULT(); +} + #endif /* !NO_BIO */ diff --git a/tests/api/test_ossl_bio.h b/tests/api/test_ossl_bio.h index 8bcbc743f..aff38a9bb 100644 --- a/tests/api/test_ossl_bio.h +++ b/tests/api/test_ossl_bio.h @@ -40,6 +40,8 @@ int test_wolfSSL_BIO_f_md(void); int test_wolfSSL_BIO_up_ref(void); int test_wolfSSL_BIO_reset(void); int test_wolfSSL_BIO_get_len(void); +int test_wolfSSL_BIO(void); +int test_wolfSSL_BIO_BIO_ring_read(void); #define TEST_OSSL_BIO_DECLS \ TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_gets), \ @@ -52,7 +54,9 @@ int test_wolfSSL_BIO_get_len(void); TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_f_md), \ TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_up_ref), \ TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_reset), \ - TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_get_len) + TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_get_len), \ + TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO), \ + TEST_DECL_GROUP("ossl_bio", test_wolfSSL_BIO_BIO_ring_read) #define TEST_OSSL_BIO_TLS_DECLS \ TEST_DECL_GROUP("ossl_bio_tls", test_wolfSSL_BIO_connect), \ diff --git a/tests/api/test_ossl_bn.c b/tests/api/test_ossl_bn.c index 176772eec..be0841a39 100644 --- a/tests/api/test_ossl_bn.c +++ b/tests/api/test_ossl_bn.c @@ -217,14 +217,16 @@ int test_wolfSSL_BN_init(void) ExpectIntEQ(BN_set_word(&cv, 5), SSL_SUCCESS); /* a^b mod c = */ - ExpectIntEQ(BN_mod_exp(&dv, NULL, &bv, &cv, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(BN_mod_exp(&dv, NULL, &bv, &cv, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); ExpectIntEQ(BN_mod_exp(&dv, ap, &bv, &cv, NULL), WOLFSSL_SUCCESS); /* check result 3^2 mod 5 */ ExpectIntEQ(BN_get_word(&dv), 4); /* a*b mod c = */ - ExpectIntEQ(BN_mod_mul(&dv, NULL, &bv, &cv, NULL), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(BN_mod_mul(&dv, NULL, &bv, &cv, NULL), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); ExpectIntEQ(BN_mod_mul(&dv, ap, &bv, &cv, NULL), SSL_SUCCESS); /* check result 3*2 mod 5 */ @@ -1027,7 +1029,8 @@ int test_wolfSSL_BN_prime(void) EXPECT_DECLS; #if defined(OPENSSL_EXTRA) && !defined(NO_ASN) && \ !defined(OPENSSL_EXTRA_NO_BN) && !defined(WOLFSSL_SP_MATH) -#if defined(WOLFSSL_KEY_GEN) && (!defined(NO_RSA) || !defined(NO_DH) || !defined(NO_DSA)) +#if defined(WOLFSSL_KEY_GEN) && (!defined(NO_RSA) || !defined(NO_DH) || \ + !defined(NO_DSA)) BIGNUM* a = NULL; BIGNUM* add = NULL; BIGNUM* rem = NULL; diff --git a/tests/api/test_ossl_dgst.c b/tests/api/test_ossl_dgst.c index 8bc6c467e..bce1f6229 100644 --- a/tests/api/test_ossl_dgst.c +++ b/tests/api/test_ossl_dgst.c @@ -161,8 +161,10 @@ int test_wolfSSL_MD5_Transform(void) ExpectIntEQ(MD5_Transform(NULL, (const byte*)&input1), 0); ExpectIntEQ(MD5_Transform(&md5.compat, NULL), 0); ExpectIntEQ(wc_Md5Transform(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Md5Transform(NULL, (const byte*)&input1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Md5Transform(&md5.native, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Md5Transform(NULL, (const byte*)&input1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Md5Transform(&md5.native, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); /* Init MD5 CTX */ ExpectIntEQ(wolfSSL_MD5_Init(&md5.compat), 1); @@ -359,8 +361,10 @@ int test_wolfSSL_SHA_Transform(void) ExpectIntEQ(SHA1_Transform(NULL, (const byte*)&input1), 0); ExpectIntEQ(SHA1_Transform(&sha.compat, NULL), 0); ExpectIntEQ(wc_ShaTransform(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_ShaTransform(NULL, (const byte*)&input1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_ShaTransform(&sha.native, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_ShaTransform(NULL, (const byte*)&input1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_ShaTransform(&sha.native, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); /* Init SHA CTX */ ExpectIntEQ(SHA_Init(&sha.compat), 1); @@ -500,8 +504,10 @@ int test_wolfSSL_SHA256_Transform(void) ExpectIntEQ(SHA256_Transform(NULL, (const byte*)&input1), 0); ExpectIntEQ(SHA256_Transform(&sha256.compat, NULL), 0); ExpectIntEQ(wc_Sha256Transform(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Sha256Transform(NULL, (const byte*)&input1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Sha256Transform(&sha256.native, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha256Transform(NULL, (const byte*)&input1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha256Transform(&sha256.native, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); /* Init SHA256 CTX */ ExpectIntEQ(SHA256_Init(&sha256.compat), 1); @@ -574,8 +580,10 @@ int test_wolfSSL_SHA512_Transform(void) ExpectIntEQ(SHA512_Transform(NULL, (const byte*)&input1), 0); ExpectIntEQ(SHA512_Transform(&sha512.compat, NULL), 0); ExpectIntEQ(wc_Sha512Transform(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Sha512Transform(NULL, (const byte*)&input1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Sha512Transform(&sha512.native, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha512Transform(NULL, (const byte*)&input1), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha512Transform(&sha512.native, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); /* Init SHA512 CTX */ ExpectIntEQ(wolfSSL_SHA512_Init(&sha512.compat), 1); @@ -584,8 +592,8 @@ int test_wolfSSL_SHA512_Transform(void) sLen = (word32)XSTRLEN((char*)input1); XMEMCPY(local, input1, sLen); ExpectIntEQ(SHA512_Transform(&sha512.compat, (const byte*)&local[0]), 1); - ExpectIntEQ(XMEMCMP(sha512.native.digest, output1, - WC_SHA512_DIGEST_SIZE), 0); + ExpectIntEQ(XMEMCMP(sha512.native.digest, output1, WC_SHA512_DIGEST_SIZE), + 0); ExpectIntEQ(SHA512_Final(local, &sha512.compat), 1); /* frees resources */ /* Init SHA512 CTX */ @@ -594,8 +602,8 @@ int test_wolfSSL_SHA512_Transform(void) XMEMSET(local, 0, WC_SHA512_BLOCK_SIZE); XMEMCPY(local, input2, sLen); ExpectIntEQ(SHA512_Transform(&sha512.compat, (const byte*)&local[0]), 1); - ExpectIntEQ(XMEMCMP(sha512.native.digest, output2, - WC_SHA512_DIGEST_SIZE), 0); + ExpectIntEQ(XMEMCMP(sha512.native.digest, output2, WC_SHA512_DIGEST_SIZE), + 0); ExpectIntEQ(SHA512_Final(local, &sha512.compat), 1); /* frees resources */ (void)input1; @@ -643,10 +651,12 @@ int test_wolfSSL_SHA512_224_Transform(void) ExpectIntEQ(SHA512_224_Transform(NULL, NULL), 0); ExpectIntEQ(SHA512_224_Transform(NULL, (const byte*)&input1), 0); ExpectIntEQ(SHA512_224_Transform(&sha512.compat, NULL), 0); - ExpectIntEQ(wc_Sha512_224Transform(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha512_224Transform(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wc_Sha512_224Transform(NULL, (const byte*)&input1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Sha512_224Transform(&sha512.native, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha512_224Transform(&sha512.native, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); /* Init SHA512 CTX */ ExpectIntEQ(wolfSSL_SHA512_224_Init(&sha512.compat), 1); @@ -716,10 +726,12 @@ int test_wolfSSL_SHA512_256_Transform(void) ExpectIntEQ(SHA512_256_Transform(NULL, NULL), 0); ExpectIntEQ(SHA512_256_Transform(NULL, (const byte*)&input1), 0); ExpectIntEQ(SHA512_256_Transform(&sha512.compat, NULL), 0); - ExpectIntEQ(wc_Sha512_256Transform(NULL, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha512_256Transform(NULL, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wc_Sha512_256Transform(NULL, (const byte*)&input1), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wc_Sha512_256Transform(&sha512.native, NULL), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wc_Sha512_256Transform(&sha512.native, NULL), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); /* Init SHA512 CTX */ ExpectIntEQ(wolfSSL_SHA512_256_Init(&sha512.compat), 1); diff --git a/tests/api/test_ossl_ec.c b/tests/api/test_ossl_ec.c index dd5bf28cb..33c4678a2 100644 --- a/tests/api/test_ossl_ec.c +++ b/tests/api/test_ossl_ec.c @@ -610,7 +610,7 @@ int test_wolfSSL_EC_POINT(void) hexStr = EC_POINT_point2hex(group, Gxy, POINT_CONVERSION_COMPRESSED, ctx); ExpectNotNull(hexStr); ExpectStrEQ(hexStr, compG); - #ifdef HAVE_COMP_KEY + #if defined(HAVE_COMP_KEY) && !defined(HAVE_SELFTEST) ExpectNotNull(get_point = EC_POINT_hex2point (group, hexStr, get_point, ctx)); ExpectIntEQ(EC_POINT_cmp(group, Gxy, get_point, ctx), 0); @@ -1345,12 +1345,15 @@ int test_wolfSSL_EC_KEY_print_fp(void) EC_KEY* key = NULL; /* Bad file pointer. */ - ExpectIntEQ(wolfSSL_EC_KEY_print_fp(NULL, key, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EC_KEY_print_fp(NULL, key, 0), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); /* NULL key. */ - ExpectIntEQ(wolfSSL_EC_KEY_print_fp(stderr, NULL, 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EC_KEY_print_fp(stderr, NULL, 0), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); ExpectNotNull((key = wolfSSL_EC_KEY_new_by_curve_name(NID_secp224r1))); /* Negative indent. */ - ExpectIntEQ(wolfSSL_EC_KEY_print_fp(stderr, key, -1), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectIntEQ(wolfSSL_EC_KEY_print_fp(stderr, key, -1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); ExpectIntEQ(wolfSSL_EC_KEY_print_fp(stderr, key, 4), WOLFSSL_SUCCESS); ExpectIntEQ(wolfSSL_EC_KEY_generate_key(key), WOLFSSL_SUCCESS); diff --git a/tests/api/test_ossl_obj.c b/tests/api/test_ossl_obj.c new file mode 100644 index 000000000..e6dc3f025 --- /dev/null +++ b/tests/api/test_ossl_obj.c @@ -0,0 +1,465 @@ +/* test_ossl_obj.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#include +#include +#include + +#if defined(OPENSSL_EXTRA) +static void obj_name_t(const OBJ_NAME* nm, void* arg) +{ + (void)arg; + (void)nm; + + AssertIntGT(nm->type, OBJ_NAME_TYPE_UNDEF); + +#if !defined(NO_FILESYSTEM) && defined(DEBUG_WOLFSSL_VERBOSE) + /* print to stderr */ + AssertNotNull(arg); + + BIO *bio = BIO_new(BIO_s_file()); + BIO_set_fp(bio, arg, BIO_NOCLOSE); + BIO_printf(bio, "%s\n", nm); + BIO_free(bio); +#endif +} + +#endif +int test_OBJ_NAME_do_all(void) +{ + int res = TEST_SKIPPED; +#if defined(OPENSSL_EXTRA) + + OBJ_NAME_do_all(OBJ_NAME_TYPE_MD_METH, NULL, NULL); + + OBJ_NAME_do_all(OBJ_NAME_TYPE_CIPHER_METH, NULL, stderr); + + OBJ_NAME_do_all(OBJ_NAME_TYPE_MD_METH, obj_name_t, stderr); + OBJ_NAME_do_all(OBJ_NAME_TYPE_PKEY_METH, obj_name_t, stderr); + OBJ_NAME_do_all(OBJ_NAME_TYPE_COMP_METH, obj_name_t, stderr); + OBJ_NAME_do_all(OBJ_NAME_TYPE_NUM, obj_name_t, stderr); + OBJ_NAME_do_all(OBJ_NAME_TYPE_UNDEF, obj_name_t, stderr); + OBJ_NAME_do_all(OBJ_NAME_TYPE_CIPHER_METH, obj_name_t, stderr); + OBJ_NAME_do_all(-1, obj_name_t, stderr); + + res = TEST_SUCCESS; +#endif + + return res; +} + +int test_wolfSSL_OBJ(void) +{ +/* Password "wolfSSL test" is only 12 (96-bit) too short for testing in FIPS + * mode + */ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_SHA256) && !defined(NO_ASN) && \ + !defined(HAVE_FIPS) && !defined(NO_SHA) && defined(WOLFSSL_CERT_EXT) && \ + defined(WOLFSSL_CERT_GEN) && !defined(NO_BIO) && \ + !defined(NO_FILESYSTEM) && !defined(NO_STDIO_FILESYSTEM) + ASN1_OBJECT *obj = NULL; + ASN1_OBJECT *obj2 = NULL; + char buf[50]; + + XFILE fp = XBADFILE; + X509 *x509 = NULL; + X509_NAME *x509Name = NULL; + X509_NAME_ENTRY *x509NameEntry = NULL; + ASN1_OBJECT *asn1Name = NULL; + int numNames = 0; + BIO *bio = NULL; + int nid; + int i, j; + const char *f[] = { + #ifndef NO_RSA + "./certs/ca-cert.der", + #endif + #ifdef HAVE_ECC + "./certs/ca-ecc-cert.der", + "./certs/ca-ecc384-cert.der", + #endif + NULL}; + ASN1_OBJECT *field_name_obj = NULL; + int lastpos = -1; + int tmp = -1; + ASN1_STRING *asn1 = NULL; + unsigned char *buf_dyn = NULL; + + ExpectIntEQ(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1), + WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + ExpectNotNull(obj = OBJ_nid2obj(NID_any_policy)); + ExpectIntEQ(OBJ_obj2nid(obj), NID_any_policy); + ExpectIntEQ(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1), 11); + ExpectIntGT(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 0), 0); + ASN1_OBJECT_free(obj); + obj = NULL; + + ExpectNotNull(obj = OBJ_nid2obj(NID_sha256)); + ExpectIntEQ(OBJ_obj2nid(obj), NID_sha256); + ExpectIntEQ(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1), 22); +#ifdef WOLFSSL_CERT_EXT + ExpectIntEQ(OBJ_txt2nid(buf), NID_sha256); +#endif + ExpectIntGT(OBJ_obj2txt(buf, (int)sizeof(buf), obj, 0), 0); + ExpectNotNull(obj2 = OBJ_dup(obj)); + ExpectIntEQ(OBJ_cmp(obj, obj2), 0); + ASN1_OBJECT_free(obj); + obj = NULL; + ASN1_OBJECT_free(obj2); + obj2 = NULL; + + for (i = 0; f[i] != NULL; i++) + { + ExpectTrue((fp = XFOPEN(f[i], "rb")) != XBADFILE); + ExpectNotNull(x509 = d2i_X509_fp(fp, NULL)); + if (fp != XBADFILE) { + XFCLOSE(fp); + fp = XBADFILE; + } + ExpectNotNull(x509Name = X509_get_issuer_name(x509)); + ExpectIntNE((numNames = X509_NAME_entry_count(x509Name)), 0); + + /* Get the Common Name by using OBJ_txt2obj */ + ExpectNotNull(field_name_obj = OBJ_txt2obj("CN", 0)); + ExpectIntEQ(X509_NAME_get_index_by_OBJ(NULL, NULL, 99), + WOLFSSL_FATAL_ERROR); + ExpectIntEQ(X509_NAME_get_index_by_OBJ(x509Name, NULL, 99), + WOLFSSL_FATAL_ERROR); + ExpectIntEQ(X509_NAME_get_index_by_OBJ(NULL, field_name_obj, 99), + WOLFSSL_FATAL_ERROR); + ExpectIntEQ(X509_NAME_get_index_by_OBJ(x509Name, field_name_obj, 99), + WOLFSSL_FATAL_ERROR); + ExpectIntEQ(X509_NAME_get_index_by_OBJ(x509Name, NULL, 0), + WOLFSSL_FATAL_ERROR); + do + { + lastpos = tmp; + tmp = X509_NAME_get_index_by_OBJ(x509Name, field_name_obj, lastpos); + } while (tmp > -1); + ExpectIntNE(lastpos, -1); + ASN1_OBJECT_free(field_name_obj); + field_name_obj = NULL; + ExpectNotNull(x509NameEntry = X509_NAME_get_entry(x509Name, lastpos)); + ExpectNotNull(asn1 = X509_NAME_ENTRY_get_data(x509NameEntry)); + ExpectIntGE(ASN1_STRING_to_UTF8(&buf_dyn, asn1), 0); + /* + * All Common Names should be www.wolfssl.com + * This makes testing easier as we can test for the expected value. + */ + ExpectStrEQ((char*)buf_dyn, "www.wolfssl.com"); + OPENSSL_free(buf_dyn); + buf_dyn = NULL; + bio = BIO_new(BIO_s_mem()); + ExpectTrue(bio != NULL); + for (j = 0; j < numNames; j++) + { + ExpectNotNull(x509NameEntry = X509_NAME_get_entry(x509Name, j)); + ExpectNotNull(asn1Name = X509_NAME_ENTRY_get_object(x509NameEntry)); + ExpectTrue((nid = OBJ_obj2nid(asn1Name)) > 0); + } + BIO_free(bio); + bio = NULL; + X509_free(x509); + x509 = NULL; + + } + +#ifdef HAVE_PKCS12 + { + PKCS12 *p12 = NULL; + int boolRet; + EVP_PKEY *pkey = NULL; + const char *p12_f[] = { + /* bundle uses AES-CBC 256 and PKCS7 key uses DES3 */ + #if !defined(NO_DES3) && defined(WOLFSSL_AES_256) && !defined(NO_RSA) + "./certs/test-servercert.p12", + #endif + NULL + }; + + for (i = 0; p12_f[i] != NULL; i++) + { + ExpectTrue((fp = XFOPEN(p12_f[i], "rb")) != XBADFILE); + ExpectNotNull(p12 = d2i_PKCS12_fp(fp, NULL)); + if (fp != XBADFILE) { + XFCLOSE(fp); + fp = XBADFILE; + } + ExpectTrue((boolRet = PKCS12_parse(p12, "wolfSSL test", + &pkey, &x509, NULL)) > 0); + wc_PKCS12_free(p12); + p12 = NULL; + EVP_PKEY_free(pkey); + x509Name = X509_get_issuer_name(x509); + ExpectNotNull(x509Name); + ExpectIntNE((numNames = X509_NAME_entry_count(x509Name)), 0); + ExpectTrue((bio = BIO_new(BIO_s_mem())) != NULL); + for (j = 0; j < numNames; j++) + { + ExpectNotNull(x509NameEntry = X509_NAME_get_entry(x509Name, j)); + ExpectNotNull(asn1Name = + X509_NAME_ENTRY_get_object(x509NameEntry)); + ExpectTrue((nid = OBJ_obj2nid(asn1Name)) > 0); + } + BIO_free(bio); + bio = NULL; + X509_free(x509); + x509 = NULL; + } + } +#endif /* HAVE_PKCS12 */ +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_OBJ_cmp(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(NO_SHA256) + ASN1_OBJECT *obj = NULL; + ASN1_OBJECT *obj2 = NULL; + + ExpectNotNull(obj = OBJ_nid2obj(NID_any_policy)); + ExpectNotNull(obj2 = OBJ_nid2obj(NID_sha256)); + + ExpectIntEQ(OBJ_cmp(NULL, NULL), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(OBJ_cmp(obj, NULL), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(OBJ_cmp(NULL, obj2), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(OBJ_cmp(obj, obj2), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + ExpectIntEQ(OBJ_cmp(obj, obj), 0); + ExpectIntEQ(OBJ_cmp(obj2, obj2), 0); + + ASN1_OBJECT_free(obj); + ASN1_OBJECT_free(obj2); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_OBJ_txt2nid(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) || \ + defined(WOLFSSL_APACHE_HTTPD) + int i; + static const struct { + const char* sn; + const char* ln; + const char* oid; + int nid; + } testVals[] = { +#ifdef WOLFSSL_APACHE_HTTPD + { "tlsfeature", "TLS Feature", "1.3.6.1.5.5.7.1.24", NID_tlsfeature }, + { "id-on-dnsSRV", "SRVName", "1.3.6.1.5.5.7.8.7", + NID_id_on_dnsSRV }, + { "msUPN", "Microsoft User Principal Name", + "1.3.6.1.4.1.311.20.2.3", NID_ms_upn }, +#endif + { NULL, NULL, NULL, NID_undef } + }; + + /* Invalid cases */ + ExpectIntEQ(OBJ_txt2nid(NULL), NID_undef); + ExpectIntEQ(OBJ_txt2nid("Bad name"), NID_undef); + + /* Valid cases */ + for (i = 0; testVals[i].sn != NULL; i++) { + ExpectIntEQ(OBJ_txt2nid(testVals[i].sn), testVals[i].nid); + ExpectIntEQ(OBJ_txt2nid(testVals[i].ln), testVals[i].nid); + ExpectIntEQ(OBJ_txt2nid(testVals[i].oid), testVals[i].nid); + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_OBJ_txt2obj(void) +{ + EXPECT_DECLS; +#if defined(WOLFSSL_APACHE_HTTPD) || (defined(OPENSSL_EXTRA) && \ + defined(WOLFSSL_CERT_EXT) && defined(WOLFSSL_CERT_GEN)) + int i; + char buf[50]; + ASN1_OBJECT* obj = NULL; + static const struct { + const char* oidStr; + const char* sn; + const char* ln; + } objs_list[] = { + #if defined(WOLFSSL_APACHE_HTTPD) + { "1.3.6.1.5.5.7.1.24", "tlsfeature", "TLS Feature" }, + { "1.3.6.1.5.5.7.8.7", "id-on-dnsSRV", "SRVName" }, + #endif + { "2.5.29.19", "basicConstraints", "X509v3 Basic Constraints"}, + { NULL, NULL, NULL } + }; + static const struct { + const char* numeric; + const char* name; + } objs_named[] = { + /* In dictionary but not in normal list. */ + { "1.3.6.1.5.5.7.3.8", "Time Stamping" }, + /* Made up OID. */ + { "1.3.5.7", "1.3.5.7" }, + { NULL, NULL } + }; + + ExpectNull(obj = OBJ_txt2obj("Bad name", 0)); + ASN1_OBJECT_free(obj); + obj = NULL; + ExpectNull(obj = OBJ_txt2obj(NULL, 0)); + ASN1_OBJECT_free(obj); + obj = NULL; + + for (i = 0; objs_list[i].oidStr != NULL; i++) { + /* Test numerical value of oid (oidStr) */ + ExpectNotNull(obj = OBJ_txt2obj(objs_list[i].oidStr, 1)); + /* Convert object back to text to confirm oid is correct */ + wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); + ExpectIntEQ(XSTRNCMP(buf, objs_list[i].oidStr, (int)XSTRLEN(buf)), 0); + ASN1_OBJECT_free(obj); + obj = NULL; + XMEMSET(buf, 0, sizeof(buf)); + + /* Test short name (sn) */ + ExpectNull(obj = OBJ_txt2obj(objs_list[i].sn, 1)); + ExpectNotNull(obj = OBJ_txt2obj(objs_list[i].sn, 0)); + /* Convert object back to text to confirm oid is correct */ + wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); + ExpectIntEQ(XSTRNCMP(buf, objs_list[i].oidStr, (int)XSTRLEN(buf)), 0); + ASN1_OBJECT_free(obj); + obj = NULL; + XMEMSET(buf, 0, sizeof(buf)); + + /* Test long name (ln) - should fail when no_name = 1 */ + ExpectNull(obj = OBJ_txt2obj(objs_list[i].ln, 1)); + ExpectNotNull(obj = OBJ_txt2obj(objs_list[i].ln, 0)); + /* Convert object back to text to confirm oid is correct */ + wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); + ExpectIntEQ(XSTRNCMP(buf, objs_list[i].oidStr, (int)XSTRLEN(buf)), 0); + ASN1_OBJECT_free(obj); + obj = NULL; + XMEMSET(buf, 0, sizeof(buf)); + } + + for (i = 0; objs_named[i].numeric != NULL; i++) { + ExpectNotNull(obj = OBJ_txt2obj(objs_named[i].numeric, 1)); + wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 0); + ExpectIntEQ(XSTRNCMP(buf, objs_named[i].name, (int)XSTRLEN(buf)), 0); + wolfSSL_OBJ_obj2txt(buf, (int)sizeof(buf), obj, 1); + ExpectIntEQ(XSTRNCMP(buf, objs_named[i].numeric, (int)XSTRLEN(buf)), 0); + ASN1_OBJECT_free(obj); + obj = NULL; + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_OBJ_ln(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + const int nid_set[] = { + NID_commonName, + NID_serialNumber, + NID_countryName, + NID_localityName, + NID_stateOrProvinceName, + NID_organizationName, + NID_organizationalUnitName, + NID_domainComponent, + NID_businessCategory, + NID_jurisdictionCountryName, + NID_jurisdictionStateOrProvinceName, + NID_emailAddress + }; + const char* ln_set[] = { + "commonName", + "serialNumber", + "countryName", + "localityName", + "stateOrProvinceName", + "organizationName", + "organizationalUnitName", + "domainComponent", + "businessCategory", + "jurisdictionCountryName", + "jurisdictionStateOrProvinceName", + "emailAddress", + }; + size_t i = 0, maxIdx = sizeof(ln_set)/sizeof(char*); + + ExpectIntEQ(OBJ_ln2nid(NULL), NID_undef); + +#ifdef HAVE_ECC +#if !defined(HAVE_FIPS) || (defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION>2)) + { + EC_builtin_curve r[27]; + size_t nCurves = sizeof(r) / sizeof(r[0]); + nCurves = EC_get_builtin_curves(r, nCurves); + + for (i = 0; i < nCurves; i++) { + /* skip ECC_CURVE_INVALID */ + if (r[i].nid != ECC_CURVE_INVALID) { + ExpectIntEQ(OBJ_ln2nid(r[i].comment), r[i].nid); + ExpectStrEQ(OBJ_nid2ln(r[i].nid), r[i].comment); + } + } + } +#endif +#endif + + for (i = 0; i < maxIdx; i++) { + ExpectIntEQ(OBJ_ln2nid(ln_set[i]), nid_set[i]); + ExpectStrEQ(OBJ_nid2ln(nid_set[i]), ln_set[i]); + } +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_OBJ_sn(void) +{ + EXPECT_DECLS; +#ifdef OPENSSL_ALL + int i = 0, maxIdx = 7; + const int nid_set[] = {NID_commonName,NID_countryName,NID_localityName, + NID_stateOrProvinceName,NID_organizationName, + NID_organizationalUnitName,NID_emailAddress}; + const char* sn_open_set[] = {"CN","C","L","ST","O","OU","emailAddress"}; + + ExpectIntEQ(wolfSSL_OBJ_sn2nid(NULL), NID_undef); + for (i = 0; i < maxIdx; i++) { + ExpectIntEQ(wolfSSL_OBJ_sn2nid(sn_open_set[i]), nid_set[i]); + ExpectStrEQ(wolfSSL_OBJ_nid2sn(nid_set[i]), sn_open_set[i]); + } +#endif + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_ossl_obj.h b/tests/api/test_ossl_obj.h new file mode 100644 index 000000000..c780e664d --- /dev/null +++ b/tests/api/test_ossl_obj.h @@ -0,0 +1,45 @@ +/* test_ossl_obj.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_OSSL_OBJ_H +#define WOLFCRYPT_TEST_OSSL_OBJ_H + +#include + +int test_OBJ_NAME_do_all(void); +int test_wolfSSL_OBJ(void); +int test_wolfSSL_OBJ_cmp(void); +int test_wolfSSL_OBJ_txt2nid(void); +int test_wolfSSL_OBJ_txt2obj(void); +int test_wolfSSL_OBJ_ln(void); +int test_wolfSSL_OBJ_sn(void); + +#define TEST_OSSL_OBJ_DECLS \ + TEST_DECL_GROUP("ossl_obj", test_OBJ_NAME_do_all), \ + TEST_DECL_GROUP("ossl_obj", test_wolfSSL_OBJ), \ + TEST_DECL_GROUP("ossl_obj", test_wolfSSL_OBJ_cmp), \ + TEST_DECL_GROUP("ossl_obj", test_wolfSSL_OBJ_txt2nid), \ + TEST_DECL_GROUP("ossl_obj", test_wolfSSL_OBJ_txt2obj), \ + TEST_DECL_GROUP("ossl_obj", test_wolfSSL_OBJ_ln), \ + TEST_DECL_GROUP("ossl_obj", test_wolfSSL_OBJ_sn) + +#endif /* WOLFCRYPT_TEST_OSSL_OBJ_H */ + diff --git a/tests/api/test_ossl_p7p12.c b/tests/api/test_ossl_p7p12.c new file mode 100644 index 000000000..6611cf831 --- /dev/null +++ b/tests/api/test_ossl_p7p12.c @@ -0,0 +1,1321 @@ +/* test_ossl_p7p12.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#include +#include +#include +#include +#include + +int test_wolfssl_PKCS7(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_BIO) && \ + !defined(NO_RSA) + PKCS7* pkcs7 = NULL; + byte data[FOURK_BUF]; + word32 len = sizeof(data); + const byte* p = data; + byte content[] = "Test data to encode."; +#if !defined(NO_RSA) & defined(USE_CERT_BUFFERS_2048) + BIO* bio = NULL; + byte key[sizeof(client_key_der_2048)]; + word32 keySz = (word32)sizeof(key); + byte* out = NULL; +#endif + + ExpectIntGT((len = (word32)CreatePKCS7SignedData(data, (int)len, content, + (word32)sizeof(content), 0, 0, 0, RSA_TYPE)), 0); + + ExpectNull(pkcs7 = d2i_PKCS7(NULL, NULL, (int)len)); + ExpectNull(pkcs7 = d2i_PKCS7(NULL, &p, 0)); + ExpectNotNull(pkcs7 = d2i_PKCS7(NULL, &p, (int)len)); + ExpectIntEQ(wolfSSL_PKCS7_verify(NULL, NULL, NULL, NULL, NULL, + PKCS7_NOVERIFY), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + PKCS7_free(pkcs7); + pkcs7 = NULL; + + /* fail case, without PKCS7_NOVERIFY */ + p = data; + ExpectNotNull(pkcs7 = d2i_PKCS7(NULL, &p, (int)len)); + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, NULL, NULL, + 0), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + PKCS7_free(pkcs7); + pkcs7 = NULL; + + /* success case, with PKCS7_NOVERIFY */ + p = data; + ExpectNotNull(pkcs7 = d2i_PKCS7(NULL, &p, (int)len)); + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, NULL, NULL, + PKCS7_NOVERIFY), WOLFSSL_SUCCESS); + +#if !defined(NO_RSA) & defined(USE_CERT_BUFFERS_2048) + /* test i2d */ + XMEMCPY(key, client_key_der_2048, keySz); + if (pkcs7 != NULL) { + pkcs7->privateKey = key; + pkcs7->privateKeySz = (word32)sizeof(key); + pkcs7->encryptOID = RSAk; + #ifdef NO_SHA + pkcs7->hashOID = SHA256h; + #else + pkcs7->hashOID = SHAh; + #endif + } + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + ExpectIntEQ(i2d_PKCS7_bio(bio, pkcs7), 1); +#ifndef NO_ASN_TIME + ExpectIntEQ(i2d_PKCS7(pkcs7, &out), 655); +#else + ExpectIntEQ(i2d_PKCS7(pkcs7, &out), 625); +#endif + XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); + BIO_free(bio); +#endif + + PKCS7_free(NULL); + PKCS7_free(pkcs7); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_PKCS7_certs(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && !defined(NO_CERTS) && !defined(NO_BIO) && \ + !defined(NO_FILESYSTEM) && !defined(NO_RSA) && defined(HAVE_PKCS7) + STACK_OF(X509)* sk = NULL; + STACK_OF(X509_INFO)* info_sk = NULL; + PKCS7 *p7 = NULL; + BIO* bio = NULL; + const byte* p = NULL; + int buflen = 0; + int i; + + /* Test twice. Once with d2i and once without to test + * that everything is free'd correctly. */ + for (i = 0; i < 2; i++) { + ExpectNotNull(p7 = PKCS7_new()); + if (p7 != NULL) { + p7->version = 1; + #ifdef NO_SHA + p7->hashOID = SHA256h; + #else + p7->hashOID = SHAh; + #endif + } + ExpectNotNull(bio = BIO_new(BIO_s_file())); + ExpectIntGT(BIO_read_filename(bio, svrCertFile), 0); + ExpectNotNull(info_sk = PEM_X509_INFO_read_bio(bio, NULL, NULL, NULL)); + ExpectIntEQ(sk_X509_INFO_num(info_sk), 2); + ExpectNotNull(sk = sk_X509_new_null()); + while (EXPECT_SUCCESS() && (sk_X509_INFO_num(info_sk) > 0)) { + X509_INFO* info = NULL; + ExpectNotNull(info = sk_X509_INFO_shift(info_sk)); + if (EXPECT_SUCCESS() && info != NULL) { + ExpectIntGT(sk_X509_push(sk, info->x509), 0); + info->x509 = NULL; + } + X509_INFO_free(info); + } + sk_X509_INFO_pop_free(info_sk, X509_INFO_free); + info_sk = NULL; + BIO_free(bio); + bio = NULL; + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + ExpectIntEQ(wolfSSL_PKCS7_encode_certs(p7, sk, bio), 1); + if ((sk != NULL) && ((p7 == NULL) || (bio == NULL))) { + sk_X509_pop_free(sk, X509_free); + } + sk = NULL; + ExpectIntGT((buflen = BIO_get_mem_data(bio, &p)), 0); + + if (i == 0) { + PKCS7_free(p7); + p7 = NULL; + ExpectNotNull(d2i_PKCS7(&p7, &p, buflen)); + if (p7 != NULL) { + /* Reset certs to force wolfSSL_PKCS7_to_stack to regenerate + * them */ + ((WOLFSSL_PKCS7*)p7)->certs = NULL; + } + /* PKCS7_free free's the certs */ + ExpectNotNull(wolfSSL_PKCS7_to_stack(p7)); + } + + BIO_free(bio); + bio = NULL; + PKCS7_free(p7); + p7 = NULL; + } +#endif /* defined(OPENSSL_ALL) && !defined(NO_CERTS) && \ + !defined(NO_FILESYSTEM) && !defined(NO_RSA) && defined(HAVE_PKCS7) */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_PKCS7_sign(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_BIO) && \ + !defined(NO_FILESYSTEM) && !defined(NO_RSA) + + PKCS7* p7 = NULL; + PKCS7* p7Ver = NULL; + byte* out = NULL; + byte* tmpPtr = NULL; + int outLen = 0; + int flags = 0; + byte data[] = "Test data to encode."; + + const char* cert = "./certs/server-cert.pem"; + const char* key = "./certs/server-key.pem"; + const char* ca = "./certs/ca-cert.pem"; + + WOLFSSL_BIO* certBio = NULL; + WOLFSSL_BIO* keyBio = NULL; + WOLFSSL_BIO* caBio = NULL; + WOLFSSL_BIO* inBio = NULL; + X509* signCert = NULL; + EVP_PKEY* signKey = NULL; + X509* caCert = NULL; + X509_STORE* store = NULL; +#ifndef NO_PKCS7_STREAM + int z; + int ret; +#endif /* !NO_PKCS7_STREAM */ + + /* read signer cert/key into BIO */ + ExpectNotNull(certBio = BIO_new_file(cert, "r")); + ExpectNotNull(keyBio = BIO_new_file(key, "r")); + ExpectNotNull(signCert = PEM_read_bio_X509(certBio, NULL, 0, NULL)); + ExpectNotNull(signKey = PEM_read_bio_PrivateKey(keyBio, NULL, 0, NULL)); + + /* read CA cert into store (for verify) */ + ExpectNotNull(caBio = BIO_new_file(ca, "r")); + ExpectNotNull(caCert = PEM_read_bio_X509(caBio, NULL, 0, NULL)); + ExpectNotNull(store = X509_STORE_new()); + ExpectIntEQ(X509_STORE_add_cert(store, caCert), 1); + + /* data to be signed into BIO */ + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + /* PKCS7_sign, bad args: signer NULL */ + ExpectNull(p7 = PKCS7_sign(NULL, signKey, NULL, inBio, 0)); + /* PKCS7_sign, bad args: signer key NULL */ + ExpectNull(p7 = PKCS7_sign(signCert, NULL, NULL, inBio, 0)); + /* PKCS7_sign, bad args: in data NULL without PKCS7_STREAM */ + ExpectNull(p7 = PKCS7_sign(signCert, signKey, NULL, NULL, 0)); + /* PKCS7_sign, bad args: PKCS7_NOCERTS flag not supported */ + ExpectNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, PKCS7_NOCERTS)); + /* PKCS7_sign, bad args: PKCS7_PARTIAL flag not supported */ + ExpectNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, PKCS7_PARTIAL)); + + /* TEST SUCCESS: Not detached, not streaming, not MIME */ + { + flags = PKCS7_BINARY; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); + + /* verify with d2i_PKCS7 */ + tmpPtr = out; + ExpectNotNull(p7Ver = d2i_PKCS7(NULL, (const byte**)&tmpPtr, outLen)); + ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); + PKCS7_free(p7Ver); + p7Ver = NULL; + + /* verify with wc_PKCS7_VerifySignedData */ + ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); + ExpectIntEQ(wc_PKCS7_Init(p7Ver, HEAP_HINT, INVALID_DEVID), 0); + ExpectIntEQ(wc_PKCS7_VerifySignedData(p7Ver, out, (word32)outLen), 0); + + #ifndef NO_PKCS7_STREAM + /* verify with wc_PKCS7_VerifySignedData streaming */ + wc_PKCS7_Free(p7Ver); + p7Ver = NULL; + ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); + ExpectIntEQ(wc_PKCS7_Init(p7Ver, HEAP_HINT, INVALID_DEVID), 0); + /* test for streaming */ + ret = -1; + for (z = 0; z < outLen && ret != 0; z++) { + ret = wc_PKCS7_VerifySignedData(p7Ver, out + z, 1); + if (ret < 0){ + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)); + } + } + ExpectIntEQ(ret, 0); + #endif /* !NO_PKCS7_STREAM */ + + /* compare the signer found to expected signer */ + ExpectIntNE(p7Ver->verifyCertSz, 0); + tmpPtr = NULL; + ExpectIntEQ(i2d_X509(signCert, &tmpPtr), p7Ver->verifyCertSz); + ExpectIntEQ(XMEMCMP(tmpPtr, p7Ver->verifyCert, p7Ver->verifyCertSz), 0); + XFREE(tmpPtr, NULL, DYNAMIC_TYPE_OPENSSL); + tmpPtr = NULL; + + wc_PKCS7_Free(p7Ver); + p7Ver = NULL; + + ExpectNotNull(out); + XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); + out = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + /* TEST SUCCESS: Not detached, streaming, not MIME. Also bad arg + * tests for PKCS7_final() while we have a PKCS7 pointer to use */ + { + /* re-populate input BIO, may have been consumed */ + BIO_free(inBio); + inBio = NULL; + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_BINARY | PKCS7_STREAM; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectIntEQ(PKCS7_final(p7, inBio, flags), 1); + ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); + + /* PKCS7_final, bad args: PKCS7 null */ + ExpectIntEQ(PKCS7_final(NULL, inBio, 0), 0); + /* PKCS7_final, bad args: PKCS7 null */ + ExpectIntEQ(PKCS7_final(p7, NULL, 0), 0); + + tmpPtr = out; + ExpectNotNull(p7Ver = d2i_PKCS7(NULL, (const byte**)&tmpPtr, outLen)); + ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); + PKCS7_free(p7Ver); + p7Ver = NULL; + + ExpectNotNull(out); + XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); + out = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + /* TEST SUCCESS: Detached, not streaming, not MIME */ + { + /* re-populate input BIO, may have been consumed */ + BIO_free(inBio); + inBio = NULL; + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_BINARY | PKCS7_DETACHED; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); + ExpectNotNull(out); + + /* verify with wolfCrypt, d2i_PKCS7 does not support detached content */ + ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); + if (p7Ver != NULL) { + p7Ver->content = data; + p7Ver->contentSz = sizeof(data); + } + ExpectIntEQ(wc_PKCS7_VerifySignedData(p7Ver, out, (word32)outLen), 0); + wc_PKCS7_Free(p7Ver); + p7Ver = NULL; + + #ifndef NO_PKCS7_STREAM + /* verify with wc_PKCS7_VerifySignedData streaming */ + ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); + if (p7Ver != NULL) { + p7Ver->content = data; + p7Ver->contentSz = sizeof(data); + } + /* test for streaming */ + if (EXPECT_SUCCESS()) { + ret = -1; + for (z = 0; z < outLen && ret != 0; z++) { + ret = wc_PKCS7_VerifySignedData(p7Ver, out + z, 1); + if (ret < 0){ + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)); + } + } + ExpectIntEQ(ret, 0); + } + wc_PKCS7_Free(p7Ver); + p7Ver = NULL; + #endif /* !NO_PKCS7_STREAM */ + + /* verify expected failure (NULL return) from d2i_PKCS7, it does not + * yet support detached content */ + tmpPtr = out; + ExpectNull(p7Ver = d2i_PKCS7(NULL, (const byte**)&tmpPtr, outLen)); + PKCS7_free(p7Ver); + p7Ver = NULL; + + XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); + out = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + /* TEST SUCCESS: Detached, streaming, not MIME */ + { + /* re-populate input BIO, may have been consumed */ + BIO_free(inBio); + inBio = NULL; + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_BINARY | PKCS7_DETACHED | PKCS7_STREAM; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectIntEQ(PKCS7_final(p7, inBio, flags), 1); + ExpectIntGT((outLen = i2d_PKCS7(p7, &out)), 0); + + /* verify with wolfCrypt, d2i_PKCS7 does not support detached content */ + ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); + if (p7Ver != NULL) { + p7Ver->content = data; + p7Ver->contentSz = sizeof(data); + } + ExpectIntEQ(wc_PKCS7_VerifySignedData(p7Ver, out, (word32)outLen), 0); + wc_PKCS7_Free(p7Ver); + p7Ver = NULL; + + ExpectNotNull(out); + + #ifndef NO_PKCS7_STREAM + /* verify with wc_PKCS7_VerifySignedData streaming */ + ExpectNotNull(p7Ver = wc_PKCS7_New(HEAP_HINT, testDevId)); + if (p7Ver != NULL) { + p7Ver->content = data; + p7Ver->contentSz = sizeof(data); + } + /* test for streaming */ + if (EXPECT_SUCCESS()) { + ret = -1; + for (z = 0; z < outLen && ret != 0; z++) { + ret = wc_PKCS7_VerifySignedData(p7Ver, out + z, 1); + if (ret < 0){ + ExpectIntEQ(ret, WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)); + } + } + ExpectIntEQ(ret, 0); + } + wc_PKCS7_Free(p7Ver); + p7Ver = NULL; + #endif /* !NO_PKCS7_STREAM */ + + XFREE(out, NULL, DYNAMIC_TYPE_TMP_BUFFER); + PKCS7_free(p7); + p7 = NULL; + } + + X509_STORE_free(store); + X509_free(caCert); + X509_free(signCert); + EVP_PKEY_free(signKey); + BIO_free(inBio); + BIO_free(keyBio); + BIO_free(certBio); + BIO_free(caBio); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_PKCS7_SIGNED_new(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) + PKCS7_SIGNED* pkcs7 = NULL; + + ExpectNotNull(pkcs7 = PKCS7_SIGNED_new()); + ExpectIntEQ(pkcs7->contentOID, SIGNED_DATA); + + PKCS7_SIGNED_free(pkcs7); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_PEM_write_bio_PKCS7(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_FILESYSTEM) && \ + !defined(NO_BIO) + PKCS7* pkcs7 = NULL; + BIO* bio = NULL; + const byte* cert_buf = NULL; + int ret = 0; + WC_RNG rng; + const byte data[] = { /* Hello World */ + 0x48,0x65,0x6c,0x6c,0x6f,0x20,0x57,0x6f, + 0x72,0x6c,0x64 + }; +#ifndef NO_RSA + #if defined(USE_CERT_BUFFERS_2048) + byte key[sizeof(client_key_der_2048)]; + byte cert[sizeof(client_cert_der_2048)]; + word32 keySz = (word32)sizeof(key); + word32 certSz = (word32)sizeof(cert); + XMEMSET(key, 0, keySz); + XMEMSET(cert, 0, certSz); + XMEMCPY(key, client_key_der_2048, keySz); + XMEMCPY(cert, client_cert_der_2048, certSz); + #elif defined(USE_CERT_BUFFERS_1024) + byte key[sizeof_client_key_der_1024]; + byte cert[sizeof(sizeof_client_cert_der_1024)]; + word32 keySz = (word32)sizeof(key); + word32 certSz = (word32)sizeof(cert); + XMEMSET(key, 0, keySz); + XMEMSET(cert, 0, certSz); + XMEMCPY(key, client_key_der_1024, keySz); + XMEMCPY(cert, client_cert_der_1024, certSz); + #else + unsigned char cert[ONEK_BUF]; + unsigned char key[ONEK_BUF]; + XFILE fp = XBADFILE; + int certSz; + int keySz; + + ExpectTrue((fp = XFOPEN("./certs/1024/client-cert.der", "rb")) != + XBADFILE); + ExpectIntGT(certSz = (int)XFREAD(cert, 1, sizeof_client_cert_der_1024, + fp), 0); + if (fp != XBADFILE) { + XFCLOSE(fp); + fp = XBADFILE; + } + + ExpectTrue((fp = XFOPEN("./certs/1024/client-key.der", "rb")) != + XBADFILE); + ExpectIntGT(keySz = (int)XFREAD(key, 1, sizeof_client_key_der_1024, fp), + 0); + if (fp != XBADFILE) { + XFCLOSE(fp); + fp = XBADFILE; + } + #endif +#elif defined(HAVE_ECC) + #if defined(USE_CERT_BUFFERS_256) + unsigned char cert[sizeof(cliecc_cert_der_256)]; + unsigned char key[sizeof(ecc_clikey_der_256)]; + int certSz = (int)sizeof(cert); + int keySz = (int)sizeof(key); + XMEMSET(cert, 0, certSz); + XMEMSET(key, 0, keySz); + XMEMCPY(cert, cliecc_cert_der_256, sizeof_cliecc_cert_der_256); + XMEMCPY(key, ecc_clikey_der_256, sizeof_ecc_clikey_der_256); + #else + unsigned char cert[ONEK_BUF]; + unsigned char key[ONEK_BUF]; + XFILE fp = XBADFILE; + int certSz, keySz; + + ExpectTrue((fp = XFOPEN("./certs/client-ecc-cert.der", "rb")) != + XBADFILE); + ExpectIntGT(certSz = (int)XFREAD(cert, 1, sizeof_cliecc_cert_der_256, + fp), 0); + if (fp != XBADFILE) { + XFCLOSE(fp); + fp = XBADFILE; + } + + ExpectTrue((fp = XFOPEN("./certs/client-ecc-key.der", "rb")) != + XBADFILE); + ExpectIntGT(keySz = (int)XFREAD(key, 1, sizeof_ecc_clikey_der_256, fp), + 0); + if (fp != XBADFILE) { + XFCLOSE(fp); + fp = XBADFILE; + } + #endif +#else + #error PKCS7 requires ECC or RSA +#endif + + ExpectNotNull(pkcs7 = wc_PKCS7_New(HEAP_HINT, testDevId)); + /* initialize with DER encoded cert */ + ExpectIntEQ(wc_PKCS7_InitWithCert(pkcs7, (byte*)cert, (word32)certSz), 0); + + /* init rng */ + XMEMSET(&rng, 0, sizeof(WC_RNG)); + ExpectIntEQ(wc_InitRng(&rng), 0); + + if (pkcs7 != NULL) { + pkcs7->rng = &rng; + pkcs7->content = (byte*)data; /* not used for ex */ + pkcs7->contentSz = (word32)sizeof(data); + pkcs7->contentOID = SIGNED_DATA; + pkcs7->privateKey = key; + pkcs7->privateKeySz = (word32)sizeof(key); + pkcs7->encryptOID = RSAk; + #ifdef NO_SHA + pkcs7->hashOID = SHA256h; + #else + pkcs7->hashOID = SHAh; + #endif + pkcs7->signedAttribs = NULL; + pkcs7->signedAttribsSz = 0; + } + + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + /* Write PKCS#7 PEM to BIO, the function converts the DER to PEM cert*/ + ExpectIntEQ(PEM_write_bio_PKCS7(bio, pkcs7), WOLFSSL_SUCCESS); + + /* Read PKCS#7 PEM from BIO */ + ret = wolfSSL_BIO_get_mem_data(bio, &cert_buf); + ExpectIntGE(ret, 0); + + BIO_free(bio); + wc_PKCS7_Free(pkcs7); + wc_FreeRng(&rng); +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_PEM_write_bio_encryptedKey(void) +{ + EXPECT_DECLS; +#if (defined(OPENSSL_EXTRA) || defined(OPENSSL_ALL)) && \ + defined(WOLFSSL_KEY_GEN) && !defined(NO_RSA) && \ + defined(WOLFSSL_ENCRYPTED_KEYS) && \ + (defined(WOLFSSL_PEM_TO_DER) || defined(WOLFSSL_DER_TO_PEM)) && \ + !defined(NO_FILESYSTEM) && !defined(NO_BIO) && !defined(NO_CERTS) && \ + !defined(NO_DES3) + RSA* rsaKey = NULL; + RSA* retKey = NULL; + const EVP_CIPHER *cipher = NULL; + BIO* bio = NULL; + BIO* retbio = NULL; + byte* out; + const char* password = "wolfssl"; + word32 passwordSz =(word32)XSTRLEN((char*)password); + int membufSz = 0; + +#if defined(USE_CERT_BUFFERS_2048) + const byte* key = client_key_der_2048; + word32 keySz = sizeof_client_key_der_2048; +#elif defined(USE_CERT_BUFFERS_1024) + const byte* key = client_key_der_1024; + word32 keySz = sizeof_client_key_der_1024; +#endif + /* Import Rsa Key */ + ExpectNotNull(rsaKey = wolfSSL_RSA_new()); + ExpectIntEQ(wolfSSL_RSA_LoadDer_ex(rsaKey, key, keySz, + WOLFSSL_RSA_LOAD_PRIVATE), 1); + + ExpectNotNull(cipher = EVP_des_ede3_cbc()); + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + ExpectIntEQ(PEM_write_bio_RSAPrivateKey(bio, rsaKey, cipher, + (byte*)password, passwordSz, NULL, NULL), 1); + ExpectIntGT((membufSz = BIO_get_mem_data(bio, &out)), 0); + ExpectNotNull(retbio = BIO_new_mem_buf(out, membufSz)); + ExpectNotNull((retKey = PEM_read_bio_RSAPrivateKey(retbio, NULL, + NULL, (void*)password))); + if (bio != NULL) { + BIO_free(bio); + } + if (retbio != NULL) { + BIO_free(retbio); + } + if (retKey != NULL) { + RSA_free(retKey); + } + if (rsaKey != NULL) { + RSA_free(rsaKey); + } +#endif + return EXPECT_RESULT(); +} + +/* // NOLINTBEGIN(clang-analyzer-unix.Stream) */ +int test_wolfSSL_SMIME_read_PKCS7(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_FILESYSTEM) && \ + !defined(NO_RSA) && !defined(NO_BIO) && defined(HAVE_SMIME) + PKCS7* pkcs7 = NULL; + BIO* bio = NULL; + BIO* bcont = NULL; + BIO* out = NULL; + const byte* outBuf = NULL; + int outBufLen = 0; + static const char contTypeText[] = "Content-Type: text/plain\r\n\r\n"; + XFILE smimeTestFile = XBADFILE; + + ExpectTrue((smimeTestFile = XFOPEN("./certs/test/smime-test.p7s", "rb")) != + XBADFILE); + + /* smime-test.p7s */ + bio = wolfSSL_BIO_new(wolfSSL_BIO_s_file()); + ExpectNotNull(bio); + ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); + pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); + ExpectNotNull(pkcs7); + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, + PKCS7_NOVERIFY), SSL_SUCCESS); + if (smimeTestFile != XBADFILE) { + XFCLOSE(smimeTestFile); + smimeTestFile = XBADFILE; + } + if (bcont) BIO_free(bcont); + bcont = NULL; + wolfSSL_PKCS7_free(pkcs7); + pkcs7 = NULL; + + /* smime-test-multipart.p7s */ + smimeTestFile = XFOPEN("./certs/test/smime-test-multipart.p7s", "rb"); + ExpectFalse(smimeTestFile == XBADFILE); + ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); + pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); + ExpectNotNull(pkcs7); + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, + PKCS7_NOVERIFY), SSL_SUCCESS); + if (smimeTestFile != XBADFILE) { + XFCLOSE(smimeTestFile); + smimeTestFile = XBADFILE; + } + if (bcont) BIO_free(bcont); + bcont = NULL; + wolfSSL_PKCS7_free(pkcs7); + pkcs7 = NULL; + + /* smime-test-multipart-badsig.p7s */ + smimeTestFile = XFOPEN("./certs/test/smime-test-multipart-badsig.p7s", + "rb"); + ExpectFalse(smimeTestFile == XBADFILE); + ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); + pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); + ExpectNotNull(pkcs7); /* can read in the unverified smime bundle */ + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, + PKCS7_NOVERIFY), WC_NO_ERR_TRACE(WOLFSSL_FAILURE)); + if (smimeTestFile != XBADFILE) { + XFCLOSE(smimeTestFile); + smimeTestFile = XBADFILE; + } + if (bcont) BIO_free(bcont); + bcont = NULL; + wolfSSL_PKCS7_free(pkcs7); + pkcs7 = NULL; + + /* smime-test-canon.p7s */ + smimeTestFile = XFOPEN("./certs/test/smime-test-canon.p7s", "rb"); + ExpectFalse(smimeTestFile == XBADFILE); + ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); + pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); + ExpectNotNull(pkcs7); + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, NULL, + PKCS7_NOVERIFY), SSL_SUCCESS); + if (smimeTestFile != XBADFILE) { + XFCLOSE(smimeTestFile); + smimeTestFile = XBADFILE; + } + if (bcont) BIO_free(bcont); + bcont = NULL; + wolfSSL_PKCS7_free(pkcs7); + pkcs7 = NULL; + + /* Test PKCS7_TEXT, PKCS7_verify() should remove Content-Type: text/plain */ + smimeTestFile = XFOPEN("./certs/test/smime-test-canon.p7s", "rb"); + ExpectFalse(smimeTestFile == XBADFILE); + ExpectIntEQ(wolfSSL_BIO_set_fp(bio, smimeTestFile, BIO_CLOSE), SSL_SUCCESS); + pkcs7 = wolfSSL_SMIME_read_PKCS7(bio, &bcont); + ExpectNotNull(pkcs7); + out = wolfSSL_BIO_new(BIO_s_mem()); + ExpectNotNull(out); + ExpectIntEQ(wolfSSL_PKCS7_verify(pkcs7, NULL, NULL, bcont, out, + PKCS7_NOVERIFY | PKCS7_TEXT), SSL_SUCCESS); + ExpectIntGT((outBufLen = BIO_get_mem_data(out, &outBuf)), 0); + /* Content-Type should not show up at beginning of output buffer */ + ExpectIntGT(outBufLen, XSTRLEN(contTypeText)); + ExpectIntGT(XMEMCMP(outBuf, contTypeText, XSTRLEN(contTypeText)), 0); + + BIO_free(out); + BIO_free(bio); + if (bcont) BIO_free(bcont); + wolfSSL_PKCS7_free(pkcs7); +#endif + return EXPECT_RESULT(); +} +/* // NOLINTEND(clang-analyzer-unix.Stream) */ + +int test_wolfSSL_SMIME_write_PKCS7(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_ALL) && defined(HAVE_PKCS7) && !defined(NO_RSA) && \ + !defined(NO_BIO) && defined(HAVE_SMIME) + PKCS7* p7 = NULL; + PKCS7* p7Ver = NULL; + int flags = 0; + byte data[] = "Test data to encode."; + + const char* cert = "./certs/server-cert.pem"; + const char* key = "./certs/server-key.pem"; + const char* ca = "./certs/ca-cert.pem"; + + WOLFSSL_BIO* certBio = NULL; + WOLFSSL_BIO* keyBio = NULL; + WOLFSSL_BIO* caBio = NULL; + WOLFSSL_BIO* inBio = NULL; + WOLFSSL_BIO* outBio = NULL; + WOLFSSL_BIO* content = NULL; + X509* signCert = NULL; + EVP_PKEY* signKey = NULL; + X509* caCert = NULL; + X509_STORE* store = NULL; + + /* read signer cert/key into BIO */ + ExpectNotNull(certBio = BIO_new_file(cert, "r")); + ExpectNotNull(keyBio = BIO_new_file(key, "r")); + ExpectNotNull(signCert = PEM_read_bio_X509(certBio, NULL, 0, NULL)); + ExpectNotNull(signKey = PEM_read_bio_PrivateKey(keyBio, NULL, 0, NULL)); + + /* read CA cert into store (for verify) */ + ExpectNotNull(caBio = BIO_new_file(ca, "r")); + ExpectNotNull(caCert = PEM_read_bio_X509(caBio, NULL, 0, NULL)); + ExpectNotNull(store = X509_STORE_new()); + ExpectIntEQ(X509_STORE_add_cert(store, caCert), 1); + + + /* generate and verify SMIME: not detached */ + { + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_STREAM; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectNotNull(outBio = BIO_new(BIO_s_mem())); + ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); + + /* bad arg: out NULL */ + ExpectIntEQ(SMIME_write_PKCS7(NULL, p7, inBio, flags), 0); + /* bad arg: pkcs7 NULL */ + ExpectIntEQ(SMIME_write_PKCS7(outBio, NULL, inBio, flags), 0); + + ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); + ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); + + BIO_free(content); + content = NULL; + BIO_free(inBio); + inBio = NULL; + BIO_free(outBio); + outBio = NULL; + PKCS7_free(p7Ver); + p7Ver = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + /* generate and verify SMIME: not detached, add Content-Type */ + { + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_STREAM | PKCS7_TEXT; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectNotNull(outBio = BIO_new(BIO_s_mem())); + ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); + + ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); + ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, NULL, NULL, flags), 1); + + BIO_free(content); + content = NULL; + BIO_free(inBio); + inBio = NULL; + BIO_free(outBio); + outBio = NULL; + PKCS7_free(p7Ver); + p7Ver = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + /* generate and verify SMIME: detached */ + { + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_DETACHED | PKCS7_STREAM; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectNotNull(outBio = BIO_new(BIO_s_mem())); + ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); + + ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); + ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, content, NULL, flags), 1); + + BIO_free(content); + content = NULL; + BIO_free(inBio); + inBio = NULL; + BIO_free(outBio); + outBio = NULL; + PKCS7_free(p7Ver); + p7Ver = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + /* generate and verify SMIME: PKCS7_TEXT to add Content-Type header */ + { + ExpectNotNull(inBio = BIO_new(BIO_s_mem())); + ExpectIntGT(BIO_write(inBio, data, sizeof(data)), 0); + + flags = PKCS7_STREAM | PKCS7_DETACHED | PKCS7_TEXT; + ExpectNotNull(p7 = PKCS7_sign(signCert, signKey, NULL, inBio, flags)); + ExpectNotNull(outBio = BIO_new(BIO_s_mem())); + ExpectIntEQ(SMIME_write_PKCS7(outBio, p7, inBio, flags), 1); + + ExpectNotNull(p7Ver = SMIME_read_PKCS7(outBio, &content)); + ExpectIntEQ(PKCS7_verify(p7Ver, NULL, store, content, NULL, flags), 1); + + BIO_free(content); + content = NULL; + BIO_free(inBio); + inBio = NULL; + BIO_free(outBio); + outBio = NULL; + PKCS7_free(p7Ver); + p7Ver = NULL; + PKCS7_free(p7); + p7 = NULL; + } + + X509_STORE_free(store); + X509_free(caCert); + X509_free(signCert); + EVP_PKEY_free(signKey); + BIO_free(keyBio); + BIO_free(certBio); + BIO_free(caBio); +#endif + return EXPECT_RESULT(); +} + +/* Testing functions dealing with PKCS12 parsing out X509 certs */ +int test_wolfSSL_PKCS12(void) +{ + EXPECT_DECLS; + /* .p12 file is encrypted with DES3 */ +#ifndef HAVE_FIPS /* Password used in cert "wolfSSL test" is only 12-bytes + * (96-bit) FIPS mode requires Minimum of 14-byte (112-bit) + * Password Key + */ +#if defined(OPENSSL_EXTRA) && !defined(NO_DES3) && !defined(NO_FILESYSTEM) && \ + !defined(NO_STDIO_FILESYSTEM) && !defined(NO_TLS) && \ + !defined(NO_ASN) && !defined(NO_PWDBASED) && !defined(NO_RSA) && \ + !defined(NO_SHA) && defined(HAVE_PKCS12) && !defined(NO_BIO) && \ + defined(WOLFSSL_AES_256) + byte buf[6000]; + char file[] = "./certs/test-servercert.p12"; + char order[] = "./certs/ecc-rsa-server.p12"; +#ifdef WC_RC2 + char rc2p12[] = "./certs/test-servercert-rc2.p12"; +#endif + char pass[] = "a password"; + const char goodPsw[] = "wolfSSL test"; + const char badPsw[] = "bad"; +#ifdef HAVE_ECC + WOLFSSL_X509_NAME *subject = NULL; + WOLFSSL_X509 *x509 = NULL; +#endif + XFILE f = XBADFILE; + int bytes = 0, ret = 0, goodPswLen = 0, badPswLen = 0; + WOLFSSL_BIO *bio = NULL; + WOLFSSL_EVP_PKEY *pkey = NULL; + WC_PKCS12 *pkcs12 = NULL; + WC_PKCS12 *pkcs12_2 = NULL; + WOLFSSL_X509 *cert = NULL; + WOLFSSL_X509 *tmp = NULL; + WOLF_STACK_OF(WOLFSSL_X509) *ca = NULL; +#if (defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) || defined(WOLFSSL_HAPROXY) \ + || defined(WOLFSSL_NGINX)) && defined(SESSION_CERTS) + WOLFSSL_CTX *ctx = NULL; + WOLFSSL *ssl = NULL; + WOLF_STACK_OF(WOLFSSL_X509) *tmp_ca = NULL; +#endif + + ExpectTrue((f = XFOPEN(file, "rb")) != XBADFILE); + ExpectIntGT(bytes = (int)XFREAD(buf, 1, sizeof(buf), f), 0); + if (f != XBADFILE) { + XFCLOSE(f); + f = XBADFILE; + } + + goodPswLen = (int)XSTRLEN(goodPsw); + badPswLen = (int)XSTRLEN(badPsw); + + ExpectNotNull(bio = wolfSSL_BIO_new(wolfSSL_BIO_s_mem())); + + ExpectIntEQ(BIO_write(bio, buf, bytes), bytes); /* d2i consumes BIO */ + ExpectNotNull(d2i_PKCS12_bio(bio, &pkcs12)); + ExpectNotNull(pkcs12); + BIO_free(bio); + bio = NULL; + + /* check verify MAC directly */ + ExpectIntEQ(ret = PKCS12_verify_mac(pkcs12, goodPsw, goodPswLen), 1); + + /* check verify MAC fail case directly */ + ExpectIntEQ(ret = PKCS12_verify_mac(pkcs12, badPsw, badPswLen), 0); + + /* check verify MAC fail case */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "bad", &pkey, &cert, NULL), 0); + ExpectNull(pkey); + ExpectNull(cert); + + /* check parse with no extra certs kept */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, NULL), + 1); + ExpectNotNull(pkey); + ExpectNotNull(cert); + + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; + wolfSSL_X509_free(cert); + cert = NULL; + + /* check parse with extra certs kept */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), + 1); + ExpectNotNull(pkey); + ExpectNotNull(cert); + ExpectNotNull(ca); + +#if (defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) || defined(WOLFSSL_HAPROXY) \ + || defined(WOLFSSL_NGINX)) && defined(SESSION_CERTS) + + /* Check that SSL_CTX_set0_chain correctly sets the certChain buffer */ +#if !defined(NO_WOLFSSL_CLIENT) || !defined(NO_WOLFSSL_SERVER) +#if !defined(NO_WOLFSSL_CLIENT) && defined(SESSION_CERTS) + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_client_method())); +#else + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_server_method())); +#endif + /* Copy stack structure */ + ExpectNotNull(tmp_ca = X509_chain_up_ref(ca)); + ExpectIntEQ(SSL_CTX_set0_chain(ctx, tmp_ca), 1); + /* CTX now owns the tmp_ca stack structure */ + tmp_ca = NULL; + ExpectIntEQ(wolfSSL_CTX_get_extra_chain_certs(ctx, &tmp_ca), 1); + ExpectNotNull(tmp_ca); + ExpectIntEQ(sk_X509_num(tmp_ca), sk_X509_num(ca)); + /* Check that the main cert is also set */ + ExpectNotNull(SSL_CTX_get0_certificate(ctx)); + ExpectNotNull(ssl = SSL_new(ctx)); + ExpectNotNull(SSL_get_certificate(ssl)); + SSL_free(ssl); + SSL_CTX_free(ctx); + ctx = NULL; +#endif +#endif /* !NO_WOLFSSL_CLIENT || !NO_WOLFSSL_SERVER */ + /* should be 2 other certs on stack */ + ExpectNotNull(tmp = sk_X509_pop(ca)); + X509_free(tmp); + ExpectNotNull(tmp = sk_X509_pop(ca)); + X509_free(tmp); + ExpectNull(sk_X509_pop(ca)); + + EVP_PKEY_free(pkey); + pkey = NULL; + X509_free(cert); + cert = NULL; + sk_X509_pop_free(ca, X509_free); + ca = NULL; + + /* check PKCS12_create */ + ExpectNull(PKCS12_create(pass, NULL, NULL, NULL, NULL, -1, -1, -1, -1,0)); + ExpectIntEQ(PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), + SSL_SUCCESS); + ExpectNotNull((pkcs12_2 = PKCS12_create(pass, NULL, pkey, cert, ca, + -1, -1, 100, -1, 0))); + EVP_PKEY_free(pkey); + pkey = NULL; + X509_free(cert); + cert = NULL; + sk_X509_pop_free(ca, NULL); + ca = NULL; + + ExpectIntEQ(PKCS12_parse(pkcs12_2, "a password", &pkey, &cert, &ca), + SSL_SUCCESS); + PKCS12_free(pkcs12_2); + pkcs12_2 = NULL; + ExpectNotNull((pkcs12_2 = PKCS12_create(pass, NULL, pkey, cert, ca, + NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + 2000, 1, 0))); + EVP_PKEY_free(pkey); + pkey = NULL; + X509_free(cert); + cert = NULL; + sk_X509_pop_free(ca, NULL); + ca = NULL; + + /* convert to DER then back and parse */ + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + ExpectIntEQ(i2d_PKCS12_bio(bio, pkcs12_2), SSL_SUCCESS); + PKCS12_free(pkcs12_2); + pkcs12_2 = NULL; + + ExpectNotNull(pkcs12_2 = d2i_PKCS12_bio(bio, NULL)); + BIO_free(bio); + bio = NULL; + ExpectIntEQ(PKCS12_parse(pkcs12_2, "a password", &pkey, &cert, &ca), + SSL_SUCCESS); + + /* should be 2 other certs on stack */ + ExpectNotNull(tmp = sk_X509_pop(ca)); + X509_free(tmp); + ExpectNotNull(tmp = sk_X509_pop(ca)); + X509_free(tmp); + ExpectNull(sk_X509_pop(ca)); + + +#ifndef NO_RC4 + PKCS12_free(pkcs12_2); + pkcs12_2 = NULL; + ExpectNotNull((pkcs12_2 = PKCS12_create(pass, NULL, pkey, cert, NULL, + NID_pbe_WithSHA1And128BitRC4, + NID_pbe_WithSHA1And128BitRC4, + 2000, 1, 0))); + EVP_PKEY_free(pkey); + pkey = NULL; + X509_free(cert); + cert = NULL; + sk_X509_pop_free(ca, NULL); + ca = NULL; + + ExpectIntEQ(PKCS12_parse(pkcs12_2, "a password", &pkey, &cert, &ca), + SSL_SUCCESS); + +#endif /* NO_RC4 */ + + EVP_PKEY_free(pkey); + pkey = NULL; + X509_free(cert); + cert = NULL; + PKCS12_free(pkcs12); + pkcs12 = NULL; + PKCS12_free(pkcs12_2); + pkcs12_2 = NULL; + sk_X509_pop_free(ca, NULL); + ca = NULL; + +#ifdef HAVE_ECC + /* test order of parsing */ + ExpectTrue((f = XFOPEN(order, "rb")) != XBADFILE); + ExpectIntGT(bytes = (int)XFREAD(buf, 1, sizeof(buf), f), 0); + if (f != XBADFILE) { + XFCLOSE(f); + f = XBADFILE; + } + + ExpectNotNull(bio = BIO_new_mem_buf((void*)buf, bytes)); + ExpectNotNull(pkcs12 = d2i_PKCS12_bio(bio, NULL)); + ExpectIntEQ((ret = PKCS12_parse(pkcs12, "", &pkey, &cert, &ca)), + WOLFSSL_SUCCESS); + + /* check use of pkey after parse */ +#if (defined(OPENSSL_ALL) || defined(WOLFSSL_ASIO) || defined(WOLFSSL_HAPROXY) \ + || defined(WOLFSSL_NGINX)) && defined(SESSION_CERTS) +#if !defined(NO_WOLFSSL_CLIENT) || !defined(NO_WOLFSSL_SERVER) +#if !defined(NO_WOLFSSL_CLIENT) && defined(SESSION_CERTS) + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_client_method())); +#else + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfSSLv23_server_method())); +#endif + ExpectIntEQ(SSL_CTX_use_PrivateKey(ctx, pkey), WOLFSSL_SUCCESS); + SSL_CTX_free(ctx); +#endif /* !NO_WOLFSSL_CLIENT || !NO_WOLFSSL_SERVER */ +#endif + + ExpectNotNull(pkey); + ExpectNotNull(cert); + ExpectNotNull(ca); + + /* compare subject lines of certificates */ + ExpectNotNull(subject = wolfSSL_X509_get_subject_name(cert)); + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(eccRsaCertFile, + SSL_FILETYPE_PEM)); + ExpectIntEQ(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, + (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); + X509_free(x509); + x509 = NULL; + + /* test expected fail case */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(eccCertFile, + SSL_FILETYPE_PEM)); + ExpectIntNE(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, + (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); + X509_free(x509); + x509 = NULL; + X509_free(cert); + cert = NULL; + + /* get subject line from ca stack */ + ExpectNotNull(cert = sk_X509_pop(ca)); + ExpectNotNull(subject = wolfSSL_X509_get_subject_name(cert)); + + /* compare subject from certificate in ca to expected */ + ExpectNotNull(x509 = wolfSSL_X509_load_certificate_file(eccCertFile, + SSL_FILETYPE_PEM)); + ExpectIntEQ(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, + (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); + + /* modify case and compare subject from certificate in ca to expected. + * The first bit of the name is: + * /C=US/ST=Washington + * So we'll change subject->name[1] to 'c' (lower case) */ + if (subject != NULL) { + subject->name[1] = 'c'; + ExpectIntEQ(wolfSSL_X509_NAME_cmp((const WOLFSSL_X509_NAME*)subject, + (const WOLFSSL_X509_NAME*)wolfSSL_X509_get_subject_name(x509)), 0); + } + + EVP_PKEY_free(pkey); + pkey = NULL; + X509_free(x509); + x509 = NULL; + X509_free(cert); + cert = NULL; + BIO_free(bio); + bio = NULL; + PKCS12_free(pkcs12); + pkcs12 = NULL; + sk_X509_pop_free(ca, NULL); /* TEST d2i_PKCS12_fp */ + ca = NULL; + + /* test order of parsing */ + ExpectTrue((f = XFOPEN(file, "rb")) != XBADFILE); + ExpectNotNull(pkcs12 = d2i_PKCS12_fp(f, NULL)); + if (f != XBADFILE) { + XFCLOSE(f); + f = XBADFILE; + } + + /* check verify MAC fail case */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "bad", &pkey, &cert, NULL), 0); + ExpectNull(pkey); + ExpectNull(cert); + + /* check parse with no extra certs kept */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, NULL), + 1); + ExpectNotNull(pkey); + ExpectNotNull(cert); + + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; + wolfSSL_X509_free(cert); + cert = NULL; + + /* check parse with extra certs kept */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), + 1); + ExpectNotNull(pkey); + ExpectNotNull(cert); + ExpectNotNull(ca); + + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; + wolfSSL_X509_free(cert); + cert = NULL; + sk_X509_pop_free(ca, NULL); + ca = NULL; + + PKCS12_free(pkcs12); + pkcs12 = NULL; +#endif /* HAVE_ECC */ + +#ifdef WC_RC2 + /* test PKCS#12 with RC2 encryption */ + ExpectTrue((f = XFOPEN(rc2p12, "rb")) != XBADFILE); + ExpectIntGT(bytes = (int)XFREAD(buf, 1, sizeof(buf), f), 0); + if (f != XBADFILE) { + XFCLOSE(f); + f = XBADFILE; + } + + ExpectNotNull(bio = BIO_new_mem_buf((void*)buf, bytes)); + ExpectNotNull(pkcs12 = d2i_PKCS12_bio(bio, NULL)); + + /* check verify MAC fail case */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "bad", &pkey, &cert, NULL), 0); + ExpectNull(pkey); + ExpectNull(cert); + + /* check parse with not extra certs kept */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, NULL), + WOLFSSL_SUCCESS); + ExpectNotNull(pkey); + ExpectNotNull(cert); + + wolfSSL_EVP_PKEY_free(pkey); + pkey = NULL; + wolfSSL_X509_free(cert); + cert = NULL; + + /* check parse with extra certs kept */ + ExpectIntEQ(ret = PKCS12_parse(pkcs12, "wolfSSL test", &pkey, &cert, &ca), + WOLFSSL_SUCCESS); + ExpectNotNull(pkey); + ExpectNotNull(cert); + ExpectNotNull(ca); + + wolfSSL_EVP_PKEY_free(pkey); + wolfSSL_X509_free(cert); + sk_X509_pop_free(ca, NULL); + + BIO_free(bio); + bio = NULL; + PKCS12_free(pkcs12); + pkcs12 = NULL; +#endif /* WC_RC2 */ + + /* Test i2d_PKCS12_bio */ + ExpectTrue((f = XFOPEN(file, "rb")) != XBADFILE); + ExpectNotNull(pkcs12 = d2i_PKCS12_fp(f, NULL)); + if (f != XBADFILE) + XFCLOSE(f); + + ExpectNotNull(bio = BIO_new(BIO_s_mem())); + + ExpectIntEQ(ret = i2d_PKCS12_bio(bio, pkcs12), 1); + + ExpectIntEQ(ret = i2d_PKCS12_bio(NULL, pkcs12), 0); + + ExpectIntEQ(ret = i2d_PKCS12_bio(bio, NULL), 0); + + PKCS12_free(pkcs12); + BIO_free(bio); + + (void)order; +#endif /* OPENSSL_EXTRA */ +#endif /* HAVE_FIPS */ + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_ossl_p7p12.h b/tests/api/test_ossl_p7p12.h new file mode 100644 index 000000000..32863c1ae --- /dev/null +++ b/tests/api/test_ossl_p7p12.h @@ -0,0 +1,54 @@ +/* test_ossl_p7p12.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_OSSL_P7P12_H +#define WOLFCRYPT_TEST_OSSL_P7P12_H + +#include + +int test_wolfssl_PKCS7(void); +int test_wolfSSL_PKCS7_certs(void); +int test_wolfSSL_PKCS7_sign(void); +int test_wolfSSL_PKCS7_SIGNED_new(void); +int test_wolfSSL_PEM_write_bio_PKCS7(void); +int test_wolfSSL_PEM_write_bio_encryptedKey(void); +int test_wolfSSL_SMIME_read_PKCS7(void); +int test_wolfSSL_SMIME_write_PKCS7(void); +int test_wolfSSL_PKCS12(void); + +#define TEST_OSSL_PKCS7_DECLS \ + TEST_DECL_GROUP("ossl_p7", test_wolfssl_PKCS7), \ + TEST_DECL_GROUP("ossl_p7", test_wolfSSL_PKCS7_certs), \ + TEST_DECL_GROUP("ossl_p7", test_wolfSSL_PKCS7_sign), \ + TEST_DECL_GROUP("ossl_p7", test_wolfSSL_PKCS7_SIGNED_new), \ + TEST_DECL_GROUP("ossl_p7", test_wolfSSL_PEM_write_bio_PKCS7), \ + TEST_DECL_GROUP("ossl_p7", test_wolfSSL_PEM_write_bio_encryptedKey), \ + TEST_DECL_GROUP("ossl_p7", test_wolfSSL_RAND_poll) + +#define TEST_OSSL_SMIME_DECLS \ + TEST_DECL_GROUP("ossl_smime", test_wolfSSL_SMIME_read_PKCS7), \ + TEST_DECL_GROUP("ossl_smime", test_wolfSSL_SMIME_write_PKCS7) + +#define TEST_OSSL_PKCS12_DECLS \ + TEST_DECL_GROUP("ossl_p12", test_wolfSSL_PKCS12) + +#endif /* WOLFCRYPT_TEST_OSSL_P7P12_H */ + diff --git a/tests/api/test_ossl_rand.c b/tests/api/test_ossl_rand.c new file mode 100644 index 000000000..a5b028145 --- /dev/null +++ b/tests/api/test_ossl_rand.c @@ -0,0 +1,340 @@ +/* test_ossl_rand.c + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#if defined(__linux__) || defined(__FreeBSD__) +#include +#include +#endif + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#include +#ifdef OPENSSL_EXTRA + #include +#endif +#include +#include + + +#if defined(OPENSSL_EXTRA) && !defined(WOLFSSL_NO_OPENSSL_RAND_CB) +static int stub_rand_seed(const void *buf, int num) +{ + (void)buf; + (void)num; + + return 123; +} + +static int stub_rand_bytes(unsigned char *buf, int num) +{ + (void)buf; + (void)num; + + return 456; +} + +static byte* was_stub_rand_cleanup_called(void) +{ + static byte was_called = 0; + + return &was_called; +} + +static void stub_rand_cleanup(void) +{ + byte* was_called = was_stub_rand_cleanup_called(); + + *was_called = 1; + + return; +} + +static byte* was_stub_rand_add_called(void) +{ + static byte was_called = 0; + + return &was_called; +} + +static int stub_rand_add(const void *buf, int num, double entropy) +{ + byte* was_called = was_stub_rand_add_called(); + + (void)buf; + (void)num; + (void)entropy; + + *was_called = 1; + + return 0; +} + +static int stub_rand_pseudo_bytes(unsigned char *buf, int num) +{ + (void)buf; + (void)num; + + return 9876; +} + +static int stub_rand_status(void) +{ + return 5432; +} +#endif /* OPENSSL_EXTRA && !WOLFSSL_NO_OPENSSL_RAND_CB */ + +int test_wolfSSL_RAND_set_rand_method(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) && !defined(WOLFSSL_NO_OPENSSL_RAND_CB) + RAND_METHOD rand_methods = {NULL, NULL, NULL, NULL, NULL, NULL}; + unsigned char* buf = NULL; + int num = 0; + double entropy = 0; + int ret; + byte* was_cleanup_called = was_stub_rand_cleanup_called(); + byte* was_add_called = was_stub_rand_add_called(); + + ExpectNotNull(buf = (byte*)XMALLOC(32 * sizeof(byte), NULL, + DYNAMIC_TYPE_TMP_BUFFER)); + + ExpectIntNE(wolfSSL_RAND_status(), 5432); + ExpectIntEQ(*was_cleanup_called, 0); + RAND_cleanup(); + ExpectIntEQ(*was_cleanup_called, 0); + + + rand_methods.seed = &stub_rand_seed; + rand_methods.bytes = &stub_rand_bytes; + rand_methods.cleanup = &stub_rand_cleanup; + rand_methods.add = &stub_rand_add; + rand_methods.pseudorand = &stub_rand_pseudo_bytes; + rand_methods.status = &stub_rand_status; + + ExpectIntEQ(RAND_set_rand_method(&rand_methods), WOLFSSL_SUCCESS); + ExpectIntEQ(RAND_seed(buf, num), 123); + ExpectIntEQ(RAND_bytes(buf, num), 456); + ExpectIntEQ(RAND_pseudo_bytes(buf, num), 9876); + ExpectIntEQ(RAND_status(), 5432); + + ExpectIntEQ(*was_add_called, 0); + /* The function pointer for RAND_add returns int, but RAND_add itself + * returns void. */ + RAND_add(buf, num, entropy); + ExpectIntEQ(*was_add_called, 1); + was_add_called = 0; + ExpectIntEQ(*was_cleanup_called, 0); + RAND_cleanup(); + ExpectIntEQ(*was_cleanup_called, 1); + *was_cleanup_called = 0; + + + ret = RAND_set_rand_method(NULL); + ExpectIntEQ(ret, WOLFSSL_SUCCESS); + ExpectIntNE(RAND_status(), 5432); + ExpectIntEQ(*was_cleanup_called, 0); + RAND_cleanup(); + ExpectIntEQ(*was_cleanup_called, 0); + + RAND_set_rand_method(NULL); + + XFREE(buf, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif /* OPENSSL_EXTRA && !WOLFSSL_NO_OPENSSL_RAND_CB */ + return EXPECT_RESULT(); +} + +int test_wolfSSL_RAND_bytes(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) + const int size1 = RNG_MAX_BLOCK_LEN; /* in bytes */ + const int size2 = RNG_MAX_BLOCK_LEN + 1; /* in bytes */ + const int size3 = RNG_MAX_BLOCK_LEN * 2; /* in bytes */ + const int size4 = RNG_MAX_BLOCK_LEN * 4; /* in bytes */ + int max_bufsize; + byte *my_buf = NULL; +#if defined(OPENSSL_EXTRA) && defined(HAVE_GETPID) && !defined(__MINGW64__) && \ + !defined(__MINGW32__) + byte seed[16] = {0}; + byte randbuf[8] = {0}; + int pipefds[2] = {0}; + pid_t pid = 0; +#endif + + /* sanity check */ + ExpectIntEQ(RAND_bytes(NULL, 16), 0); + ExpectIntEQ(RAND_bytes(NULL, 0), 0); + + max_bufsize = size4; + + ExpectNotNull(my_buf = (byte*)XMALLOC(max_bufsize * sizeof(byte), HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER)); + + ExpectIntEQ(RAND_bytes(my_buf, 0), 1); + ExpectIntEQ(RAND_bytes(my_buf, -1), 0); + + ExpectNotNull(XMEMSET(my_buf, 0, max_bufsize)); + ExpectIntEQ(RAND_bytes(my_buf, size1), 1); + ExpectIntEQ(RAND_bytes(my_buf, size2), 1); + ExpectIntEQ(RAND_bytes(my_buf, size3), 1); + ExpectIntEQ(RAND_bytes(my_buf, size4), 1); + XFREE(my_buf, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + +#if defined(OPENSSL_EXTRA) && defined(HAVE_GETPID) && !defined(__MINGW64__) && \ + !defined(__MINGW32__) + XMEMSET(seed, 0, sizeof(seed)); + RAND_cleanup(); + + /* No global methods set. */ + ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); + + ExpectIntEQ(pipe(pipefds), 0); + pid = fork(); + ExpectIntGE(pid, 0); + if (pid == 0) { + ssize_t n_written = 0; + + /* Child process. */ + close(pipefds[0]); + RAND_bytes(randbuf, sizeof(randbuf)); + n_written = write(pipefds[1], randbuf, sizeof(randbuf)); + close(pipefds[1]); + exit(n_written == sizeof(randbuf) ? 0 : 1); + } + else { + /* Parent process. */ + byte childrand[8] = {0}; + int waitstatus = 0; + + close(pipefds[1]); + ExpectIntEQ(RAND_bytes(randbuf, sizeof(randbuf)), 1); + ExpectIntEQ(read(pipefds[0], childrand, sizeof(childrand)), + sizeof(childrand)); + #ifdef WOLFSSL_NO_GETPID + ExpectBufEQ(randbuf, childrand, sizeof(randbuf)); + #else + ExpectBufNE(randbuf, childrand, sizeof(randbuf)); + #endif + close(pipefds[0]); + waitpid(pid, &waitstatus, 0); + } + RAND_cleanup(); +#endif +#endif + return EXPECT_RESULT(); +} + +int test_wolfSSL_RAND(void) +{ + EXPECT_DECLS; +#if defined(OPENSSL_EXTRA) + byte seed[16]; + + XMEMSET(seed, 0, sizeof(seed)); + + /* No global methods set. */ + ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); + ExpectIntEQ(RAND_poll(), 1); + RAND_cleanup(); + + ExpectIntEQ(RAND_egd(NULL), -1); +#ifndef NO_FILESYSTEM + { + char fname[100]; + + ExpectNotNull(RAND_file_name(fname, (sizeof(fname) - 1))); + ExpectIntEQ(RAND_write_file(NULL), 0); + } +#endif +#endif + return EXPECT_RESULT(); +} + + +#if defined(WC_RNG_SEED_CB) && defined(OPENSSL_EXTRA) +static int wc_DummyGenerateSeed(OS_Seed* os, byte* output, word32 sz) +{ + word32 i; + for (i = 0; i < sz; i++ ) + output[i] = (byte)i; + + (void)os; + + return 0; +} +#endif /* WC_RNG_SEED_CB */ + + +int test_wolfSSL_RAND_poll(void) +{ + EXPECT_DECLS; + +#if defined(OPENSSL_EXTRA) + byte seed[16]; + byte rand1[16]; +#ifdef WC_RNG_SEED_CB + byte rand2[16]; +#endif + + XMEMSET(seed, 0, sizeof(seed)); + ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); + ExpectIntEQ(RAND_poll(), 1); + ExpectIntEQ(RAND_bytes(rand1, 16), 1); + RAND_cleanup(); + +#ifdef WC_RNG_SEED_CB + /* Test with custom seed and poll */ + wc_SetSeed_Cb(wc_DummyGenerateSeed); + + ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); + ExpectIntEQ(RAND_bytes(rand1, 16), 1); + RAND_cleanup(); + + /* test that the same value is generated twice with dummy seed function */ + ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); + ExpectIntEQ(RAND_bytes(rand2, 16), 1); + ExpectIntEQ(XMEMCMP(rand1, rand2, 16), 0); + RAND_cleanup(); + + /* test that doing a poll is reseeding RNG */ + ExpectIntEQ(RAND_seed(seed, sizeof(seed)), 1); + ExpectIntEQ(RAND_poll(), 1); + ExpectIntEQ(RAND_bytes(rand2, 16), 1); + ExpectIntNE(XMEMCMP(rand1, rand2, 16), 0); + + /* reset the seed function used */ + wc_SetSeed_Cb(WC_GENERATE_SEED_DEFAULT); +#endif + RAND_cleanup(); + + ExpectIntEQ(RAND_egd(NULL), -1); +#endif + + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_ossl_rand.h b/tests/api/test_ossl_rand.h new file mode 100644 index 000000000..4f10956aa --- /dev/null +++ b/tests/api/test_ossl_rand.h @@ -0,0 +1,39 @@ +/* test_ossl_rand.h + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef WOLFCRYPT_TEST_OSSL_RAND_H +#define WOLFCRYPT_TEST_OSSL_RAND_H + +#include + +int test_wolfSSL_RAND_set_rand_method(void); +int test_wolfSSL_RAND_bytes(void); +int test_wolfSSL_RAND(void); +int test_wolfSSL_RAND_poll(void); + +#define TEST_OSSL_RAND_DECLS \ + TEST_DECL_GROUP("ossl_rand", test_wolfSSL_RAND_set_rand_method), \ + TEST_DECL_GROUP("ossl_rand", test_wolfSSL_RAND_bytes), \ + TEST_DECL_GROUP("ossl_rand", test_wolfSSL_RAND), \ + TEST_DECL_GROUP("ossl_rand", test_wolfSSL_RAND_poll) + +#endif /* WOLFCRYPT_TEST_OSSL_RAND_H */ + diff --git a/tests/api/test_ossl_rsa.c b/tests/api/test_ossl_rsa.c index 1947ad7ee..7adbe126a 100644 --- a/tests/api/test_ossl_rsa.c +++ b/tests/api/test_ossl_rsa.c @@ -1470,8 +1470,10 @@ int test_wolfSSL_RSA_To_Der(void) rsa = NULL; ExpectNotNull(wolfSSL_d2i_RSAPrivateKey(&rsa, &der, privDerSz)); - ExpectIntEQ(wolfSSL_RSA_To_Der(NULL, &outDer, 0, HEAP_HINT), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 2, HEAP_HINT), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_RSA_To_Der(NULL, &outDer, 0, HEAP_HINT), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 2, HEAP_HINT), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, NULL, 0, HEAP_HINT), privDerSz); outDer = out; @@ -1491,14 +1493,17 @@ int test_wolfSSL_RSA_To_Der(void) RSA_free(rsa); ExpectNotNull(rsa = RSA_new()); - ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 0, HEAP_HINT), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); - ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 1, HEAP_HINT), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 0, HEAP_HINT), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 1, HEAP_HINT), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); RSA_free(rsa); der = pubDer; rsa = NULL; ExpectNotNull(wolfSSL_d2i_RSAPublicKey(&rsa, &der, pubDerSz)); - ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 0, HEAP_HINT), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); + ExpectIntEQ(wolfSSL_RSA_To_Der(rsa, &outDer, 0, HEAP_HINT), + WC_NO_ERR_TRACE(BAD_FUNC_ARG)); RSA_free(rsa); #endif #endif diff --git a/tests/api/test_ossl_x509.c b/tests/api/test_ossl_x509.c index b198da11d..a28728800 100644 --- a/tests/api/test_ossl_x509.c +++ b/tests/api/test_ossl_x509.c @@ -528,10 +528,10 @@ int test_wolfSSL_X509(void) ExpectIntEQ(X509_verify_cert(ctx), SSL_SUCCESS); #ifndef NO_WOLFSSL_STUB - ExpectNull(X509_get_default_cert_file_env()); - ExpectNull(X509_get_default_cert_file()); - ExpectNull(X509_get_default_cert_dir_env()); - ExpectNull(X509_get_default_cert_dir()); + ExpectStrEQ(X509_get_default_cert_file_env(), ""); + ExpectStrEQ(X509_get_default_cert_file(), ""); + ExpectStrEQ(X509_get_default_cert_dir_env(), ""); + ExpectStrEQ(X509_get_default_cert_dir(), ""); #endif ExpectNull(wolfSSL_X509_get_der(NULL, NULL)); diff --git a/tests/api/test_ossl_x509_str.c b/tests/api/test_ossl_x509_str.c index 53206b2c3..79f1ce558 100644 --- a/tests/api/test_ossl_x509_str.c +++ b/tests/api/test_ossl_x509_str.c @@ -36,6 +36,31 @@ #include #include +#if defined(OPENSSL_ALL) && \ + !defined(NO_RSA) && !defined(NO_FILESYSTEM) + +static int last_errcodes[10]; +static int last_errdepths[10]; +static int err_index = 0; + +static int X509CallbackCount(int ok, X509_STORE_CTX *ctx) +{ + if (!ok) { + if (err_index < 10) { + last_errcodes[err_index] = X509_STORE_CTX_get_error(ctx); + last_errdepths[err_index] = X509_STORE_CTX_get_error_depth(ctx); + err_index++; + } else { + /* Should not happen in test */ + WOLFSSL_MSG("Error index overflow in X509CallbackCount"); + err_index = 0; + } + } + /* Always return OK to allow verification to continue.*/ + return 1; +} +#endif + int test_wolfSSL_X509_STORE_CTX_set_time(void) { EXPECT_DECLS; @@ -161,6 +186,78 @@ int test_wolfSSL_X509_STORE_check_time(void) store = NULL; wolfSSL_X509_free(cert); cert = NULL; + +#if defined(OPENSSL_ALL) && \ + !defined(NO_RSA) && !defined(NO_FILESYSTEM) + + err_index = 0; + + ExpectNotNull(store = X509_STORE_new()); + ExpectNotNull(ctx = X509_STORE_CTX_new()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(caCertFile, + SSL_FILETYPE_PEM)); + ExpectIntEQ(wolfSSL_X509_STORE_add_cert(store, ca), WOLFSSL_SUCCESS); + + X509_STORE_set_verify_cb(store, X509CallbackCount); + + ExpectNotNull(cert = wolfSSL_X509_load_certificate_file(expiredCertFile, + SSL_FILETYPE_PEM)); + + ExpectIntEQ(X509_STORE_CTX_init(ctx, store, cert, NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(X509_verify_cert(ctx), WOLFSSL_SUCCESS); + /* while verifying the certificate, it should have two errors */ + ExpectIntEQ(err_index, 2); + /* self-signed */ + ExpectIntEQ(last_errcodes[err_index - 2], + WOLFSSL_X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT); + /* expired */ + ExpectIntEQ(last_errcodes[err_index - 1], + WOLFSSL_X509_V_ERR_CERT_HAS_EXPIRED); + + X509_STORE_CTX_free(ctx); + ctx = NULL; + X509_STORE_free(store); + store = NULL; + X509_free(cert); + cert = NULL; + X509_free(ca); + ca = NULL; + + err_index = 0; + + ExpectNotNull(store = X509_STORE_new()); + /* Set NO_CHECK_TIME flag to skip time validation */ + ExpectIntEQ(X509_VERIFY_PARAM_set_flags(store->param, + WOLFSSL_NO_CHECK_TIME), WOLFSSL_SUCCESS); + ExpectTrue((store->param->flags & WOLFSSL_NO_CHECK_TIME) == + WOLFSSL_NO_CHECK_TIME); + ExpectNotNull(ctx = X509_STORE_CTX_new()); + ExpectNotNull(ca = wolfSSL_X509_load_certificate_file(caCertFile, + SSL_FILETYPE_PEM)); + ExpectIntEQ(wolfSSL_X509_STORE_add_cert(store, ca), WOLFSSL_SUCCESS); + + X509_STORE_set_verify_cb(store, X509CallbackCount); + + ExpectNotNull(cert = wolfSSL_X509_load_certificate_file(expiredCertFile, + SSL_FILETYPE_PEM)); + + ExpectIntEQ(X509_STORE_CTX_init(ctx, store, cert, NULL), WOLFSSL_SUCCESS); + ExpectIntEQ(X509_verify_cert(ctx), WOLFSSL_SUCCESS); + /* while verifying the certificate, it should have an error */ + ExpectIntEQ(err_index, 1); + /* self-signed */ + ExpectIntEQ(last_errcodes[err_index - 1], + WOLFSSL_X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT); + /* no expired because of no_check_time */ + X509_STORE_CTX_free(ctx); + ctx = NULL; + X509_STORE_free(store); + store = NULL; + X509_free(cert); + cert = NULL; + X509_free(ca); + ca = NULL; +#endif #endif /* OPENSSL_EXTRA && !NO_FILESYSTEM && !NO_ASN_TIME && !NO_RSA */ return EXPECT_RESULT(); } diff --git a/tests/api/test_tls13.c b/tests/api/test_tls13.c index 2a22d38d9..802ecf2f9 100644 --- a/tests/api/test_tls13.c +++ b/tests/api/test_tls13.c @@ -764,8 +764,9 @@ int test_tls13_apis(void) ExpectIntEQ(wolfSSL_write_early_data(clientTls12Ssl, earlyData, sizeof(earlyData), &outSz), WC_NO_ERR_TRACE(BAD_FUNC_ARG)); #endif + /* invoking without session or psk cbs */ ExpectIntEQ(wolfSSL_write_early_data(clientSsl, earlyData, - sizeof(earlyData), &outSz), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + sizeof(earlyData), &outSz), WC_NO_ERR_TRACE(BAD_STATE_E)); #endif ExpectIntEQ(wolfSSL_read_early_data(NULL, earlyDataBuffer, @@ -2354,7 +2355,9 @@ int test_tls13_same_ch(void) ExpectIntEQ(test_memio_inject_message(&test_ctx, 1, (char*)hrr, sizeof(hrr)), 0); ExpectIntEQ(wolfSSL_connect(ssl_c), -1); - ExpectIntEQ(wolfSSL_get_error(ssl_c, -1), DUPLICATE_MSG_E); + /* issue 9653: use a more appropriate error than DUPLICATE_MSG_E. + * Since the cause of this is missing extension, return that. */ + ExpectIntEQ(wolfSSL_get_error(ssl_c, -1), EXT_MISSING); wolfSSL_free(ssl_c); wolfSSL_CTX_free(ctx_c); diff --git a/wolfcrypt/benchmark/benchmark.c b/wolfcrypt/benchmark/benchmark.c index 041a10334..3e1dcf050 100644 --- a/wolfcrypt/benchmark/benchmark.c +++ b/wolfcrypt/benchmark/benchmark.c @@ -10554,35 +10554,39 @@ static void bench_mlkem_encap(int type, const char* name, int keySize, int ret = 0, times, count, pending = 0; double start; const char**desc = bench_desc_words[lng_index]; - byte ct[WC_ML_KEM_MAX_CIPHER_TEXT_SIZE]; - byte ss[WC_ML_KEM_SS_SZ]; - byte pub[WC_ML_KEM_MAX_PUBLIC_KEY_SIZE]; + WC_DECLARE_VAR(ct, byte, WC_ML_KEM_MAX_CIPHER_TEXT_SIZE, HEAP_HINT); + WC_DECLARE_VAR(ss, byte, WC_ML_KEM_SS_SZ, HEAP_HINT); + WC_DECLARE_VAR(pub, byte, WC_ML_KEM_MAX_PUBLIC_KEY_SIZE, HEAP_HINT); word32 pubLen; word32 ctSz; DECLARE_MULTI_VALUE_STATS_VARS() bench_stats_prepare(); + WC_ALLOC_VAR(ct, byte, WC_ML_KEM_MAX_CIPHER_TEXT_SIZE, HEAP_HINT); + WC_ALLOC_VAR(ss, byte, WC_ML_KEM_SS_SZ, HEAP_HINT); + WC_ALLOC_VAR(pub, byte, WC_ML_KEM_MAX_PUBLIC_KEY_SIZE, HEAP_HINT); + ret = wc_KyberKey_PublicKeySize(key1, &pubLen); if (ret != 0) { - return; + goto exit; } ret = wc_KyberKey_EncodePublicKey(key1, pub, pubLen); if (ret != 0) { - return; + goto exit; } ret = wc_KyberKey_Init(type, key2, HEAP_HINT, INVALID_DEVID); if (ret != 0) { - return; + goto exit; } ret = wc_KyberKey_DecodePublicKey(key2, pub, pubLen); if (ret != 0) { - return; + goto exit; } ret = wc_KyberKey_CipherTextSize(key2, &ctSz); if (ret != 0) { - return; + goto exit; } #ifndef WOLFSSL_MLKEM_NO_ENCAPSULATE @@ -10641,7 +10645,19 @@ exit_decap: #ifdef MULTI_VALUE_STATISTICS bench_multi_value_stats(max, min, sum, squareSum, runs); #endif + #endif + +exit: + + WC_FREE_VAR(ct, HEAP_HINT); + WC_FREE_VAR(ss, HEAP_HINT); + WC_FREE_VAR(pub, HEAP_HINT); + + if (ret != 0) + printf("error: bench_mlkem_encap() failed with code %d.\n", ret); + + return; } #endif diff --git a/wolfcrypt/src/aes.c b/wolfcrypt/src/aes.c index ddb7e31db..a1d09be4c 100644 --- a/wolfcrypt/src/aes.c +++ b/wolfcrypt/src/aes.c @@ -4101,10 +4101,16 @@ static WARN_UNUSED_RESULT int wc_AesDecrypt( int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir) { + if (aes == NULL || userKey == NULL) { + return BAD_FUNC_ARG; + } + if (keylen > sizeof(aes->key)) { + return BAD_FUNC_ARG; + } + return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1); } - int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir) { @@ -4987,6 +4993,8 @@ static void AesSetKey_C(Aes* aes, const byte* key, word32 keySz, int dir) } #endif /* WOLFSSL_AESNI */ +#ifndef WC_C_DYNAMIC_FALLBACK + #if defined(WOLFSSL_ARMASM) #if !defined(WOLFSSL_ARMASM_NO_HW_CRYPTO) #ifndef __aarch64__ @@ -5117,6 +5125,9 @@ static void AesSetKey_C(Aes* aes, const byte* key, word32 keySz, int dir) #endif return ret; #endif + +#endif /* !WC_C_DYNAMIC_FALLBACK */ + } /* wc_AesSetKeyLocal */ int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, @@ -5277,7 +5288,7 @@ int wc_AesSetIV(Aes* aes, const byte* iv) { int ret; - if (aes == NULL) + if (aes == NULL || out == NULL || in == NULL) return BAD_FUNC_ARG; VECTOR_REGISTERS_PUSH; ret = wc_AesEncrypt(aes, in, out); @@ -10350,7 +10361,8 @@ int WARN_UNUSED_RESULT AES_GCM_decrypt_C( /* now use res as a mask for constant time return of ret, unless tag * mismatch, whereupon AES_GCM_AUTH_E is returned. */ - ret = (ret & ~res) | (res & WC_NO_ERR_TRACE(AES_GCM_AUTH_E)); + ret = (ret & ~res); + ret |= (res & WC_NO_ERR_TRACE(AES_GCM_AUTH_E)); #endif return ret; } @@ -12095,24 +12107,7 @@ int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut, return ret; } -/* Update the AES GCM for encryption with data and/or authentication data. - * - * All the AAD must be passed to update before the plaintext. - * Last part of AAD can be passed with first part of plaintext. - * - * Must set key and IV before calling this function. - * Must call wc_AesGcmInit() before calling this function. - * - * @param [in, out] aes AES object. - * @param [out] out Buffer to hold cipher text. - * @param [in] in Buffer holding plaintext. - * @param [in] sz Length of plaintext in bytes. - * @param [in] authIn Buffer holding authentication data. - * @param [in] authInSz Length of authentication data in bytes. - * @return 0 on success. - * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer - * is NULL. - */ +/* Update the AES GCM for encryption with data and/or authentication data. */ int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz, const byte* authIn, word32 authInSz) { @@ -12254,24 +12249,7 @@ int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv, return wc_AesGcmInit(aes, key, len, iv, ivSz); } -/* Update the AES GCM for decryption with data and/or authentication data. - * - * All the AAD must be passed to update before the cipher text. - * Last part of AAD can be passed with first part of cipher text. - * - * Must set key and IV before calling this function. - * Must call wc_AesGcmInit() before calling this function. - * - * @param [in, out] aes AES object. - * @param [out] out Buffer to hold plaintext. - * @param [in] in Buffer holding cipher text. - * @param [in] sz Length of cipher text in bytes. - * @param [in] authIn Buffer holding authentication data. - * @param [in] authInSz Length of authentication data in bytes. - * @return 0 on success. - * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer - * is NULL. - */ +/* Update the AES GCM for decryption with data and/or authentication data. */ int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz, const byte* authIn, word32 authInSz) { diff --git a/wolfcrypt/src/aes_asm.S b/wolfcrypt/src/aes_asm.S index d96d07ad5..2cc182f65 100644 --- a/wolfcrypt/src/aes_asm.S +++ b/wolfcrypt/src/aes_asm.S @@ -27,6 +27,25 @@ * by Intel Mobility Group, Israel Development Center, Israel Shay Gueron */ +#ifdef WOLFSSL_USER_SETTINGS +#ifdef WOLFSSL_USER_SETTINGS_ASM +/* + * user_settings_asm.h is a file generated by the script user_settings_asm.sh. + * The script takes in a user_settings.h and produces user_settings_asm.h, which + * is a stripped down version of user_settings.h containing only preprocessor + * directives. This makes the header safe to include in assembly (.S) files. + */ +#include "user_settings_asm.h" +#else +/* + * Note: if user_settings.h contains any C code (e.g. a typedef or function + * prototype), including it here in an assembly (.S) file will cause an + * assembler failure. See user_settings_asm.h above. + */ +#include "user_settings.h" +#endif /* WOLFSSL_USER_SETTINGS_ASM */ +#endif /* WOLFSSL_USER_SETTINGS */ + #ifdef WOLFSSL_X86_64_BUILD /* diff --git a/wolfcrypt/src/asn.c b/wolfcrypt/src/asn.c index e64f27892..31466ca84 100644 --- a/wolfcrypt/src/asn.c +++ b/wolfcrypt/src/asn.c @@ -13211,7 +13211,8 @@ static int SetEccPublicKey(byte* output, ecc_key* key, int outLen, if (ret == 0) { /* Calculate the size of the encoded public point. */ PRIVATE_KEY_UNLOCK(); - #if defined(HAVE_COMP_KEY) && defined(HAVE_FIPS) && FIPS_VERSION3_LT(6,0,0) + #if defined(HAVE_COMP_KEY) && \ + (defined(HAVE_SELFTEST) || (defined(HAVE_FIPS) && FIPS_VERSION3_LT(6,0,0))) /* in earlier versions of FIPS the get length functionality is not * available with compressed keys */ pubSz = key->dp ? key->dp->size : MAX_ECC_BYTES; @@ -16271,8 +16272,38 @@ static WC_INLINE int GetTime_Long(long* value, const byte* date, int* idx) * Reminder: idx is incremented in each call to GetTime() * Return 0 on failure, 1 for success. */ int ExtractDate(const unsigned char* date, unsigned char format, - struct tm* certTime, int* idx) + struct tm* certTime, int* idx, int len) { + int i = *idx; + + /* Validate date string length based on format. Can not assume null + * terminated strings. Must check for the 'Z'. + * Subtract 2; one for zero indexing and one to exclude null terminator + * built into macro values. */ + if (format == ASN_UTC_TIME) { + /* UTCTime format requires YYMMDDHHMMSSZ (13 chars). */ + /* Bounds check: ensure we have enough data before accessing. */ + if (len < i + ASN_UTC_TIME_SIZE - 1) { + return 0; + } + if (date[i + ASN_UTC_TIME_SIZE - 2] != 'Z') { + return 0; + } + } + else if (format == ASN_GENERALIZED_TIME) { + /* GeneralizedTime format requires YYYYMMDDHHMMSSZ (15 chars). */ + /* Bounds check: ensure we have enough data before accessing. */ + if (len < i + ASN_GENERALIZED_TIME_SIZE - 1) { + return 0; + } + if (date[ i + ASN_GENERALIZED_TIME_SIZE - 2] != 'Z') { + return 0; + } + } + else { + return 0; + } + XMEMSET(certTime, 0, sizeof(struct tm)); /* Get the first two bytes of the year (century) */ @@ -16341,12 +16372,12 @@ int ExtractDate(const unsigned char* date, unsigned char format, #ifdef WOLFSSL_ASN_TIME_STRING -int GetTimeString(byte* date, int format, char* buf, int len) +int GetTimeString(byte* date, int format, char* buf, int len, int dateLen) { struct tm t; int idx = 0; - if (!ExtractDate(date, (unsigned char)format, &t, &idx)) { + if (!ExtractDate(date, (unsigned char)format, &t, &idx, dateLen)) { return 0; } @@ -16576,13 +16607,13 @@ static WC_INLINE int DateLessThan(const struct tm* a, const struct tm* b) /* date = ASN.1 raw */ /* format = ASN_UTC_TIME or ASN_GENERALIZED_TIME */ /* dateType = ASN_AFTER or ASN_BEFORE */ -int wc_ValidateDate(const byte* date, byte format, int dateType) +int wc_ValidateDate(const byte* date, byte format, int dateType, int len) { - return wc_ValidateDateWithTime(date, format, dateType, 0); + return wc_ValidateDateWithTime(date, format, dateType, 0, len); } int wc_ValidateDateWithTime(const byte* date, byte format, int dateType, - time_t checkTime) + time_t checkTime, int len) { time_t ltime; struct tm certTime; @@ -16631,7 +16662,7 @@ int wc_ValidateDateWithTime(const byte* date, byte format, int dateType, } #endif - if (!ExtractDate(date, format, &certTime, &i)) { + if (!ExtractDate(date, format, &certTime, &i, len)) { WOLFSSL_MSG("Error extracting the date"); return 0; } @@ -16853,7 +16884,7 @@ static int GetDate(DecodedCert* cert, int dateType, int verify, int maxIdx) #ifndef NO_ASN_TIME_CHECK if (verify != NO_VERIFY && verify != VERIFY_SKIP_DATE && (! AsnSkipDateCheck) && - !XVALIDATE_DATE(date, format, dateType)) { + !XVALIDATE_DATE(date, format, dateType, length)) { if (dateType == ASN_BEFORE) { WOLFSSL_ERROR_VERBOSE(ASN_BEFORE_DATE_E); return ASN_BEFORE_DATE_E; @@ -16911,7 +16942,7 @@ int wc_GetDateAsCalendarTime(const byte* date, int length, byte format, { int idx = 0; (void)length; - if (!ExtractDate(date, format, timearg, &idx)) + if (!ExtractDate(date, format, timearg, &idx, length)) return ASN_TIME_E; return 0; } @@ -19285,8 +19316,8 @@ exit_cs: #ifndef IGNORE_NAME_CONSTRAINTS -static int MatchBaseName(int type, const char* name, int nameSz, - const char* base, int baseSz) +int wolfssl_local_MatchBaseName(int type, const char* name, int nameSz, + const char* base, int baseSz) { if (base == NULL || baseSz <= 0 || name == NULL || nameSz <= 0 || name[0] == '.' || nameSz < baseSz || @@ -19303,6 +19334,8 @@ static int MatchBaseName(int type, const char* name, int nameSz, if (type == ASN_RFC822_TYPE) { const char* p = NULL; int count = 0; + int baseIsEmail = 0; + int atPos = -1; if (base[0] != '.') { p = base; @@ -19314,25 +19347,36 @@ static int MatchBaseName(int type, const char* name, int nameSz, p++; } - /* No '@' in base, reset p to NULL */ - if (count >= baseSz) - p = NULL; + if (count < baseSz) { + /* '@' found in base - validate it's not at start/end and only one */ + if (count == 0 || count == baseSz - 1) + return 0; /* '@' at start or end of base is invalid */ + baseIsEmail = 1; + } } - if (p == NULL) { - /* Base isn't an email address, it is a domain name, - * wind the name forward one character past its '@'. */ - p = name; - count = 0; - while (*p != '@' && count < baseSz) { - count++; - p++; + /* verify that name is a valid email address, store @ position */ + p = name; + count = 0; + while (count < nameSz) { + if (*p == '@') { + if (atPos >= 0) + return 0; /* Multiple '@' in name is invalid */ + atPos = count; } + count++; + p++; + } - if (count < baseSz && *p == '@') { - name = p + 1; - nameSz -= count + 1; - } + /* Validate '@' exists and is not at start or end */ + if (atPos < 0 || atPos == 0 || atPos == nameSz - 1) + return 0; + + if (!baseIsEmail) { + /* Base isn't an email address but a domain or host. + * wind the name forward one character past its '@'. */ + name = name + atPos + 1; + nameSz -= atPos + 1; } } @@ -19340,13 +19384,24 @@ static int MatchBaseName(int type, const char* name, int nameSz, * "...Any DNS name that can be constructed by simply adding zero or more * labels to the left-hand side of the name satisfies the name constraint." * i.e www.host.example.com works for host.example.com name constraint and - * host1.example.com does not. */ + * host1.example.com does not. + * + * Note: For DNS type, RFC 5280 does not allow leading dot in constraint. + * However, we accept it here for backwards compatibility. */ if (type == ASN_DNS_TYPE || (type == ASN_RFC822_TYPE && base[0] == '.')) { int szAdjust = nameSz - baseSz; + /* Check dot boundary: if there's a prefix and base doesn't start with + * '.', the character before the matched suffix must be '.'. + * When base starts with '.', the dot is included in the comparison. */ + if (szAdjust > 0 && base[0] != '.' && name[szAdjust - 1] != '.') + return 0; name += szAdjust; nameSz -= szAdjust; } + if (nameSz != baseSz) + return 0; + while (nameSz > 0) { if (XTOLOWER((unsigned char)*name) != XTOLOWER((unsigned char)*base)) @@ -19378,8 +19433,8 @@ static int PermittedListOk(DNS_entry* name, Base_entry* dnsList, byte nameType) if (current->type == nameType) { need = 1; /* restriction on permitted names is set for this type */ if (name->len >= current->nameSz && - MatchBaseName(nameType, name->name, name->len, - current->name, current->nameSz)) { + wolfssl_local_MatchBaseName(nameType, name->name, name->len, + current->name, current->nameSz)) { match = 1; /* found the current name in the permitted list*/ break; } @@ -19409,8 +19464,8 @@ static int IsInExcludedList(DNS_entry* name, Base_entry* dnsList, byte nameType) while (current != NULL) { if (current->type == nameType) { if (name->len >= current->nameSz && - MatchBaseName(nameType, name->name, name->len, - current->name, current->nameSz)) { + wolfssl_local_MatchBaseName(nameType, name->name, name->len, + current->name, current->nameSz)) { ret = 1; break; } @@ -21360,6 +21415,9 @@ static int DecodeAuthKeyIdInternal(const byte* input, word32 sz, ret = GetHashId(extAuthKeyId, (int)extAuthKeyIdSz, cert->extAuthKeyId, HashIdAlg(cert->signatureOID)); } + else { + cert->extAuthKeyIdSet = 0; + } #ifdef WOLFSSL_AKID_NAME if (ret == 0 && extAuthKeyIdIssuerSz > 0) { @@ -23590,7 +23648,8 @@ static int CheckDate(ASNGetData *dataASN, int dateType) #ifndef NO_ASN_TIME_CHECK /* Check date is a valid string and ASN_BEFORE or ASN_AFTER now. */ if ((ret == 0) && (! AsnSkipDateCheck)) { - if (!XVALIDATE_DATE(dataASN->data.ref.data, dataASN->tag, dateType)) { + if (!XVALIDATE_DATE(dataASN->data.ref.data, dataASN->tag, dateType, + (int)dataASN->data.ref.length)) { if (dateType == ASN_BEFORE) { ret = ASN_BEFORE_DATE_E; } @@ -27605,8 +27664,8 @@ static int KeyPemToDerPassCb(char* passwd, int sz, int rw, void* userdata) if (userdata == NULL) return 0; - XSTRNCPY(passwd, (char*)userdata, (size_t)sz); - return (int)min((word32)sz, (word32)XSTRLEN((char*)userdata)); + XSTRLCPY(passwd, (char*)userdata, (size_t)sz); + return (int)min((word32)(sz - 1), (word32)XSTRLEN((char*)userdata)); } #endif @@ -38381,7 +38440,7 @@ static int DecodeSingleResponse(byte* source, word32* ioIndex, word32 size, #ifndef NO_ASN_TIME_CHECK #ifndef WOLFSSL_NO_OCSP_DATE_CHECK if ((! AsnSkipDateCheck) && !XVALIDATE_DATE(single->status->thisDate, - single->status->thisDateFormat, ASN_BEFORE)) + single->status->thisDateFormat, ASN_BEFORE, MAX_DATE_SIZE)) return ASN_BEFORE_DATE_E; #endif #endif @@ -38419,7 +38478,7 @@ static int DecodeSingleResponse(byte* source, word32* ioIndex, word32 size, #ifndef WOLFSSL_NO_OCSP_DATE_CHECK if ((! AsnSkipDateCheck) && !XVALIDATE_DATE(single->status->nextDate, - single->status->nextDateFormat, ASN_AFTER)) + single->status->nextDateFormat, ASN_AFTER, MAX_DATE_SIZE)) return ASN_AFTER_DATE_E; #endif #endif @@ -38493,7 +38552,8 @@ static int DecodeSingleResponse(byte* source, word32* ioIndex, word32 size, #if !defined(NO_ASN_TIME_CHECK) && !defined(WOLFSSL_NO_OCSP_DATE_CHECK) /* Check date is a valid string and ASN_BEFORE now. */ if ((! AsnSkipDateCheck) && - !XVALIDATE_DATE(cs->thisDate, ASN_GENERALIZED_TIME, ASN_BEFORE)) + !XVALIDATE_DATE(cs->thisDate, ASN_GENERALIZED_TIME, ASN_BEFORE, + MAX_DATE_SIZE)) { ret = ASN_BEFORE_DATE_E; } @@ -38518,7 +38578,8 @@ static int DecodeSingleResponse(byte* source, word32* ioIndex, word32 size, #if !defined(NO_ASN_TIME_CHECK) && !defined(WOLFSSL_NO_OCSP_DATE_CHECK) /* Check date is a valid string and ASN_AFTER now. */ if ((! AsnSkipDateCheck) && - !XVALIDATE_DATE(cs->nextDate, ASN_GENERALIZED_TIME, ASN_AFTER)) + !XVALIDATE_DATE(cs->nextDate, ASN_GENERALIZED_TIME, ASN_AFTER, + MAX_DATE_SIZE)) { ret = ASN_AFTER_DATE_E; } @@ -40605,7 +40666,8 @@ static int ParseCRL_CertList(RevokedCert* rcert, DecodedCRL* dcrl, #if !defined(NO_ASN_TIME) && !defined(WOLFSSL_NO_CRL_DATE_CHECK) if (verify != NO_VERIFY && (! AsnSkipDateCheck) && - !XVALIDATE_DATE(dcrl->nextDate, dcrl->nextDateFormat, ASN_AFTER)) { + !XVALIDATE_DATE(dcrl->nextDate, dcrl->nextDateFormat, ASN_AFTER, + MAX_DATE_SIZE)) { WOLFSSL_MSG("CRL after date is no longer valid"); WOLFSSL_ERROR_VERBOSE(CRL_CERT_DATE_ERR); return CRL_CERT_DATE_ERR; @@ -40714,6 +40776,7 @@ static int ParseCRL_Extensions(DecodedCRL* dcrl, const byte* buf, word32* inOutIdx, word32 sz) { int length; + int needed; word32 idx; word32 ext_bound; /* boundary index for the sequence of extensions */ word32 oid; @@ -40799,9 +40862,9 @@ static int ParseCRL_Extensions(DecodedCRL* dcrl, const byte* buf, return ret; } else { - DECL_MP_INT_SIZE_DYN(m, CRL_MAX_NUM_SZ * CHAR_BIT, - CRL_MAX_NUM_SZ * CHAR_BIT); - NEW_MP_INT_SIZE(m, CRL_MAX_NUM_SZ * CHAR_BIT, NULL, + DECL_MP_INT_SIZE_DYN(m, CRL_MAX_NUM_SZ_BITS, + CRL_MAX_NUM_SZ_BITS); + NEW_MP_INT_SIZE(m, CRL_MAX_NUM_SZ_BITS, NULL, DYNAMIC_TYPE_TMP_BUFFER); #ifdef MP_INT_SIZE_CHECK_NULL if (m == NULL) { @@ -40819,7 +40882,15 @@ static int ParseCRL_Extensions(DecodedCRL* dcrl, const byte* buf, if (ret != MP_OKAY) ret = BUFFER_E; - + /* Check CRL number size + * if it exceeds CRL_MAX_NUM_SZ(octets) + * and CRL_MAX_NUM_HEX_STR_SZ(hex string) + */ + if (((needed = mp_unsigned_bin_size(m)) > CRL_MAX_NUM_SZ) || + ((needed * 2 + 1) > CRL_MAX_NUM_HEX_STR_SZ)) { + WOLFSSL_MSG("CRL number exceeds limitation."); + ret = BUFFER_E; + } if (ret == MP_OKAY && mp_toradix(m, (char*)dcrl->crlNumber, MP_RADIX_HEX) != MP_OKAY) ret = BUFFER_E; @@ -40857,6 +40928,7 @@ static int ParseCRL_Extensions(DecodedCRL* dcrl, const byte* buf, word32 idx, word32 maxIdx) { DECL_ASNGETDATA(dataASN, certExtASN_Length); + int needed; int ret = 0; /* Track if we've seen these extensions already */ word32 seenAuthKey = 0; @@ -40917,9 +40989,9 @@ static int ParseCRL_Extensions(DecodedCRL* dcrl, const byte* buf, word32 idx, #endif } else if (oid == CRL_NUMBER_OID) { - DECL_MP_INT_SIZE_DYN(m, CRL_MAX_NUM_SZ * CHAR_BIT, - CRL_MAX_NUM_SZ * CHAR_BIT); - NEW_MP_INT_SIZE(m, CRL_MAX_NUM_SZ * CHAR_BIT, NULL, + DECL_MP_INT_SIZE_DYN(m, CRL_MAX_NUM_SZ_BITS, + CRL_MAX_NUM_SZ_BITS); + NEW_MP_INT_SIZE(m, CRL_MAX_NUM_SZ_BITS, NULL, DYNAMIC_TYPE_TMP_BUFFER); #ifdef MP_INT_SIZE_CHECK_NULL @@ -40936,7 +41008,15 @@ static int ParseCRL_Extensions(DecodedCRL* dcrl, const byte* buf, word32 idx, if (ret == 0) { ret = GetInt(m, buf, &localIdx, maxIdx); } - + /* Check CRL number size + * if it exceeds CRL_MAX_NUM_SZ(octets) + * and CRL_MAX_NUM_HEX_STR_SZ(hex string) + */ + if (((needed = mp_unsigned_bin_size(m)) > CRL_MAX_NUM_SZ) || + ((needed * 2 + 1) > CRL_MAX_NUM_HEX_STR_SZ)) { + WOLFSSL_MSG("CRL number exceeds limitation."); + ret = BUFFER_E; + } if (ret == 0 && mp_toradix(m, (char*)dcrl->crlNumber, MP_RADIX_HEX) != MP_OKAY) ret = BUFFER_E; @@ -41267,7 +41347,8 @@ end: /* Next date was set, so validate it. */ if (verify != NO_VERIFY && (! AsnSkipDateCheck) && - !XVALIDATE_DATE(dcrl->nextDate, dcrl->nextDateFormat, ASN_AFTER)) { + !XVALIDATE_DATE(dcrl->nextDate, dcrl->nextDateFormat, ASN_AFTER, + MAX_DATE_SIZE)) { WOLFSSL_MSG("CRL after date is no longer valid"); ret = CRL_CERT_DATE_ERR; WOLFSSL_ERROR_VERBOSE(ret); diff --git a/wolfcrypt/src/blake2b.c b/wolfcrypt/src/blake2b.c index 1f473cdd1..37acb9744 100644 --- a/wolfcrypt/src/blake2b.c +++ b/wolfcrypt/src/blake2b.c @@ -37,6 +37,12 @@ #include #include +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif static const word64 blake2b_IV[8] = { @@ -511,7 +517,115 @@ int wc_Blake2bFinal(Blake2b* b2b, byte* final, word32 requestSz) } -/* end CTaoCrypt API */ +int wc_Blake2bHmacInit(Blake2b* b2b, const byte* key, size_t key_len) +{ + byte x_key[BLAKE2B_BLOCKBYTES]; + int i; + int ret = 0; + + if (key == NULL) + return BAD_FUNC_ARG; + + if (key_len > BLAKE2B_BLOCKBYTES) { + ret = wc_InitBlake2b(b2b, BLAKE2B_OUTBYTES); + if (ret == 0) + ret = wc_Blake2bUpdate(b2b, key, (word32)key_len); + if (ret == 0) + ret = wc_Blake2bFinal(b2b, x_key, 0); + } else { + XMEMCPY(x_key, key, key_len); + if (key_len < BLAKE2B_BLOCKBYTES) { + XMEMSET(x_key + key_len, 0, BLAKE2B_BLOCKBYTES - key_len); + } + } + + if (ret == 0) { + for (i = 0; i < BLAKE2B_BLOCKBYTES; ++i) + x_key[i] ^= 0x36U; + } + + if (ret == 0) + ret = wc_InitBlake2b(b2b, BLAKE2B_OUTBYTES); + if (ret == 0) + ret = wc_Blake2bUpdate(b2b, x_key, BLAKE2B_BLOCKBYTES); + + ForceZero(x_key, sizeof(x_key)); + + return ret; +} + +int wc_Blake2bHmacUpdate(Blake2b* b2b, const byte* in, size_t in_len) +{ + if (in == NULL) + return BAD_FUNC_ARG; + + return wc_Blake2bUpdate(b2b, in, (word32)in_len); +} + +int wc_Blake2bHmacFinal(Blake2b* b2b, const byte* key, size_t key_len, + byte* out, size_t out_len) +{ + byte x_key[BLAKE2B_BLOCKBYTES]; + int i; + int ret = 0; + + if (key == NULL) + return BAD_FUNC_ARG; + + if (out_len != BLAKE2B_OUTBYTES) + return BUFFER_E; + + if (key_len > BLAKE2B_BLOCKBYTES) { + ret = wc_InitBlake2b(b2b, BLAKE2B_OUTBYTES); + if (ret == 0) + ret = wc_Blake2bUpdate(b2b, key, (word32)key_len); + if (ret == 0) + ret = wc_Blake2bFinal(b2b, x_key, 0); + } else { + XMEMCPY(x_key, key, key_len); + if (key_len < BLAKE2B_BLOCKBYTES) { + XMEMSET(x_key + key_len, 0, BLAKE2B_BLOCKBYTES - key_len); + } + } + + if (ret == 0) { + for (i = 0; i < BLAKE2B_BLOCKBYTES; ++i) + x_key[i] ^= 0x5CU; + } + + if (ret == 0) + ret = wc_Blake2bFinal(b2b, out, 0); + + if (ret == 0) + ret = wc_InitBlake2b(b2b, BLAKE2B_OUTBYTES); + if (ret == 0) + ret = wc_Blake2bUpdate(b2b, x_key, BLAKE2B_BLOCKBYTES); + if (ret == 0) + ret = wc_Blake2bUpdate(b2b, out, BLAKE2B_OUTBYTES); + if (ret == 0) + ret = wc_Blake2bFinal(b2b, out, 0); + + ForceZero(x_key, sizeof(x_key)); + + return ret; +} + +int wc_Blake2bHmac(const byte* in, size_t in_len, + const byte* key, size_t key_len, + byte* out, size_t out_len) +{ + Blake2b state; + int ret; + + ret = wc_Blake2bHmacInit(&state, key, key_len); + if (ret == 0) + ret = wc_Blake2bHmacUpdate(&state, in, in_len); + if (ret == 0) + ret = wc_Blake2bHmacFinal(&state, key, key_len, out, out_len); + + return ret; +} + +/* end wolfCrypt API */ #endif /* HAVE_BLAKE2 */ - diff --git a/wolfcrypt/src/blake2s.c b/wolfcrypt/src/blake2s.c index cf5c9f2ff..b38d12a93 100644 --- a/wolfcrypt/src/blake2s.c +++ b/wolfcrypt/src/blake2s.c @@ -37,6 +37,12 @@ #include #include +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif static const word32 blake2s_IV[8] = { @@ -505,7 +511,115 @@ int wc_Blake2sFinal(Blake2s* b2s, byte* final, word32 requestSz) } -/* end CTaoCrypt API */ +int wc_Blake2sHmacInit(Blake2s* b2s, const byte* key, size_t key_len) +{ + byte x_key[BLAKE2S_BLOCKBYTES]; + int i; + int ret = 0; + + if (key == NULL) + return BAD_FUNC_ARG; + + if (key_len > BLAKE2S_BLOCKBYTES) { + ret = wc_InitBlake2s(b2s, BLAKE2S_OUTBYTES); + if (ret == 0) + ret = wc_Blake2sUpdate(b2s, key, (word32)key_len); + if (ret == 0) + ret = wc_Blake2sFinal(b2s, x_key, 0); + } else { + XMEMCPY(x_key, key, key_len); + if (key_len < BLAKE2S_BLOCKBYTES) { + XMEMSET(x_key + key_len, 0, BLAKE2S_BLOCKBYTES - key_len); + } + } + + if (ret == 0) { + for (i = 0; i < BLAKE2S_BLOCKBYTES; ++i) + x_key[i] ^= 0x36U; + } + + if (ret == 0) + ret = wc_InitBlake2s(b2s, BLAKE2S_OUTBYTES); + if (ret == 0) + ret = wc_Blake2sUpdate(b2s, x_key, BLAKE2S_BLOCKBYTES); + + ForceZero(x_key, sizeof(x_key)); + + return ret; +} + +int wc_Blake2sHmacUpdate(Blake2s* b2s, const byte* in, size_t in_len) +{ + if (in == NULL) + return BAD_FUNC_ARG; + + return wc_Blake2sUpdate(b2s, in, (word32)in_len); +} + +int wc_Blake2sHmacFinal(Blake2s* b2s, const byte* key, size_t key_len, + byte* out, size_t out_len) +{ + byte x_key[BLAKE2S_BLOCKBYTES]; + int i; + int ret = 0; + + if (key == NULL) + return BAD_FUNC_ARG; + + if (out_len != BLAKE2S_OUTBYTES) + return BUFFER_E; + + if (key_len > BLAKE2S_BLOCKBYTES) { + ret = wc_InitBlake2s(b2s, BLAKE2S_OUTBYTES); + if (ret == 0) + ret = wc_Blake2sUpdate(b2s, key, (word32)key_len); + if (ret == 0) + ret = wc_Blake2sFinal(b2s, x_key, 0); + } else { + XMEMCPY(x_key, key, key_len); + if (key_len < BLAKE2S_BLOCKBYTES) { + XMEMSET(x_key + key_len, 0, BLAKE2S_BLOCKBYTES - key_len); + } + } + + if (ret == 0) { + for (i = 0; i < BLAKE2S_BLOCKBYTES; ++i) + x_key[i] ^= 0x5CU; + } + + if (ret == 0) + ret = wc_Blake2sFinal(b2s, out, 0); + + if (ret == 0) + ret = wc_InitBlake2s(b2s, BLAKE2S_OUTBYTES); + if (ret == 0) + ret = wc_Blake2sUpdate(b2s, x_key, BLAKE2S_BLOCKBYTES); + if (ret == 0) + ret = wc_Blake2sUpdate(b2s, out, BLAKE2S_OUTBYTES); + if (ret == 0) + ret = wc_Blake2sFinal(b2s, out, 0); + + ForceZero(x_key, sizeof(x_key)); + + return ret; +} + +int wc_Blake2sHmac(const byte* in, size_t in_len, + const byte* key, size_t key_len, + byte* out, size_t out_len) +{ + Blake2s state; + int ret; + + ret = wc_Blake2sHmacInit(&state, key, key_len); + if (ret == 0) + ret = wc_Blake2sHmacUpdate(&state, in, in_len); + if (ret == 0) + ret = wc_Blake2sHmacFinal(&state, key, key_len, out, out_len); + + return ret; +} + +/* end wolfCrypt API */ #endif /* HAVE_BLAKE2S */ - diff --git a/wolfcrypt/src/chacha20_poly1305.c b/wolfcrypt/src/chacha20_poly1305.c index 94eb5ea89..cadf7ff5b 100644 --- a/wolfcrypt/src/chacha20_poly1305.c +++ b/wolfcrypt/src/chacha20_poly1305.c @@ -313,7 +313,8 @@ int wc_XChaCha20Poly1305_Init( byte authKey[CHACHA20_POLY1305_AEAD_KEYSIZE]; int ret; - if ((ad == NULL) || (nonce == NULL) || (key == NULL)) + if ((aead == NULL) || (ad == NULL && ad_len > 0) || (nonce == NULL) || + (key == NULL)) return BAD_FUNC_ARG; if ((key_len != CHACHA20_POLY1305_AEAD_KEYSIZE) || diff --git a/wolfcrypt/src/cmac.c b/wolfcrypt/src/cmac.c index b378ab331..563113c47 100644 --- a/wolfcrypt/src/cmac.c +++ b/wolfcrypt/src/cmac.c @@ -163,12 +163,19 @@ int wc_InitCmac_ex(Cmac* cmac, const byte* key, word32 keySz, byte l[WC_AES_BLOCK_SIZE]; XMEMSET(l, 0, WC_AES_BLOCK_SIZE); +#ifndef HAVE_SELFTEST ret = wc_AesEncryptDirect(&cmac->aes, l, l); if (ret == 0) { ShiftAndXorRb(cmac->k1, l); ShiftAndXorRb(cmac->k2, cmac->k1); ForceZero(l, WC_AES_BLOCK_SIZE); } +#else + wc_AesEncryptDirect(&cmac->aes, l, l); + ShiftAndXorRb(cmac->k1, l); + ShiftAndXorRb(cmac->k2, cmac->k1); + ForceZero(l, WC_AES_BLOCK_SIZE); +#endif } break; #endif /* !NO_AES && WOLFSSL_AES_DIRECT */ @@ -233,12 +240,19 @@ int wc_CmacUpdate(Cmac* cmac, const byte* in, word32 inSz) if (cmac->totalSz != 0) { xorbuf(cmac->buffer, cmac->digest, WC_AES_BLOCK_SIZE); } +#ifndef HAVE_SELFTEST ret = wc_AesEncryptDirect(&cmac->aes, cmac->digest, cmac->buffer); if (ret == 0) { cmac->totalSz += WC_AES_BLOCK_SIZE; cmac->bufferSz = 0; } +#else + wc_AesEncryptDirect(&cmac->aes, cmac->digest, + cmac->buffer); + cmac->totalSz += WC_AES_BLOCK_SIZE; + cmac->bufferSz = 0; +#endif } } }; break; @@ -332,10 +346,15 @@ int wc_CmacFinalNoFree(Cmac* cmac, byte* out, word32* outSz) } xorbuf(cmac->buffer, cmac->digest, WC_AES_BLOCK_SIZE); xorbuf(cmac->buffer, subKey, WC_AES_BLOCK_SIZE); +#ifndef HAVE_SELFTEST ret = wc_AesEncryptDirect(&cmac->aes, cmac->digest, cmac->buffer); if (ret == 0) { XMEMCPY(out, cmac->digest, *outSz); } +#else + wc_AesEncryptDirect(&cmac->aes, cmac->digest, cmac->buffer); + XMEMCPY(out, cmac->digest, *outSz); +#endif }; break; #endif /* !NO_AES && WOLFSSL_AES_DIRECT */ default: diff --git a/wolfcrypt/src/curve25519.c b/wolfcrypt/src/curve25519.c index d0db86b25..6534a69e9 100644 --- a/wolfcrypt/src/curve25519.c +++ b/wolfcrypt/src/curve25519.c @@ -75,8 +75,9 @@ const curve25519_set_type curve25519_sets[] = { } }; -#if !defined(WOLFSSL_CURVE25519_USE_ED25519) || \ - defined(WOLFSSL_CURVE25519_BLINDING) +#if (!defined(WOLFSSL_CURVE25519_USE_ED25519) && \ + !(defined(CURVED25519_X64) || (defined(WOLFSSL_ARMASM) && \ + defined(__aarch64__)))) || defined(WOLFSSL_CURVE25519_BLINDING) static const word32 kCurve25519BasePoint[CURVE25519_KEYSIZE/sizeof(word32)] = { #ifdef BIG_ENDIAN_ORDER 0x09000000 diff --git a/wolfcrypt/src/dilithium.c b/wolfcrypt/src/dilithium.c index 994c70c21..dcb42e59f 100644 --- a/wolfcrypt/src/dilithium.c +++ b/wolfcrypt/src/dilithium.c @@ -1884,17 +1884,17 @@ static void dilithium_decode_gamma1_c(const byte* s, int bits, sword32* z) #endif #else z[i+0] = DILITHIUM_GAMMA1_17 - - ( s[ 0] | ((sword32)(s[ 1] << 8) | + ( (sword32)s[ 0] | (((sword32)s[ 1] << 8) | (sword32)(s[ 2] & 0x03) << 16)); z[i+1] = DILITHIUM_GAMMA1_17 - - ((s[ 2] >> 2) | ((sword32)(s[ 3] << 6) | + (((sword32)s[ 2] >> 2) | (((sword32)s[ 3] << 6) | (sword32)(s[ 4] & 0x0f) << 14)); z[i+2] = DILITHIUM_GAMMA1_17 - - ((s[ 4] >> 4) | ((sword32)(s[ 5] << 4) | + (((sword32)s[ 4] >> 4) | (((sword32)s[ 5] << 4) | (sword32)(s[ 6] & 0x3f) << 12)); z[i+3] = DILITHIUM_GAMMA1_17 - - ((s[ 6] >> 6) | ((sword32)(s[ 7] << 2) | - (sword32)(s[ 8] ) << 10)); + (((sword32)s[ 6] >> 6) | (((sword32)s[ 7] << 2) | + ((sword32)s[ 8] ) << 10)); #endif /* Move to next place to decode from. */ s += DILITHIUM_GAMMA1_17_ENC_BITS / 2; @@ -1948,29 +1948,29 @@ static void dilithium_decode_gamma1_c(const byte* s, int bits, sword32* z) #endif #else z[i+0] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ( s[ 0] | ((sword32)(s[ 1] << 8) | - (sword32)(s[ 2] & 0x03) << 16))); + ( (sword32)s[ 0] | (((sword32)s[ 1] << 8) | + ((sword32)s[ 2] & 0x03) << 16))); z[i+1] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ((s[ 2] >> 2) | ((sword32)(s[ 3] << 6) | - (sword32)(s[ 4] & 0x0f) << 14))); + (((sword32)s[ 2] >> 2) | (((sword32)s[ 3] << 6) | + ((sword32)s[ 4] & 0x0f) << 14))); z[i+2] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ((s[ 4] >> 4) | ((sword32)(s[ 5] << 4) | - (sword32)(s[ 6] & 0x3f) << 12))); + (((sword32)s[ 4] >> 4) | (((sword32)s[ 5] << 4) | + ((sword32)s[ 6] & 0x3f) << 12))); z[i+3] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ((s[ 6] >> 6) | ((sword32)(s[ 7] << 2) | - (sword32)(s[ 8] ) << 10))); + (((sword32)s[ 6] >> 6) | (((sword32)s[ 7] << 2) | + ((sword32)s[ 8] ) << 10))); z[i+4] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ( s[ 9] | ((sword32)(s[10] << 8) | - (sword32)(s[11] & 0x03) << 16))); + ( (sword32)s[ 9] | (((sword32)s[10] << 8) | + ((sword32)s[11] & 0x03) << 16))); z[i+5] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ((s[11] >> 2) | ((sword32)(s[12] << 6) | - (sword32)(s[13] & 0x0f) << 14))); + (((sword32)s[11] >> 2) | (((sword32)s[12] << 6) | + ((sword32)s[13] & 0x0f) << 14))); z[i+6] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ((s[13] >> 4) | ((sword32)(s[14] << 4) | - (sword32)(s[15] & 0x3f) << 12))); + (((sword32)s[13] >> 4) | (((sword32)s[14] << 4) | + ((sword32)s[15] & 0x3f) << 12))); z[i+7] = (sword32)((word32)DILITHIUM_GAMMA1_17 - - ((s[15] >> 6) | ((sword32)(s[16] << 2) | - (sword32)(s[17] ) << 10))); + (((sword32)s[15] >> 6) | (((sword32)s[16] << 2) | + ((sword32)s[17] ) << 10))); #endif /* Move to next place to decode from. */ s += DILITHIUM_GAMMA1_17_ENC_BITS; @@ -2005,14 +2005,18 @@ static void dilithium_decode_gamma1_c(const byte* s, int bits, sword32* z) ((sword32)s16_0 << 4)); #endif #else - z[i+0] = DILITHIUM_GAMMA1_19 - ( s[0] | ((sword32)s[1] << 8) | - ((sword32)(s[2] & 0x0f) << 16)); - z[i+1] = DILITHIUM_GAMMA1_19 - ((s[2] >> 4) | ((sword32)s[3] << 4) | - ((sword32)(s[4] ) << 12)); - z[i+2] = DILITHIUM_GAMMA1_19 - ( s[5] | ((sword32)s[6] << 8) | - ((sword32)(s[7] & 0x0f) << 16)); - z[i+3] = DILITHIUM_GAMMA1_19 - ((s[7] >> 4) | ((sword32)s[8] << 4) | - ((sword32)(s[9] ) << 12)); + z[i+0] = DILITHIUM_GAMMA1_19 - + ( (sword32)s[0] | ((sword32)s[1] << 8) | + (((sword32)s[2] & 0x0f) << 16)); + z[i+1] = DILITHIUM_GAMMA1_19 - + (((sword32)s[2] >> 4) | ((sword32)s[3] << 4) | + (((sword32)s[4] ) << 12)); + z[i+2] = DILITHIUM_GAMMA1_19 - + ( (sword32)s[5] | ((sword32)s[6] << 8) | + (((sword32)s[7] & 0x0f) << 16)); + z[i+3] = DILITHIUM_GAMMA1_19 - + (((sword32)s[7] >> 4) | ((sword32)s[8] << 4) | + (((sword32)s[9] ) << 12)); #endif /* Move to next place to decode from. */ s += DILITHIUM_GAMMA1_19_ENC_BITS / 2; @@ -2065,30 +2069,38 @@ static void dilithium_decode_gamma1_c(const byte* s, int bits, sword32* z) ((sword32)s16_1 << 4)); #endif #else - z[i+0] = DILITHIUM_GAMMA1_19 - ( s[ 0] | - ((sword32)s[ 1] << 8) | - ((sword32)(s[ 2] & 0x0f) << 16)); - z[i+1] = DILITHIUM_GAMMA1_19 - ((s[ 2] >> 4) | - ((sword32) s[ 3] << 4) | - ((sword32)(s[ 4] ) << 12)); - z[i+2] = DILITHIUM_GAMMA1_19 - ( s[ 5] | - ((sword32) s[ 6] << 8) | - ((sword32)(s[ 7] & 0x0f) << 16)); - z[i+3] = DILITHIUM_GAMMA1_19 - ((s[ 7] >> 4) | - ((sword32) s[ 8] << 4) | - ((sword32)(s[ 9] ) << 12)); - z[i+4] = DILITHIUM_GAMMA1_19 - ( s[10] | - ((sword32) s[11] << 8) | - ((sword32)(s[12] & 0x0f) << 16)); - z[i+5] = DILITHIUM_GAMMA1_19 - ((s[12] >> 4) | - ((sword32) s[13] << 4) | - ((sword32)(s[14] ) << 12)); - z[i+6] = DILITHIUM_GAMMA1_19 - ( s[15] | - ((sword32) s[16] << 8) | - ((sword32)(s[17] & 0x0f) << 16)); - z[i+7] = DILITHIUM_GAMMA1_19 - ((s[17] >> 4) | - ((sword32) s[18] << 4) | - ((sword32)(s[19] ) << 12)); + z[i+0] = DILITHIUM_GAMMA1_19 - + ( (sword32)s[ 0] | + ( (sword32)s[ 1] << 8) | + (((sword32)s[ 2] & 0x0f) << 16)); + z[i+1] = DILITHIUM_GAMMA1_19 - + (((sword32)s[ 2] >> 4) | + ( (sword32)s[ 3] << 4) | + (((sword32)s[ 4] ) << 12)); + z[i+2] = DILITHIUM_GAMMA1_19 - + ( (sword32)s[ 5] | + ( (sword32)s[ 6] << 8) | + (((sword32)s[ 7] & 0x0f) << 16)); + z[i+3] = DILITHIUM_GAMMA1_19 - + ( ((sword32)s[ 7] >> 4) | + ( (sword32)s[ 8] << 4) | + (((sword32)s[ 9] ) << 12)); + z[i+4] = DILITHIUM_GAMMA1_19 - + ( (sword32)s[10] | + ( (sword32)s[11] << 8) | + (((sword32)s[12] & 0x0f) << 16)); + z[i+5] = DILITHIUM_GAMMA1_19 - + ( ((sword32)s[12] >> 4) | + ( (sword32)s[13] << 4) | + (((sword32)s[14] ) << 12)); + z[i+6] = DILITHIUM_GAMMA1_19 - + ( (sword32)s[15] | + ( (sword32)s[16] << 8) | + (((sword32)s[17] & 0x0f) << 16)); + z[i+7] = DILITHIUM_GAMMA1_19 - + ( ((sword32)s[17] >> 4) | + ( (sword32)s[18] << 4) | + (((sword32)s[19] ) << 12)); #endif /* Move to next place to decode from. */ s += DILITHIUM_GAMMA1_19_ENC_BITS; @@ -2868,7 +2880,7 @@ static int wc_mldsa_gen_matrix_6x5_avx2(sword32* a, byte* seed) } for (l = 0; l < 2; l++) { - state[4*4 + l] = 0x1f0000 + (5 << 8) + (l + 3); + state[4*4 + l] = 0x1f0000 + ((word32)5 << 8) + (l + 3); } sha3_128_blocksx4_seed_avx2(state, seed); @@ -5451,7 +5463,7 @@ static sword32 dilithium_mont_red(sword64 a) */ static sword32 dilithium_red(sword32 a) { - sword32 t = (sword32)((a + (1 << 22)) >> 23); + sword32 t = (sword32)((a + ((sword32)1 << 22)) >> 23); #ifndef DILITHIUM_MUL_Q_SLOW return (sword32)(a - (t * DILITHIUM_Q)); #else @@ -8252,7 +8264,7 @@ static int dilithium_sign_with_seed_mu(dilithium_key* key, params->gamma1_bits, y, params->l); #ifdef WOLFSSL_DILITHIUM_SIGN_CHECK_Y valid = dilithium_vec_check_low(y, params->l, - (1 << params->gamma1_bits) - params->beta); + ((sword32)1 << params->gamma1_bits) - params->beta); if (valid) #endif { @@ -8306,7 +8318,7 @@ static int dilithium_sign_with_seed_mu(dilithium_key* key, valid = dilithium_vec_check_low(w0 + i * DILITHIUM_N, 1, hi); } - hi = (1 << params->gamma1_bits) - params->beta; + hi = ((sword32)1 << params->gamma1_bits) - params->beta; for (i = 0; valid && i < params->l; i++) { /* Step 19: cs1 = NTT-1(c o s1) */ dilithium_mul(z + i * DILITHIUM_N, c, @@ -8515,7 +8527,7 @@ static int dilithium_sign_with_seed_mu(dilithium_key* key, params->gamma1_bits, y, params->l); #ifdef WOLFSSL_DILITHIUM_SIGN_CHECK_Y valid = dilithium_vec_check_low(y, params->l, - (1 << params->gamma1_bits) - params->beta); + ((sword32)1 << params->gamma1_bits) - params->beta); #endif #ifdef WOLFSSL_DILITHIUM_SIGN_SMALL_MEM_PRECALC_A @@ -8743,7 +8755,7 @@ static int dilithium_sign_with_seed_mu(dilithium_key* key, dilithium_add(z, yt); dilithium_poly_red(z); /* Step 23: Check z has low enough values. */ - hi = (1 << params->gamma1_bits) - params->beta; + hi = ((sword32)1 << params->gamma1_bits) - params->beta; valid = dilithium_check_low(z, hi); if (valid) { /* Step 32: Encode z into signature. @@ -9385,7 +9397,7 @@ static int dilithium_verify_mu(dilithium_key* key, const byte* mu, /* Step 2: Decode z from signature. */ dilithium_vec_decode_gamma1(ze, params->l, params->gamma1_bits, z); /* Step 13: Check z is valid - values are low enough. */ - hi = (1 << params->gamma1_bits) - params->beta; + hi = ((sword32)1 << params->gamma1_bits) - params->beta; valid = dilithium_vec_check_low(z, params->l, hi); } if ((ret == 0) && valid) { @@ -9524,7 +9536,7 @@ static int dilithium_verify_mu(dilithium_key* key, const byte* mu, /* Step 2: Decode z from signature. */ dilithium_vec_decode_gamma1(ze, params->l, params->gamma1_bits, z); /* Step 13: Check z is valid - values are low enough. */ - hi = (1 << params->gamma1_bits) - params->beta; + hi = ((sword32)1 << params->gamma1_bits) - params->beta; valid = dilithium_vec_check_low(z, params->l, hi); } if ((ret == 0) && valid) { diff --git a/wolfcrypt/src/ecc.c b/wolfcrypt/src/ecc.c index 738720489..da309eb2a 100644 --- a/wolfcrypt/src/ecc.c +++ b/wolfcrypt/src/ecc.c @@ -2703,236 +2703,239 @@ int ecc_projective_dbl_point(ecc_point *P, ecc_point *R, mp_int* a, */ int ecc_map_ex(ecc_point* P, mp_int* modulus, mp_digit mp, int ct) { - int err = MP_OKAY; -#if !defined(WOLFSSL_SP_MATH) - DECL_MP_INT_SIZE_DYN(t1, mp_bitsused(modulus), MAX_ECC_BITS_USE); - DECL_MP_INT_SIZE_DYN(t2, mp_bitsused(modulus), MAX_ECC_BITS_USE); -#ifdef ALT_ECC_SIZE - DECL_MP_INT_SIZE_DYN(rx, mp_bitsused(modulus), MAX_ECC_BITS_USE); - DECL_MP_INT_SIZE_DYN(ry, mp_bitsused(modulus), MAX_ECC_BITS_USE); - DECL_MP_INT_SIZE_DYN(rz, mp_bitsused(modulus), MAX_ECC_BITS_USE); -#endif - mp_int *x, *y, *z; + int err = MP_OKAY; + (void)ct; - (void)ct; + if (P == NULL || modulus == NULL){ + return ECC_BAD_ARG_E; + } + { + #if !defined(WOLFSSL_SP_MATH) + DECL_MP_INT_SIZE_DYN(t1, mp_bitsused(modulus), MAX_ECC_BITS_USE); + DECL_MP_INT_SIZE_DYN(t2, mp_bitsused(modulus), MAX_ECC_BITS_USE); + #ifdef ALT_ECC_SIZE + DECL_MP_INT_SIZE_DYN(rx, mp_bitsused(modulus), MAX_ECC_BITS_USE); + DECL_MP_INT_SIZE_DYN(ry, mp_bitsused(modulus), MAX_ECC_BITS_USE); + DECL_MP_INT_SIZE_DYN(rz, mp_bitsused(modulus), MAX_ECC_BITS_USE); + #endif + mp_int *x, *y, *z; - if (P == NULL || modulus == NULL) - return ECC_BAD_ARG_E; + /* special case for point at infinity */ + if (mp_cmp_d(P->z, 0) == MP_EQ) { + err = mp_set(P->x, 0); + if (err == MP_OKAY) + err = mp_set(P->y, 0); + if (err == MP_OKAY) + err = mp_set(P->z, 1); + return err; + } - /* special case for point at infinity */ - if (mp_cmp_d(P->z, 0) == MP_EQ) { - err = mp_set(P->x, 0); - if (err == MP_OKAY) - err = mp_set(P->y, 0); - if (err == MP_OKAY) - err = mp_set(P->z, 1); - return err; - } + #ifdef WOLFSSL_SMALL_STACK + #ifdef WOLFSSL_SMALL_STACK_CACHE + if (P->key != NULL) { + t1 = P->key->t1; + t2 = P->key->t2; + #ifdef ALT_ECC_SIZE + rx = P->key->x; + ry = P->key->y; + rz = P->key->z; + #endif + } + else + #endif /* WOLFSSL_SMALL_STACK_CACHE */ + #endif + { + NEW_MP_INT_SIZE(t1, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); + NEW_MP_INT_SIZE(t2, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); + #ifdef MP_INT_SIZE_CHECK_NULL + if (t1 == NULL || t2 == NULL) { + FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); + return MEMORY_E; + } + #endif + #ifdef ALT_ECC_SIZE + NEW_MP_INT_SIZE(rx, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); + NEW_MP_INT_SIZE(ry, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); + NEW_MP_INT_SIZE(rz, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); + #ifdef MP_INT_SIZE_CHECK_NULL + if (rx == NULL || ry == NULL || rz == NULL) { + FREE_MP_INT_SIZE(rz, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(ry, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(rx, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); + return MEMORY_E; + } + #endif + #endif + } -#ifdef WOLFSSL_SMALL_STACK -#ifdef WOLFSSL_SMALL_STACK_CACHE - if (P->key != NULL) { - t1 = P->key->t1; - t2 = P->key->t2; - #ifdef ALT_ECC_SIZE - rx = P->key->x; - ry = P->key->y; - rz = P->key->z; - #endif - } - else -#endif /* WOLFSSL_SMALL_STACK_CACHE */ -#endif - { - NEW_MP_INT_SIZE(t1, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); - NEW_MP_INT_SIZE(t2, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); - #ifdef MP_INT_SIZE_CHECK_NULL - if (t1 == NULL || t2 == NULL) { - FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); - return MEMORY_E; - } - #endif - #ifdef ALT_ECC_SIZE - NEW_MP_INT_SIZE(rx, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); - NEW_MP_INT_SIZE(ry, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); - NEW_MP_INT_SIZE(rz, mp_bitsused(modulus), NULL, DYNAMIC_TYPE_ECC); - #ifdef MP_INT_SIZE_CHECK_NULL - if (rx == NULL || ry == NULL || rz == NULL) { - FREE_MP_INT_SIZE(rz, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(ry, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(rx, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); - return MEMORY_E; - } - #endif - #endif - } + err = INIT_MP_INT_SIZE(t1, mp_bitsused(modulus)); + if (err == MP_OKAY) { + err = INIT_MP_INT_SIZE(t2, mp_bitsused(modulus)); + } + if (err != MP_OKAY) { + #ifdef WOLFSSL_SMALL_STACK + #ifdef WOLFSSL_SMALL_STACK_CACHE + if (P->key == NULL) + #endif + #endif + { + #ifdef ALT_ECC_SIZE + FREE_MP_INT_SIZE(rz, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(ry, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(rx, NULL, DYNAMIC_TYPE_ECC); + #endif + FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); + } + return MEMORY_E; + } - err = INIT_MP_INT_SIZE(t1, mp_bitsused(modulus)); - if (err == MP_OKAY) { - err = INIT_MP_INT_SIZE(t2, mp_bitsused(modulus)); - } - if (err != MP_OKAY) { -#ifdef WOLFSSL_SMALL_STACK - #ifdef WOLFSSL_SMALL_STACK_CACHE - if (P->key == NULL) - #endif -#endif - { - #ifdef ALT_ECC_SIZE - FREE_MP_INT_SIZE(rz, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(ry, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(rx, NULL, DYNAMIC_TYPE_ECC); - #endif - FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); - } - return MEMORY_E; - } + #ifdef ALT_ECC_SIZE + /* Use local stack variable */ + x = rx; + y = ry; + z = rz; -#ifdef ALT_ECC_SIZE - /* Use local stack variable */ - x = rx; - y = ry; - z = rz; + err = INIT_MP_INT_SIZE(x, mp_bitsused(modulus)); + if (err == MP_OKAY) { + err = INIT_MP_INT_SIZE(y, mp_bitsused(modulus)); + } + if (err == MP_OKAY) { + err = INIT_MP_INT_SIZE(z, mp_bitsused(modulus)); + } + if (err != MP_OKAY) { + goto done; + } - err = INIT_MP_INT_SIZE(x, mp_bitsused(modulus)); - if (err == MP_OKAY) { - err = INIT_MP_INT_SIZE(y, mp_bitsused(modulus)); - } - if (err == MP_OKAY) { - err = INIT_MP_INT_SIZE(z, mp_bitsused(modulus)); - } - if (err != MP_OKAY) { - goto done; - } + if (err == MP_OKAY) + err = mp_copy(P->x, x); + if (err == MP_OKAY) + err = mp_copy(P->y, y); + if (err == MP_OKAY) + err = mp_copy(P->z, z); - if (err == MP_OKAY) - err = mp_copy(P->x, x); - if (err == MP_OKAY) - err = mp_copy(P->y, y); - if (err == MP_OKAY) - err = mp_copy(P->z, z); + if (err != MP_OKAY) { + goto done; + } + #else + /* Use destination directly */ + x = P->x; + y = P->y; + z = P->z; + #endif - if (err != MP_OKAY) { - goto done; - } -#else - /* Use destination directly */ - x = P->x; - y = P->y; - z = P->z; -#endif + /* get 1/z */ + if (err == MP_OKAY) { + #if defined(ECC_TIMING_RESISTANT) && (defined(USE_FAST_MATH) || \ + defined(WOLFSSL_SP_MATH) || defined(WOLFSSL_SP_MATH_ALL)) + if (ct) { + err = mp_invmod_mont_ct(z, modulus, t1, mp); + if (err == MP_OKAY) + err = mp_montgomery_reduce(t1, modulus, mp); + } + else + #endif + { + /* first map z back to normal */ + err = mp_montgomery_reduce(z, modulus, mp); + if (err == MP_OKAY) + err = mp_invmod(z, modulus, t1); + } + } - /* get 1/z */ - if (err == MP_OKAY) { -#if defined(ECC_TIMING_RESISTANT) && (defined(USE_FAST_MATH) || \ - defined(WOLFSSL_SP_MATH) || defined(WOLFSSL_SP_MATH_ALL)) - if (ct) { - err = mp_invmod_mont_ct(z, modulus, t1, mp); - if (err == MP_OKAY) - err = mp_montgomery_reduce(t1, modulus, mp); - } - else -#endif - { - /* first map z back to normal */ - err = mp_montgomery_reduce(z, modulus, mp); - if (err == MP_OKAY) - err = mp_invmod(z, modulus, t1); - } - } + /* get 1/z^2 and 1/z^3 */ + if (err == MP_OKAY) + err = mp_sqr(t1, t2); + if (err == MP_OKAY) + err = mp_mod(t2, modulus, t2); + if (err == MP_OKAY) + err = mp_mul(t1, t2, t1); + if (err == MP_OKAY) + err = mp_mod(t1, modulus, t1); - /* get 1/z^2 and 1/z^3 */ - if (err == MP_OKAY) - err = mp_sqr(t1, t2); - if (err == MP_OKAY) - err = mp_mod(t2, modulus, t2); - if (err == MP_OKAY) - err = mp_mul(t1, t2, t1); - if (err == MP_OKAY) - err = mp_mod(t1, modulus, t1); + /* multiply against x/y */ + if (err == MP_OKAY) + err = mp_mul(x, t2, x); + if (err == MP_OKAY) + err = mp_montgomery_reduce(x, modulus, mp); + if (err == MP_OKAY) + err = mp_mul(y, t1, y); + if (err == MP_OKAY) + err = mp_montgomery_reduce(y, modulus, mp); - /* multiply against x/y */ - if (err == MP_OKAY) - err = mp_mul(x, t2, x); - if (err == MP_OKAY) - err = mp_montgomery_reduce(x, modulus, mp); - if (err == MP_OKAY) - err = mp_mul(y, t1, y); - if (err == MP_OKAY) - err = mp_montgomery_reduce(y, modulus, mp); + if (err == MP_OKAY) + err = mp_set(z, 1); - if (err == MP_OKAY) - err = mp_set(z, 1); + #ifdef ALT_ECC_SIZE + /* return result */ + if (err == MP_OKAY) + err = mp_copy(x, P->x); + if (err == MP_OKAY) + err = mp_copy(y, P->y); + if (err == MP_OKAY) + err = mp_copy(z, P->z); -#ifdef ALT_ECC_SIZE - /* return result */ - if (err == MP_OKAY) - err = mp_copy(x, P->x); - if (err == MP_OKAY) - err = mp_copy(y, P->y); - if (err == MP_OKAY) - err = mp_copy(z, P->z); + done: + #endif -done: -#endif + /* clean up */ + mp_clear(t1); + mp_clear(t2); - /* clean up */ - mp_clear(t1); - mp_clear(t2); + #ifdef WOLFSSL_SMALL_STACK + #ifdef WOLFSSL_SMALL_STACK_CACHE + if (P->key == NULL) + #endif + #endif + { + #ifdef ALT_ECC_SIZE + FREE_MP_INT_SIZE(rz, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(ry, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(rx, NULL, DYNAMIC_TYPE_ECC); + #endif + FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); + FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); + } -#ifdef WOLFSSL_SMALL_STACK -#ifdef WOLFSSL_SMALL_STACK_CACHE - if (P->key == NULL) -#endif -#endif - { - #ifdef ALT_ECC_SIZE - FREE_MP_INT_SIZE(rz, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(ry, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(rx, NULL, DYNAMIC_TYPE_ECC); - #endif - FREE_MP_INT_SIZE(t2, NULL, DYNAMIC_TYPE_ECC); - FREE_MP_INT_SIZE(t1, NULL, DYNAMIC_TYPE_ECC); - } + return err; + /* end !defined(WOLFSSL_SP_MATH) */ - return err; - /* end !defined(WOLFSSL_SP_MATH) */ + #else + /* begin defined(WOLFSSL_SP_MATH) */ + if (P == NULL || modulus == NULL) + return ECC_BAD_ARG_E; -#else - /* begin defined(WOLFSSL_SP_MATH) */ - if (P == NULL || modulus == NULL) - return ECC_BAD_ARG_E; + (void)mp; + (void)ct; - (void)mp; - (void)ct; + #if defined(WOLFSSL_SM2) && defined(WOLFSSL_SP_SM2) + if ((mp_count_bits(modulus) == 256) && + (!mp_is_bit_set(modulus, 224))) { + err = sp_ecc_map_sm2_256(P->x, P->y, P->z); + } + #elif !defined(WOLFSSL_SP_NO_256) + if (mp_count_bits(modulus) == 256) { + err = sp_ecc_map_256(P->x, P->y, P->z); + } + #elif defined(WOLFSSL_SP_384) + if (mp_count_bits(modulus) == 384) { + err = sp_ecc_map_384(P->x, P->y, P->z); + } + #elif defined(WOLFSSL_SP_521) + if (mp_count_bits(modulus) == 521) { + err = sp_ecc_map_521(P->x, P->y, P->z); + } + #else + err = ECC_BAD_ARG_E; + #endif -#if defined(WOLFSSL_SM2) && defined(WOLFSSL_SP_SM2) - if ((mp_count_bits(modulus) == 256) && (!mp_is_bit_set(modulus, 224))) { - err = sp_ecc_map_sm2_256(P->x, P->y, P->z); - } -#elif !defined(WOLFSSL_SP_NO_256) - if (mp_count_bits(modulus) == 256) { - err = sp_ecc_map_256(P->x, P->y, P->z); - } -#elif defined(WOLFSSL_SP_384) - if (mp_count_bits(modulus) == 384) { - err = sp_ecc_map_384(P->x, P->y, P->z); - } -#elif defined(WOLFSSL_SP_521) - if (mp_count_bits(modulus) == 521) { - err = sp_ecc_map_521(P->x, P->y, P->z); - } -#else - err = ECC_BAD_ARG_E; -#endif - - WOLFSSL_LEAVE("ecc_map_ex (SP Math)", err); - return err; + WOLFSSL_LEAVE("ecc_map_ex (SP Math)", err); + return err; #endif /* WOLFSSL_SP_MATH */ + } } #endif /* !FREESCALE_LTC_ECC && !WOLFSSL_STM32_PKA */ diff --git a/wolfcrypt/src/ed25519.c b/wolfcrypt/src/ed25519.c index 3744e1dfe..c8412905b 100644 --- a/wolfcrypt/src/ed25519.c +++ b/wolfcrypt/src/ed25519.c @@ -822,7 +822,7 @@ static int ed25519_verify_msg_final_with_sha(const byte* sig, word32 sigLen, if (ret != 0) return ret; - ge_tobytes(rcheck, &R); + ge_tobytes_nct(rcheck, &R); #endif /* FREESCALE_LTC_ECC */ /* comparison of R created to R in sig */ diff --git a/wolfcrypt/src/error.c b/wolfcrypt/src/error.c index b24a7710d..9e24d9aa4 100644 --- a/wolfcrypt/src/error.c +++ b/wolfcrypt/src/error.c @@ -659,6 +659,12 @@ const char* wc_GetErrorString(int error) case MLKEM_PUB_HASH_E: return "ML-KEM priv key's stored hash doesn't match encoded pub key"; + case BUSY_E: + return "Object is busy"; + + case ALREADY_E: + return "Operation was redundant or preempted"; + case MAX_CODE_E: case WC_SPAN1_MIN_CODE_E: case MIN_CODE_E: diff --git a/wolfcrypt/src/fe_x25519_asm.S b/wolfcrypt/src/fe_x25519_asm.S index 2ac32677a..427a1aadf 100644 --- a/wolfcrypt/src/fe_x25519_asm.S +++ b/wolfcrypt/src/fe_x25519_asm.S @@ -238,7 +238,7 @@ L_fe_init_get_flags: #else movq %rax, _ge_sub_p(%rip) #endif /* __APPLE__ */ -#if !defined(HAVE_ED25519) && !defined(WOLFSSL_CURVE25519_USE_ED25519) +#if defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) #ifndef __APPLE__ movq curve25519_base_avx2@GOTPCREL(%rip), %rax #else @@ -250,7 +250,7 @@ L_fe_init_get_flags: #else movq %rax, _curve25519_base_p(%rip) #endif /* __APPLE__ */ -#endif /* !HAVE_ED25519 && !WOLFSSL_CURVE25519_USE_ED25519 */ +#endif /* WOLFSSL_CURVE25519_NOT_USE_ED25519 */ #ifdef HAVE_ED25519 #ifndef __APPLE__ movq fe_sq2_avx2@GOTPCREL(%rip), %rax @@ -263,6 +263,17 @@ L_fe_init_get_flags: #else movq %rax, _fe_sq2_p(%rip) #endif /* __APPLE__ */ +#ifndef __APPLE__ + movq fe_invert_nct_avx2@GOTPCREL(%rip), %rax +#else + leaq _fe_invert_nct_avx2(%rip), %rax +#endif /* __APPLE__ */ +#ifndef __APPLE__ + movq fe_invert_nct_p@GOTPCREL(%rip), %rdx + movq %rax, (%rdx) +#else + movq %rax, _fe_invert_nct_p(%rip) +#endif /* __APPLE__ */ #ifndef __APPLE__ movq sc_reduce_avx2@GOTPCREL(%rip), %rax #else @@ -443,7 +454,6 @@ fe_sub: .p2align 4 _fe_sub: #endif /* __APPLE__ */ - pushq %r12 # Sub movq (%rsi), %rax movq 8(%rsi), %rcx @@ -453,13 +463,12 @@ _fe_sub: sbbq 8(%rdx), %rcx sbbq 16(%rdx), %r8 sbbq 24(%rdx), %r9 - sbbq %r11, %r11 - shldq $0x01, %r9, %r11 - movq $0x7fffffffffffffff, %r12 - imulq $-19, %r11 - andq %r12, %r9 + sbbq %r10, %r10 + shldq $0x01, %r9, %r10 + imulq $-19, %r10 + btr $63, %r9 # Add modulus (if underflow) - subq %r11, %rax + subq %r10, %rax sbbq $0x00, %rcx sbbq $0x00, %r8 sbbq $0x00, %r9 @@ -467,7 +476,6 @@ _fe_sub: movq %rcx, 8(%rdi) movq %r8, 16(%rdi) movq %r9, 24(%rdi) - popq %r12 repz retq #ifndef __APPLE__ .size fe_sub,.-fe_sub @@ -484,7 +492,6 @@ fe_add: .p2align 4 _fe_add: #endif /* __APPLE__ */ - pushq %r12 # Add movq (%rsi), %rax movq 8(%rsi), %rcx @@ -494,14 +501,13 @@ _fe_add: movq 24(%rsi), %r9 adcq 16(%rdx), %r8 adcq 24(%rdx), %r9 - movq $0x00, %r11 - adcq $0x00, %r11 - shldq $0x01, %r9, %r11 - movq $0x7fffffffffffffff, %r12 - imulq $19, %r11 - andq %r12, %r9 + movq $0x00, %r10 + adcq $0x00, %r10 + shldq $0x01, %r9, %r10 + imulq $19, %r10 + btr $63, %r9 # Sub modulus (if overflow) - addq %r11, %rax + addq %r10, %rax adcq $0x00, %rcx adcq $0x00, %r8 adcq $0x00, %r9 @@ -509,7 +515,6 @@ _fe_add: movq %rcx, 8(%rdi) movq %r8, 16(%rdi) movq %r9, 24(%rdi) - popq %r12 repz retq #ifndef __APPLE__ .size fe_add,.-fe_add @@ -920,6 +925,30 @@ _ge_sub: #ifndef __APPLE__ .size ge_sub,.-ge_sub #endif /* __APPLE__ */ +#if defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) +#if defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) +#ifndef __APPLE__ +.text +.globl curve25519_base +.type curve25519_base,@function +.align 16 +curve25519_base: +#else +.section __TEXT,__text +.globl _curve25519_base +.p2align 4 +_curve25519_base: +#endif /* __APPLE__ */ +#ifndef __APPLE__ + jmpq *curve25519_base_p(%rip) +#else + jmpq *_curve25519_base_p(%rip) +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.size curve25519_base,.-curve25519_base +#endif /* __APPLE__ */ +#endif /* WOLFSSL_CURVE25519_NOT_USE_ED25519 */ +#endif /* WOLFSSL_CURVE25519_NOT_USE_ED25519 */ #ifdef HAVE_ED25519 #ifdef HAVE_ED25519 #ifndef __APPLE__ @@ -946,6 +975,28 @@ _fe_sq2: #ifdef HAVE_ED25519 #ifndef __APPLE__ .text +.globl fe_invert_nct +.type fe_invert_nct,@function +.align 16 +fe_invert_nct: +#else +.section __TEXT,__text +.globl _fe_invert_nct +.p2align 4 +_fe_invert_nct: +#endif /* __APPLE__ */ +#ifndef __APPLE__ + jmpq *fe_invert_nct_p(%rip) +#else + jmpq *_fe_invert_nct_p(%rip) +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.size fe_invert_nct,.-fe_invert_nct +#endif /* __APPLE__ */ +#endif /* HAVE_ED25519 */ +#ifdef HAVE_ED25519 +#ifndef __APPLE__ +.text .globl sc_reduce .type sc_reduce,@function .align 16 @@ -1180,7 +1231,7 @@ ge_sub_p: _ge_sub_p: .quad _ge_sub_x64 #endif /* __APPLE__ */ -#if !defined(HAVE_ED25519) && !defined(WOLFSSL_CURVE25519_USE_ED25519) +#if defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) #ifndef __APPLE__ .data .type curve25519_base_p, @object @@ -1193,7 +1244,7 @@ curve25519_base_p: _curve25519_base_p: .quad _curve25519_base_x64 #endif /* __APPLE__ */ -#endif /* !HAVE_ED25519 && !WOLFSSL_CURVE25519_USE_ED25519 */ +#endif /* WOLFSSL_CURVE25519_NOT_USE_ED25519 */ #ifdef HAVE_ED25519 #ifndef __APPLE__ .data @@ -1209,6 +1260,18 @@ _fe_sq2_p: #endif /* __APPLE__ */ #ifndef __APPLE__ .data +.type fe_invert_nct_p, @object +.size fe_invert_nct_p,8 +fe_invert_nct_p: + .quad fe_invert_nct_x64 +#else +.section __DATA,__data +.p2align 3 +_fe_invert_nct_p: + .quad _fe_invert_nct_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.data .type sc_reduce_p, @object .size sc_reduce_p,8 sc_reduce_p: @@ -2268,7 +2331,7 @@ _fe_invert_x64: movq 128(%rsp), %rdi addq $0x90, %rsp repz retq -#if !defined(HAVE_ED25519) && !defined(WOLFSSL_CURVE25519_USE_ED25519) +#if defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) #ifndef __APPLE__ .data #else @@ -2342,43 +2405,42 @@ L_curve25519_base_x64_bits: movq 8(%rdi), %r8 movq 16(%rdi), %r9 movq 24(%rdi), %r10 + movq (%rsp), %r11 + movq 8(%rsp), %r12 + movq 16(%rsp), %r13 + movq 24(%rsp), %r14 xorq 64(%rsp), %rcx xorq 72(%rsp), %r8 xorq 80(%rsp), %r9 xorq 88(%rsp), %r10 + xorq 32(%rsp), %r11 + xorq 40(%rsp), %r12 + xorq 48(%rsp), %r13 + xorq 56(%rsp), %r14 andq %r15, %rcx andq %r15, %r8 andq %r15, %r9 andq %r15, %r10 + andq %r15, %r11 + andq %r15, %r12 + andq %r15, %r13 + andq %r15, %r14 xorq %rcx, (%rdi) xorq %r8, 8(%rdi) xorq %r9, 16(%rdi) xorq %r10, 24(%rdi) + xorq %r11, (%rsp) + xorq %r12, 8(%rsp) + xorq %r13, 16(%rsp) + xorq %r14, 24(%rsp) xorq %rcx, 64(%rsp) xorq %r8, 72(%rsp) xorq %r9, 80(%rsp) xorq %r10, 88(%rsp) - # Conditional Swap - movq (%rsp), %rcx - movq 8(%rsp), %r8 - movq 16(%rsp), %r9 - movq 24(%rsp), %r10 - xorq 32(%rsp), %rcx - xorq 40(%rsp), %r8 - xorq 48(%rsp), %r9 - xorq 56(%rsp), %r10 - andq %r15, %rcx - andq %r15, %r8 - andq %r15, %r9 - andq %r15, %r10 - xorq %rcx, (%rsp) - xorq %r8, 8(%rsp) - xorq %r9, 16(%rsp) - xorq %r10, 24(%rsp) - xorq %rcx, 32(%rsp) - xorq %r8, 40(%rsp) - xorq %r9, 48(%rsp) - xorq %r10, 56(%rsp) + xorq %r11, 32(%rsp) + xorq %r12, 40(%rsp) + xorq %r13, 48(%rsp) + xorq %r14, 56(%rsp) movq %rbx, %r15 # Add-Sub # Add @@ -2394,14 +2456,13 @@ L_curve25519_base_x64_bits: adcq 16(%rsp), %r9 movq %r10, %r14 adcq 24(%rsp), %r10 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r10 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r10, %rbx + imulq $19, %rbx + btr $63, %r10 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbx, %rcx adcq $0x00, %r8 adcq $0x00, %r9 adcq $0x00, %r10 @@ -2410,12 +2471,12 @@ L_curve25519_base_x64_bits: sbbq 8(%rsp), %r12 sbbq 16(%rsp), %r13 sbbq 24(%rsp), %r14 - sbbq %rax, %rax - shldq $0x01, %r14, %rax - imulq $-19, %rax - andq %rdx, %r14 + sbbq %rbx, %rbx + shldq $0x01, %r14, %rbx + imulq $-19, %rbx + btr $63, %r14 # Add modulus (if underflow) - subq %rax, %r11 + subq %rbx, %r11 sbbq $0x00, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 @@ -2441,14 +2502,13 @@ L_curve25519_base_x64_bits: adcq 48(%rsp), %r9 movq %r10, %r14 adcq 56(%rsp), %r10 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r10 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r10, %rbx + imulq $19, %rbx + btr $63, %r10 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbx, %rcx adcq $0x00, %r8 adcq $0x00, %r9 adcq $0x00, %r10 @@ -2457,12 +2517,12 @@ L_curve25519_base_x64_bits: sbbq 40(%rsp), %r12 sbbq 48(%rsp), %r13 sbbq 56(%rsp), %r14 - sbbq %rax, %rax - shldq $0x01, %r14, %rax - imulq $-19, %rax - andq %rdx, %r14 + sbbq %rbx, %rbx + shldq $0x01, %r14, %rbx + imulq $-19, %rbx + btr $63, %r14 # Add modulus (if underflow) - subq %rax, %r11 + subq %rbx, %r11 sbbq $0x00, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 @@ -2962,14 +3022,13 @@ L_curve25519_base_x64_bits: adcq 48(%rsp), %r9 movq %r10, %r14 adcq 56(%rsp), %r10 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r10 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r10, %rbx + imulq $19, %rbx + btr $63, %r10 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbx, %rcx adcq $0x00, %r8 adcq $0x00, %r9 adcq $0x00, %r10 @@ -2978,12 +3037,12 @@ L_curve25519_base_x64_bits: sbbq 40(%rsp), %r12 sbbq 48(%rsp), %r13 sbbq 56(%rsp), %r14 - sbbq %rax, %rax - shldq $0x01, %r14, %rax - imulq $-19, %rax - andq %rdx, %r14 + sbbq %rbx, %rbx + shldq $0x01, %r14, %rbx + imulq $-19, %rbx + btr $63, %r14 # Add modulus (if underflow) - subq %rax, %r11 + subq %rbx, %r11 sbbq $0x00, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 @@ -3136,13 +3195,12 @@ L_curve25519_base_x64_bits: sbbq 104(%rsp), %r8 sbbq 112(%rsp), %r9 sbbq 120(%rsp), %r10 - sbbq %rax, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $-19, %rax - andq %rdx, %r10 + sbbq %rbx, %rbx + shldq $0x01, %r10, %rbx + imulq $-19, %rbx + btr $63, %r10 # Add modulus (if underflow) - subq %rax, %rcx + subq %rbx, %rcx sbbq $0x00, %r8 sbbq $0x00, %r9 sbbq $0x00, %r10 @@ -3255,39 +3313,6 @@ L_curve25519_base_x64_bits: movq %r8, 40(%rsp) movq %r9, 48(%rsp) movq %r10, 56(%rsp) - # Multiply by 121666 - movq $0x1db42, %rax - mulq 128(%rsp) - xorq %r9, %r9 - movq %rax, %rcx - movq %rdx, %r8 - movq $0x1db42, %rax - mulq 136(%rsp) - xorq %r10, %r10 - addq %rax, %r8 - adcq %rdx, %r9 - movq $0x1db42, %rax - mulq 144(%rsp) - xorq %r12, %r12 - addq %rax, %r9 - adcq %rdx, %r10 - movq $0x1db42, %rax - mulq 152(%rsp) - movq $0x7fffffffffffffff, %r11 - addq %rax, %r10 - adcq %rdx, %r12 - shldq $0x01, %r10, %r12 - andq %r11, %r10 - movq $19, %rax - mulq %r12 - addq %rax, %rcx - adcq $0x00, %r8 - adcq $0x00, %r9 - adcq $0x00, %r10 - movq %rcx, (%rsp) - movq %r8, 8(%rsp) - movq %r9, 16(%rsp) - movq %r10, 24(%rsp) # Square # A[0] * A[1] movq 64(%rsp), %rax @@ -3393,22 +3418,36 @@ L_curve25519_base_x64_bits: movq %r8, 72(%rsp) movq %r9, 80(%rsp) movq %r10, 88(%rsp) - # Add - movq 96(%rsp), %rcx - movq 104(%rsp), %r8 - addq (%rsp), %rcx - movq 112(%rsp), %r9 - adcq 8(%rsp), %r8 - movq 120(%rsp), %r10 - adcq 16(%rsp), %r9 - adcq 24(%rsp), %r10 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r10 - # Sub modulus (if overflow) + # Multiply by 121666 + movq $0x1db42, %rax + mulq 128(%rsp) + xorq %r9, %r9 + movq %rax, %rcx + movq %rdx, %r8 + movq $0x1db42, %rax + mulq 136(%rsp) + xorq %r10, %r10 + addq %rax, %r8 + adcq %rdx, %r9 + movq $0x1db42, %rax + mulq 144(%rsp) + xorq %r12, %r12 + addq %rax, %r9 + adcq %rdx, %r10 + movq $0x1db42, %rax + mulq 152(%rsp) + movq $0x7fffffffffffffff, %r11 + addq %rax, %r10 + adcq %rdx, %r12 + addq 96(%rsp), %rcx + adcq 104(%rsp), %r8 + adcq 112(%rsp), %r9 + adcq 120(%rsp), %r10 + adcq $0x00, %r12 + shldq $0x01, %r10, %r12 + andq %r11, %r10 + movq $19, %rax + mulq %r12 addq %rax, %rcx adcq $0x00, %r8 adcq $0x00, %r9 @@ -3591,43 +3630,42 @@ L_curve25519_base_x64_bits: movq 8(%rdi), %r8 movq 16(%rdi), %r9 movq 24(%rdi), %r10 + movq (%rsp), %r11 + movq 8(%rsp), %r12 + movq 16(%rsp), %r13 + movq 24(%rsp), %r14 xorq 64(%rsp), %rcx xorq 72(%rsp), %r8 xorq 80(%rsp), %r9 xorq 88(%rsp), %r10 + xorq 32(%rsp), %r11 + xorq 40(%rsp), %r12 + xorq 48(%rsp), %r13 + xorq 56(%rsp), %r14 andq %r15, %rcx andq %r15, %r8 andq %r15, %r9 andq %r15, %r10 + andq %r15, %r11 + andq %r15, %r12 + andq %r15, %r13 + andq %r15, %r14 xorq %rcx, (%rdi) xorq %r8, 8(%rdi) xorq %r9, 16(%rdi) xorq %r10, 24(%rdi) + xorq %r11, (%rsp) + xorq %r12, 8(%rsp) + xorq %r13, 16(%rsp) + xorq %r14, 24(%rsp) xorq %rcx, 64(%rsp) xorq %r8, 72(%rsp) xorq %r9, 80(%rsp) xorq %r10, 88(%rsp) - # Conditional Swap - movq (%rsp), %rcx - movq 8(%rsp), %r8 - movq 16(%rsp), %r9 - movq 24(%rsp), %r10 - xorq 32(%rsp), %rcx - xorq 40(%rsp), %r8 - xorq 48(%rsp), %r9 - xorq 56(%rsp), %r10 - andq %r15, %rcx - andq %r15, %r8 - andq %r15, %r9 - andq %r15, %r10 - xorq %rcx, (%rsp) - xorq %r8, 8(%rsp) - xorq %r9, 16(%rsp) - xorq %r10, 24(%rsp) - xorq %rcx, 32(%rsp) - xorq %r8, 40(%rsp) - xorq %r9, 48(%rsp) - xorq %r10, 56(%rsp) + xorq %r11, 32(%rsp) + xorq %r12, 40(%rsp) + xorq %r13, 48(%rsp) + xorq %r14, 56(%rsp) L_curve25519_base_x64_3: # Add-Sub # Add @@ -3643,14 +3681,13 @@ L_curve25519_base_x64_3: adcq 16(%rsp), %r9 movq %r10, %r14 adcq 24(%rsp), %r10 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r10 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r10, %rbx + imulq $19, %rbx + btr $63, %r10 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbx, %rcx adcq $0x00, %r8 adcq $0x00, %r9 adcq $0x00, %r10 @@ -3659,12 +3696,12 @@ L_curve25519_base_x64_3: sbbq 8(%rsp), %r12 sbbq 16(%rsp), %r13 sbbq 24(%rsp), %r14 - sbbq %rax, %rax - shldq $0x01, %r14, %rax - imulq $-19, %rax - andq %rdx, %r14 + sbbq %rbx, %rbx + shldq $0x01, %r14, %rbx + imulq $-19, %rbx + btr $63, %r14 # Add modulus (if underflow) - subq %rax, %r11 + subq %rbx, %r11 sbbq $0x00, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 @@ -4027,13 +4064,12 @@ L_curve25519_base_x64_3: sbbq 104(%rsp), %r8 sbbq 112(%rsp), %r9 sbbq 120(%rsp), %r10 - sbbq %rax, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $-19, %rax - andq %rdx, %r10 + sbbq %rbx, %rbx + shldq $0x01, %r10, %rbx + imulq $-19, %rbx + btr $63, %r10 # Add modulus (if underflow) - subq %rax, %rcx + subq %rbx, %rcx sbbq $0x00, %r8 sbbq $0x00, %r9 sbbq $0x00, %r10 @@ -4062,6 +4098,11 @@ L_curve25519_base_x64_3: movq $0x7fffffffffffffff, %r11 addq %rax, %r10 adcq %rdx, %r12 + addq 96(%rsp), %rcx + adcq 104(%rsp), %r8 + adcq 112(%rsp), %r9 + adcq 120(%rsp), %r10 + adcq $0x00, %r12 shldq $0x01, %r10, %r12 andq %r11, %r10 movq $19, %rax @@ -4070,30 +4111,6 @@ L_curve25519_base_x64_3: adcq $0x00, %r8 adcq $0x00, %r9 adcq $0x00, %r10 - movq %rcx, (%rsp) - movq %r8, 8(%rsp) - movq %r9, 16(%rsp) - movq %r10, 24(%rsp) - # Add - movq 96(%rsp), %rcx - movq 104(%rsp), %r8 - addq (%rsp), %rcx - movq 112(%rsp), %r9 - adcq 8(%rsp), %r8 - movq 120(%rsp), %r10 - adcq 16(%rsp), %r9 - adcq 24(%rsp), %r10 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r10, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r10 - # Sub modulus (if overflow) - addq %rax, %rcx - adcq $0x00, %r8 - adcq $0x00, %r9 - adcq $0x00, %r10 movq %rcx, 96(%rsp) movq %r8, 104(%rsp) movq %r9, 112(%rsp) @@ -4639,7 +4656,7 @@ L_curve25519_base_x64_3: #ifndef __APPLE__ .size curve25519_base_x64,.-curve25519_base_x64 #endif /* __APPLE__ */ -#endif /* !HAVE_ED25519 && !WOLFSSL_CURVE25519_USE_ED25519 */ +#endif /* WOLFSSL_CURVE25519_NOT_USE_ED25519 */ #ifndef __APPLE__ .text .globl curve25519_x64 @@ -4702,43 +4719,42 @@ L_curve25519_x64_bits: movq 8(%rdi), %r9 movq 16(%rdi), %r10 movq 24(%rdi), %r11 + movq (%rsp), %r12 + movq 8(%rsp), %r13 + movq 16(%rsp), %r14 + movq 24(%rsp), %r15 xorq 64(%rsp), %rcx xorq 72(%rsp), %r9 xorq 80(%rsp), %r10 xorq 88(%rsp), %r11 + xorq 32(%rsp), %r12 + xorq 40(%rsp), %r13 + xorq 48(%rsp), %r14 + xorq 56(%rsp), %r15 andq %rbx, %rcx andq %rbx, %r9 andq %rbx, %r10 andq %rbx, %r11 + andq %rbx, %r12 + andq %rbx, %r13 + andq %rbx, %r14 + andq %rbx, %r15 xorq %rcx, (%rdi) xorq %r9, 8(%rdi) xorq %r10, 16(%rdi) xorq %r11, 24(%rdi) + xorq %r12, (%rsp) + xorq %r13, 8(%rsp) + xorq %r14, 16(%rsp) + xorq %r15, 24(%rsp) xorq %rcx, 64(%rsp) xorq %r9, 72(%rsp) xorq %r10, 80(%rsp) xorq %r11, 88(%rsp) - # Conditional Swap - movq (%rsp), %rcx - movq 8(%rsp), %r9 - movq 16(%rsp), %r10 - movq 24(%rsp), %r11 - xorq 32(%rsp), %rcx - xorq 40(%rsp), %r9 - xorq 48(%rsp), %r10 - xorq 56(%rsp), %r11 - andq %rbx, %rcx - andq %rbx, %r9 - andq %rbx, %r10 - andq %rbx, %r11 - xorq %rcx, (%rsp) - xorq %r9, 8(%rsp) - xorq %r10, 16(%rsp) - xorq %r11, 24(%rsp) - xorq %rcx, 32(%rsp) - xorq %r9, 40(%rsp) - xorq %r10, 48(%rsp) - xorq %r11, 56(%rsp) + xorq %r12, 32(%rsp) + xorq %r13, 40(%rsp) + xorq %r14, 48(%rsp) + xorq %r15, 56(%rsp) movq %rbp, %rbx # Add-Sub # Add @@ -4754,14 +4770,13 @@ L_curve25519_x64_bits: adcq 16(%rsp), %r10 movq %r11, %r15 adcq 24(%rsp), %r11 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r11 + movq $0x00, %rbp + adcq $0x00, %rbp + shldq $0x01, %r11, %rbp + imulq $19, %rbp + btr $63, %r11 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbp, %rcx adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -4770,12 +4785,12 @@ L_curve25519_x64_bits: sbbq 8(%rsp), %r13 sbbq 16(%rsp), %r14 sbbq 24(%rsp), %r15 - sbbq %rax, %rax - shldq $0x01, %r15, %rax - imulq $-19, %rax - andq %rdx, %r15 + sbbq %rbp, %rbp + shldq $0x01, %r15, %rbp + imulq $-19, %rbp + btr $63, %r15 # Add modulus (if underflow) - subq %rax, %r12 + subq %rbp, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -4801,14 +4816,13 @@ L_curve25519_x64_bits: adcq 48(%rsp), %r10 movq %r11, %r15 adcq 56(%rsp), %r11 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r11 + movq $0x00, %rbp + adcq $0x00, %rbp + shldq $0x01, %r11, %rbp + imulq $19, %rbp + btr $63, %r11 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbp, %rcx adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -4817,12 +4831,12 @@ L_curve25519_x64_bits: sbbq 40(%rsp), %r13 sbbq 48(%rsp), %r14 sbbq 56(%rsp), %r15 - sbbq %rax, %rax - shldq $0x01, %r15, %rax - imulq $-19, %rax - andq %rdx, %r15 + sbbq %rbp, %rbp + shldq $0x01, %r15, %rbp + imulq $-19, %rbp + btr $63, %r15 # Add modulus (if underflow) - subq %rax, %r12 + subq %rbp, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -5322,14 +5336,13 @@ L_curve25519_x64_bits: adcq 48(%rsp), %r10 movq %r11, %r15 adcq 56(%rsp), %r11 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r11 + movq $0x00, %rbp + adcq $0x00, %rbp + shldq $0x01, %r11, %rbp + imulq $19, %rbp + btr $63, %r11 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbp, %rcx adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -5338,12 +5351,12 @@ L_curve25519_x64_bits: sbbq 40(%rsp), %r13 sbbq 48(%rsp), %r14 sbbq 56(%rsp), %r15 - sbbq %rax, %rax - shldq $0x01, %r15, %rax - imulq $-19, %rax - andq %rdx, %r15 + sbbq %rbp, %rbp + shldq $0x01, %r15, %rbp + imulq $-19, %rbp + btr $63, %r15 # Add modulus (if underflow) - subq %rax, %r12 + subq %rbp, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -5496,13 +5509,12 @@ L_curve25519_x64_bits: sbbq 104(%rsp), %r9 sbbq 112(%rsp), %r10 sbbq 120(%rsp), %r11 - sbbq %rax, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $-19, %rax - andq %rdx, %r11 + sbbq %rbp, %rbp + shldq $0x01, %r11, %rbp + imulq $-19, %rbp + btr $63, %r11 # Add modulus (if underflow) - subq %rax, %rcx + subq %rbp, %rcx sbbq $0x00, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 @@ -5615,39 +5627,6 @@ L_curve25519_x64_bits: movq %r9, 40(%rsp) movq %r10, 48(%rsp) movq %r11, 56(%rsp) - # Multiply by 121666 - movq $0x1db42, %rax - mulq 128(%rsp) - xorq %r10, %r10 - movq %rax, %rcx - movq %rdx, %r9 - movq $0x1db42, %rax - mulq 136(%rsp) - xorq %r11, %r11 - addq %rax, %r9 - adcq %rdx, %r10 - movq $0x1db42, %rax - mulq 144(%rsp) - xorq %r13, %r13 - addq %rax, %r10 - adcq %rdx, %r11 - movq $0x1db42, %rax - mulq 152(%rsp) - movq $0x7fffffffffffffff, %r12 - addq %rax, %r11 - adcq %rdx, %r13 - shldq $0x01, %r11, %r13 - andq %r12, %r11 - movq $19, %rax - mulq %r13 - addq %rax, %rcx - adcq $0x00, %r9 - adcq $0x00, %r10 - adcq $0x00, %r11 - movq %rcx, (%rsp) - movq %r9, 8(%rsp) - movq %r10, 16(%rsp) - movq %r11, 24(%rsp) # Square # A[0] * A[1] movq 64(%rsp), %rax @@ -5753,22 +5732,36 @@ L_curve25519_x64_bits: movq %r9, 72(%rsp) movq %r10, 80(%rsp) movq %r11, 88(%rsp) - # Add - movq 96(%rsp), %rcx - movq 104(%rsp), %r9 - addq (%rsp), %rcx - movq 112(%rsp), %r10 - adcq 8(%rsp), %r9 - movq 120(%rsp), %r11 - adcq 16(%rsp), %r10 - adcq 24(%rsp), %r11 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r11 - # Sub modulus (if overflow) + # Multiply by 121666 + movq $0x1db42, %rax + mulq 128(%rsp) + xorq %r10, %r10 + movq %rax, %rcx + movq %rdx, %r9 + movq $0x1db42, %rax + mulq 136(%rsp) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + movq $0x1db42, %rax + mulq 144(%rsp) + xorq %r13, %r13 + addq %rax, %r10 + adcq %rdx, %r11 + movq $0x1db42, %rax + mulq 152(%rsp) + movq $0x7fffffffffffffff, %r12 + addq %rax, %r11 + adcq %rdx, %r13 + addq 96(%rsp), %rcx + adcq 104(%rsp), %r9 + adcq 112(%rsp), %r10 + adcq 120(%rsp), %r11 + adcq $0x00, %r13 + shldq $0x01, %r11, %r13 + andq %r12, %r11 + movq $19, %rax + mulq %r13 addq %rax, %rcx adcq $0x00, %r9 adcq $0x00, %r10 @@ -6045,50 +6038,49 @@ L_curve25519_x64_bits: decq %r9 cmpq $3, %r9 jge L_curve25519_x64_bits - movq %r9, 160(%rsp) + movq $2, 160(%rsp) negq %rbx # Conditional Swap movq (%rdi), %rcx movq 8(%rdi), %r9 movq 16(%rdi), %r10 movq 24(%rdi), %r11 + movq (%rsp), %r12 + movq 8(%rsp), %r13 + movq 16(%rsp), %r14 + movq 24(%rsp), %r15 xorq 64(%rsp), %rcx xorq 72(%rsp), %r9 xorq 80(%rsp), %r10 xorq 88(%rsp), %r11 + xorq 32(%rsp), %r12 + xorq 40(%rsp), %r13 + xorq 48(%rsp), %r14 + xorq 56(%rsp), %r15 andq %rbx, %rcx andq %rbx, %r9 andq %rbx, %r10 andq %rbx, %r11 + andq %rbx, %r12 + andq %rbx, %r13 + andq %rbx, %r14 + andq %rbx, %r15 xorq %rcx, (%rdi) xorq %r9, 8(%rdi) xorq %r10, 16(%rdi) xorq %r11, 24(%rdi) + xorq %r12, (%rsp) + xorq %r13, 8(%rsp) + xorq %r14, 16(%rsp) + xorq %r15, 24(%rsp) xorq %rcx, 64(%rsp) xorq %r9, 72(%rsp) xorq %r10, 80(%rsp) xorq %r11, 88(%rsp) - # Conditional Swap - movq (%rsp), %rcx - movq 8(%rsp), %r9 - movq 16(%rsp), %r10 - movq 24(%rsp), %r11 - xorq 32(%rsp), %rcx - xorq 40(%rsp), %r9 - xorq 48(%rsp), %r10 - xorq 56(%rsp), %r11 - andq %rbx, %rcx - andq %rbx, %r9 - andq %rbx, %r10 - andq %rbx, %r11 - xorq %rcx, (%rsp) - xorq %r9, 8(%rsp) - xorq %r10, 16(%rsp) - xorq %r11, 24(%rsp) - xorq %rcx, 32(%rsp) - xorq %r9, 40(%rsp) - xorq %r10, 48(%rsp) - xorq %r11, 56(%rsp) + xorq %r12, 32(%rsp) + xorq %r13, 40(%rsp) + xorq %r14, 48(%rsp) + xorq %r15, 56(%rsp) L_curve25519_x64_3: # Add-Sub # Add @@ -6104,14 +6096,13 @@ L_curve25519_x64_3: adcq 16(%rsp), %r10 movq %r11, %r15 adcq 24(%rsp), %r11 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r11 + movq $0x00, %rbp + adcq $0x00, %rbp + shldq $0x01, %r11, %rbp + imulq $19, %rbp + btr $63, %r11 # Sub modulus (if overflow) - addq %rax, %rcx + addq %rbp, %rcx adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -6120,12 +6111,12 @@ L_curve25519_x64_3: sbbq 8(%rsp), %r13 sbbq 16(%rsp), %r14 sbbq 24(%rsp), %r15 - sbbq %rax, %rax - shldq $0x01, %r15, %rax - imulq $-19, %rax - andq %rdx, %r15 + sbbq %rbp, %rbp + shldq $0x01, %r15, %rbp + imulq $-19, %rbp + btr $63, %r15 # Add modulus (if underflow) - subq %rax, %r12 + subq %rbp, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -6488,13 +6479,12 @@ L_curve25519_x64_3: sbbq 104(%rsp), %r9 sbbq 112(%rsp), %r10 sbbq 120(%rsp), %r11 - sbbq %rax, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $-19, %rax - andq %rdx, %r11 + sbbq %rbp, %rbp + shldq $0x01, %r11, %rbp + imulq $-19, %rbp + btr $63, %r11 # Add modulus (if underflow) - subq %rax, %rcx + subq %rbp, %rcx sbbq $0x00, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 @@ -6523,6 +6513,11 @@ L_curve25519_x64_3: movq $0x7fffffffffffffff, %r12 addq %rax, %r11 adcq %rdx, %r13 + addq 96(%rsp), %rcx + adcq 104(%rsp), %r9 + adcq 112(%rsp), %r10 + adcq 120(%rsp), %r11 + adcq $0x00, %r13 shldq $0x01, %r11, %r13 andq %r12, %r11 movq $19, %rax @@ -6531,30 +6526,6 @@ L_curve25519_x64_3: adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 - movq %rcx, (%rsp) - movq %r9, 8(%rsp) - movq %r10, 16(%rsp) - movq %r11, 24(%rsp) - # Add - movq 96(%rsp), %rcx - movq 104(%rsp), %r9 - addq (%rsp), %rcx - movq 112(%rsp), %r10 - adcq 8(%rsp), %r9 - movq 120(%rsp), %r11 - adcq 16(%rsp), %r10 - adcq 24(%rsp), %r11 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r11, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r11 - # Sub modulus (if overflow) - addq %rax, %rcx - adcq $0x00, %r9 - adcq $0x00, %r10 - adcq $0x00, %r11 movq %rcx, 96(%rsp) movq %r9, 104(%rsp) movq %r10, 112(%rsp) @@ -6691,9 +6662,7 @@ L_curve25519_x64_3: movq %r9, 8(%rsp) movq %r10, 16(%rsp) movq %r11, 24(%rsp) - movq 160(%rsp), %r9 - decq %r9 - movq %r9, 160(%rsp) + decq 160(%rsp) jge L_curve25519_x64_3 # Invert leaq 32(%rsp), %rdi @@ -8598,14 +8567,13 @@ _ge_p2_dbl_x64: adcq 16(%rsi), %r11 movq %r12, %rbx adcq 24(%rsi), %r12 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r12, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r12 + movq $0x00, %r8 + adcq $0x00, %r8 + shldq $0x01, %r12, %r8 + imulq $19, %r8 + btr $63, %r12 # Sub modulus (if overflow) - addq %rax, %r9 + addq %r8, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -8614,12 +8582,12 @@ _ge_p2_dbl_x64: sbbq 8(%rsi), %r14 sbbq 16(%rsi), %r15 sbbq 24(%rsi), %rbx - sbbq %rax, %rax - shldq $0x01, %rbx, %rax - imulq $-19, %rax - andq %rdx, %rbx + sbbq %r8, %r8 + shldq $0x01, %rbx, %r8 + imulq $-19, %r8 + btr $63, %rbx # Add modulus (if underflow) - subq %rax, %r13 + subq %r8, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx @@ -8644,14 +8612,13 @@ _ge_p2_dbl_x64: movq 24(%rsi), %r12 adcq 16(%rcx), %r11 adcq 24(%rcx), %r12 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r12, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r12 + movq $0x00, %r8 + adcq $0x00, %r8 + shldq $0x01, %r12, %r8 + imulq $19, %r8 + btr $63, %r12 # Sub modulus (if overflow) - addq %rax, %r9 + addq %r8, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -8767,13 +8734,12 @@ _ge_p2_dbl_x64: sbbq 8(%rsi), %r10 sbbq 16(%rsi), %r11 sbbq 24(%rsi), %r12 - sbbq %rax, %rax - shldq $0x01, %r12, %rax - movq $0x7fffffffffffffff, %rdx - imulq $-19, %rax - andq %rdx, %r12 + sbbq %r8, %r8 + shldq $0x01, %r12, %r8 + imulq $-19, %r8 + btr $63, %r12 # Add modulus (if underflow) - subq %rax, %r9 + subq %r8, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 sbbq $0x00, %r12 @@ -8904,13 +8870,12 @@ _ge_p2_dbl_x64: sbbq 8(%rsi), %r10 sbbq 16(%rsi), %r11 sbbq 24(%rsi), %r12 - sbbq %rax, %rax - shldq $0x01, %r12, %rax - movq $0x7fffffffffffffff, %rdx - imulq $-19, %rax - andq %rdx, %r12 + sbbq %r8, %r8 + shldq $0x01, %r12, %r8 + imulq $-19, %r8 + btr $63, %r12 # Add modulus (if underflow) - subq %rax, %r9 + subq %r8, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 sbbq $0x00, %r12 @@ -8970,14 +8935,13 @@ _ge_madd_x64: adcq 16(%r8), %r12 movq %r13, %rbp adcq 24(%r8), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -8986,12 +8950,12 @@ _ge_madd_x64: sbbq 8(%r8), %r15 sbbq 16(%r8), %rbx sbbq 24(%r8), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -9412,14 +9376,13 @@ _ge_madd_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -9428,12 +9391,12 @@ _ge_madd_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -9455,14 +9418,13 @@ _ge_madd_x64: movq 24(%r8), %r13 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -9479,14 +9441,13 @@ _ge_madd_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -9495,12 +9456,12 @@ _ge_madd_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -9565,14 +9526,13 @@ _ge_msub_x64: adcq 16(%r8), %r12 movq %r13, %rbp adcq 24(%r8), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -9581,12 +9541,12 @@ _ge_msub_x64: sbbq 8(%r8), %r15 sbbq 16(%r8), %rbx sbbq 24(%r8), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -10007,14 +9967,13 @@ _ge_msub_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10023,12 +9982,12 @@ _ge_msub_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -10051,14 +10010,13 @@ _ge_msub_x64: movq 24(%r8), %r13 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10074,14 +10032,13 @@ _ge_msub_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10090,12 +10047,12 @@ _ge_msub_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -10160,14 +10117,13 @@ _ge_add_x64: adcq 16(%r8), %r12 movq %r13, %rbp adcq 24(%r8), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10176,12 +10132,12 @@ _ge_add_x64: sbbq 8(%r8), %r15 sbbq 16(%r8), %rbx sbbq 24(%r8), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -10603,14 +10559,13 @@ _ge_add_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10619,12 +10574,12 @@ _ge_add_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -10772,14 +10727,13 @@ _ge_add_x64: adcq %r11, %r11 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10795,14 +10749,13 @@ _ge_add_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10811,12 +10764,12 @@ _ge_add_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -10881,14 +10834,13 @@ _ge_sub_x64: adcq 16(%r8), %r12 movq %r13, %rbp adcq 24(%r8), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -10897,12 +10849,12 @@ _ge_sub_x64: sbbq 8(%r8), %r15 sbbq 16(%r8), %rbx sbbq 24(%r8), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -11323,14 +11275,13 @@ _ge_sub_x64: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -11339,12 +11290,12 @@ _ge_sub_x64: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -11491,14 +11442,13 @@ _ge_sub_x64: adcq %r11, %r11 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -11515,14 +11465,13 @@ _ge_sub_x64: adcq 16(%rdi), %r12 movq %r13, %rbp adcq 24(%rdi), %r13 - movq $0x00, %rax - adcq $0x00, %rax - shldq $0x01, %r13, %rax - movq $0x7fffffffffffffff, %rdx - imulq $19, %rax - andq %rdx, %r13 + movq $0x00, %r9 + adcq $0x00, %r9 + shldq $0x01, %r13, %r9 + imulq $19, %r9 + btr $63, %r13 # Sub modulus (if overflow) - addq %rax, %r10 + addq %r9, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -11531,12 +11480,12 @@ _ge_sub_x64: sbbq 8(%rdi), %r15 sbbq 16(%rdi), %rbx sbbq 24(%rdi), %rbp - sbbq %rax, %rax - shldq $0x01, %rbp, %rax - imulq $-19, %rax - andq %rdx, %rbp + sbbq %r9, %r9 + shldq $0x01, %rbp, %r9 + imulq $-19, %r9 + btr $63, %rbp # Add modulus (if underflow) - subq %rax, %r14 + subq %r9, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -12173,6 +12122,236 @@ _sc_muladd_x64: #ifndef __APPLE__ .size sc_muladd_x64,.-sc_muladd_x64 #endif /* __APPLE__ */ +/* Non-constant time modular inversion. + * + * @param [out] r Resulting number. + * @param [in] a Number to invert. + * @return MP_OKAY on success. + */ +#ifndef __APPLE__ +.text +.globl fe_invert_nct_x64 +.type fe_invert_nct_x64,@function +.align 16 +fe_invert_nct_x64: +#else +.section __TEXT,__text +.globl _fe_invert_nct_x64 +.p2align 4 +_fe_invert_nct_x64: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x201, %rsp + movq $-19, %rcx + movq $-1, %r8 + movq $-1, %r9 + movq $0x7fffffffffffffff, %r10 + movq (%rsi), %r11 + movq 8(%rsi), %r12 + movq 16(%rsi), %r13 + movq 24(%rsi), %r14 + movq $0x00, %r15 + testb $0x01, %r11b + jnz fe_invert_nct_v_even_end +fe_invert_nct_v_even_start: + shrdq $0x01, %r12, %r11 + shrdq $0x01, %r13, %r12 + shrdq $0x01, %r14, %r13 + shrq $0x01, %r14 + movb $0x01, (%rsp,%r15,1) + incq %r15 + testb $0x01, %r11b + jz fe_invert_nct_v_even_start +fe_invert_nct_v_even_end: +L_fe_invert_nct_uv_start: + cmpq %r14, %r10 + jb L_fe_invert_nct_uv_v + ja L_fe_invert_nct_uv_u + cmpq %r13, %r9 + jb L_fe_invert_nct_uv_v + ja L_fe_invert_nct_uv_u + cmpq %r12, %r8 + jb L_fe_invert_nct_uv_v + ja L_fe_invert_nct_uv_u + cmpq %r11, %rcx + jb L_fe_invert_nct_uv_v +L_fe_invert_nct_uv_u: + movb $2, (%rsp,%r15,1) + incq %r15 + subq %r11, %rcx + sbbq %r12, %r8 + sbbq %r13, %r9 + sbbq %r14, %r10 + shrdq $0x01, %r8, %rcx + shrdq $0x01, %r9, %r8 + shrdq $0x01, %r10, %r9 + shrq $0x01, %r10 + testb $0x01, %cl + jnz fe_invert_nct_usubv_even_end +fe_invert_nct_usubv_even_start: + shrdq $0x01, %r8, %rcx + shrdq $0x01, %r9, %r8 + shrdq $0x01, %r10, %r9 + shrq $0x01, %r10 + movb $0x00, (%rsp,%r15,1) + incq %r15 + testb $0x01, %cl + jz fe_invert_nct_usubv_even_start +fe_invert_nct_usubv_even_end: + cmpq $0x01, %rcx + jne L_fe_invert_nct_uv_start + movq %r8, %rdx + orq %r9, %rdx + jne L_fe_invert_nct_uv_start + orq %r10, %rdx + jne L_fe_invert_nct_uv_start + movb $0x01, %al + jmp L_fe_invert_nct_uv_end +L_fe_invert_nct_uv_v: + movb $3, (%rsp,%r15,1) + incq %r15 + subq %rcx, %r11 + sbbq %r8, %r12 + sbbq %r9, %r13 + sbbq %r10, %r14 + shrdq $0x01, %r12, %r11 + shrdq $0x01, %r13, %r12 + shrdq $0x01, %r14, %r13 + shrq $0x01, %r14 + testb $0x01, %r11b + jnz fe_invert_nct_vsubu_even_end +fe_invert_nct_vsubu_even_start: + shrdq $0x01, %r12, %r11 + shrdq $0x01, %r13, %r12 + shrdq $0x01, %r14, %r13 + shrq $0x01, %r14 + movb $0x01, (%rsp,%r15,1) + incq %r15 + testb $0x01, %r11b + jz fe_invert_nct_vsubu_even_start +fe_invert_nct_vsubu_even_end: + cmpq $0x01, %r11 + jne L_fe_invert_nct_uv_start + movq %r12, %rdx + orq %r13, %rdx + jne L_fe_invert_nct_uv_start + orq %r14, %rdx + jne L_fe_invert_nct_uv_start + movb $0x00, %al +L_fe_invert_nct_uv_end: + movq $-19, %rcx + movq $-1, %r8 + movq $-1, %r9 + movq $0x7fffffffffffffff, %r10 + movq $0x01, %r11 + xorq %r12, %r12 + xorq %r13, %r13 + xorq %r14, %r14 + movb $7, (%rsp,%r15,1) + movb (%rsp), %dl + movq $0x01, %r15 + cmpb $0x01, %dl + je L_fe_invert_nct_op_div2_d + jl L_fe_invert_nct_op_div2_b + cmpb $3, %dl + je L_fe_invert_nct_op_d_sub_b + jl L_fe_invert_nct_op_b_sub_d + jmp L_fe_invert_nct_op_end +L_fe_invert_nct_op_b_sub_d: + subq %r11, %rcx + sbbq %r12, %r8 + sbbq %r13, %r9 + sbbq %r14, %r10 + jnc L_fe_invert_nct_op_div2_b + movq $-1, %rdx + addq $-19, %rcx + adcq %rdx, %r8 + adcq %rdx, %r9 + movq $0x7fffffffffffffff, %rdx + adcq %rdx, %r10 +L_fe_invert_nct_op_div2_b: + testb $0x01, %cl + jz L_fe_invert_nct_op_div2_b_mod + addq $-19, %rcx + movq $-1, %rdx + adcq %rdx, %r8 + adcq %rdx, %r9 + movq $0x7fffffffffffffff, %rdx + adcq %rdx, %r10 +L_fe_invert_nct_op_div2_b_mod: + shrdq $0x01, %r8, %rcx + shrdq $0x01, %r9, %r8 + shrdq $0x01, %r10, %r9 + shrq $0x01, %r10 + movb (%rsp,%r15,1), %dl + incq %r15 + cmpb $0x01, %dl + je L_fe_invert_nct_op_div2_d + jl L_fe_invert_nct_op_div2_b + cmpb $3, %dl + je L_fe_invert_nct_op_d_sub_b + jl L_fe_invert_nct_op_b_sub_d + jmp L_fe_invert_nct_op_end +L_fe_invert_nct_op_d_sub_b: + subq %rcx, %r11 + sbbq %r8, %r12 + sbbq %r9, %r13 + sbbq %r10, %r14 + jnc L_fe_invert_nct_op_div2_d + movq $-1, %rdx + addq $-19, %r11 + adcq %rdx, %r12 + adcq %rdx, %r13 + movq $0x7fffffffffffffff, %rdx + adcq %rdx, %r14 +L_fe_invert_nct_op_div2_d: + testb $0x01, %r11b + jz L_fe_invert_nct_op_div2_d_mod + addq $-19, %r11 + movq $-1, %rdx + adcq %rdx, %r12 + adcq %rdx, %r13 + movq $0x7fffffffffffffff, %rdx + adcq %rdx, %r14 +L_fe_invert_nct_op_div2_d_mod: + shrdq $0x01, %r12, %r11 + shrdq $0x01, %r13, %r12 + shrdq $0x01, %r14, %r13 + shrq $0x01, %r14 + movb (%rsp,%r15,1), %dl + incq %r15 + cmpb $0x01, %dl + je L_fe_invert_nct_op_div2_d + jl L_fe_invert_nct_op_div2_b + cmpb $3, %dl + je L_fe_invert_nct_op_d_sub_b + jl L_fe_invert_nct_op_b_sub_d +L_fe_invert_nct_op_end: + cmpb $0x01, %al + jne L_fe_invert_nct_store_d + movq %rcx, (%rdi) + movq %r8, 8(%rdi) + movq %r9, 16(%rdi) + movq %r10, 24(%rdi) + jmp L_fe_invert_nct_store_end +L_fe_invert_nct_store_d: + movq %r11, (%rdi) + movq %r12, 8(%rdi) + movq %r13, 16(%rdi) + movq %r14, 24(%rdi) +L_fe_invert_nct_store_end: + addq $0x201, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_invert_nct_x64,.-fe_invert_nct_x64 +#endif /* __APPLE__ */ #endif /* HAVE_ED25519 */ #ifdef HAVE_INTEL_AVX2 #ifndef __APPLE__ @@ -12358,16 +12537,18 @@ fe_mul_avx2: .p2align 4 _fe_mul_avx2: #endif /* __APPLE__ */ + pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 - pushq %rbx - movq %rdx, %rbx + pushq %rbp + movq %rdx, %rbp + movq (%rsi), %rbx # Multiply # A[0] * B[0] - movq (%rbx), %rdx - mulxq (%rsi), %r8, %r9 + movq (%rbp), %rdx + mulxq %rbx, %r8, %r9 # A[2] * B[0] mulxq 16(%rsi), %r10, %r11 # A[1] * B[0] @@ -12375,66 +12556,66 @@ _fe_mul_avx2: xorq %r15, %r15 adcxq %rax, %r9 # A[3] * B[1] - movq 8(%rbx), %rdx + movq 8(%rbp), %rdx mulxq 24(%rsi), %r12, %r13 adcxq %rcx, %r10 # A[0] * B[1] - mulxq (%rsi), %rax, %rcx + mulxq %rbx, %rax, %rcx adoxq %rax, %r9 # A[2] * B[1] mulxq 16(%rsi), %rax, %r14 adoxq %rcx, %r10 adcxq %rax, %r11 # A[1] * B[2] - movq 16(%rbx), %rdx + movq 16(%rbp), %rdx mulxq 8(%rsi), %rax, %rcx adcxq %r14, %r12 adoxq %rax, %r11 adcxq %r15, %r13 adoxq %rcx, %r12 # A[0] * B[2] - mulxq (%rsi), %rax, %rcx + mulxq %rbx, %rax, %rcx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rax, %r10 # A[1] * B[1] - movq 8(%rbx), %rdx + movq 8(%rbp), %rdx mulxq 8(%rsi), %rdx, %rax adcxq %rcx, %r11 adoxq %rdx, %r10 # A[1] * B[3] - movq 24(%rbx), %rdx + movq 24(%rbp), %rdx adoxq %rax, %r11 mulxq 8(%rsi), %rax, %rcx adcxq %rax, %r12 # A[2] * B[2] - movq 16(%rbx), %rdx + movq 16(%rbp), %rdx mulxq 16(%rsi), %rdx, %rax adcxq %rcx, %r13 adoxq %rdx, %r12 # A[3] * B[3] - movq 24(%rbx), %rdx + movq 24(%rbp), %rdx adoxq %rax, %r13 mulxq 24(%rsi), %rax, %rcx adoxq %r15, %r14 adcxq %rax, %r14 # A[0] * B[3] - mulxq (%rsi), %rdx, %rax + mulxq %rbx, %rdx, %rax adcxq %rcx, %r15 xorq %rcx, %rcx adcxq %rdx, %r11 # A[3] * B[0] movq 24(%rsi), %rdx adcxq %rax, %r12 - mulxq (%rbx), %rdx, %rax + mulxq (%rbp), %rdx, %rax adoxq %rdx, %r11 adoxq %rax, %r12 # A[3] * B[2] movq 24(%rsi), %rdx - mulxq 16(%rbx), %rdx, %rax + mulxq 16(%rbp), %rdx, %rax adcxq %rdx, %r13 # A[2] * B[3] - movq 24(%rbx), %rdx + movq 24(%rbp), %rdx adcxq %rax, %r14 mulxq 16(%rsi), %rax, %rdx adcxq %rcx, %r15 @@ -12475,11 +12656,12 @@ _fe_mul_avx2: movq %r9, 8(%rdi) movq %r10, 16(%rdi) movq %r11, 24(%rdi) - popq %rbx + popq %rbp popq %r15 popq %r14 popq %r13 popq %r12 + popq %rbx repz retq #ifndef __APPLE__ .size fe_mul_avx2,.-fe_mul_avx2 @@ -12729,14 +12911,13 @@ _fe_mul121666_avx2: mulxq (%rsi), %rax, %r13 mulxq 8(%rsi), %rcx, %r12 mulxq 16(%rsi), %r8, %r11 - mulxq 24(%rsi), %r9, %r10 addq %r13, %rcx + mulxq 24(%rsi), %r9, %r10 adcq %r12, %r8 adcq %r11, %r9 adcq $0x00, %r10 - movq $0x7fffffffffffffff, %r13 shldq $0x01, %r9, %r10 - andq %r13, %r9 + btr $63, %r9 imulq $19, %r10, %r10 addq %r10, %rax adcq $0x00, %rcx @@ -13008,7 +13189,7 @@ _fe_invert_avx2: movq 128(%rsp), %rdi addq $0x90, %rsp repz retq -#if !defined(HAVE_ED25519) && !defined(WOLFSSL_CURVE25519_USE_ED25519) +#if defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) #ifndef __APPLE__ .data #else @@ -13083,43 +13264,42 @@ L_curve25519_base_avx2_bits: movq 8(%rdi), %r9 movq 16(%rdi), %r10 movq 24(%rdi), %r11 + movq (%rsp), %r12 + movq 8(%rsp), %r13 + movq 16(%rsp), %r14 + movq 24(%rsp), %r15 xorq 64(%rsp), %r8 xorq 72(%rsp), %r9 xorq 80(%rsp), %r10 xorq 88(%rsp), %r11 + xorq 32(%rsp), %r12 + xorq 40(%rsp), %r13 + xorq 48(%rsp), %r14 + xorq 56(%rsp), %r15 andq %rax, %r8 andq %rax, %r9 andq %rax, %r10 andq %rax, %r11 + andq %rax, %r12 + andq %rax, %r13 + andq %rax, %r14 + andq %rax, %r15 xorq %r8, (%rdi) xorq %r9, 8(%rdi) xorq %r10, 16(%rdi) xorq %r11, 24(%rdi) + xorq %r12, (%rsp) + xorq %r13, 8(%rsp) + xorq %r14, 16(%rsp) + xorq %r15, 24(%rsp) xorq %r8, 64(%rsp) xorq %r9, 72(%rsp) xorq %r10, 80(%rsp) xorq %r11, 88(%rsp) - # Conditional Swap - movq (%rsp), %r8 - movq 8(%rsp), %r9 - movq 16(%rsp), %r10 - movq 24(%rsp), %r11 - xorq 32(%rsp), %r8 - xorq 40(%rsp), %r9 - xorq 48(%rsp), %r10 - xorq 56(%rsp), %r11 - andq %rax, %r8 - andq %rax, %r9 - andq %rax, %r10 - andq %rax, %r11 - xorq %r8, (%rsp) - xorq %r9, 8(%rsp) - xorq %r10, 16(%rsp) - xorq %r11, 24(%rsp) - xorq %r8, 32(%rsp) - xorq %r9, 40(%rsp) - xorq %r10, 48(%rsp) - xorq %r11, 56(%rsp) + xorq %r12, 32(%rsp) + xorq %r13, 40(%rsp) + xorq %r14, 48(%rsp) + xorq %r15, 56(%rsp) movq %rbx, 168(%rsp) # Add-Sub # Add @@ -13135,14 +13315,13 @@ L_curve25519_base_avx2_bits: adcq 16(%rsp), %r10 movq %r11, %r15 adcq 24(%rsp), %r11 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r11 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r11, %rbx + imulq $19, %rbx + btr $63, %r11 # Sub modulus (if overflow) - addq %rcx, %r8 + addq %rbx, %r8 adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -13151,12 +13330,12 @@ L_curve25519_base_avx2_bits: sbbq 8(%rsp), %r13 sbbq 16(%rsp), %r14 sbbq 24(%rsp), %r15 - sbbq %rcx, %rcx - shldq $0x01, %r15, %rcx - imulq $-19, %rcx - andq %rbx, %r15 + sbbq %rbx, %rbx + shldq $0x01, %r15, %rbx + imulq $-19, %rbx + btr $63, %r15 # Add modulus (if underflow) - subq %rcx, %r12 + subq %rbx, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -13182,14 +13361,13 @@ L_curve25519_base_avx2_bits: adcq 48(%rsp), %r10 movq %r11, %r15 adcq 56(%rsp), %r11 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r11 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r11, %rbx + imulq $19, %rbx + btr $63, %r11 # Sub modulus (if overflow) - addq %rcx, %r8 + addq %rbx, %r8 adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -13198,12 +13376,12 @@ L_curve25519_base_avx2_bits: sbbq 40(%rsp), %r13 sbbq 48(%rsp), %r14 sbbq 56(%rsp), %r15 - sbbq %rcx, %rcx - shldq $0x01, %r15, %rcx - imulq $-19, %rcx - andq %rbx, %r15 + sbbq %rbx, %rbx + shldq $0x01, %r15, %rbx + imulq $-19, %rbx + btr $63, %r15 # Add modulus (if underflow) - subq %rcx, %r12 + subq %rbx, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -13215,10 +13393,11 @@ L_curve25519_base_avx2_bits: movq %r13, 104(%rsp) movq %r14, 112(%rsp) movq %r15, 120(%rsp) + movq 32(%rsp), %rax # Multiply # A[0] * B[0] movq 128(%rsp), %rdx - mulxq 32(%rsp), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 48(%rsp), %r10, %r11 # A[1] * B[0] @@ -13230,7 +13409,7 @@ L_curve25519_base_avx2_bits: mulxq 56(%rsp), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq 32(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 48(%rsp), %rcx, %r14 @@ -13244,7 +13423,7 @@ L_curve25519_base_avx2_bits: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq 32(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -13270,7 +13449,7 @@ L_curve25519_base_avx2_bits: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq 32(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -13317,10 +13496,11 @@ L_curve25519_base_avx2_bits: movq %r9, 40(%rsp) movq %r10, 48(%rsp) movq %r11, 56(%rsp) + movq 96(%rsp), %rax # Multiply # A[0] * B[0] movq (%rdi), %rdx - mulxq 96(%rsp), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 112(%rsp), %r10, %r11 # A[1] * B[0] @@ -13332,7 +13512,7 @@ L_curve25519_base_avx2_bits: mulxq 120(%rsp), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq 96(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 112(%rsp), %rcx, %r14 @@ -13346,7 +13526,7 @@ L_curve25519_base_avx2_bits: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq 96(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -13372,7 +13552,7 @@ L_curve25519_base_avx2_bits: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq 96(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -13593,14 +13773,13 @@ L_curve25519_base_avx2_bits: adcq 48(%rsp), %r10 movq %r11, %r15 adcq 56(%rsp), %r11 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r11 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r11, %rbx + imulq $19, %rbx + btr $63, %r11 # Sub modulus (if overflow) - addq %rcx, %r8 + addq %rbx, %r8 adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -13609,12 +13788,12 @@ L_curve25519_base_avx2_bits: sbbq 40(%rsp), %r13 sbbq 48(%rsp), %r14 sbbq 56(%rsp), %r15 - sbbq %rcx, %rcx - shldq $0x01, %r15, %rcx - imulq $-19, %rcx - andq %rbx, %r15 + sbbq %rbx, %rbx + shldq $0x01, %r15, %rbx + imulq $-19, %rbx + btr $63, %r15 # Add modulus (if underflow) - subq %rcx, %r12 + subq %rbx, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -13626,10 +13805,11 @@ L_curve25519_base_avx2_bits: movq %r13, 40(%rsp) movq %r14, 48(%rsp) movq %r15, 56(%rsp) + movq 128(%rsp), %rax # Multiply # A[0] * B[0] movq 96(%rsp), %rdx - mulxq 128(%rsp), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 144(%rsp), %r10, %r11 # A[1] * B[0] @@ -13641,7 +13821,7 @@ L_curve25519_base_avx2_bits: mulxq 152(%rsp), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 144(%rsp), %rcx, %r14 @@ -13655,7 +13835,7 @@ L_curve25519_base_avx2_bits: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -13681,7 +13861,7 @@ L_curve25519_base_avx2_bits: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -13737,13 +13917,12 @@ L_curve25519_base_avx2_bits: sbbq 104(%rsp), %r9 sbbq 112(%rsp), %r10 sbbq 120(%rsp), %r11 - sbbq %rcx, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $-19, %rcx - andq %rbx, %r11 + sbbq %rbx, %rbx + shldq $0x01, %r11, %rbx + imulq $-19, %rbx + btr $63, %r11 # Add modulus (if underflow) - subq %rcx, %r8 + subq %rbx, %r8 sbbq $0x00, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 @@ -13831,27 +14010,6 @@ L_curve25519_base_avx2_bits: movq %r9, 40(%rsp) movq %r10, 48(%rsp) movq %r11, 56(%rsp) - movq $0x1db42, %rdx - mulxq 128(%rsp), %r8, %r15 - mulxq 136(%rsp), %r9, %r14 - mulxq 144(%rsp), %r10, %r13 - mulxq 152(%rsp), %r11, %r12 - addq %r15, %r9 - adcq %r14, %r10 - adcq %r13, %r11 - adcq $0x00, %r12 - movq $0x7fffffffffffffff, %r15 - shldq $0x01, %r11, %r12 - andq %r15, %r11 - imulq $19, %r12, %r12 - addq %r12, %r8 - adcq $0x00, %r9 - adcq $0x00, %r10 - adcq $0x00, %r11 - movq %r8, (%rsp) - movq %r9, 8(%rsp) - movq %r10, 16(%rsp) - movq %r11, 24(%rsp) # Square movq 64(%rsp), %rdx movq 72(%rsp), %rax @@ -13932,23 +14090,24 @@ L_curve25519_base_avx2_bits: movq %r9, 72(%rsp) movq %r10, 80(%rsp) movq %r11, 88(%rsp) - # Add - movq 96(%rsp), %r8 - movq 104(%rsp), %r9 - addq (%rsp), %r8 - movq 112(%rsp), %r10 - adcq 8(%rsp), %r9 - movq 120(%rsp), %r11 - adcq 16(%rsp), %r10 - adcq 24(%rsp), %r11 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r11 - # Sub modulus (if overflow) - addq %rcx, %r8 + movq $0x1db42, %rdx + mulxq 128(%rsp), %r8, %r15 + mulxq 136(%rsp), %r9, %r14 + mulxq 144(%rsp), %r10, %r13 + addq %r15, %r9 + mulxq 152(%rsp), %r11, %r12 + adcq %r14, %r10 + adcq %r13, %r11 + adcq $0x00, %r12 + addq 96(%rsp), %r8 + adcq 104(%rsp), %r9 + adcq 112(%rsp), %r10 + adcq 120(%rsp), %r11 + adcq $0x00, %r12 + shldq $0x01, %r11, %r12 + btr $63, %r11 + imulq $19, %r12, %r12 + addq %r12, %r8 adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -13960,14 +14119,13 @@ L_curve25519_base_avx2_bits: mulxq 32(%rsp), %r8, %r15 mulxq 40(%rsp), %r9, %r14 mulxq 48(%rsp), %r10, %r13 - mulxq 56(%rsp), %r11, %r12 addq %r15, %r9 + mulxq 56(%rsp), %r11, %r12 adcq %r14, %r10 adcq %r13, %r11 adcq $0x00, %r12 - movq $0x7fffffffffffffff, %r15 shldq $0x01, %r11, %r12 - andq %r15, %r11 + btr $63, %r11 imulq $19, %r12, %r12 addq %r12, %r8 adcq $0x00, %r9 @@ -13977,10 +14135,11 @@ L_curve25519_base_avx2_bits: movq %r9, 40(%rsp) movq %r10, 48(%rsp) movq %r11, 56(%rsp) + movq 128(%rsp), %rax # Multiply # A[0] * B[0] movq 96(%rsp), %rdx - mulxq 128(%rsp), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 144(%rsp), %r10, %r11 # A[1] * B[0] @@ -13992,7 +14151,7 @@ L_curve25519_base_avx2_bits: mulxq 152(%rsp), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 144(%rsp), %rcx, %r14 @@ -14006,7 +14165,7 @@ L_curve25519_base_avx2_bits: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -14032,7 +14191,7 @@ L_curve25519_base_avx2_bits: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -14089,43 +14248,42 @@ L_curve25519_base_avx2_bits: movq 8(%rdi), %r9 movq 16(%rdi), %r10 movq 24(%rdi), %r11 + movq (%rsp), %r12 + movq 8(%rsp), %r13 + movq 16(%rsp), %r14 + movq 24(%rsp), %r15 xorq 64(%rsp), %r8 xorq 72(%rsp), %r9 xorq 80(%rsp), %r10 xorq 88(%rsp), %r11 + xorq 32(%rsp), %r12 + xorq 40(%rsp), %r13 + xorq 48(%rsp), %r14 + xorq 56(%rsp), %r15 andq %rax, %r8 andq %rax, %r9 andq %rax, %r10 andq %rax, %r11 + andq %rax, %r12 + andq %rax, %r13 + andq %rax, %r14 + andq %rax, %r15 xorq %r8, (%rdi) xorq %r9, 8(%rdi) xorq %r10, 16(%rdi) xorq %r11, 24(%rdi) + xorq %r12, (%rsp) + xorq %r13, 8(%rsp) + xorq %r14, 16(%rsp) + xorq %r15, 24(%rsp) xorq %r8, 64(%rsp) xorq %r9, 72(%rsp) xorq %r10, 80(%rsp) xorq %r11, 88(%rsp) - # Conditional Swap - movq (%rsp), %r8 - movq 8(%rsp), %r9 - movq 16(%rsp), %r10 - movq 24(%rsp), %r11 - xorq 32(%rsp), %r8 - xorq 40(%rsp), %r9 - xorq 48(%rsp), %r10 - xorq 56(%rsp), %r11 - andq %rax, %r8 - andq %rax, %r9 - andq %rax, %r10 - andq %rax, %r11 - xorq %r8, (%rsp) - xorq %r9, 8(%rsp) - xorq %r10, 16(%rsp) - xorq %r11, 24(%rsp) - xorq %r8, 32(%rsp) - xorq %r9, 40(%rsp) - xorq %r10, 48(%rsp) - xorq %r11, 56(%rsp) + xorq %r12, 32(%rsp) + xorq %r13, 40(%rsp) + xorq %r14, 48(%rsp) + xorq %r15, 56(%rsp) L_curve25519_base_avx2_last_3: # Add-Sub # Add @@ -14141,14 +14299,13 @@ L_curve25519_base_avx2_last_3: adcq 16(%rsp), %r10 movq %r11, %r15 adcq 24(%rsp), %r11 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r11 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r11, %rbx + imulq $19, %rbx + btr $63, %r11 # Sub modulus (if overflow) - addq %rcx, %r8 + addq %rbx, %r8 adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 @@ -14157,12 +14314,12 @@ L_curve25519_base_avx2_last_3: sbbq 8(%rsp), %r13 sbbq 16(%rsp), %r14 sbbq 24(%rsp), %r15 - sbbq %rcx, %rcx - shldq $0x01, %r15, %rcx - imulq $-19, %rcx - andq %rbx, %r15 + sbbq %rbx, %rbx + shldq $0x01, %r15, %rbx + imulq $-19, %rbx + btr $63, %r15 # Add modulus (if underflow) - subq %rcx, %r12 + subq %rbx, %r12 sbbq $0x00, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 @@ -14334,10 +14491,11 @@ L_curve25519_base_avx2_last_3: movq %r9, 136(%rsp) movq %r10, 144(%rsp) movq %r11, 152(%rsp) + movq 128(%rsp), %rax # Multiply # A[0] * B[0] movq 96(%rsp), %rdx - mulxq 128(%rsp), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 144(%rsp), %r10, %r11 # A[1] * B[0] @@ -14349,7 +14507,7 @@ L_curve25519_base_avx2_last_3: mulxq 152(%rsp), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 144(%rsp), %rcx, %r14 @@ -14363,7 +14521,7 @@ L_curve25519_base_avx2_last_3: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -14389,7 +14547,7 @@ L_curve25519_base_avx2_last_3: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -14445,13 +14603,12 @@ L_curve25519_base_avx2_last_3: sbbq 104(%rsp), %r9 sbbq 112(%rsp), %r10 sbbq 120(%rsp), %r11 - sbbq %rcx, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $-19, %rcx - andq %rbx, %r11 + sbbq %rbx, %rbx + shldq $0x01, %r11, %rbx + imulq $-19, %rbx + btr $63, %r11 # Add modulus (if underflow) - subq %rcx, %r8 + subq %rbx, %r8 sbbq $0x00, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 @@ -14463,51 +14620,32 @@ L_curve25519_base_avx2_last_3: mulxq 128(%rsp), %r8, %r15 mulxq 136(%rsp), %r9, %r14 mulxq 144(%rsp), %r10, %r13 - mulxq 152(%rsp), %r11, %r12 addq %r15, %r9 + mulxq 152(%rsp), %r11, %r12 adcq %r14, %r10 adcq %r13, %r11 adcq $0x00, %r12 - movq $0x7fffffffffffffff, %r15 + addq 96(%rsp), %r8 + adcq 104(%rsp), %r9 + adcq 112(%rsp), %r10 + adcq 120(%rsp), %r11 + adcq $0x00, %r12 shldq $0x01, %r11, %r12 - andq %r15, %r11 + btr $63, %r11 imulq $19, %r12, %r12 addq %r12, %r8 adcq $0x00, %r9 adcq $0x00, %r10 adcq $0x00, %r11 - movq %r8, (%rsp) - movq %r9, 8(%rsp) - movq %r10, 16(%rsp) - movq %r11, 24(%rsp) - # Add - movq 96(%rsp), %r8 - movq 104(%rsp), %r9 - addq (%rsp), %r8 - movq 112(%rsp), %r10 - adcq 8(%rsp), %r9 - movq 120(%rsp), %r11 - adcq 16(%rsp), %r10 - adcq 24(%rsp), %r11 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r11, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r11 - # Sub modulus (if overflow) - addq %rcx, %r8 - adcq $0x00, %r9 - adcq $0x00, %r10 - adcq $0x00, %r11 movq %r8, 96(%rsp) movq %r9, 104(%rsp) movq %r10, 112(%rsp) movq %r11, 120(%rsp) + movq 128(%rsp), %rax # Multiply # A[0] * B[0] movq 96(%rsp), %rdx - mulxq 128(%rsp), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 144(%rsp), %r10, %r11 # A[1] * B[0] @@ -14519,7 +14657,7 @@ L_curve25519_base_avx2_last_3: mulxq 152(%rsp), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 144(%rsp), %rcx, %r14 @@ -14533,7 +14671,7 @@ L_curve25519_base_avx2_last_3: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -14559,7 +14697,7 @@ L_curve25519_base_avx2_last_3: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -14846,10 +14984,11 @@ L_curve25519_base_avx2_last_3: callq _fe_mul_avx2 #endif /* __APPLE__ */ movq 160(%rsp), %rdi + movq (%rdi), %rax # Multiply # A[0] * B[0] movq (%rsp), %rdx - mulxq (%rdi), %r8, %r9 + mulxq %rax, %r8, %r9 # A[2] * B[0] mulxq 16(%rdi), %r10, %r11 # A[1] * B[0] @@ -14861,7 +15000,7 @@ L_curve25519_base_avx2_last_3: mulxq 24(%rdi), %r12, %r13 adcxq %rbx, %r10 # A[0] * B[1] - mulxq (%rdi), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r9 # A[2] * B[1] mulxq 16(%rdi), %rcx, %r14 @@ -14875,7 +15014,7 @@ L_curve25519_base_avx2_last_3: adcxq %r15, %r13 adoxq %rbx, %r12 # A[0] * B[2] - mulxq (%rdi), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %r15, %r13 xorq %r14, %r14 adcxq %rcx, %r10 @@ -14901,7 +15040,7 @@ L_curve25519_base_avx2_last_3: adoxq %r15, %r14 adcxq %rcx, %r14 # A[0] * B[3] - mulxq (%rdi), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %r15 xorq %rbx, %rbx adcxq %rdx, %r11 @@ -14985,7 +15124,7 @@ L_curve25519_base_avx2_last_3: #ifndef __APPLE__ .size curve25519_base_avx2,.-curve25519_base_avx2 #endif /* __APPLE__ */ -#endif /* !HAVE_ED25519 && !WOLFSSL_CURVE25519_USE_ED25519 */ +#endif /* WOLFSSL_CURVE25519_NOT_USE_ED25519 */ #ifndef __APPLE__ .text .globl curve25519_avx2 @@ -15034,59 +15173,58 @@ _curve25519_avx2: movq %r12, 88(%rsp) movq $0xfe, %rbx L_curve25519_avx2_bits: - movq 176(%rsp), %rax movq %rbx, 160(%rsp) movq %rbx, %rcx + movq 176(%rsp), %rax andq $63, %rcx shrq $6, %rbx movq (%rsi,%rbx,8), %rbx shrq %cl, %rbx andq $0x01, %rbx xorq %rbx, %rax + movq %rbx, 176(%rsp) negq %rax # Conditional Swap movq (%rdi), %r9 movq 8(%rdi), %r10 movq 16(%rdi), %r11 movq 24(%rdi), %r12 + movq (%rsp), %r13 + movq 8(%rsp), %r14 + movq 16(%rsp), %r15 + movq 24(%rsp), %rbp xorq 64(%rsp), %r9 xorq 72(%rsp), %r10 xorq 80(%rsp), %r11 xorq 88(%rsp), %r12 + xorq 32(%rsp), %r13 + xorq 40(%rsp), %r14 + xorq 48(%rsp), %r15 + xorq 56(%rsp), %rbp andq %rax, %r9 andq %rax, %r10 andq %rax, %r11 andq %rax, %r12 + andq %rax, %r13 + andq %rax, %r14 + andq %rax, %r15 + andq %rax, %rbp xorq %r9, (%rdi) xorq %r10, 8(%rdi) xorq %r11, 16(%rdi) xorq %r12, 24(%rdi) + xorq %r13, (%rsp) + xorq %r14, 8(%rsp) + xorq %r15, 16(%rsp) + xorq %rbp, 24(%rsp) xorq %r9, 64(%rsp) xorq %r10, 72(%rsp) xorq %r11, 80(%rsp) xorq %r12, 88(%rsp) - # Conditional Swap - movq (%rsp), %r9 - movq 8(%rsp), %r10 - movq 16(%rsp), %r11 - movq 24(%rsp), %r12 - xorq 32(%rsp), %r9 - xorq 40(%rsp), %r10 - xorq 48(%rsp), %r11 - xorq 56(%rsp), %r12 - andq %rax, %r9 - andq %rax, %r10 - andq %rax, %r11 - andq %rax, %r12 - xorq %r9, (%rsp) - xorq %r10, 8(%rsp) - xorq %r11, 16(%rsp) - xorq %r12, 24(%rsp) - xorq %r9, 32(%rsp) - xorq %r10, 40(%rsp) - xorq %r11, 48(%rsp) - xorq %r12, 56(%rsp) - movq %rbx, 176(%rsp) + xorq %r13, 32(%rsp) + xorq %r14, 40(%rsp) + xorq %r15, 48(%rsp) + xorq %rbp, 56(%rsp) # Add-Sub # Add movq (%rdi), %r9 @@ -15101,14 +15239,13 @@ L_curve25519_avx2_bits: adcq 16(%rsp), %r11 movq %r12, %rbp adcq 24(%rsp), %r12 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r12 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r12, %rbx + imulq $19, %rbx + btr $63, %r12 # Sub modulus (if overflow) - addq %rcx, %r9 + addq %rbx, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -15117,12 +15254,12 @@ L_curve25519_avx2_bits: sbbq 8(%rsp), %r14 sbbq 16(%rsp), %r15 sbbq 24(%rsp), %rbp - sbbq %rcx, %rcx - shldq $0x01, %rbp, %rcx - imulq $-19, %rcx - andq %rbx, %rbp + sbbq %rbx, %rbx + shldq $0x01, %rbp, %rbx + imulq $-19, %rbx + btr $63, %rbp # Add modulus (if underflow) - subq %rcx, %r13 + subq %rbx, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbp @@ -15148,14 +15285,13 @@ L_curve25519_avx2_bits: adcq 48(%rsp), %r11 movq %r12, %rbp adcq 56(%rsp), %r12 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r12 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r12, %rbx + imulq $19, %rbx + btr $63, %r12 # Sub modulus (if overflow) - addq %rcx, %r9 + addq %rbx, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -15164,12 +15300,12 @@ L_curve25519_avx2_bits: sbbq 40(%rsp), %r14 sbbq 48(%rsp), %r15 sbbq 56(%rsp), %rbp - sbbq %rcx, %rcx - shldq $0x01, %rbp, %rcx - imulq $-19, %rcx - andq %rbx, %rbp + sbbq %rbx, %rbx + shldq $0x01, %rbp, %rbx + imulq $-19, %rbx + btr $63, %rbp # Add modulus (if underflow) - subq %rcx, %r13 + subq %rbx, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbp @@ -15181,10 +15317,11 @@ L_curve25519_avx2_bits: movq %r14, 104(%rsp) movq %r15, 112(%rsp) movq %rbp, 120(%rsp) + movq 32(%rsp), %rax # Multiply # A[0] * B[0] movq 128(%rsp), %rdx - mulxq 32(%rsp), %r9, %r10 + mulxq %rax, %r9, %r10 # A[2] * B[0] mulxq 48(%rsp), %r11, %r12 # A[1] * B[0] @@ -15196,7 +15333,7 @@ L_curve25519_avx2_bits: mulxq 56(%rsp), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq 32(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] mulxq 48(%rsp), %rcx, %r15 @@ -15210,7 +15347,7 @@ L_curve25519_avx2_bits: adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq 32(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 @@ -15236,7 +15373,7 @@ L_curve25519_avx2_bits: adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq 32(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 @@ -15283,10 +15420,11 @@ L_curve25519_avx2_bits: movq %r10, 40(%rsp) movq %r11, 48(%rsp) movq %r12, 56(%rsp) + movq 96(%rsp), %rax # Multiply # A[0] * B[0] movq (%rdi), %rdx - mulxq 96(%rsp), %r9, %r10 + mulxq %rax, %r9, %r10 # A[2] * B[0] mulxq 112(%rsp), %r11, %r12 # A[1] * B[0] @@ -15298,7 +15436,7 @@ L_curve25519_avx2_bits: mulxq 120(%rsp), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq 96(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] mulxq 112(%rsp), %rcx, %r15 @@ -15312,7 +15450,7 @@ L_curve25519_avx2_bits: adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq 96(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 @@ -15338,7 +15476,7 @@ L_curve25519_avx2_bits: adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq 96(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 @@ -15559,14 +15697,13 @@ L_curve25519_avx2_bits: adcq 48(%rsp), %r11 movq %r12, %rbp adcq 56(%rsp), %r12 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r12 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r12, %rbx + imulq $19, %rbx + btr $63, %r12 # Sub modulus (if overflow) - addq %rcx, %r9 + addq %rbx, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -15575,12 +15712,12 @@ L_curve25519_avx2_bits: sbbq 40(%rsp), %r14 sbbq 48(%rsp), %r15 sbbq 56(%rsp), %rbp - sbbq %rcx, %rcx - shldq $0x01, %rbp, %rcx - imulq $-19, %rcx - andq %rbx, %rbp + sbbq %rbx, %rbx + shldq $0x01, %rbp, %rbx + imulq $-19, %rbx + btr $63, %rbp # Add modulus (if underflow) - subq %rcx, %r13 + subq %rbx, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbp @@ -15592,10 +15729,11 @@ L_curve25519_avx2_bits: movq %r14, 40(%rsp) movq %r15, 48(%rsp) movq %rbp, 56(%rsp) + movq 128(%rsp), %rax # Multiply # A[0] * B[0] movq 96(%rsp), %rdx - mulxq 128(%rsp), %r9, %r10 + mulxq %rax, %r9, %r10 # A[2] * B[0] mulxq 144(%rsp), %r11, %r12 # A[1] * B[0] @@ -15607,7 +15745,7 @@ L_curve25519_avx2_bits: mulxq 152(%rsp), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] mulxq 144(%rsp), %rcx, %r15 @@ -15621,7 +15759,7 @@ L_curve25519_avx2_bits: adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 @@ -15647,7 +15785,7 @@ L_curve25519_avx2_bits: adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 @@ -15703,13 +15841,12 @@ L_curve25519_avx2_bits: sbbq 104(%rsp), %r10 sbbq 112(%rsp), %r11 sbbq 120(%rsp), %r12 - sbbq %rcx, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $-19, %rcx - andq %rbx, %r12 + sbbq %rbx, %rbx + shldq $0x01, %r12, %rbx + imulq $-19, %rbx + btr $63, %r12 # Add modulus (if underflow) - subq %rcx, %r9 + subq %rbx, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 sbbq $0x00, %r12 @@ -15797,27 +15934,6 @@ L_curve25519_avx2_bits: movq %r10, 40(%rsp) movq %r11, 48(%rsp) movq %r12, 56(%rsp) - movq $0x1db42, %rdx - mulxq 128(%rsp), %r9, %rbp - mulxq 136(%rsp), %r10, %r15 - mulxq 144(%rsp), %r11, %r14 - mulxq 152(%rsp), %r12, %r13 - addq %rbp, %r10 - adcq %r15, %r11 - adcq %r14, %r12 - adcq $0x00, %r13 - movq $0x7fffffffffffffff, %rbp - shldq $0x01, %r12, %r13 - andq %rbp, %r12 - imulq $19, %r13, %r13 - addq %r13, %r9 - adcq $0x00, %r10 - adcq $0x00, %r11 - adcq $0x00, %r12 - movq %r9, (%rsp) - movq %r10, 8(%rsp) - movq %r11, 16(%rsp) - movq %r12, 24(%rsp) # Square movq 64(%rsp), %rdx movq 72(%rsp), %rax @@ -15898,23 +16014,24 @@ L_curve25519_avx2_bits: movq %r10, 72(%rsp) movq %r11, 80(%rsp) movq %r12, 88(%rsp) - # Add - movq 96(%rsp), %r9 - movq 104(%rsp), %r10 - addq (%rsp), %r9 - movq 112(%rsp), %r11 - adcq 8(%rsp), %r10 - movq 120(%rsp), %r12 - adcq 16(%rsp), %r11 - adcq 24(%rsp), %r12 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r12 - # Sub modulus (if overflow) - addq %rcx, %r9 + movq $0x1db42, %rdx + mulxq 128(%rsp), %r9, %rbp + mulxq 136(%rsp), %r10, %r15 + mulxq 144(%rsp), %r11, %r14 + addq %rbp, %r10 + mulxq 152(%rsp), %r12, %r13 + adcq %r15, %r11 + adcq %r14, %r12 + adcq $0x00, %r13 + addq 96(%rsp), %r9 + adcq 104(%rsp), %r10 + adcq 112(%rsp), %r11 + adcq 120(%rsp), %r12 + adcq $0x00, %r13 + shldq $0x01, %r12, %r13 + btr $63, %r12 + imulq $19, %r13, %r13 + addq %r13, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -15922,10 +16039,11 @@ L_curve25519_avx2_bits: movq %r10, 104(%rsp) movq %r11, 112(%rsp) movq %r12, 120(%rsp) + movq (%r8), %rax # Multiply # A[0] * B[0] movq 32(%rsp), %rdx - mulxq (%r8), %r9, %r10 + mulxq %rax, %r9, %r10 # A[2] * B[0] mulxq 16(%r8), %r11, %r12 # A[1] * B[0] @@ -15937,7 +16055,7 @@ L_curve25519_avx2_bits: mulxq 24(%r8), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq (%r8), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] mulxq 16(%r8), %rcx, %r15 @@ -15951,7 +16069,7 @@ L_curve25519_avx2_bits: adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq (%r8), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 @@ -15977,7 +16095,7 @@ L_curve25519_avx2_bits: adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq (%r8), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 @@ -16024,79 +16142,80 @@ L_curve25519_avx2_bits: movq %r10, 40(%rsp) movq %r11, 48(%rsp) movq %r12, 56(%rsp) + movq 96(%rsp), %rax # Multiply # A[0] * B[0] - movq 96(%rsp), %rdx - mulxq 128(%rsp), %r9, %r10 + movq 128(%rsp), %rdx + mulxq %rax, %r9, %r10 # A[2] * B[0] - mulxq 144(%rsp), %r11, %r12 + mulxq 112(%rsp), %r11, %r12 # A[1] * B[0] - mulxq 136(%rsp), %rcx, %rbx + mulxq 104(%rsp), %rcx, %rbx xorq %rbp, %rbp adcxq %rcx, %r10 # A[3] * B[1] - movq 104(%rsp), %rdx - mulxq 152(%rsp), %r13, %r14 + movq 136(%rsp), %rdx + mulxq 120(%rsp), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] - mulxq 144(%rsp), %rcx, %r15 + mulxq 112(%rsp), %rcx, %r15 adoxq %rbx, %r11 adcxq %rcx, %r12 # A[1] * B[2] - movq 112(%rsp), %rdx - mulxq 136(%rsp), %rcx, %rbx + movq 144(%rsp), %rdx + mulxq 104(%rsp), %rcx, %rbx adcxq %r15, %r13 adoxq %rcx, %r12 adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 # A[1] * B[1] - movq 104(%rsp), %rdx - mulxq 136(%rsp), %rdx, %rcx + movq 136(%rsp), %rdx + mulxq 104(%rsp), %rdx, %rcx adcxq %rbx, %r12 adoxq %rdx, %r11 # A[1] * B[3] - movq 120(%rsp), %rdx + movq 152(%rsp), %rdx adoxq %rcx, %r12 - mulxq 136(%rsp), %rcx, %rbx + mulxq 104(%rsp), %rcx, %rbx adcxq %rcx, %r13 # A[2] * B[2] - movq 112(%rsp), %rdx - mulxq 144(%rsp), %rdx, %rcx + movq 144(%rsp), %rdx + mulxq 112(%rsp), %rdx, %rcx adcxq %rbx, %r14 adoxq %rdx, %r13 # A[3] * B[3] - movq 120(%rsp), %rdx + movq 152(%rsp), %rdx adoxq %rcx, %r14 - mulxq 152(%rsp), %rcx, %rbx + mulxq 120(%rsp), %rcx, %rbx adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 # A[3] * B[0] - movq 152(%rsp), %rdx + movq 120(%rsp), %rdx adcxq %rcx, %r13 - mulxq 96(%rsp), %rdx, %rcx + mulxq 128(%rsp), %rdx, %rcx adoxq %rdx, %r12 adoxq %rcx, %r13 # A[3] * B[2] - movq 152(%rsp), %rdx - mulxq 112(%rsp), %rdx, %rcx + movq 120(%rsp), %rdx + mulxq 144(%rsp), %rdx, %rcx adcxq %rdx, %r14 # A[2] * B[3] - movq 120(%rsp), %rdx + movq 152(%rsp), %rdx adcxq %rcx, %r15 - mulxq 144(%rsp), %rcx, %rdx + mulxq 112(%rsp), %rcx, %rdx adcxq %rbx, %rbp adoxq %rcx, %r14 adoxq %rdx, %r15 @@ -16130,7 +16249,7 @@ L_curve25519_avx2_bits: decq %rbx cmpq $3, %rbx jge L_curve25519_avx2_bits - movq %rbx, 160(%rsp) + movq $2, 160(%rsp) movq 176(%rsp), %rax negq %rax # Conditional Swap @@ -16138,43 +16257,42 @@ L_curve25519_avx2_bits: movq 8(%rdi), %r10 movq 16(%rdi), %r11 movq 24(%rdi), %r12 + movq (%rsp), %r13 + movq 8(%rsp), %r14 + movq 16(%rsp), %r15 + movq 24(%rsp), %rbp xorq 64(%rsp), %r9 xorq 72(%rsp), %r10 xorq 80(%rsp), %r11 xorq 88(%rsp), %r12 + xorq 32(%rsp), %r13 + xorq 40(%rsp), %r14 + xorq 48(%rsp), %r15 + xorq 56(%rsp), %rbp andq %rax, %r9 andq %rax, %r10 andq %rax, %r11 andq %rax, %r12 + andq %rax, %r13 + andq %rax, %r14 + andq %rax, %r15 + andq %rax, %rbp xorq %r9, (%rdi) xorq %r10, 8(%rdi) xorq %r11, 16(%rdi) xorq %r12, 24(%rdi) + xorq %r13, (%rsp) + xorq %r14, 8(%rsp) + xorq %r15, 16(%rsp) + xorq %rbp, 24(%rsp) xorq %r9, 64(%rsp) xorq %r10, 72(%rsp) xorq %r11, 80(%rsp) xorq %r12, 88(%rsp) - # Conditional Swap - movq (%rsp), %r9 - movq 8(%rsp), %r10 - movq 16(%rsp), %r11 - movq 24(%rsp), %r12 - xorq 32(%rsp), %r9 - xorq 40(%rsp), %r10 - xorq 48(%rsp), %r11 - xorq 56(%rsp), %r12 - andq %rax, %r9 - andq %rax, %r10 - andq %rax, %r11 - andq %rax, %r12 - xorq %r9, (%rsp) - xorq %r10, 8(%rsp) - xorq %r11, 16(%rsp) - xorq %r12, 24(%rsp) - xorq %r9, 32(%rsp) - xorq %r10, 40(%rsp) - xorq %r11, 48(%rsp) - xorq %r12, 56(%rsp) + xorq %r13, 32(%rsp) + xorq %r14, 40(%rsp) + xorq %r15, 48(%rsp) + xorq %rbp, 56(%rsp) L_curve25519_avx2_last_3: # Add-Sub # Add @@ -16190,14 +16308,13 @@ L_curve25519_avx2_last_3: adcq 16(%rsp), %r11 movq %r12, %rbp adcq 24(%rsp), %r12 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r12 + movq $0x00, %rbx + adcq $0x00, %rbx + shldq $0x01, %r12, %rbx + imulq $19, %rbx + btr $63, %r12 # Sub modulus (if overflow) - addq %rcx, %r9 + addq %rbx, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 @@ -16206,12 +16323,12 @@ L_curve25519_avx2_last_3: sbbq 8(%rsp), %r14 sbbq 16(%rsp), %r15 sbbq 24(%rsp), %rbp - sbbq %rcx, %rcx - shldq $0x01, %rbp, %rcx - imulq $-19, %rcx - andq %rbx, %rbp + sbbq %rbx, %rbx + shldq $0x01, %rbp, %rbx + imulq $-19, %rbx + btr $63, %rbp # Add modulus (if underflow) - subq %rcx, %r13 + subq %rbx, %r13 sbbq $0x00, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbp @@ -16383,10 +16500,11 @@ L_curve25519_avx2_last_3: movq %r10, 136(%rsp) movq %r11, 144(%rsp) movq %r12, 152(%rsp) + movq 128(%rsp), %rax # Multiply # A[0] * B[0] movq 96(%rsp), %rdx - mulxq 128(%rsp), %r9, %r10 + mulxq %rax, %r9, %r10 # A[2] * B[0] mulxq 144(%rsp), %r11, %r12 # A[1] * B[0] @@ -16398,7 +16516,7 @@ L_curve25519_avx2_last_3: mulxq 152(%rsp), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] mulxq 144(%rsp), %rcx, %r15 @@ -16412,7 +16530,7 @@ L_curve25519_avx2_last_3: adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 @@ -16438,7 +16556,7 @@ L_curve25519_avx2_last_3: adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 @@ -16494,13 +16612,12 @@ L_curve25519_avx2_last_3: sbbq 104(%rsp), %r10 sbbq 112(%rsp), %r11 sbbq 120(%rsp), %r12 - sbbq %rcx, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $-19, %rcx - andq %rbx, %r12 + sbbq %rbx, %rbx + shldq $0x01, %r12, %rbx + imulq $-19, %rbx + btr $63, %r12 # Add modulus (if underflow) - subq %rcx, %r9 + subq %rbx, %r9 sbbq $0x00, %r10 sbbq $0x00, %r11 sbbq $0x00, %r12 @@ -16512,120 +16629,101 @@ L_curve25519_avx2_last_3: mulxq 128(%rsp), %r9, %rbp mulxq 136(%rsp), %r10, %r15 mulxq 144(%rsp), %r11, %r14 - mulxq 152(%rsp), %r12, %r13 addq %rbp, %r10 + mulxq 152(%rsp), %r12, %r13 adcq %r15, %r11 adcq %r14, %r12 adcq $0x00, %r13 - movq $0x7fffffffffffffff, %rbp + addq 96(%rsp), %r9 + adcq 104(%rsp), %r10 + adcq 112(%rsp), %r11 + adcq 120(%rsp), %r12 + adcq $0x00, %r13 shldq $0x01, %r12, %r13 - andq %rbp, %r12 + btr $63, %r12 imulq $19, %r13, %r13 addq %r13, %r9 adcq $0x00, %r10 adcq $0x00, %r11 adcq $0x00, %r12 - movq %r9, (%rsp) - movq %r10, 8(%rsp) - movq %r11, 16(%rsp) - movq %r12, 24(%rsp) - # Add - movq 96(%rsp), %r9 - movq 104(%rsp), %r10 - addq (%rsp), %r9 - movq 112(%rsp), %r11 - adcq 8(%rsp), %r10 - movq 120(%rsp), %r12 - adcq 16(%rsp), %r11 - adcq 24(%rsp), %r12 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r12, %rcx - movq $0x7fffffffffffffff, %rbx - imulq $19, %rcx - andq %rbx, %r12 - # Sub modulus (if overflow) - addq %rcx, %r9 - adcq $0x00, %r10 - adcq $0x00, %r11 - adcq $0x00, %r12 movq %r9, 96(%rsp) movq %r10, 104(%rsp) movq %r11, 112(%rsp) movq %r12, 120(%rsp) + movq 96(%rsp), %rax # Multiply # A[0] * B[0] - movq 96(%rsp), %rdx - mulxq 128(%rsp), %r9, %r10 + movq 128(%rsp), %rdx + mulxq %rax, %r9, %r10 # A[2] * B[0] - mulxq 144(%rsp), %r11, %r12 + mulxq 112(%rsp), %r11, %r12 # A[1] * B[0] - mulxq 136(%rsp), %rcx, %rbx + mulxq 104(%rsp), %rcx, %rbx xorq %rbp, %rbp adcxq %rcx, %r10 # A[3] * B[1] - movq 104(%rsp), %rdx - mulxq 152(%rsp), %r13, %r14 + movq 136(%rsp), %rdx + mulxq 120(%rsp), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] - mulxq 144(%rsp), %rcx, %r15 + mulxq 112(%rsp), %rcx, %r15 adoxq %rbx, %r11 adcxq %rcx, %r12 # A[1] * B[2] - movq 112(%rsp), %rdx - mulxq 136(%rsp), %rcx, %rbx + movq 144(%rsp), %rdx + mulxq 104(%rsp), %rcx, %rbx adcxq %r15, %r13 adoxq %rcx, %r12 adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq 128(%rsp), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 # A[1] * B[1] - movq 104(%rsp), %rdx - mulxq 136(%rsp), %rdx, %rcx + movq 136(%rsp), %rdx + mulxq 104(%rsp), %rdx, %rcx adcxq %rbx, %r12 adoxq %rdx, %r11 # A[1] * B[3] - movq 120(%rsp), %rdx + movq 152(%rsp), %rdx adoxq %rcx, %r12 - mulxq 136(%rsp), %rcx, %rbx + mulxq 104(%rsp), %rcx, %rbx adcxq %rcx, %r13 # A[2] * B[2] - movq 112(%rsp), %rdx - mulxq 144(%rsp), %rdx, %rcx + movq 144(%rsp), %rdx + mulxq 112(%rsp), %rdx, %rcx adcxq %rbx, %r14 adoxq %rdx, %r13 # A[3] * B[3] - movq 120(%rsp), %rdx + movq 152(%rsp), %rdx adoxq %rcx, %r14 - mulxq 152(%rsp), %rcx, %rbx + mulxq 120(%rsp), %rcx, %rbx adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq 128(%rsp), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 # A[3] * B[0] - movq 152(%rsp), %rdx + movq 120(%rsp), %rdx adcxq %rcx, %r13 - mulxq 96(%rsp), %rdx, %rcx + mulxq 128(%rsp), %rdx, %rcx adoxq %rdx, %r12 adoxq %rcx, %r13 # A[3] * B[2] - movq 152(%rsp), %rdx - mulxq 112(%rsp), %rdx, %rcx + movq 120(%rsp), %rdx + mulxq 144(%rsp), %rdx, %rcx adcxq %rdx, %r14 # A[2] * B[3] - movq 120(%rsp), %rdx + movq 152(%rsp), %rdx adcxq %rcx, %r15 - mulxq 144(%rsp), %rcx, %rdx + mulxq 112(%rsp), %rcx, %rdx adcxq %rbx, %rbp adoxq %rcx, %r14 adoxq %rdx, %r15 @@ -16655,9 +16753,7 @@ L_curve25519_avx2_last_3: movq %r10, 8(%rsp) movq %r11, 16(%rsp) movq %r12, 24(%rsp) - movq 160(%rsp), %rbx - decq %rbx - movq %rbx, 160(%rsp) + decq 160(%rsp) jge L_curve25519_avx2_last_3 # Invert leaq 32(%rsp), %rdi @@ -16897,10 +16993,11 @@ L_curve25519_avx2_last_3: callq _fe_mul_avx2 #endif /* __APPLE__ */ movq 168(%rsp), %rdi + movq (%rdi), %rax # Multiply # A[0] * B[0] movq (%rsp), %rdx - mulxq (%rdi), %r9, %r10 + mulxq %rax, %r9, %r10 # A[2] * B[0] mulxq 16(%rdi), %r11, %r12 # A[1] * B[0] @@ -16912,7 +17009,7 @@ L_curve25519_avx2_last_3: mulxq 24(%rdi), %r13, %r14 adcxq %rbx, %r11 # A[0] * B[1] - mulxq (%rdi), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rcx, %r10 # A[2] * B[1] mulxq 16(%rdi), %rcx, %r15 @@ -16926,7 +17023,7 @@ L_curve25519_avx2_last_3: adcxq %rbp, %r14 adoxq %rbx, %r13 # A[0] * B[2] - mulxq (%rdi), %rcx, %rbx + mulxq %rax, %rcx, %rbx adoxq %rbp, %r14 xorq %r15, %r15 adcxq %rcx, %r11 @@ -16952,7 +17049,7 @@ L_curve25519_avx2_last_3: adoxq %rbp, %r15 adcxq %rcx, %r15 # A[0] * B[3] - mulxq (%rdi), %rdx, %rcx + mulxq %rax, %rdx, %rcx adcxq %rbx, %rbp xorq %rbx, %rbx adcxq %rdx, %r12 @@ -17308,321 +17405,326 @@ _ge_p1p1_to_p2_avx2: pushq %r14 pushq %r15 pushq %rbx + pushq %rbp subq $16, %rsp movq %rdi, (%rsp) movq %rsi, 8(%rsp) leaq 96(%rsi), %rax + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) leaq 64(%rsi), %rsi leaq 64(%rdi), %rdi + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) leaq -32(%rsi), %rax leaq -32(%rdi), %rdi + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) addq $16, %rsp + popq %rbp popq %rbx popq %r15 popq %r14 @@ -17649,425 +17751,431 @@ _ge_p1p1_to_p3_avx2: pushq %r14 pushq %r15 pushq %rbx + pushq %rbp subq $16, %rsp movq %rdi, (%rsp) movq %rsi, 8(%rsp) leaq 96(%rsi), %rax + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) leaq 32(%rsi), %rax leaq 96(%rdi), %rdi + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) leaq 64(%rsi), %rsi leaq -64(%rdi), %rdi + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) leaq 32(%rsi), %rax leaq 32(%rdi), %rdi + movq (%rsi), %r9 # Multiply # A[0] * B[0] movq (%rax), %rdx - mulxq (%rsi), %r9, %r10 + mulxq %r9, %r10, %r11 # A[2] * B[0] - mulxq 16(%rsi), %r11, %r12 + mulxq 16(%rsi), %r12, %r13 # A[1] * B[0] mulxq 8(%rsi), %rcx, %r8 - xorq %rbx, %rbx - adcxq %rcx, %r10 + xorq %rbp, %rbp + adcxq %rcx, %r11 # A[3] * B[1] movq 8(%rax), %rdx - mulxq 24(%rsi), %r13, %r14 - adcxq %r8, %r11 + mulxq 24(%rsi), %r14, %r15 + adcxq %r8, %r12 # A[0] * B[1] - mulxq (%rsi), %rcx, %r8 - adoxq %rcx, %r10 + mulxq %r9, %rcx, %r8 + adoxq %rcx, %r11 # A[2] * B[1] - mulxq 16(%rsi), %rcx, %r15 - adoxq %r8, %r11 - adcxq %rcx, %r12 + mulxq 16(%rsi), %rcx, %rbx + adoxq %r8, %r12 + adcxq %rcx, %r13 # A[1] * B[2] movq 16(%rax), %rdx mulxq 8(%rsi), %rcx, %r8 - adcxq %r15, %r13 - adoxq %rcx, %r12 adcxq %rbx, %r14 - adoxq %r8, %r13 + adoxq %rcx, %r13 + adcxq %rbp, %r15 + adoxq %r8, %r14 # A[0] * B[2] - mulxq (%rsi), %rcx, %r8 - adoxq %rbx, %r14 - xorq %r15, %r15 - adcxq %rcx, %r11 + mulxq %r9, %rcx, %r8 + adoxq %rbp, %r15 + xorq %rbx, %rbx + adcxq %rcx, %r12 # A[1] * B[1] movq 8(%rax), %rdx mulxq 8(%rsi), %rdx, %rcx - adcxq %r8, %r12 - adoxq %rdx, %r11 + adcxq %r8, %r13 + adoxq %rdx, %r12 # A[1] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r12 + adoxq %rcx, %r13 mulxq 8(%rsi), %rcx, %r8 - adcxq %rcx, %r13 + adcxq %rcx, %r14 # A[2] * B[2] movq 16(%rax), %rdx mulxq 16(%rsi), %rdx, %rcx - adcxq %r8, %r14 - adoxq %rdx, %r13 + adcxq %r8, %r15 + adoxq %rdx, %r14 # A[3] * B[3] movq 24(%rax), %rdx - adoxq %rcx, %r14 + adoxq %rcx, %r15 mulxq 24(%rsi), %rcx, %r8 - adoxq %rbx, %r15 - adcxq %rcx, %r15 + adoxq %rbp, %rbx + adcxq %rcx, %rbx # A[0] * B[3] - mulxq (%rsi), %rdx, %rcx - adcxq %r8, %rbx + mulxq %r9, %rdx, %rcx + adcxq %r8, %rbp xorq %r8, %r8 - adcxq %rdx, %r12 + adcxq %rdx, %r13 # A[3] * B[0] movq 24(%rsi), %rdx - adcxq %rcx, %r13 + adcxq %rcx, %r14 mulxq (%rax), %rdx, %rcx - adoxq %rdx, %r12 - adoxq %rcx, %r13 + adoxq %rdx, %r13 + adoxq %rcx, %r14 # A[3] * B[2] movq 24(%rsi), %rdx mulxq 16(%rax), %rdx, %rcx - adcxq %rdx, %r14 + adcxq %rdx, %r15 # A[2] * B[3] movq 24(%rax), %rdx - adcxq %rcx, %r15 + adcxq %rcx, %rbx mulxq 16(%rsi), %rcx, %rdx - adcxq %r8, %rbx - adoxq %rcx, %r14 - adoxq %rdx, %r15 - adoxq %r8, %rbx + adcxq %r8, %rbp + adoxq %rcx, %r15 + adoxq %rdx, %rbx + adoxq %r8, %rbp movq $38, %rdx - mulxq %rbx, %rbx, %rcx - addq %rbx, %r12 + mulxq %rbp, %rbp, %rcx + addq %rbp, %r13 adcq $0x00, %rcx movq $0x7fffffffffffffff, %r8 - shldq $0x01, %r12, %rcx + shldq $0x01, %r13, %rcx imulq $19, %rcx, %rcx - andq %r8, %r12 + andq %r8, %r13 xorq %r8, %r8 - adoxq %rcx, %r9 - mulxq %r13, %rcx, %r13 - adcxq %rcx, %r9 - adoxq %r13, %r10 + adoxq %rcx, %r10 mulxq %r14, %rcx, %r14 adcxq %rcx, %r10 adoxq %r14, %r11 mulxq %r15, %rcx, %r15 adcxq %rcx, %r11 adoxq %r15, %r12 - adcxq %r8, %r12 + mulxq %rbx, %rcx, %rbx + adcxq %rcx, %r12 + adoxq %rbx, %r13 + adcxq %r8, %r13 # Store - movq %r9, (%rdi) - movq %r10, 8(%rdi) - movq %r11, 16(%rdi) - movq %r12, 24(%rdi) + movq %r10, (%rdi) + movq %r11, 8(%rdi) + movq %r12, 16(%rdi) + movq %r13, 24(%rdi) addq $16, %rsp + popq %rbp popq %rbx popq %r15 popq %r14 @@ -18268,14 +18376,13 @@ _ge_p2_dbl_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r13, %rcx - movq $0x7fffffffffffffff, %r8 - imulq $19, %rcx - andq %r8, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %rcx, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -18284,12 +18391,12 @@ _ge_p2_dbl_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %rcx, %rcx - shldq $0x01, %rbp, %rcx - imulq $-19, %rcx - andq %r8, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %rcx, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -18313,14 +18420,13 @@ _ge_p2_dbl_avx2: movq 24(%rsi), %r13 adcq 16(%rax), %r12 adcq 24(%rax), %r13 - movq $0x00, %rcx - adcq $0x00, %rcx - shldq $0x01, %r13, %rcx - movq $0x7fffffffffffffff, %r8 - imulq $19, %rcx - andq %r8, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %rcx, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -18410,13 +18516,12 @@ _ge_p2_dbl_avx2: sbbq 8(%rsi), %r11 sbbq 16(%rsi), %r12 sbbq 24(%rsi), %r13 - sbbq %rcx, %rcx - shldq $0x01, %r13, %rcx - movq $0x7fffffffffffffff, %r8 - imulq $-19, %rcx - andq %r8, %r13 + sbbq %rdx, %rdx + shldq $0x01, %r13, %rdx + imulq $-19, %rdx + btr $63, %r13 # Add modulus (if underflow) - subq %rcx, %r10 + subq %rdx, %r10 sbbq $0x00, %r11 sbbq $0x00, %r12 sbbq $0x00, %r13 @@ -18521,13 +18626,12 @@ _ge_p2_dbl_avx2: sbbq 8(%rsi), %r11 sbbq 16(%rsi), %r12 sbbq 24(%rsi), %r13 - sbbq %rcx, %rcx - shldq $0x01, %r13, %rcx - movq $0x7fffffffffffffff, %r8 - imulq $-19, %rcx - andq %r8, %r13 + sbbq %rdx, %rdx + shldq $0x01, %r13, %rdx + imulq $-19, %rdx + btr $63, %r13 # Add modulus (if underflow) - subq %rcx, %r10 + subq %rdx, %r10 sbbq $0x00, %r11 sbbq $0x00, %r12 sbbq $0x00, %r13 @@ -18692,14 +18796,13 @@ _ge_madd_avx2: adcq 16(%rcx), %r12 movq %r13, %rbp adcq 24(%rcx), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -18708,12 +18811,12 @@ _ge_madd_avx2: sbbq 8(%rcx), %r15 sbbq 16(%rcx), %rbx sbbq 24(%rcx), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -18945,14 +19048,13 @@ _ge_madd_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -18961,12 +19063,12 @@ _ge_madd_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -18988,14 +19090,13 @@ _ge_madd_avx2: movq 24(%rcx), %r13 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19011,14 +19112,13 @@ _ge_madd_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19027,12 +19127,12 @@ _ge_madd_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -19201,14 +19301,13 @@ _ge_msub_avx2: adcq 16(%rcx), %r12 movq %r13, %rbp adcq 24(%rcx), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19217,12 +19316,12 @@ _ge_msub_avx2: sbbq 8(%rcx), %r15 sbbq 16(%rcx), %rbx sbbq 24(%rcx), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -19455,14 +19554,13 @@ _ge_msub_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19471,12 +19569,12 @@ _ge_msub_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -19498,14 +19596,13 @@ _ge_msub_avx2: movq 24(%rcx), %r13 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19521,14 +19618,13 @@ _ge_msub_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19537,12 +19633,12 @@ _ge_msub_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -19711,14 +19807,13 @@ _ge_add_avx2: adcq 16(%rcx), %r12 movq %r13, %rbp adcq 24(%rcx), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -19727,12 +19822,12 @@ _ge_add_avx2: sbbq 8(%rcx), %r15 sbbq 16(%rcx), %rbx sbbq 24(%rcx), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -20056,14 +20151,13 @@ _ge_add_avx2: adcq %r11, %r11 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20086,14 +20180,13 @@ _ge_add_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20102,12 +20195,12 @@ _ge_add_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -20135,14 +20228,13 @@ _ge_add_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20151,12 +20243,12 @@ _ge_add_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -20325,14 +20417,13 @@ _ge_sub_avx2: adcq 16(%rcx), %r12 movq %r13, %rbp adcq 24(%rcx), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20341,12 +20432,12 @@ _ge_sub_avx2: sbbq 8(%rcx), %r15 sbbq 16(%rcx), %rbx sbbq 24(%rcx), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -20671,14 +20762,13 @@ _ge_sub_avx2: adcq %r11, %r11 adcq %r12, %r12 adcq %r13, %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20701,14 +20791,13 @@ _ge_sub_avx2: adcq 16(%rsi), %r12 movq %r13, %rbp adcq 24(%rsi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20717,12 +20806,12 @@ _ge_sub_avx2: sbbq 8(%rsi), %r15 sbbq 16(%rsi), %rbx sbbq 24(%rsi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -20750,14 +20839,13 @@ _ge_sub_avx2: adcq 16(%rdi), %r12 movq %r13, %rbp adcq 24(%rdi), %r13 - movq $0x00, %r8 - adcq $0x00, %r8 - shldq $0x01, %r13, %r8 - movq $0x7fffffffffffffff, %r9 - imulq $19, %r8 - andq %r9, %r13 + movq $0x00, %rdx + adcq $0x00, %rdx + shldq $0x01, %r13, %rdx + imulq $19, %rdx + btr $63, %r13 # Sub modulus (if overflow) - addq %r8, %r10 + addq %rdx, %r10 adcq $0x00, %r11 adcq $0x00, %r12 adcq $0x00, %r13 @@ -20766,12 +20854,12 @@ _ge_sub_avx2: sbbq 8(%rdi), %r15 sbbq 16(%rdi), %rbx sbbq 24(%rdi), %rbp - sbbq %r8, %r8 - shldq $0x01, %rbp, %r8 - imulq $-19, %r8 - andq %r9, %rbp + sbbq %rdx, %rdx + shldq $0x01, %rbp, %rdx + imulq $-19, %rdx + btr $63, %rbp # Add modulus (if underflow) - subq %r8, %r14 + subq %rdx, %r14 sbbq $0x00, %r15 sbbq $0x00, %rbx sbbq $0x00, %rbp @@ -21322,6 +21410,385 @@ _sc_muladd_avx2: #ifndef __APPLE__ .size sc_muladd_avx2,.-sc_muladd_avx2 #endif /* __APPLE__ */ +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__prime: +.long 0x03ffffed,0x03ffffff,0x03ffffff,0x03ffffff +.long 0x03ffffff,0x00000000,0x00000000,0x00000000 +.long 0x03ffffff,0x03ffffff,0x03ffffff,0x03ffffff +.long 0x001fffff,0x00000000,0x00000000,0x00000000 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.align 32 +#else +.p2align 5 +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__one: +.quad 0x1, 0x0 +.quad 0x0, 0x0 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__all_one: +.long 0x00000001,0x00000001,0x00000001,0x00000001 +.long 0x00000001,0x00000001,0x00000001,0x00000001 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__mask01111: +.long 0x00000000,0x00000001,0x00000001,0x00000001 +.long 0x00000001,0x00000000,0x00000000,0x00000000 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__down_one_dword: +.long 0x00000001,0x00000002,0x00000003,0x00000004 +.long 0x00000005,0x00000006,0x00000007,0x00000007 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__neg: +.long 0x00000000,0x00000000,0x00000000,0x00000000 +.long 0x80000000,0x00000000,0x00000000,0x00000000 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__up_one_dword: +.long 0x00000007,0x00000000,0x00000001,0x00000002 +.long 0x00000003,0x00000007,0x00000007,0x00000007 +#ifndef __APPLE__ +.data +#else +.section __DATA,__data +#endif /* __APPLE__ */ +L_sp_mod_inv_avx2__mask26: +.long 0x03ffffff,0x03ffffff,0x03ffffff,0x03ffffff +.long 0x03ffffff,0x00000000,0x00000000,0x00000000 +/* Non-constant time modular inversion. + * + * @param [out] r Resulting number. + * @param [in] a Number to invert. + * @param [in] m Modulus. + * @return MP_OKAY on success. + */ +#ifndef __APPLE__ +.text +.globl fe_invert_nct_avx2 +.type fe_invert_nct_avx2,@function +.align 16 +fe_invert_nct_avx2: +#else +.section __TEXT,__text +.globl _fe_invert_nct_avx2 +.p2align 4 +_fe_invert_nct_avx2: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushq %rbx + movq $-19, %rax + movq $-1, %rcx + movq $-1, %r8 + movq $0x7fffffffffffffff, %r9 + movq (%rsi), %r10 + movq 8(%rsi), %r11 + movq 16(%rsi), %r12 + movq 24(%rsi), %r13 + leaq L_sp_mod_inv_avx2__prime(%rip), %rbx + vmovupd (%rbx), %ymm6 + vmovupd 32(%rbx), %ymm7 + leaq L_sp_mod_inv_avx2__one(%rip), %rbx + vmovupd (%rbx), %ymm8 + leaq L_sp_mod_inv_avx2__mask01111(%rip), %rbx + vmovupd (%rbx), %ymm9 + leaq L_sp_mod_inv_avx2__all_one(%rip), %rbx + vmovupd (%rbx), %ymm10 + leaq L_sp_mod_inv_avx2__down_one_dword(%rip), %rbx + vmovupd (%rbx), %ymm11 + leaq L_sp_mod_inv_avx2__neg(%rip), %rbx + vmovupd (%rbx), %ymm12 + leaq L_sp_mod_inv_avx2__up_one_dword(%rip), %rbx + vmovupd (%rbx), %ymm13 + leaq L_sp_mod_inv_avx2__mask26(%rip), %rbx + vmovupd (%rbx), %ymm14 + vpxor %xmm0, %xmm0, %xmm0 + vpxor %xmm1, %xmm1, %xmm1 + vmovdqu %ymm8, %ymm2 + vpxor %xmm3, %xmm3, %xmm3 + testb $0x01, %r10b + jnz L__mod_inv_avx2__v_even_end +L__mod_inv_avx2__v_even_start: + shrdq $0x01, %r11, %r10 + shrdq $0x01, %r12, %r11 + shrdq $0x01, %r13, %r12 + shrq $0x01, %r13 + vptest %ymm8, %ymm2 + jz L__mod_inv_avx2__v_even_shr1 + vpaddd %ymm6, %ymm2, %ymm2 + vpaddd %ymm7, %ymm3, %ymm3 +L__mod_inv_avx2__v_even_shr1: + vpand %ymm9, %ymm2, %ymm4 + vpand %ymm10, %ymm3, %ymm5 + vpermd %ymm4, %ymm11, %ymm4 + vpsrad $0x01, %ymm2, %ymm2 + vpsrad $0x01, %ymm3, %ymm3 + vpslld $25, %ymm5, %ymm5 + vpslld $25, %xmm4, %xmm4 + vpaddd %ymm5, %ymm2, %ymm2 + vpaddd %ymm4, %ymm3, %ymm3 + testb $0x01, %r10b + jz L__mod_inv_avx2__v_even_start +L__mod_inv_avx2__v_even_end: +L__mod_inv_avx2__uv_start: + cmpq %r13, %r9 + jb L__mod_inv_avx2__uv_v + ja L__mod_inv_avx2__uv_u + cmpq %r12, %r8 + jb L__mod_inv_avx2__uv_v + ja L__mod_inv_avx2__uv_u + cmpq %r11, %rcx + jb L__mod_inv_avx2__uv_v + ja L__mod_inv_avx2__uv_u + cmpq %r10, %rax + jb L__mod_inv_avx2__uv_v +L__mod_inv_avx2__uv_u: + subq %r10, %rax + sbbq %r11, %rcx + vpsubd %ymm2, %ymm0, %ymm0 + sbbq %r12, %r8 + vpsubd %ymm3, %ymm1, %ymm1 + sbbq %r13, %r9 + vptest %ymm12, %ymm1 + jz L__mod_inv_avx2__usubv_done_neg + vpaddd %ymm6, %ymm0, %ymm0 + vpaddd %ymm7, %ymm1, %ymm1 +L__mod_inv_avx2__usubv_done_neg: +L__mod_inv_avx2__usubv_shr1: + shrdq $0x01, %rcx, %rax + shrdq $0x01, %r8, %rcx + shrdq $0x01, %r9, %r8 + shrq $0x01, %r9 + vptest %ymm8, %ymm0 + jz L__mod_inv_avx2__usubv_sub_shr1 + vpaddd %ymm6, %ymm0, %ymm0 + vpaddd %ymm7, %ymm1, %ymm1 +L__mod_inv_avx2__usubv_sub_shr1: + vpand %ymm9, %ymm0, %ymm4 + vpand %ymm10, %ymm1, %ymm5 + vpermd %ymm4, %ymm11, %ymm4 + vpsrad $0x01, %ymm0, %ymm0 + vpsrad $0x01, %ymm1, %ymm1 + vpslld $25, %ymm5, %ymm5 + vpslld $25, %xmm4, %xmm4 + vpaddd %ymm5, %ymm0, %ymm0 + vpaddd %ymm4, %ymm1, %ymm1 + testb $0x01, %al + jz L__mod_inv_avx2__usubv_shr1 + cmpq $0x01, %rax + jne L__mod_inv_avx2__uv_start + movq %rcx, %rdx + orq %r8, %rdx + jne L__mod_inv_avx2__uv_start + orq %r9, %rdx + jne L__mod_inv_avx2__uv_start + vpextrd $0x00, %xmm0, %eax + vpextrd $0x01, %xmm0, %r8d + vpextrd $2, %xmm0, %r10d + vpextrd $3, %xmm0, %r12d + vpextrd $0x00, %xmm1, %ecx + vpextrd $0x01, %xmm1, %r9d + vpextrd $2, %xmm1, %r11d + vpextrd $3, %xmm1, %r13d + vextracti128 $0x01, %ymm0, %xmm0 + vextracti128 $0x01, %ymm1, %xmm1 + vpextrd $0x00, %xmm0, %r14d + vpextrd $0x00, %xmm1, %r15d + jmp L__mod_inv_avx2__store_done +L__mod_inv_avx2__uv_v: + subq %rax, %r10 + sbbq %rcx, %r11 + vpsubd %ymm0, %ymm2, %ymm2 + sbbq %r8, %r12 + vpsubd %ymm1, %ymm3, %ymm3 + sbbq %r9, %r13 + vptest %ymm12, %ymm3 + jz L__mod_inv_avx2__vsubu_done_neg + vpaddd %ymm6, %ymm2, %ymm2 + vpaddd %ymm7, %ymm3, %ymm3 +L__mod_inv_avx2__vsubu_done_neg: +L__mod_inv_avx2__vsubu_shr1: + shrdq $0x01, %r11, %r10 + shrdq $0x01, %r12, %r11 + shrdq $0x01, %r13, %r12 + shrq $0x01, %r13 + vptest %ymm8, %ymm2 + jz L__mod_inv_avx2__vsubu_sub_shr1 + vpaddd %ymm6, %ymm2, %ymm2 + vpaddd %ymm7, %ymm3, %ymm3 +L__mod_inv_avx2__vsubu_sub_shr1: + vpand %ymm9, %ymm2, %ymm4 + vpand %ymm10, %ymm3, %ymm5 + vpermd %ymm4, %ymm11, %ymm4 + vpsrad $0x01, %ymm2, %ymm2 + vpsrad $0x01, %ymm3, %ymm3 + vpslld $25, %ymm5, %ymm5 + vpslld $25, %xmm4, %xmm4 + vpaddd %ymm5, %ymm2, %ymm2 + vpaddd %ymm4, %ymm3, %ymm3 + testb $0x01, %r10b + jz L__mod_inv_avx2__vsubu_shr1 + cmpq $0x01, %r10 + jne L__mod_inv_avx2__uv_start + movq %r11, %rdx + orq %r12, %rdx + jne L__mod_inv_avx2__uv_start + orq %r13, %rdx + jne L__mod_inv_avx2__uv_start + vpextrd $0x00, %xmm2, %eax + vpextrd $0x01, %xmm2, %r8d + vpextrd $2, %xmm2, %r10d + vpextrd $3, %xmm2, %r12d + vpextrd $0x00, %xmm3, %ecx + vpextrd $0x01, %xmm3, %r9d + vpextrd $2, %xmm3, %r11d + vpextrd $3, %xmm3, %r13d + vextracti128 $0x01, %ymm2, %xmm2 + vextracti128 $0x01, %ymm3, %xmm3 + vpextrd $0x00, %xmm2, %r14d + vpextrd $0x00, %xmm3, %r15d +L__mod_inv_avx2__store_done: + movl %eax, %edx + andl $0x3ffffff, %eax + sarl $26, %edx + addl %edx, %ecx + movl %ecx, %edx + andl $0x3ffffff, %ecx + sarl $26, %edx + addl %edx, %r8d + movl %r8d, %edx + andl $0x3ffffff, %r8d + sarl $26, %edx + addl %edx, %r9d + movl %r9d, %edx + andl $0x3ffffff, %r9d + sarl $26, %edx + addl %edx, %r10d + movl %r10d, %edx + andl $0x3ffffff, %r10d + sarl $26, %edx + addl %edx, %r11d + movl %r11d, %edx + andl $0x3ffffff, %r11d + sarl $26, %edx + addl %edx, %r12d + movl %r12d, %edx + andl $0x3ffffff, %r12d + sarl $26, %edx + addl %edx, %r13d + movl %r13d, %edx + andl $0x3ffffff, %r13d + sarl $26, %edx + addl %edx, %r14d + movl %r14d, %edx + andl $0x3ffffff, %r14d + sarl $26, %edx + addl %edx, %r15d + movslq %ecx, %rcx + movslq %r9d, %r9 + movslq %r11d, %r11 + movslq %r13d, %r13 + movslq %r15d, %r15 + shlq $26, %rcx + shlq $26, %r9 + shlq $26, %r11 + shlq $26, %r13 + shlq $26, %r15 + movslq %eax, %rax + addq %rcx, %rax + movslq %r8d, %r8 + adcq %r9, %r8 + movslq %r10d, %r10 + adcq %r11, %r10 + movslq %r12d, %r12 + adcq %r13, %r12 + movslq %r14d, %r14 + adcq %r15, %r14 + jge L__mod_inv_avx2__3_no_add_prime + movq $0xfffffffffffed, %rcx + movq $0xfffffffffffff, %r9 + movq $0xfffffffffffff, %r11 + movq $0xfffffffffffff, %r13 + movq $0x7fffffffffff, %r15 + addq %rcx, %rax + addq %r9, %r8 + addq %r11, %r10 + addq %r13, %r12 + addq %r15, %r14 + movq $0xfffffffffffff, %rdx + movq %rax, %rcx + andq %rdx, %rax + sarq $52, %rcx + addq %rcx, %r8 + movq %r8, %r9 + andq %rdx, %r8 + sarq $52, %r9 + addq %r9, %r10 + movq %r10, %r11 + andq %rdx, %r10 + sarq $52, %r11 + addq %r11, %r12 + movq %r12, %r13 + andq %rdx, %r12 + sarq $52, %r13 + addq %r13, %r14 +L__mod_inv_avx2__3_no_add_prime: + movq %r8, %rcx + movq %r10, %r9 + movq %r12, %r11 + shlq $52, %rcx + sarq $12, %r8 + shlq $40, %r9 + sarq $24, %r10 + shlq $28, %r11 + sarq $36, %r12 + shlq $16, %r14 + addq %rcx, %rax + adcq %r9, %r8 + adcq %r11, %r10 + adcq %r14, %r12 + movq %rax, (%rdi) + movq %r8, 8(%rdi) + movq %r10, 16(%rdi) + movq %r12, 24(%rdi) + vzeroupper + popq %rbx + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_invert_nct_avx2,.-fe_invert_nct_avx2 +#endif /* __APPLE__ */ #endif /* HAVE_ED25519 */ #endif /* HAVE_INTEL_AVX2 */ diff --git a/wolfcrypt/src/ge_operations.c b/wolfcrypt/src/ge_operations.c index c3264e94f..d9056cbcd 100644 --- a/wolfcrypt/src/ge_operations.c +++ b/wolfcrypt/src/ge_operations.c @@ -9175,24 +9175,25 @@ void ge_scalarmult_base(ge_p3 *h,const unsigned char *a) #define SLIDE_SIZE 256 /* ge double scalar mult */ -static void slide(signed char *r,const unsigned char *a) +static void slide(signed char *r,const unsigned char *a, int max) { int i; int b; int k; - for (i = 0;i < SLIDE_SIZE;++i) + for (i = 0;i < SLIDE_SIZE;++i) { r[i] = 1 & (a[i >> 3] >> (i & 7)); + } - for (i = 0;i < SLIDE_SIZE;++i) + for (i = 0;i < SLIDE_SIZE;++i) { if (r[i]) { for (b = 1;b <= 6 && i + b < SLIDE_SIZE;++b) { if (r[i + b]) { signed char rb = (signed char)((unsigned char)r[i + b] << b); - if (r[i] + rb <= 15) { + if (r[i] + rb <= max) { r[i] = (signed char)(r[i] + rb); r[i + b] = 0; - } else if (r[i] - rb >= -15) { + } else if (r[i] - rb >= -max) { r[i] = (signed char)(r[i] - rb); for (k = i + b;k < SLIDE_SIZE;++k) { if (!r[k]) { @@ -9206,180 +9207,470 @@ static void slide(signed char *r,const unsigned char *a) } } } + } } +/* Generated using command: ruby ../scripts/x25519/ed25519.rb */ #ifdef CURVED25519_ASM_64BIT -static const ge_precomp Bi[8] = { +static const ge_precomp Bi[32] = { { - { 0x2fbc93c6f58c3b85, -0x306cd2390473f1e7, 0x270b4898643d42c2, 0x07cf9d3a33d4ba65, }, - { -0x62efc6fa28bf6ec2, -0x02c660fa2ebf414d, -0x5a3e7bcb977075f7, 0x44fd2f9298f81267, }, - { -0x5436edfa78855598, 0x26d9e823ccaac49e, 0x5a1b7dcbdd43598c, 0x6f117b689f0c65a8, }, + { 0x2fbc93c6f58c3b85, -0x306cd2390473f1e7, 0x270b4898643d42c2, + 0x07cf9d3a33d4ba65 }, + { -0x62efc6fa28bf6ec2, -0x02c660fa2ebf414d, -0x5a3e7bcb977075f7, + 0x44fd2f9298f81267 }, + { -0x5436edfa78855598, 0x26d9e823ccaac49e, 0x5a1b7dcbdd43598c, + 0x6f117b689f0c65a8 }, }, { - { -0x50da4f57b31168d0, 0x025a8430e8864b8a, -0x3ee4affd60fe98ce, 0x7a164e1b9a80f8f4, }, - { 0x56611fe8a4fcd265, 0x3bd353fde5c1ba7d, -0x7ece0ce5deb42943, 0x2ab91587555bda62, }, - { 0x14ae933f0dd0d889, 0x589423221c35da62, -0x2e8f1aba730d24b4, 0x5a2826af12b9b4c6, }, + { -0x50da4f57b31168d0, 0x025a8430e8864b8a, -0x3ee4affd60fe98ce, + 0x7a164e1b9a80f8f4 }, + { 0x56611fe8a4fcd265, 0x3bd353fde5c1ba7d, -0x7ece0ce5deb42943, + 0x2ab91587555bda62 }, + { 0x14ae933f0dd0d889, 0x589423221c35da62, -0x2e8f1aba730d24b4, + 0x5a2826af12b9b4c6 }, }, { - { -0x5ded43bbf75a44cd, -0x72afb73c38a112fe, -0x22e414f3a54013bc, 0x2945ccf146e206eb, }, - { 0x7f9182c3a447d6ba, -0x2affeb2eb4d8d649, -0x1cc30ee3479b5f79, 0x154a7e73eb1b55f3, }, - { -0x4344240e7ed57d7b, 0x270e0807d0bdd1fc, -0x4be498f4e44258d3, 0x43aabe696b3bb69a, }, + { -0x5ded43bbf75a44cd, -0x72afb73c38a112fe, -0x22e414f3a54013bc, + 0x2945ccf146e206eb }, + { 0x7f9182c3a447d6ba, -0x2affeb2eb4d8d649, -0x1cc30ee3479b5f79, + 0x154a7e73eb1b55f3 }, + { -0x4344240e7ed57d7b, 0x270e0807d0bdd1fc, -0x4be498f4e44258d3, + 0x43aabe696b3bb69a }, }, { - { 0x6b1a5cd0944ea3bf, 0x7470353ab39dc0d2, 0x71b2528228542e49, 0x461bea69283c927e, }, - { -0x4590d36555cdde4f, 0x6ca021533bba23a7, -0x621589b06de6d3c6, 0x1d6edd5d2e5317e0, }, - { -0x0e7c9237fe474c5e, -0x4cfca0b8fac15b66, 0x529c41ba5877adf3, 0x7a9fbb1c6a0f90a7, }, + { 0x6b1a5cd0944ea3bf, 0x7470353ab39dc0d2, 0x71b2528228542e49, + 0x461bea69283c927e }, + { -0x4590d36555cdde4f, 0x6ca021533bba23a7, -0x621589b06de6d3c6, + 0x1d6edd5d2e5317e0 }, + { -0x0e7c9237fe474c5e, -0x4cfca0b8fac15b66, 0x529c41ba5877adf3, + 0x7a9fbb1c6a0f90a7 }, }, { - { -0x64d1987559579cd1, -0x59af6190ae43b93b, -0x314dcc3639790a4b, 0x34b9ed338add7f59, }, - { -0x0c91de81fc627f9c, -0x675f7e490adfbe65, -0x693439f718a14fbc, 0x49c05a51fadc9c8f, }, - { 0x06b4e8bf9045af1b, -0x1d007c1758e62dd1, -0x550903d66c2b30ea, 0x73c172021b008b06, }, + { -0x64d1987559579cd1, -0x59af6190ae43b93b, -0x314dcc3639790a4b, + 0x34b9ed338add7f59 }, + { -0x0c91de81fc627f9c, -0x675f7e490adfbe65, -0x693439f718a14fbc, + 0x49c05a51fadc9c8f }, + { 0x06b4e8bf9045af1b, -0x1d007c1758e62dd1, -0x550903d66c2b30ea, + 0x73c172021b008b06 }, }, { - { 0x2fbf00848a802ade, -0x1a260130fdcfd1d9, 0x113e847117703406, 0x4275aae2546d8faf, }, - { 0x315f5b0249864348, 0x3ed6b36977088381, -0x5c5f8aaa9572146b, 0x18ab598029d5c77f, }, - { -0x27d4d33a029f7617, 0x031eb4a13282e4a4, 0x44311199b51a8622, 0x3dc65522b53df948, }, + { 0x2fbf00848a802ade, -0x1a260130fdcfd1d9, 0x113e847117703406, + 0x4275aae2546d8faf }, + { 0x315f5b0249864348, 0x3ed6b36977088381, -0x5c5f8aaa9572146b, + 0x18ab598029d5c77f }, + { -0x27d4d33a029f7617, 0x031eb4a13282e4a4, 0x44311199b51a8622, + 0x3dc65522b53df948 }, }, { - { -0x408f3ddd5dff8093, -0x407b4c654a432125, 0x537a0e12fb07ba07, 0x234fd7eec346f241, }, - { 0x506f013b327fbf93, -0x5103143664889095, -0x62ed4dcd5552a698, 0x0267882d176024a7, }, - { 0x5360a119732ea378, 0x2437e6b1df8dd471, -0x5d10c8076e581acd, 0x497ba6fdaa097863, }, + { -0x408f3ddd5dff8093, -0x407b4c654a432125, 0x537a0e12fb07ba07, + 0x234fd7eec346f241 }, + { 0x506f013b327fbf93, -0x5103143664889095, -0x62ed4dcd5552a698, + 0x0267882d176024a7 }, + { 0x5360a119732ea378, 0x2437e6b1df8dd471, -0x5d10c8076e581acd, + 0x497ba6fdaa097863 }, }, { - { 0x24cecc0313cfeaa0, -0x79b73d72e763db93, 0x2dbdbdfac1f2d4d0, 0x61e22917f12de72b, }, - { 0x040bcd86468ccf0b, -0x2c7d645bd566ef2a, 0x7508300807b25192, 0x43b5cd4218d05ebf, }, - { 0x5d9a762f9bd0b516, -0x14c750b1c8c02112, 0x032e5a7d93d64270, 0x511d61210ae4d842, }, + { 0x24cecc0313cfeaa0, -0x79b73d72e763db93, 0x2dbdbdfac1f2d4d0, + 0x61e22917f12de72b }, + { 0x040bcd86468ccf0b, -0x2c7d645bd566ef2a, 0x7508300807b25192, + 0x43b5cd4218d05ebf }, + { 0x5d9a762f9bd0b516, -0x14c750b1c8c02112, 0x032e5a7d93d64270, + 0x511d61210ae4d842 }, + }, + { + { -0x6d3989106af1627f, -0x5ab9df323f28fbb1, -0x5564c99b9070edb8, + 0x6d325924ddb855e3 }, + { 0x081386484420de87, -0x75e30fe94a6d124c, 0x39fa4e2729942d25, + 0x71a7fe6fe2482810 }, + { 0x6c7182b8a5c8c854, 0x33fd1479fe5f2a03, 0x72cf591883778d0c, + 0x4746c4b6559eeaa9 }, + }, + { + { -0x2c8884c3923965d5, -0x21054dd8907609e9, 0x45651cf7b53a16b5, + 0x5c9a51de34fe9fb7 }, + { 0x348546c864741147, 0x7d35aedd0efcc849, -0x006c6589f98d5cce, + 0x219663497db5e6d6 }, + { -0x0aef0e30860ef199, -0x0022255e19a7aea5, 0x09c3a71710142277, + 0x4804503c608223bb }, + }, + { + { -0x3bdb612fd35c8039, -0x5fa65f1c59ea5355, -0x775691283691f1dd, + 0x553398a51650696d }, + { 0x3b6821d23a36d175, -0x444bf558166461ce, 0x5d9e5ce420838a47, + 0x771e098858de4c5e }, + { -0x65ed0a2d87bae121, 0x3ada5d7985899ccb, 0x477f4a2d9fa59508, + 0x5a5ed1d68ff5a611 }, + }, + { + { 0x1195122afe150e83, -0x30df65da81b4ca28, 0x7387f8291e711e20, + 0x44acb897d8bf92f0 }, + { -0x451a1f3aa7ad8ca7, 0x392e5c19cadb9d7e, 0x28653c1eda1cabe9, + 0x019b60135fefdc44 }, + { 0x1e6068145e134b83, -0x3b0a19b0dbcfb3ea, 0x506e88a8fc1a3ed7, + 0x150c49fde6ad2f92 }, + }, + { + { -0x71840d6af6b8eec8, 0x5d6fef394f75a651, 0x10af79c425a708ad, + 0x6b2b5a075bb99922 }, + { -0x47b679c363235798, -0x37c0bb24478eb530, -0x01c11ca9f3c9e973, + 0x78a6d7791e05fbc1 }, + { 0x58bf704b47a0b976, -0x59fe4caa8be8b72b, -0x55d4e04e2abd0a70, + 0x725c7ffc4ad55d00 }, + }, + { + { -0x1bbd98ea2e30664e, 0x7352d51102a20d34, 0x23d1157b8b12109f, + 0x794cc9277cb1f3a3 }, + { -0x6e7fd408e32f6740, -0x01be935b12a19c9a, -0x20a7a28eb6fd66b4, + 0x4cd54625f855fae7 }, + { 0x4af6c426c2ac5053, -0x43651252cd098da8, 0x2ad032f10a311021, + 0x7008357b6fcc8e85 }, + }, + { + { 0x0b88672738773f01, -0x473337056a043305, -0x72d22a5c4652d64a, + 0x06ef7e9851ad0f6a }, + { -0x2fe460447da7b5cc, 0x47ab6463d2b4792b, -0x49ce9c63b7ac9dfe, + 0x13a92a3669d6d428 }, + { -0x356c88e33fa8821b, 0x7540e41e5035dc5c, 0x24680f01d802e071, + 0x3c296ddf8a2af86a }, + }, + { + { -0x5152ea0626eb58ed, -0x56d08406737006ee, -0x5007dce860ac28d0, + 0x7a99d393490c77ba }, + { -0x0314b2d144e0dabf, -0x476aef38bf5246e1, -0x038e5c822f5e52fb, + 0x0a892c700747717b }, + { -0x70ad12dbc9425c18, 0x77a8c84157e80794, -0x5a569a9cd9d06320, + 0x286762d28302f7d2 }, + }, + { + { 0x4e7836093ce35b25, -0x7d1ee7e24d945569, 0x0cc192d3cbc7b83f, + 0x32f1da046a9d9d3a }, + { 0x7c558e2bce2ef5bd, -0x1b67934b98b8439d, 0x154a179f3bbb89b8, + 0x7686f2a3d6f1767a }, + { -0x5572ed5992a68396, -0x70ee6cfcfb2c7ad5, 0x3f91dc73c209b022, + 0x561305f8a9ad28a6 }, + }, + { + { 0x100c978dec92aed1, -0x35bc2abcb2928c1b, -0x7cece4dd27b845b8, + 0x00aaec53e35d4d2c }, + { 0x6722cc28e7b0c0d5, 0x709de9bbdb075c53, -0x3509725828fef59f, + 0x030a1aef2c57cc6c }, + { 0x7bb1f773003ad2aa, 0x0b3f29802b216608, 0x7821dc86520ed23e, + 0x20be9c1c24065480 }, + }, + { + { -0x1eac7827db698c5a, 0x5943bc2df546e493, 0x1c7f9a81c36f63b5, + 0x750ab3361f0ac1de }, + { 0x20e0e44ae2025e60, -0x4fc4c4d0342346c8, 0x105d639cf95a0d1c, + 0x69764c545067e311 }, + { 0x1e8a3283a2f81037, 0x6f2eda23bd7fcbf1, -0x48d02ea453d1da9d, + 0x54f96b3fb7075040 }, + }, + { + { 0x0fadf20429669279, 0x3adda2047d7d724a, 0x6f3d94828c5760f1, + 0x3d7fe9c52bb7539e }, + { 0x177dafc616b11ecd, -0x7689b46305a89b87, -0x48575eef1913187b, + 0x78e6839fbe85dbf0 }, + { 0x70332df737b8856b, 0x75d05d43041a178a, 0x320ff74aa0e59e22, + 0x70f268f350088242 }, + }, + { + { 0x66864583b1805f47, -0x0aca3a2e9f2283e7, -0x1678b148e1b34ffa, + 0x7c0d345cfad889d9 }, + { 0x2324112070dcf355, 0x380cc97ee7fce117, -0x4ce22112caad4968, + 0x404e56c039b8c4b9 }, + { 0x591f1f4b8c78338a, -0x5fc9954e981f4a1f, 0x5cbc4152b45f3d44, + 0x20d754762aaec777 }, + }, + { + { 0x5e8fc36fc73bb758, -0x531abc5ac9c34466, -0x566cb5826fc436de, + 0x2b8f1e46f3ceec62 }, + { -0x628b014eca460abd, -0x7b4c820e21736a94, -0x16cdd4f8a8ec7457, + 0x38b8ada8790b4ce1 }, + { -0x4a3fb56320ae06a3, 0x2b3952aecb1fdeac, 0x1d106d8b328b66da, + 0x049aeb32ceba1953 }, + }, + { + { -0x55af82f48a0386cf, 0x0fef924b7a6725d3, 0x1d82542b396b3930, + 0x795ee17530f674fc }, + { -0x288982c39c230182, 0x209c594897856e40, -0x4998979e1eb083ed, + 0x51c665e0c8d625fc }, + { 0x254a5b0a52ecbd81, 0x5d411f6ee034afe7, -0x195db2f23511b5cf, + 0x6cd19bf49dc54477 }, + }, + { + { 0x1ffe612165afc386, 0x082a2a88b8d51b10, 0x76f6627e20990baa, + 0x5e01b3a7429e43e7 }, + { 0x7e87619052179ca3, 0x571d0a060b2c9f85, -0x7f5d45577b668ee2, + 0x7520f3db40b2e638 }, + { 0x3db50be3d39357a1, -0x69849322a6616b5b, 0x1a309a64df311e6e, + 0x71092c9ccef3c986 }, + }, + { + { -0x7a9427538bfae231, 0x03f6a40855b7aa1e, 0x3a4ae7cbc9743ceb, + 0x4173a5bb7137abde }, + { 0x53d8523f0364918c, -0x5d4bfb0bc05494e4, 0x080b4a9e6681e5a4, + 0x0ea15b03d0257ba7 }, + { 0x17c56e31f0f9218a, 0x5a696e2b1afc4708, -0x086ce9970b4d0e8a, + 0x5fc565614a4e3a67 }, + }, + { + { 0x4892e1e67790988e, 0x01d5950f1c5cd722, -0x1c4f7e651a6dc113, + 0x3214c7409d46651b }, + { 0x136e570dc46d7ae5, 0x0fd0aacc54f8dc8f, 0x59549f03310dad86, + 0x62711c414c454aa1 }, + { 0x1329827406651770, 0x3ba4a0668a279436, -0x26494713e7a2ddc4, + 0x5bea94073ecb833c }, + }, + { + { -0x4b8f319c0cbc2d08, 0x0067ba8f0543e8f1, 0x35da51a1a2117b6f, + 0x4ad0785944f1bd2f }, + { 0x641dbf0912c89be4, -0x530c74ce8291a864, -0x540161fd09684f9b, + 0x3aacd5c148f61eec }, + { -0x7a71c4cb3cce7cff, -0x23663fb8f8ce97da, 0x34085b2ed39da88c, + 0x3aff0cb1d902853d }, + }, + { + { -0x6dd9bcf40b3acafb, 0x68e49c13261f2283, 0x09ef33788fd327c6, + 0x2ccf9f732bd99e7f }, + { -0x783a3814c5dfbfa2, -0x711cee101252a937, 0x29252e48ad29d5f9, + 0x110e7e86f4cd251d }, + { 0x57c0d89ed603f5e4, 0x12888628f0b0200c, 0x53172709a02e3bb7, + 0x05c557e0b9693a37 }, + }, + { + { -0x0889444f763df150, 0x61f85bf6fa0fd85c, -0x4946c0b19cbbde05, + 0x289fef0841861205 }, + { -0x270631cee0368191, 0x7a3f263011f9fdae, -0x1ea4815f7412da23, + 0x6e154c178fe9875a }, + { -0x309e9cc901296541, -0x64e91b187cca36b1, 0x13789765753a7fe7, + 0x6afbf642a95ca319 }, + }, + { + { 0x5de55070f913a8cc, 0x7d1d167b2b0cf561, -0x25d6a9496f152b77, + 0x12c093cedb801ed9 }, + { 0x7da8de0c62f5d2c1, -0x6703c25b4ff18466, 0x7deb6ada0dad70e0, + 0x0db4b851b95038c4 }, + { -0x03eb806cf747e6f1, 0x06969da0a11ae310, -0x3118aa8d25382803, + 0x33aa8799c6635ce6 }, + }, + { + { -0x7cb70a7703ea934f, 0x6da2ba9b1a0a6d27, -0x1dd9d2a37835a54a, + 0x212cd0c1c8d589a6 }, + { -0x50f00ae142f7a30e, 0x78f51a8967d33f1f, 0x6ec2bfe15060033c, + 0x233c6f29e8e21a86 }, + { -0x2d0b2aef80e7387f, 0x122ecdf2527e9d28, -0x58f579d5c2c2ccbf, + 0x1db7778911914ce3 }, + }, + { + { -0x4cc6b896228fe54a, -0x1d47212be630725b, 0x15df4161fd2ac852, + 0x7ae2ca8a017d24be }, + { -0x220cadc683943d91, 0x7a97e2cc53d50113, 0x7c74f43abf79a330, + 0x31ad97ad26e2adfc }, + { -0x4817e812f6df469e, 0x1e8518cc3f19da9d, -0x1b6e3eb0daa9f59c, + 0x1ed1fc53a6622c83 }, }, }; #elif defined(CURVED25519_ASM_32BIT) static const ge_precomp Bi[8] = { { - { -0x0a73c47b, 0x2fbc93c6, -0x0473f1e7, -0x306cd23a, 0x643d42c2, 0x270b4898, 0x33d4ba65, 0x07cf9d3a, }, - { -0x28bf6ec2, -0x62efc6fb, -0x2ebf414d, -0x02c660fb, 0x688f8a09, -0x5a3e7bcc, -0x6707ed99, 0x44fd2f92, }, - { -0x78855598, -0x5436edfb, -0x33553b62, 0x26d9e823, -0x22bca674, 0x5a1b7dcb, -0x60f39a58, 0x6f117b68, }, + { -0x0a73c47b, 0x2fbc93c6, -0x0473f1e7, -0x306cd23a, 0x643d42c2, + 0x270b4898, 0x33d4ba65, 0x07cf9d3a }, + { -0x28bf6ec2, -0x62efc6fb, -0x2ebf414d, -0x02c660fb, 0x688f8a09, + -0x5a3e7bcc, -0x6707ed99, 0x44fd2f92 }, + { -0x78855598, -0x5436edfb, -0x33553b62, 0x26d9e823, -0x22bca674, + 0x5a1b7dcb, -0x60f39a58, 0x6f117b68 }, }, { - { 0x4cee9730, -0x50da4f58, -0x1779b476, 0x025a8430, -0x60fe98ce, -0x3ee4affe, -0x657f070c, 0x7a164e1b, }, - { -0x5b032d9b, 0x56611fe8, -0x1a3e4583, 0x3bd353fd, 0x214bd6bd, -0x7ece0ce6, 0x555bda62, 0x2ab91587, }, - { 0x0dd0d889, 0x14ae933f, 0x1c35da62, 0x58942322, -0x730d24b4, -0x2e8f1abb, 0x12b9b4c6, 0x5a2826af, }, + { 0x4cee9730, -0x50da4f58, -0x1779b476, 0x025a8430, -0x60fe98ce, + -0x3ee4affe, -0x657f070c, 0x7a164e1b }, + { -0x5b032d9b, 0x56611fe8, -0x1a3e4583, 0x3bd353fd, 0x214bd6bd, + -0x7ece0ce6, 0x555bda62, 0x2ab91587 }, + { 0x0dd0d889, 0x14ae933f, 0x1c35da62, 0x58942322, -0x730d24b4, + -0x2e8f1abb, 0x12b9b4c6, 0x5a2826af }, }, { - { 0x08a5bb33, -0x5ded43bc, -0x38a112fe, -0x72afb73d, 0x5abfec44, -0x22e414f4, 0x46e206eb, 0x2945ccf1, }, - { -0x5bb82946, 0x7f9182c3, 0x4b2729b7, -0x2affeb2f, -0x479b5f79, -0x1cc30ee4, -0x14e4aa0d, 0x154a7e73, }, - { -0x7ed57d7b, -0x4344240f, -0x2f422e04, 0x270e0807, 0x1bbda72d, -0x4be498f5, 0x6b3bb69a, 0x43aabe69, }, + { 0x08a5bb33, -0x5ded43bc, -0x38a112fe, -0x72afb73d, 0x5abfec44, + -0x22e414f4, 0x46e206eb, 0x2945ccf1 }, + { -0x5bb82946, 0x7f9182c3, 0x4b2729b7, -0x2affeb2f, -0x479b5f79, + -0x1cc30ee4, -0x14e4aa0d, 0x154a7e73 }, + { -0x7ed57d7b, -0x4344240f, -0x2f422e04, 0x270e0807, 0x1bbda72d, + -0x4be498f5, 0x6b3bb69a, 0x43aabe69 }, }, { - { -0x6bb15c41, 0x6b1a5cd0, -0x4c623f2e, 0x7470353a, 0x28542e49, 0x71b25282, 0x283c927e, 0x461bea69, }, - { -0x55cdde4f, -0x4590d366, 0x3bba23a7, 0x6ca02153, -0x6de6d3c6, -0x621589b1, 0x2e5317e0, 0x1d6edd5d, }, - { 0x01b8b3a2, -0x0e7c9238, 0x053ea49a, -0x4cfca0b9, 0x5877adf3, 0x529c41ba, 0x6a0f90a7, 0x7a9fbb1c, }, + { -0x6bb15c41, 0x6b1a5cd0, -0x4c623f2e, 0x7470353a, 0x28542e49, + 0x71b25282, 0x283c927e, 0x461bea69 }, + { -0x55cdde4f, -0x4590d366, 0x3bba23a7, 0x6ca02153, -0x6de6d3c6, + -0x621589b1, 0x2e5317e0, 0x1d6edd5d }, + { 0x01b8b3a2, -0x0e7c9238, 0x053ea49a, -0x4cfca0b9, 0x5877adf3, + 0x529c41ba, 0x6a0f90a7, 0x7a9fbb1c }, }, { - { -0x59579cd1, -0x64d19876, 0x51bc46c5, -0x59af6191, -0x39790a4b, -0x314dcc37, -0x752280a7, 0x34b9ed33, }, - { 0x039d8064, -0x0c91de82, -0x0adfbe65, -0x675f7e4a, -0x18a14fbc, -0x693439f8, -0x05236371, 0x49c05a51, }, - { -0x6fba50e5, 0x06b4e8bf, -0x58e62dd1, -0x1d007c18, -0x6c2b30ea, -0x550903d7, 0x1b008b06, 0x73c17202, }, + { -0x59579cd1, -0x64d19876, 0x51bc46c5, -0x59af6191, -0x39790a4b, + -0x314dcc37, -0x752280a7, 0x34b9ed33 }, + { 0x039d8064, -0x0c91de82, -0x0adfbe65, -0x675f7e4a, -0x18a14fbc, + -0x693439f8, -0x05236371, 0x49c05a51 }, + { -0x6fba50e5, 0x06b4e8bf, -0x58e62dd1, -0x1d007c18, -0x6c2b30ea, + -0x550903d7, 0x1b008b06, 0x73c17202 }, }, { - { -0x757fd522, 0x2fbf0084, 0x02302e27, -0x1a260131, 0x17703406, 0x113e8471, 0x546d8faf, 0x4275aae2, }, - { 0x49864348, 0x315f5b02, 0x77088381, 0x3ed6b369, 0x6a8deb95, -0x5c5f8aab, 0x29d5c77f, 0x18ab5980, }, - { -0x029f7617, -0x27d4d33b, 0x3282e4a4, 0x031eb4a1, -0x4ae579de, 0x44311199, -0x4ac206b8, 0x3dc65522, }, + { -0x757fd522, 0x2fbf0084, 0x02302e27, -0x1a260131, 0x17703406, + 0x113e8471, 0x546d8faf, 0x4275aae2 }, + { 0x49864348, 0x315f5b02, 0x77088381, 0x3ed6b369, 0x6a8deb95, + -0x5c5f8aab, 0x29d5c77f, 0x18ab5980 }, + { -0x029f7617, -0x27d4d33b, 0x3282e4a4, 0x031eb4a1, -0x4ae579de, + 0x44311199, -0x4ac206b8, 0x3dc65522 }, }, { - { -0x5dff8093, -0x408f3dde, -0x4a432125, -0x407b4c66, -0x04f845f9, 0x537a0e12, -0x3cb90dbf, 0x234fd7ee, }, - { 0x327fbf93, 0x506f013b, -0x64889095, -0x51031437, -0x5552a698, -0x62ed4dce, 0x176024a7, 0x0267882d, }, - { 0x732ea378, 0x5360a119, -0x20722b8f, 0x2437e6b1, -0x6e581acd, -0x5d10c808, -0x55f6879d, 0x497ba6fd, }, + { -0x5dff8093, -0x408f3dde, -0x4a432125, -0x407b4c66, -0x04f845f9, + 0x537a0e12, -0x3cb90dbf, 0x234fd7ee }, + { 0x327fbf93, 0x506f013b, -0x64889095, -0x51031437, -0x5552a698, + -0x62ed4dce, 0x176024a7, 0x0267882d }, + { 0x732ea378, 0x5360a119, -0x20722b8f, 0x2437e6b1, -0x6e581acd, + -0x5d10c808, -0x55f6879d, 0x497ba6fd }, }, { - { 0x13cfeaa0, 0x24cecc03, 0x189c246d, -0x79b73d73, -0x3e0d2b30, 0x2dbdbdfa, -0x0ed218d5, 0x61e22917, }, - { 0x468ccf0b, 0x040bcd86, 0x2a9910d6, -0x2c7d645c, 0x07b25192, 0x75083008, 0x18d05ebf, 0x43b5cd42, }, - { -0x642f4aea, 0x5d9a762f, 0x373fdeee, -0x14c750b2, -0x6c29bd90, 0x032e5a7d, 0x0ae4d842, 0x511d6121, }, + { 0x13cfeaa0, 0x24cecc03, 0x189c246d, -0x79b73d73, -0x3e0d2b30, + 0x2dbdbdfa, -0x0ed218d5, 0x61e22917 }, + { 0x468ccf0b, 0x040bcd86, 0x2a9910d6, -0x2c7d645c, 0x07b25192, + 0x75083008, 0x18d05ebf, 0x43b5cd42 }, + { -0x642f4aea, 0x5d9a762f, 0x373fdeee, -0x14c750b2, -0x6c29bd90, + 0x032e5a7d, 0x0ae4d842, 0x511d6121 }, }, }; #elif defined(CURVED25519_128BIT) static const ge_precomp Bi[8] = { { - { 0x493c6f58c3b85, 0x0df7181c325f7, 0x0f50b0b3e4cb7, 0x5329385a44c32, 0x07cf9d3a33d4b }, - { 0x03905d740913e, 0x0ba2817d673a2, 0x23e2827f4e67c, 0x133d2e0c21a34, 0x44fd2f9298f81 }, - { 0x11205877aaa68, 0x479955893d579, 0x50d66309b67a0, 0x2d42d0dbee5ee, 0x6f117b689f0c6 }, + { 0x493c6f58c3b85, 0x0df7181c325f7, 0x0f50b0b3e4cb7, 0x5329385a44c32, + 0x07cf9d3a33d4b }, + { 0x03905d740913e, 0x0ba2817d673a2, 0x23e2827f4e67c, 0x133d2e0c21a34, + 0x44fd2f9298f81 }, + { 0x11205877aaa68, 0x479955893d579, 0x50d66309b67a0, 0x2d42d0dbee5ee, + 0x6f117b689f0c6 }, }, { - { 0x5b0a84cee9730, 0x61d10c97155e4, 0x4059cc8096a10, 0x47a608da8014f, 0x7a164e1b9a80f }, - { 0x11fe8a4fcd265, 0x7bcb8374faacc, 0x52f5af4ef4d4f, 0x5314098f98d10, 0x2ab91587555bd }, - { 0x6933f0dd0d889, 0x44386bb4c4295, 0x3cb6d3162508c, 0x26368b872a2c6, 0x5a2826af12b9b }, + { 0x5b0a84cee9730, 0x61d10c97155e4, 0x4059cc8096a10, 0x47a608da8014f, + 0x7a164e1b9a80f }, + { 0x11fe8a4fcd265, 0x7bcb8374faacc, 0x52f5af4ef4d4f, 0x5314098f98d10, + 0x2ab91587555bd }, + { 0x6933f0dd0d889, 0x44386bb4c4295, 0x3cb6d3162508c, 0x26368b872a2c6, + 0x5a2826af12b9b }, }, { - { 0x2bc4408a5bb33, 0x078ebdda05442, 0x2ffb112354123, 0x375ee8df5862d, 0x2945ccf146e20 }, - { 0x182c3a447d6ba, 0x22964e536eff2, 0x192821f540053, 0x2f9f19e788e5c, 0x154a7e73eb1b5 }, - { 0x3dbf1812a8285, 0x0fa17ba3f9797, 0x6f69cb49c3820, 0x34d5a0db3858d, 0x43aabe696b3bb }, + { 0x2bc4408a5bb33, 0x078ebdda05442, 0x2ffb112354123, 0x375ee8df5862d, + 0x2945ccf146e20 }, + { 0x182c3a447d6ba, 0x22964e536eff2, 0x192821f540053, 0x2f9f19e788e5c, + 0x154a7e73eb1b5 }, + { 0x3dbf1812a8285, 0x0fa17ba3f9797, 0x6f69cb49c3820, 0x34d5a0db3858d, + 0x43aabe696b3bb }, }, { - { 0x25cd0944ea3bf, 0x75673b81a4d63, 0x150b925d1c0d4, 0x13f38d9294114, 0x461bea69283c9 }, - { 0x72c9aaa3221b1, 0x267774474f74d, 0x064b0e9b28085, 0x3f04ef53b27c9, 0x1d6edd5d2e531 }, - { 0x36dc801b8b3a2, 0x0e0a7d4935e30, 0x1deb7cecc0d7d, 0x053a94e20dd2c, 0x7a9fbb1c6a0f9 }, + { 0x25cd0944ea3bf, 0x75673b81a4d63, 0x150b925d1c0d4, 0x13f38d9294114, + 0x461bea69283c9 }, + { 0x72c9aaa3221b1, 0x267774474f74d, 0x064b0e9b28085, 0x3f04ef53b27c9, + 0x1d6edd5d2e531 }, + { 0x36dc801b8b3a2, 0x0e0a7d4935e30, 0x1deb7cecc0d7d, 0x053a94e20dd2c, + 0x7a9fbb1c6a0f9 }, }, { - { 0x6678aa6a8632f, 0x5ea3788d8b365, 0x21bd6d6994279, 0x7ace75919e4e3, 0x34b9ed338add7 }, - { 0x6217e039d8064, 0x6dea408337e6d, 0x57ac112628206, 0x647cb65e30473, 0x49c05a51fadc9 }, - { 0x4e8bf9045af1b, 0x514e33a45e0d6, 0x7533c5b8bfe0f, 0x583557b7e14c9, 0x73c172021b008 }, + { 0x6678aa6a8632f, 0x5ea3788d8b365, 0x21bd6d6994279, 0x7ace75919e4e3, + 0x34b9ed338add7 }, + { 0x6217e039d8064, 0x6dea408337e6d, 0x57ac112628206, 0x647cb65e30473, + 0x49c05a51fadc9 }, + { 0x4e8bf9045af1b, 0x514e33a45e0d6, 0x7533c5b8bfe0f, 0x583557b7e14c9, + 0x73c172021b008 }, }, { - { 0x700848a802ade, 0x1e04605c4e5f7, 0x5c0d01b9767fb, 0x7d7889f42388b, 0x4275aae2546d8 }, - { 0x75b0249864348, 0x52ee11070262b, 0x237ae54fb5acd, 0x3bfd1d03aaab5, 0x18ab598029d5c }, - { 0x32cc5fd6089e9, 0x426505c949b05, 0x46a18880c7ad2, 0x4a4221888ccda, 0x3dc65522b53df }, + { 0x700848a802ade, 0x1e04605c4e5f7, 0x5c0d01b9767fb, 0x7d7889f42388b, + 0x4275aae2546d8 }, + { 0x75b0249864348, 0x52ee11070262b, 0x237ae54fb5acd, 0x3bfd1d03aaab5, + 0x18ab598029d5c }, + { 0x32cc5fd6089e9, 0x426505c949b05, 0x46a18880c7ad2, 0x4a4221888ccda, + 0x3dc65522b53df }, }, { - { 0x0c222a2007f6d, 0x356b79bdb77ee, 0x41ee81efe12ce, 0x120a9bd07097d, 0x234fd7eec346f }, - { 0x7013b327fbf93, 0x1336eeded6a0d, 0x2b565a2bbf3af, 0x253ce89591955, 0x0267882d17602 }, - { 0x0a119732ea378, 0x63bf1ba8e2a6c, 0x69f94cc90df9a, 0x431d1779bfc48, 0x497ba6fdaa097 }, + { 0x0c222a2007f6d, 0x356b79bdb77ee, 0x41ee81efe12ce, 0x120a9bd07097d, + 0x234fd7eec346f }, + { 0x7013b327fbf93, 0x1336eeded6a0d, 0x2b565a2bbf3af, 0x253ce89591955, + 0x0267882d17602 }, + { 0x0a119732ea378, 0x63bf1ba8e2a6c, 0x69f94cc90df9a, 0x431d1779bfc48, + 0x497ba6fdaa097 }, }, { - { 0x6cc0313cfeaa0, 0x1a313848da499, 0x7cb534219230a, 0x39596dedefd60, 0x61e22917f12de }, - { 0x3cd86468ccf0b, 0x48553221ac081, 0x6c9464b4e0a6e, 0x75fba84180403, 0x43b5cd4218d05 }, - { 0x2762f9bd0b516, 0x1c6e7fbddcbb3, 0x75909c3ace2bd, 0x42101972d3ec9, 0x511d61210ae4d }, + { 0x6cc0313cfeaa0, 0x1a313848da499, 0x7cb534219230a, 0x39596dedefd60, + 0x61e22917f12de }, + { 0x3cd86468ccf0b, 0x48553221ac081, 0x6c9464b4e0a6e, 0x75fba84180403, + 0x43b5cd4218d05 }, + { 0x2762f9bd0b516, 0x1c6e7fbddcbb3, 0x75909c3ace2bd, 0x42101972d3ec9, + 0x511d61210ae4d }, }, }; #else static const ge_precomp Bi[8] = { - { - { 25967493,-14356035,29566456,3660896,-12694345,4014787,27544626,-11754271,-6079156,2047605 }, - { -12545711,934262,-2722910,3049990,-727428,9406986,12720692,5043384,19500929,-15469378 }, - { -8738181,4489570,9688441,-14785194,10184609,-12363380,29287919,11864899,-24514362,-4438546 }, - }, - { - { 15636291,-9688557,24204773,-7912398,616977,-16685262,27787600,-14772189,28944400,-1550024 }, - { 16568933,4717097,-11556148,-1102322,15682896,-11807043,16354577,-11775962,7689662,11199574 }, - { 30464156,-5976125,-11779434,-15670865,23220365,15915852,7512774,10017326,-17749093,-9920357 }, - }, - { - { 10861363,11473154,27284546,1981175,-30064349,12577861,32867885,14515107,-15438304,10819380 }, - { 4708026,6336745,20377586,9066809,-11272109,6594696,-25653668,12483688,-12668491,5581306 }, - { 19563160,16186464,-29386857,4097519,10237984,-4348115,28542350,13850243,-23678021,-15815942 }, - }, - { - { 5153746,9909285,1723747,-2777874,30523605,5516873,19480852,5230134,-23952439,-15175766 }, - { -30269007,-3463509,7665486,10083793,28475525,1649722,20654025,16520125,30598449,7715701 }, - { 28881845,14381568,9657904,3680757,-20181635,7843316,-31400660,1370708,29794553,-1409300 }, - }, - { - { -22518993,-6692182,14201702,-8745502,-23510406,8844726,18474211,-1361450,-13062696,13821877 }, - { -6455177,-7839871,3374702,-4740862,-27098617,-10571707,31655028,-7212327,18853322,-14220951 }, - { 4566830,-12963868,-28974889,-12240689,-7602672,-2830569,-8514358,-10431137,2207753,-3209784 }, - }, - { - { -25154831,-4185821,29681144,7868801,-6854661,-9423865,-12437364,-663000,-31111463,-16132436 }, - { 25576264,-2703214,7349804,-11814844,16472782,9300885,3844789,15725684,171356,6466918 }, - { 23103977,13316479,9739013,-16149481,817875,-15038942,8965339,-14088058,-30714912,16193877 }, - }, - { - { -33521811,3180713,-2394130,14003687,-16903474,-16270840,17238398,4729455,-18074513,9256800 }, - { -25182317,-4174131,32336398,5036987,-21236817,11360617,22616405,9761698,-19827198,630305 }, - { -13720693,2639453,-24237460,-7406481,9494427,-5774029,-6554551,-15960994,-2449256,-14291300 }, - }, - { - { -3151181,-5046075,9282714,6866145,-31907062,-863023,-18940575,15033784,25105118,-7894876 }, - { -24326370,15950226,-31801215,-14592823,-11662737,-5090925,1573892,-2625887,2198790,-15804619 }, - { -3099351,10324967,-2241613,7453183,-5446979,-2735503,-13812022,-16236442,-32461234,-12290683 }, - }, -} ; + { + { 0x18c3b85, -0x0db0e43, 0x1c325f8, 0x037dc60, -0x0c1b349, + 0x03d42c3, 0x1a44c32, -0x0b35b1f, -0x05cc2b4, 0x01f3e75 }, + { -0x0bf6eaf, 0x00e4176, -0x0298c5e, 0x02e8a06, -0x00b1984, + 0x08f8a0a, 0x0c21a34, 0x04cf4b8, 0x1298f81, -0x0ec0b42 }, + { -0x0855585, 0x0448162, 0x093d579, -0x0e19aaa, 0x09b67a1, + -0x0bca674, 0x1bee5ef, 0x0b50b43, -0x1760f3a, -0x043ba12 }, + }, + { + { 0x0ee9743, -0x093d5ed, 0x17155e5, -0x078bbce, 0x0096a11, + -0x0fe98ce, 0x1a80150, -0x0e167dd, 0x1b9a810, -0x017a6c8 }, + { 0x0fcd265, 0x047fa29, -0x0b05534, -0x010d1f2, 0x0ef4d50, + -0x0b42943, 0x0f98d11, -0x0b3afda, 0x07555be, 0x0aae456 }, + { 0x1d0d89c, -0x05b303d, -0x0b3bd6a, -0x0ef1e51, 0x162508d, + 0x0f2db4c, 0x072a2c6, 0x098da2e, -0x10ed465, -0x0975f65 }, + }, + { + { 0x0a5bb33, 0x0af1102, 0x1a05442, 0x01e3af7, -0x1cabedd, + 0x0bfec45, 0x1f5862d, 0x0dd7ba3, -0x0eb91e0, 0x0a51734 }, + { 0x047d6ba, 0x060b0e9, 0x136eff2, 0x08a5939, -0x0abffad, + 0x064a088, -0x18771a4, 0x0be7c68, -0x0c14e4b, 0x05529fa }, + { 0x12a8298, 0x0f6fc60, -0x1c06869, 0x03e85ef, 0x09c3820, + -0x04258d3, 0x1b3858e, 0x0d35683, -0x1694c45, -0x0f15506 }, + }, + { + { 0x04ea3d2, 0x0973425, 0x01a4d63, -0x02a6312, 0x1d1c0d5, + 0x0542e49, 0x1294114, 0x04fce36, -0x16d7c37, -0x0e79056 }, + { -0x1cdde4f, -0x034d955, 0x074f74e, 0x099ddd1, 0x1b28085, + 0x0192c3a, 0x13b27c9, 0x0fc13bd, 0x1d2e531, 0x075bb75 }, + { 0x1b8b3b5, 0x0db7200, 0x0935e30, 0x03829f5, -0x133f283, + 0x077adf4, -0x1df22d4, 0x014ea54, 0x1c6a0f9, -0x0158114 }, + }, + { + { -0x1579cd1, -0x0661d56, 0x0d8b366, -0x085721e, -0x166bd86, + 0x086f5b6, 0x119e4e3, -0x014c62a, -0x0c75228, 0x0d2e7b5 }, + { -0x0627f89, -0x077a07f, 0x0337e6e, -0x04856fe, -0x19d7df9, + -0x0a14fbb, 0x1e30474, -0x06e0d27, 0x11fadca, -0x0d8fe97 }, + { 0x045af2e, -0x0c5d01c, -0x1ba1f29, -0x0bac731, -0x07401f0, + -0x02b30e9, -0x081eb36, -0x09f2aa1, 0x021b009, -0x030fa38 }, + }, + { + { -0x17fd50f, -0x03fdedd, 0x1c4e5f8, 0x0781181, -0x0689805, + -0x08fcbf9, -0x0bdc774, -0x00a1dd8, -0x1dab927, -0x0f62954 }, + { 0x1864348, -0x0293f6e, 0x070262c, -0x0b447bc, 0x0fb5ace, + 0x08deb95, 0x03aaab5, 0x0eff474, 0x0029d5c, 0x062ad66 }, + { 0x16089e9, 0x0cb317f, 0x0949b05, -0x0f66be9, 0x00c7ad3, + -0x0e579de, 0x088ccdb, -0x0d6f77a, -0x1d4ac20, 0x0f71955 }, + }, + { + { -0x1ff8093, 0x03088a9, -0x0248812, 0x0d5ade7, -0x101ed32, + -0x0f845f8, 0x107097e, 0x0482a6f, -0x113cb91, 0x08d3f60 }, + { -0x180406d, -0x03fb133, 0x1ed6a0e, 0x04cdbbb, -0x1440c51, + 0x0ad5969, 0x1591955, 0x094f3a2, -0x12e89fe, 0x0099e21 }, + { -0x0d15c75, 0x028465d, -0x171d594, -0x0710391, 0x090df9b, + -0x0581acd, -0x06403b7, -0x0f38ba2, -0x0255f68, -0x0da1164 }, + }, + { + { -0x030154d, -0x04cff3b, 0x08da49a, 0x068c4e1, -0x1e6dcf6, + -0x00d2b2f, -0x121029f, 0x0e565b8, 0x17f12de, -0x078775c }, + { -0x17330e2, 0x0f36192, -0x1e53f7f, -0x0deab37, -0x0b1f591, + -0x04dae6d, 0x0180404, -0x028115f, 0x0218d06, -0x0f128cb }, + { -0x02f4ad7, 0x09d8be7, -0x022344d, 0x071b9ff, -0x0531d43, + -0x029bd8f, -0x0d2c136, -0x0f7bf9a, -0x1ef51b2, -0x0bb8a7b }, + }, +}; #endif @@ -9405,7 +9696,7 @@ int ge_double_scalarmult_vartime(ge_p2 *r, const unsigned char *a, #else signed char aslide[SLIDE_SIZE]; signed char bslide[SLIDE_SIZE]; - ge_cached Ai[8]; /* A,3A,5A,7A,9A,11A,13A,15A */ + ge_cached Ai[16]; /* A,3A,5A,7A,9A,11A,13A,15A */ ge_p1p1 t[1]; ge_p3 u[1]; @@ -9416,7 +9707,7 @@ int ge_double_scalarmult_vartime(ge_p2 *r, const unsigned char *a, #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) if (((aslide = (signed char *)XMALLOC(SLIDE_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL) || ((bslide = (signed char *)XMALLOC(SLIDE_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL) || - ((Ai = (ge_cached *)XMALLOC(8 * sizeof(*Ai), NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL) || + ((Ai = (ge_cached *)XMALLOC(16 * sizeof(*Ai), NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL) || ((t = (ge_p1p1 *)XMALLOC(sizeof(*t), NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL) || ((u = (ge_p3 *)XMALLOC(sizeof(*u), NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL) || ((A2 = (ge_p3 *)XMALLOC(sizeof(*A2), NULL, DYNAMIC_TYPE_TMP_BUFFER))== NULL)) @@ -9427,8 +9718,12 @@ int ge_double_scalarmult_vartime(ge_p2 *r, const unsigned char *a, ret = 0; #endif - slide(aslide,a); - slide(bslide,b); + slide(aslide,a,15); +#ifdef CURVED25519_ASM_64BIT + slide(bslide,b,63); +#else + slide(bslide,b,15); +#endif ge_p3_to_cached(&Ai[0],A); ge_p3_dbl(t,A); ge_p1p1_to_p3(A2,t); @@ -9442,8 +9737,60 @@ int ge_double_scalarmult_vartime(ge_p2 *r, const unsigned char *a, ge_p2_0(r); - for (i = 255;i >= 0;--i) { - if (aslide[i] || bslide[i]) break; + for (i = SLIDE_SIZE-1;i >= 0;--i) { + if (aslide[i] || bslide[i]) { +#ifdef CURVED25519_ASM_64BIT + if (aslide[i] > 0) { + fe_copy(t->Z, Ai[aslide[i]/2].YplusX); + fe_copy(t->Y, Ai[aslide[i]/2].YminusX); + fe_sub(t->X,t->Z,t->Y); + fe_add(t->Y,t->Z,t->Y); + fe_add(t->T,Ai[aslide[i]/2].Z, Ai[aslide[i]/2].Z); + fe_copy(t->Z,t->T); + } else if (aslide[i] < 0) { + fe_copy(t->Z, Ai[aslide[i]/2].YminusX); + fe_copy(t->Y, Ai[aslide[i]/2].YplusX); + fe_sub(t->X,t->Z,t->Y); + fe_add(t->Y,t->Z,t->Y); + fe_add(t->T,Ai[aslide[i]/2].Z, Ai[aslide[i]/2].Z); + fe_copy(t->Z,t->T); + } + + if (bslide[i] > 0) { + if (aslide[i] != 0) { + ge_p1p1_to_p3(u,t); + ge_madd(t,u,&Bi[bslide[i]/2]); + } else { + fe_copy(t->Z,Bi[bslide[i]/2].yplusx); + fe_copy(t->Y,Bi[bslide[i]/2].yminusx); + fe_sub(t->X,t->Z,t->Y); + fe_add(t->Y,t->Z,t->Y); + fe_0(t->T); + t->T[0] = 0x2; + fe_0(t->Z); + t->Z[0] = 0x2; + } + } else if (bslide[i] < 0) { + if (aslide[i] != 0) { + ge_p1p1_to_p3(u,t); + ge_msub(t,u,&Bi[(-bslide[i])/2]); + } else { + fe_copy(t->Z,Bi[bslide[i]/2].yminusx); + fe_copy(t->Y,Bi[bslide[i]/2].yplusx); + fe_sub(t->X,t->Z,t->Y); + fe_add(t->Y,t->Z,t->Y); + fe_0(t->T); + t->T[0] = 0x2; + fe_0(t->Z); + t->Z[0] = 0x2; + } + } + + ge_p1p1_to_p2(r,t); + --i; +#endif + break; + } } for (;i >= 0;--i) { @@ -9556,6 +9903,11 @@ int ge_frombytes_negate_vartime(ge_p3 *h,const unsigned char *s) fe_mul(h->X,h->X,u); /* x = uv^7 */ fe_pow22523(h->X,h->X); /* x = (uv^7)^((q-5)/8) */ + /* Alternative if inversion very fast. + * x^2^252 * invert(x^3) + * = x^2^252 * x^-3 + * = x^(2^252 - 3) + */ fe_mul(h->X,h->X,v3); fe_mul(h->X,h->X,u); /* x = uv^3(uv^7)^((q-5)/8) */ @@ -9823,6 +10175,26 @@ void ge_tobytes(unsigned char *s,const ge_p2 *h) s[31] ^= (unsigned char)((unsigned char)fe_isnegative(x) << 7); } +#ifdef HAVE_ED25519_VERIFY +#ifndef CURVED25519_ASM_64BIT + #define fe_invert_nct fe_invert +#endif + +/* ge tobytes */ +void ge_tobytes_nct(unsigned char *s,const ge_p2 *h) +{ + ge recip; + ge x; + ge y; + + fe_invert_nct(recip,h->Z); + fe_mul(x,h->X,recip); + fe_mul(y,h->Y,recip); + fe_tobytes(s,y); + s[31] ^= (unsigned char)((unsigned char)fe_isnegative(x) << 7); +} +#endif + #endif /* !ED25519_SMALL */ /* if HAVE_ED25519 but not HAVE_CURVE25519, and an asm implementation is built, diff --git a/wolfcrypt/src/logging.c b/wolfcrypt/src/logging.c index 284b80822..d0a411b1d 100644 --- a/wolfcrypt/src/logging.c +++ b/wolfcrypt/src/logging.c @@ -430,7 +430,11 @@ static void wolfssl_log(const int logLevel, const char* const file_name, #endif /* (!WOLFSSL_DEBUG_CERTS && !DEBUG_WOLFSSL) || NO_WOLFSSL_DEBUG_CERTS */ #if defined(XVSNPRINTF) && !defined(NO_WOLFSSL_MSG_EX) -#include /* for var args */ +#if defined(WOLFSSL_BSDKM) + #include /* for var args */ +#else + #include /* for var args */ +#endif /* WOLFSSL_BSDKM */ #ifndef WOLFSSL_MSG_EX_BUF_SZ #define WOLFSSL_MSG_EX_BUF_SZ 100 diff --git a/wolfcrypt/src/misc.c b/wolfcrypt/src/misc.c index 655c11672..9b67f6c5d 100644 --- a/wolfcrypt/src/misc.c +++ b/wolfcrypt/src/misc.c @@ -775,7 +775,9 @@ WC_MISC_STATIC WC_INLINE void ctMaskCopy(byte mask, byte* dst, byte* src, #if !defined(WOLFSSL_NO_CT_OPS) && !defined(WOLFSSL_NO_CT_MAX_MIN) && \ defined(WORD64_AVAILABLE) volatile word32 gte_mask = (word32)ctMaskWord32GTE(a, b); - return (a & ~gte_mask) | (b & gte_mask); + word32 r = (a & ~gte_mask); + r |= (b & gte_mask); + return r; #else /* WOLFSSL_NO_CT_OPS */ return a > b ? b : a; #endif /* WOLFSSL_NO_CT_OPS */ diff --git a/wolfcrypt/src/pkcs7.c b/wolfcrypt/src/pkcs7.c index 9c99f8cf5..677441e42 100644 --- a/wolfcrypt/src/pkcs7.c +++ b/wolfcrypt/src/pkcs7.c @@ -2074,6 +2074,11 @@ static int wc_PKCS7_BuildSignedAttributes(wc_PKCS7* pkcs7, ESD* esd, /* add custom signed attributes if set */ if (pkcs7->signedAttribsSz > 0 && pkcs7->signedAttribs != NULL) { + word32 availableSpace = MAX_SIGNED_ATTRIBS_SZ - atrIdx; + + if (pkcs7->signedAttribsSz > availableSpace) + return BUFFER_E; + esd->signedAttribsCount += pkcs7->signedAttribsSz; esd->signedAttribsSz += (word32)EncodeAttributes( &esd->signedAttribs[atrIdx], (int)esd->signedAttribsCount, diff --git a/wolfcrypt/src/port/arm/armv8-32-aes-asm.S b/wolfcrypt/src/port/arm/armv8-32-aes-asm.S index 4cd585287..dfa78538e 100644 --- a/wolfcrypt/src/port/arm/armv8-32-aes-asm.S +++ b/wolfcrypt/src/port/arm/armv8-32-aes-asm.S @@ -9048,6 +9048,7 @@ L_AES_set_encrypt_key_loop_128: L_AES_set_encrypt_key_end: pop {r4, r5, r6, r7, r8, pc} .size AES_set_encrypt_key,.-AES_set_encrypt_key +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE .text .align 4 .globl AES_encrypt_block @@ -9691,6 +9692,7 @@ L_AES_encrypt_block_nr: eor r7, r7, r11 pop {pc} .size AES_encrypt_block,.-AES_encrypt_block +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ #if defined(HAVE_AESCCM) || defined(HAVE_AESGCM) || \ defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) || \ defined(HAVE_AES_ECB) @@ -9751,7 +9753,645 @@ L_AES_ECB_encrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_ECB_encrypt_block_nr_256: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_ECB_encrypt_block_nr_256 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -9823,7 +10463,645 @@ L_AES_ECB_encrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_ECB_encrypt_block_nr_192: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_ECB_encrypt_block_nr_192 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -9895,7 +11173,645 @@ L_AES_ECB_encrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_ECB_encrypt_block_nr_128: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_ECB_encrypt_block_nr_128 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -9999,7 +11915,645 @@ L_AES_CBC_encrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_CBC_encrypt_block_nr_256: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_encrypt_block_nr_256 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10075,7 +12629,645 @@ L_AES_CBC_encrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_CBC_encrypt_block_nr_192: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_encrypt_block_nr_192 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10151,7 +13343,645 @@ L_AES_CBC_encrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_CBC_encrypt_block_nr_128: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_encrypt_block_nr_128 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10254,7 +14084,645 @@ L_AES_CTR_encrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_CTR_encrypt_block_nr_256: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CTR_encrypt_block_nr_256 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10315,7 +14783,645 @@ L_AES_CTR_encrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_CTR_encrypt_block_nr_192: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CTR_encrypt_block_nr_192 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10376,7 +15482,645 @@ L_AES_CTR_encrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_CTR_encrypt_block_nr_128: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CTR_encrypt_block_nr_128 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10452,6 +16196,7 @@ L_AES_CTR_encrypt_end: #ifdef HAVE_AES_DECRYPT #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) || \ defined(HAVE_AES_CBC) || defined(HAVE_AES_ECB) +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE .text .align 4 .globl AES_decrypt_block @@ -11095,6 +16840,7 @@ L_AES_decrypt_block_nr: eor r7, r7, r11 pop {pc} .size AES_decrypt_block,.-AES_decrypt_block +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ .text .type L_AES_ARM32_td_ecb, %object .size L_AES_ARM32_td_ecb, 12 @@ -11102,10 +16848,10 @@ L_AES_decrypt_block_nr: L_AES_ARM32_td_ecb: .word L_AES_ARM32_td_data .text - .type L_AES_ARM32_td4, %object - .size L_AES_ARM32_td4, 256 + .type L_AES_ARM32_ecb_td4, %object + .size L_AES_ARM32_ecb_td4, 256 .align 4 -L_AES_ARM32_td4: +L_AES_ARM32_ecb_td4: .byte 0x52 .byte 0x9 .byte 0x6a @@ -11374,7 +17120,7 @@ AES_ECB_decrypt: adr r0, L_AES_ARM32_td_ecb ldr r0, [r0] mov r12, r2 - adr r2, L_AES_ARM32_td4 + adr r2, L_AES_ARM32_ecb_td4 cmp r8, #10 beq L_AES_ECB_decrypt_start_block_128 cmp r8, #12 @@ -11415,7 +17161,645 @@ L_AES_ECB_decrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_ECB_decrypt_block_nr_256: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_ECB_decrypt_block_nr_256 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r3, r12, lr} #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -11486,7 +17870,645 @@ L_AES_ECB_decrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_ECB_decrypt_block_nr_192: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_ECB_decrypt_block_nr_192 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r3, r12, lr} #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -11557,7 +18579,645 @@ L_AES_ECB_decrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_ECB_decrypt_block_nr_128: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_ECB_decrypt_block_nr_128 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r3, r12, lr} #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -11595,6 +19255,267 @@ L_AES_ECB_decrypt_end: .size AES_ECB_decrypt,.-AES_ECB_decrypt #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER || defined(HAVE_AES_ECB) */ #ifdef HAVE_AES_CBC + .text + .type L_AES_ARM32_cbc_td4, %object + .size L_AES_ARM32_cbc_td4, 256 + .align 4 +L_AES_ARM32_cbc_td4: + .byte 0x52 + .byte 0x9 + .byte 0x6a + .byte 0xd5 + .byte 0x30 + .byte 0x36 + .byte 0xa5 + .byte 0x38 + .byte 0xbf + .byte 0x40 + .byte 0xa3 + .byte 0x9e + .byte 0x81 + .byte 0xf3 + .byte 0xd7 + .byte 0xfb + .byte 0x7c + .byte 0xe3 + .byte 0x39 + .byte 0x82 + .byte 0x9b + .byte 0x2f + .byte 0xff + .byte 0x87 + .byte 0x34 + .byte 0x8e + .byte 0x43 + .byte 0x44 + .byte 0xc4 + .byte 0xde + .byte 0xe9 + .byte 0xcb + .byte 0x54 + .byte 0x7b + .byte 0x94 + .byte 0x32 + .byte 0xa6 + .byte 0xc2 + .byte 0x23 + .byte 0x3d + .byte 0xee + .byte 0x4c + .byte 0x95 + .byte 0xb + .byte 0x42 + .byte 0xfa + .byte 0xc3 + .byte 0x4e + .byte 0x8 + .byte 0x2e + .byte 0xa1 + .byte 0x66 + .byte 0x28 + .byte 0xd9 + .byte 0x24 + .byte 0xb2 + .byte 0x76 + .byte 0x5b + .byte 0xa2 + .byte 0x49 + .byte 0x6d + .byte 0x8b + .byte 0xd1 + .byte 0x25 + .byte 0x72 + .byte 0xf8 + .byte 0xf6 + .byte 0x64 + .byte 0x86 + .byte 0x68 + .byte 0x98 + .byte 0x16 + .byte 0xd4 + .byte 0xa4 + .byte 0x5c + .byte 0xcc + .byte 0x5d + .byte 0x65 + .byte 0xb6 + .byte 0x92 + .byte 0x6c + .byte 0x70 + .byte 0x48 + .byte 0x50 + .byte 0xfd + .byte 0xed + .byte 0xb9 + .byte 0xda + .byte 0x5e + .byte 0x15 + .byte 0x46 + .byte 0x57 + .byte 0xa7 + .byte 0x8d + .byte 0x9d + .byte 0x84 + .byte 0x90 + .byte 0xd8 + .byte 0xab + .byte 0x0 + .byte 0x8c + .byte 0xbc + .byte 0xd3 + .byte 0xa + .byte 0xf7 + .byte 0xe4 + .byte 0x58 + .byte 0x5 + .byte 0xb8 + .byte 0xb3 + .byte 0x45 + .byte 0x6 + .byte 0xd0 + .byte 0x2c + .byte 0x1e + .byte 0x8f + .byte 0xca + .byte 0x3f + .byte 0xf + .byte 0x2 + .byte 0xc1 + .byte 0xaf + .byte 0xbd + .byte 0x3 + .byte 0x1 + .byte 0x13 + .byte 0x8a + .byte 0x6b + .byte 0x3a + .byte 0x91 + .byte 0x11 + .byte 0x41 + .byte 0x4f + .byte 0x67 + .byte 0xdc + .byte 0xea + .byte 0x97 + .byte 0xf2 + .byte 0xcf + .byte 0xce + .byte 0xf0 + .byte 0xb4 + .byte 0xe6 + .byte 0x73 + .byte 0x96 + .byte 0xac + .byte 0x74 + .byte 0x22 + .byte 0xe7 + .byte 0xad + .byte 0x35 + .byte 0x85 + .byte 0xe2 + .byte 0xf9 + .byte 0x37 + .byte 0xe8 + .byte 0x1c + .byte 0x75 + .byte 0xdf + .byte 0x6e + .byte 0x47 + .byte 0xf1 + .byte 0x1a + .byte 0x71 + .byte 0x1d + .byte 0x29 + .byte 0xc5 + .byte 0x89 + .byte 0x6f + .byte 0xb7 + .byte 0x62 + .byte 0xe + .byte 0xaa + .byte 0x18 + .byte 0xbe + .byte 0x1b + .byte 0xfc + .byte 0x56 + .byte 0x3e + .byte 0x4b + .byte 0xc6 + .byte 0xd2 + .byte 0x79 + .byte 0x20 + .byte 0x9a + .byte 0xdb + .byte 0xc0 + .byte 0xfe + .byte 0x78 + .byte 0xcd + .byte 0x5a + .byte 0xf4 + .byte 0x1f + .byte 0xdd + .byte 0xa8 + .byte 0x33 + .byte 0x88 + .byte 0x7 + .byte 0xc7 + .byte 0x31 + .byte 0xb1 + .byte 0x12 + .byte 0x10 + .byte 0x59 + .byte 0x27 + .byte 0x80 + .byte 0xec + .byte 0x5f + .byte 0x60 + .byte 0x51 + .byte 0x7f + .byte 0xa9 + .byte 0x19 + .byte 0xb5 + .byte 0x4a + .byte 0xd + .byte 0x2d + .byte 0xe5 + .byte 0x7a + .byte 0x9f + .byte 0x93 + .byte 0xc9 + .byte 0x9c + .byte 0xef + .byte 0xa0 + .byte 0xe0 + .byte 0x3b + .byte 0x4d + .byte 0xae + .byte 0x2a + .byte 0xf5 + .byte 0xb0 + .byte 0xc8 + .byte 0xeb + .byte 0xbb + .byte 0x3c + .byte 0x83 + .byte 0x53 + .byte 0x99 + .byte 0x61 + .byte 0x17 + .byte 0x2b + .byte 0x4 + .byte 0x7e + .byte 0xba + .byte 0x77 + .byte 0xd6 + .byte 0x26 + .byte 0xe1 + .byte 0x69 + .byte 0x14 + .byte 0x63 + .byte 0x55 + .byte 0x21 + .byte 0xc + .byte 0x7d .text .align 4 .globl AES_CBC_decrypt @@ -11605,7 +19526,7 @@ AES_CBC_decrypt: adr r0, L_AES_ARM32_td_ecb ldr r0, [r0] mov r12, r2 - adr r2, L_AES_ARM32_td4 + adr r2, L_AES_ARM32_cbc_td4 ldr r8, [sp, #36] ldr r4, [sp, #40] push {r3, r4} @@ -11662,7 +19583,645 @@ L_AES_CBC_decrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_256_odd: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_decrypt_block_nr_256_odd +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ ldr lr, [sp, #16] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -11749,7 +20308,645 @@ L_AES_CBC_decrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_256_even: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_decrypt_block_nr_256_even +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ ldr lr, [sp, #16] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -11850,7 +21047,645 @@ L_AES_CBC_decrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_192_odd: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_decrypt_block_nr_192_odd +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ ldr lr, [sp, #16] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -11937,7 +21772,645 @@ L_AES_CBC_decrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_192_even: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_decrypt_block_nr_192_even +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ ldr lr, [sp, #16] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -12038,7 +22511,645 @@ L_AES_CBC_decrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_128_odd: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_decrypt_block_nr_128_odd +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ ldr lr, [sp, #16] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -12125,7 +23236,645 @@ L_AES_CBC_decrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_128_even: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #8 + lsr r4, r4, #24 +#else + uxtb r4, r11, ror #16 +#endif +#else + ubfx r4, r11, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r10, #16 + lsr r12, r12, #24 +#else + uxtb r12, r10, ror #8 +#endif +#else + ubfx r12, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #24 + lsr lr, lr, #24 +#else + uxtb lr, r9 +#endif +#else + ubfx lr, r9, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #8 + lsr r5, r5, #24 +#else + uxtb r5, r8, ror #16 +#endif +#else + ubfx r5, r8, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #16 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #8 +#endif +#else + ubfx r12, r11, #8, #8 +#endif + eor r4, r4, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #24 + lsr lr, lr, #24 +#else + uxtb lr, r10 +#endif +#else + ubfx lr, r10, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #8 + lsr r6, r6, #24 +#else + uxtb r6, r9, ror #16 +#endif +#else + ubfx r6, r9, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #16 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #8 +#endif +#else + ubfx r12, r8, #8, #8 +#endif + eor r5, r5, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #24 + lsr lr, lr, #24 +#else + uxtb lr, r11 +#endif +#else + ubfx lr, r11, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r8, #24 + lsr r8, r8, #24 +#else + uxtb r8, r8 +#endif +#else + ubfx r8, r8, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #8 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #16 +#endif +#else + ubfx r7, r10, #16, #8 +#endif + eor r6, r6, r12, ror #8 + lsr r12, r11, #24 + eor r6, r6, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r8, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #8 + eor r7, r7, r12, ror #24 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_CBC_decrypt_block_nr_128_even +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r7, #8 + lsr r8, r8, #24 +#else + uxtb r8, r7, ror #16 +#endif +#else + ubfx r8, r7, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r6, #16 + lsr r12, r12, #24 +#else + uxtb r12, r6, ror #8 +#endif +#else + ubfx r12, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #24 + lsr lr, lr, #24 +#else + uxtb lr, r5 +#endif +#else + ubfx lr, r5, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r4, #8 + lsr r9, r9, #24 +#else + uxtb r9, r4, ror #16 +#endif +#else + ubfx r9, r4, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r7, #16 + lsr r12, r12, #24 +#else + uxtb r12, r7, ror #8 +#endif +#else + ubfx r12, r7, #8, #8 +#endif + eor r8, r8, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #24 + lsr lr, lr, #24 +#else + uxtb lr, r6 +#endif +#else + ubfx lr, r6, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r5, #8 + lsr r10, r10, #24 +#else + uxtb r10, r5, ror #16 +#endif +#else + ubfx r10, r5, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, r12, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r4, #16 + lsr r12, r12, #24 +#else + uxtb r12, r4, ror #8 +#endif +#else + ubfx r12, r4, #8, #8 +#endif + eor r9, r9, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #24 + lsr lr, lr, #24 +#else + uxtb lr, r7 +#endif +#else + ubfx lr, r7, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr lr, [r0, lr, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r4, #24 + lsr r4, r4, #24 +#else + uxtb r4, r4 +#endif +#else + ubfx r4, r4, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r6, #8 + lsr r11, r11, #24 +#else + uxtb r11, r6, ror #16 +#endif +#else + ubfx r11, r6, #16, #8 +#endif + eor r10, r10, r12, ror #8 + lsr r12, r7, #24 + eor r10, r10, lr, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r5, #16 + lsr lr, lr, #24 +#else + uxtb lr, r5, ror #8 +#endif +#else + ubfx lr, r5, #8, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r12, [r0, r12, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + eor r12, r12, r4, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #8 + eor r11, r11, r12, ror #24 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #24 + lsr r4, r4, #24 +#else + uxtb r4, r9 +#endif +#else + ubfx r4, r9, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r11, #8 + lsr r12, r12, #24 +#else + uxtb r12, r11, ror #16 +#endif +#else + ubfx r12, r11, #16, #8 +#endif + lsr lr, r8, #24 + ldrb r4, [r2, r4] + ldrb r7, [r2, r7] + ldrb r12, [r2, r12] + ldrb lr, [r2, lr] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #24 + lsr r5, r5, #24 +#else + uxtb r5, r10 +#endif +#else + ubfx r5, r10, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r8, #8 + lsr r12, r12, #24 +#else + uxtb r12, r8, ror #16 +#endif +#else + ubfx r12, r8, #16, #8 +#endif + eor r4, r4, lr, lsl #24 + lsr lr, r9, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r5, [r2, r5] + ldrb r12, [r2, r12] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #24 + lsr r6, r6, #24 +#else + uxtb r6, r11 +#endif +#else + ubfx r6, r11, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #8 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #16 +#endif +#else + ubfx r12, r9, #16, #8 +#endif + eor r5, r5, lr, lsl #24 + lsr lr, r10, #24 + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + ldrb r6, [r2, r6] + ldrb r12, [r2, r12] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #24 + lsr r7, r7, #24 +#else + uxtb r7, r8 +#endif +#else + ubfx r7, r8, #0, #8 +#endif + eor r6, r6, r12, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r12, r9, #16 + lsr r12, r12, #24 +#else + uxtb r12, r9, ror #8 +#endif +#else + ubfx r12, r9, #8, #8 +#endif + eor r6, r6, lr, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + ldrb r11, [r2, r11] + ldrb r12, [r2, r12] + ldrb r7, [r2, r7] + ldrb lr, [r2, lr] + eor r12, r12, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, r12, lsl #8 + eor r7, r7, lr, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ ldr lr, [sp, #16] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) eor r8, r4, r4, ror #16 @@ -12869,7 +24618,645 @@ L_AES_GCM_encrypt_loop_block_256: eor r6, r6, r10 eor r7, r7, r11 mov r1, #6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_GCM_encrypt_block_nr_256: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_GCM_encrypt_block_nr_256 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -12927,7 +25314,645 @@ L_AES_GCM_encrypt_loop_block_192: eor r6, r6, r10 eor r7, r7, r11 mov r1, #5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_GCM_encrypt_block_nr_192: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_GCM_encrypt_block_nr_192 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -12985,7 +26010,645 @@ L_AES_GCM_encrypt_loop_block_128: eor r6, r6, r10 eor r7, r7, r11 mov r1, #4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE bl AES_encrypt_block +#else +L_AES_GCM_encrypt_block_nr_128: +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r9, #8 + lsr r4, r4, #24 +#else + uxtb r4, r9, ror #16 +#endif +#else + ubfx r4, r9, #16, #8 +#endif + lsr r7, r8, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #16 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #8 +#endif +#else + ubfx lr, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r11, #24 + lsr r2, r2, #24 +#else + uxtb r2, r11 +#endif +#else + ubfx r2, r11, #0, #8 +#endif + ldr r4, [r0, r4, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r10, #8 + lsr r5, r5, #24 +#else + uxtb r5, r10, ror #16 +#endif +#else + ubfx r5, r10, #16, #8 +#endif + eor r4, r4, r7, ror #24 + lsr r7, r9, #24 + eor r4, r4, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #16 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #8 +#endif +#else + ubfx lr, r11, #8, #8 +#endif + eor r4, r4, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #24 + lsr r2, r2, #24 +#else + uxtb r2, r8 +#endif +#else + ubfx r2, r8, #0, #8 +#endif + ldr r5, [r0, r5, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r11, #8 + lsr r6, r6, #24 +#else + uxtb r6, r11, ror #16 +#endif +#else + ubfx r6, r11, #16, #8 +#endif + eor r5, r5, r7, ror #24 + lsr r7, r10, #24 + eor r5, r5, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r8, #16 + lsr lr, lr, #24 +#else + uxtb lr, r8, ror #8 +#endif +#else + ubfx lr, r8, #8, #8 +#endif + eor r5, r5, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #24 + lsr r2, r2, #24 +#else + uxtb r2, r9 +#endif +#else + ubfx r2, r9, #0, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r10, #24 + lsr r10, r10, #24 +#else + uxtb r10, r10 +#endif +#else + ubfx r10, r10, #0, #8 +#endif + eor r6, r6, r7, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #8 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #16 +#endif +#else + ubfx r7, r8, #16, #8 +#endif + eor r6, r6, lr, ror #8 + lsr lr, r11, #24 + eor r6, r6, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r9, #16 + lsr r2, r2, #24 +#else + uxtb r2, r9, ror #8 +#endif +#else + ubfx r2, r9, #8, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r7, [r0, r7, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r10, ror #24 + ldm r3!, {r8, r9, r10, r11} + eor r7, r7, lr, ror #24 + eor r7, r7, r2, ror #8 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 + subs r1, r1, #1 + bne L_AES_GCM_encrypt_block_nr_128 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r8, r5, #8 + lsr r8, r8, #24 +#else + uxtb r8, r5, ror #16 +#endif +#else + ubfx r8, r5, #16, #8 +#endif + lsr r11, r4, #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r6, #16 + lsr lr, lr, #24 +#else + uxtb lr, r6, ror #8 +#endif +#else + ubfx lr, r6, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r7, #24 + lsr r2, r2, #24 +#else + uxtb r2, r7 +#endif +#else + ubfx r2, r7, #0, #8 +#endif + ldr r8, [r0, r8, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r9, r6, #8 + lsr r9, r9, #24 +#else + uxtb r9, r6, ror #16 +#endif +#else + ubfx r9, r6, #16, #8 +#endif + eor r8, r8, r11, ror #24 + lsr r11, r5, #24 + eor r8, r8, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r7, #16 + lsr lr, lr, #24 +#else + uxtb lr, r7, ror #8 +#endif +#else + ubfx lr, r7, #8, #8 +#endif + eor r8, r8, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r4, #24 + lsr r2, r2, #24 +#else + uxtb r2, r4 +#endif +#else + ubfx r2, r4, #0, #8 +#endif + ldr r9, [r0, r9, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r10, r7, #8 + lsr r10, r10, #24 +#else + uxtb r10, r7, ror #16 +#endif +#else + ubfx r10, r7, #16, #8 +#endif + eor r9, r9, r11, ror #24 + lsr r11, r6, #24 + eor r9, r9, lr, ror #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r4, #16 + lsr lr, lr, #24 +#else + uxtb lr, r4, ror #8 +#endif +#else + ubfx lr, r4, #8, #8 +#endif + eor r9, r9, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #24 + lsr r2, r2, #24 +#else + uxtb r2, r5 +#endif +#else + ubfx r2, r5, #0, #8 +#endif + ldr r10, [r0, r10, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r6, #24 + lsr r6, r6, #24 +#else + uxtb r6, r6 +#endif +#else + ubfx r6, r6, #0, #8 +#endif + eor r10, r10, r11, ror #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r11, r4, #8 + lsr r11, r11, #24 +#else + uxtb r11, r4, ror #16 +#endif +#else + ubfx r11, r4, #16, #8 +#endif + eor r10, r10, lr, ror #8 + lsr lr, r7, #24 + eor r10, r10, r2, ror #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r5, #16 + lsr r2, r2, #24 +#else + uxtb r2, r5, ror #8 +#endif +#else + ubfx r2, r5, #8, #8 +#endif + ldr r6, [r0, r6, lsl #2] + ldr lr, [r0, lr, lsl #2] + ldr r11, [r0, r11, lsl #2] + ldr r2, [r0, r2, lsl #2] + eor lr, lr, r6, ror #24 + ldm r3!, {r4, r5, r6, r7} + eor r11, r11, lr, ror #24 + eor r11, r11, r2, ror #8 + # XOR in Key Schedule + eor r8, r8, r4 + eor r9, r9, r5 + eor r10, r10, r6 + eor r11, r11, r7 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r4, r11, #24 + lsr r4, r4, #24 +#else + uxtb r4, r11 +#endif +#else + ubfx r4, r11, #0, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #16 + lsr r7, r7, #24 +#else + uxtb r7, r10, ror #8 +#endif +#else + ubfx r7, r10, #8, #8 +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #8 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #16 +#endif +#else + ubfx lr, r9, #16, #8 +#endif + lsr r2, r8, #24 + ldrb r4, [r0, r4, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r5, r8, #24 + lsr r5, r5, #24 +#else + uxtb r5, r8 +#endif +#else + ubfx r5, r8, #0, #8 +#endif + eor r4, r4, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r11, #16 + lsr r7, r7, #24 +#else + uxtb r7, r11, ror #8 +#endif +#else + ubfx r7, r11, #8, #8 +#endif + eor r4, r4, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r10, #8 + lsr lr, lr, #24 +#else + uxtb lr, r10, ror #16 +#endif +#else + ubfx lr, r10, #16, #8 +#endif + eor r4, r4, r2, lsl #24 + lsr r2, r9, #24 + ldrb r5, [r0, r5, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r6, r9, #24 + lsr r6, r6, #24 +#else + uxtb r6, r9 +#endif +#else + ubfx r6, r9, #0, #8 +#endif + eor r5, r5, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r8, #16 + lsr r7, r7, #24 +#else + uxtb r7, r8, ror #8 +#endif +#else + ubfx r7, r8, #8, #8 +#endif + eor r5, r5, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r11, #8 + lsr lr, lr, #24 +#else + uxtb lr, r11, ror #16 +#endif +#else + ubfx lr, r11, #16, #8 +#endif + eor r5, r5, r2, lsl #24 + lsr r2, r10, #24 + ldrb r6, [r0, r6, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + lsr r11, r11, #24 + eor r6, r6, r7, lsl #8 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r7, r10, #24 + lsr r7, r7, #24 +#else + uxtb r7, r10 +#endif +#else + ubfx r7, r10, #0, #8 +#endif + eor r6, r6, lr, lsl #16 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl lr, r9, #16 + lsr lr, lr, #24 +#else + uxtb lr, r9, ror #8 +#endif +#else + ubfx lr, r9, #8, #8 +#endif + eor r6, r6, r2, lsl #24 +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + lsl r2, r8, #8 + lsr r2, r2, #24 +#else + uxtb r2, r8, ror #16 +#endif +#else + ubfx r2, r8, #16, #8 +#endif + ldrb r11, [r0, r11, lsl #2] + ldrb r7, [r0, r7, lsl #2] + ldrb lr, [r0, lr, lsl #2] + ldrb r2, [r0, r2, lsl #2] + eor lr, lr, r11, lsl #16 + ldm r3, {r8, r9, r10, r11} + eor r7, r7, lr, lsl #8 + eor r7, r7, r2, lsl #16 + # XOR in Key Schedule + eor r4, r4, r8 + eor r5, r5, r9 + eor r6, r6, r10 + eor r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ pop {r1, r2, lr} ldr r3, [sp] #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) diff --git a/wolfcrypt/src/port/arm/armv8-32-aes-asm_c.c b/wolfcrypt/src/port/arm/armv8-32-aes-asm_c.c index 7ff7e3d24..c3aae6f5f 100644 --- a/wolfcrypt/src/port/arm/armv8-32-aes-asm_c.c +++ b/wolfcrypt/src/port/arm/armv8-32-aes-asm_c.c @@ -9348,6 +9348,7 @@ WC_OMIT_FRAME_POINTER void AES_set_encrypt_key(const unsigned char* key, ); } +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE void AES_encrypt_block(const word32* te_p, int nr_p, int len_p, const word32* ks_p); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG @@ -10013,6 +10014,7 @@ WC_OMIT_FRAME_POINTER void AES_encrypt_block(const word32* te, int nr, int len, ); } +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ #if defined(HAVE_AESCCM) || defined(HAVE_AESGCM) || \ defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) || \ defined(HAVE_AES_ECB) @@ -10087,7 +10089,646 @@ WC_OMIT_FRAME_POINTER void AES_ECB_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_ECB_encrypt_block_nr_256_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_ECB_encrypt_block_nr_256_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10161,7 +10802,646 @@ WC_OMIT_FRAME_POINTER void AES_ECB_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_ECB_encrypt_block_nr_192_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_ECB_encrypt_block_nr_192_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10235,7 +11515,646 @@ WC_OMIT_FRAME_POINTER void AES_ECB_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_ECB_encrypt_block_nr_128_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_ECB_encrypt_block_nr_128_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10370,7 +12289,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_CBC_encrypt_block_nr_256_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_encrypt_block_nr_256_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10448,7 +13006,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_CBC_encrypt_block_nr_192_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_encrypt_block_nr_192_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10526,7 +13723,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_CBC_encrypt_block_nr_128_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_encrypt_block_nr_128_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10663,7 +14499,646 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_CTR_encrypt_block_nr_256_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CTR_encrypt_block_nr_256_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10726,7 +15201,646 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_CTR_encrypt_block_nr_192_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CTR_encrypt_block_nr_192_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10789,7 +15903,646 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_CTR_encrypt_block_nr_128_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CTR_encrypt_block_nr_128_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -10881,6 +16634,7 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, #ifdef HAVE_AES_DECRYPT #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) || \ defined(HAVE_AES_CBC) || defined(HAVE_AES_ECB) +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE void AES_decrypt_block(const word32* td_p, int nr_p, const byte* td4_p); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG WC_OMIT_FRAME_POINTER void AES_decrypt_block(const word32* td_p, int nr_p, @@ -11544,8 +17298,9 @@ WC_OMIT_FRAME_POINTER void AES_decrypt_block(const word32* td, int nr, ); } +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ static const word32* L_AES_ARM32_td_ecb = L_AES_ARM32_td_data; -static const byte L_AES_ARM32_td4[] = { +static const byte L_AES_ARM32_ecb_td4[] = { 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, @@ -11601,20 +17356,21 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, register int nr asm ("r12") = (int)nr_p; register word32* L_AES_ARM32_td_ecb_c asm ("lr") = (word32*)L_AES_ARM32_td_ecb; - register byte* L_AES_ARM32_td4_c asm ("r4") = (byte*)&L_AES_ARM32_td4; + register byte* L_AES_ARM32_ecb_td4_c asm ("r4") = + (byte*)&L_AES_ARM32_ecb_td4; #else register word32* L_AES_ARM32_td_ecb_c = (word32*)L_AES_ARM32_td_ecb; - register byte* L_AES_ARM32_td4_c = (byte*)&L_AES_ARM32_td4; + register byte* L_AES_ARM32_ecb_td4_c = (byte*)&L_AES_ARM32_ecb_td4; #endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ __asm__ __volatile__ ( - "push {%[L_AES_ARM32_td4]}\n\t" + "push {%[L_AES_ARM32_ecb_td4]}\n\t" "push {%[nr], %[L_AES_ARM32_td_ecb]}\n\t" "ldr r8, [sp]\n\t" "mov lr, %[in]\n\t" "ldr r0, [sp, #4]\n\t" "mov r12, %[len]\n\t" - "mov r2, %[L_AES_ARM32_td4]\n\t" + "mov r2, %[L_AES_ARM32_ecb_td4]\n\t" "cmp r8, #10\n\t" "beq L_AES_ECB_decrypt_start_block_128_%=\n\t" "cmp r8, #12\n\t" @@ -11656,7 +17412,646 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_ECB_decrypt_block_nr_256_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_ECB_decrypt_block_nr_256_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[ks], r12, lr}\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -11729,7 +18124,646 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_ECB_decrypt_block_nr_192_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_ECB_decrypt_block_nr_192_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[ks], r12, lr}\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -11802,7 +18836,646 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_ECB_decrypt_block_nr_128_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_ECB_decrypt_block_nr_128_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[ks], r12, lr}\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -11838,17 +19511,17 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "\n" "L_AES_ECB_decrypt_end_%=: \n\t" "pop {%[nr], %[L_AES_ARM32_td_ecb]}\n\t" - "pop {%[L_AES_ARM32_td4]}\n\t" + "pop {%[L_AES_ARM32_ecb_td4]}\n\t" #ifndef WOLFSSL_NO_VAR_ASSIGN_REG : [in] "+r" (in), [out] "+r" (out), [len] "+r" (len), [ks] "+r" (ks), [nr] "+r" (nr), [L_AES_ARM32_td_ecb] "+r" (L_AES_ARM32_td_ecb_c), - [L_AES_ARM32_td4] "+r" (L_AES_ARM32_td4_c) + [L_AES_ARM32_ecb_td4] "+r" (L_AES_ARM32_ecb_td4_c) : #else : : [in] "r" (in), [out] "r" (out), [len] "r" (len), [ks] "r" (ks), [nr] "r" (nr), [L_AES_ARM32_td_ecb] "r" (L_AES_ARM32_td_ecb_c), - [L_AES_ARM32_td4] "r" (L_AES_ARM32_td4_c) + [L_AES_ARM32_ecb_td4] "r" (L_AES_ARM32_ecb_td4_c) #endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ : "memory", "cc", "r5", "r6", "r7", "r8", "r9", "r10", "r11" ); @@ -11856,6 +19529,41 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER || defined(HAVE_AES_ECB) */ #ifdef HAVE_AES_CBC +static const byte L_AES_ARM32_cbc_td4[] = { + 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, + 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, + 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, + 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, + 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, + 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, + 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, + 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, + 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, + 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, + 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, + 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, + 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, + 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, + 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, + 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, + 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, + 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, + 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, + 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, + 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, + 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, + 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, + 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, + 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, + 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, + 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, + 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, + 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, + 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, + 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, + 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d, +}; + void AES_CBC_decrypt(const unsigned char* in_p, unsigned char* out_p, unsigned long len_p, const unsigned char* ks_p, int nr_p, unsigned char* iv_p); @@ -11878,19 +19586,20 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, register unsigned char* iv asm ("lr") = (unsigned char*)iv_p; register word32* L_AES_ARM32_td_ecb_c asm ("r4") = (word32*)L_AES_ARM32_td_ecb; - register byte* L_AES_ARM32_td4_c asm ("r5") = (byte*)&L_AES_ARM32_td4; + register byte* L_AES_ARM32_cbc_td4_c asm ("r5") = + (byte*)&L_AES_ARM32_cbc_td4; #else register word32* L_AES_ARM32_td_ecb_c = (word32*)L_AES_ARM32_td_ecb; - register byte* L_AES_ARM32_td4_c = (byte*)&L_AES_ARM32_td4; + register byte* L_AES_ARM32_cbc_td4_c = (byte*)&L_AES_ARM32_cbc_td4; #endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ __asm__ __volatile__ ( - "push {%[L_AES_ARM32_td_ecb], %[L_AES_ARM32_td4]}\n\t" + "push {%[L_AES_ARM32_td_ecb], %[L_AES_ARM32_cbc_td4]}\n\t" "push {%[nr], %[iv]}\n\t" "mov lr, %[in]\n\t" "ldr r0, [sp, #8]\n\t" "mov r12, %[len]\n\t" - "mov r2, %[L_AES_ARM32_td4]\n\t" + "mov r2, %[L_AES_ARM32_cbc_td4]\n\t" "ldr r8, [sp]\n\t" "ldr r4, [sp, #4]\n\t" "push {%[ks]-r4}\n\t" @@ -11948,7 +19657,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_CBC_decrypt_block_nr_256_odd_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_decrypt_block_nr_256_odd_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "ldr lr, [sp, #16]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -12035,7 +20383,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_CBC_decrypt_block_nr_256_even_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_decrypt_block_nr_256_even_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "ldr lr, [sp, #16]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -12137,7 +21124,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_CBC_decrypt_block_nr_192_odd_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_decrypt_block_nr_192_odd_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "ldr lr, [sp, #16]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -12224,7 +21850,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_CBC_decrypt_block_nr_192_even_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_decrypt_block_nr_192_even_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "ldr lr, [sp, #16]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -12326,7 +22591,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_CBC_decrypt_block_nr_128_odd_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_decrypt_block_nr_128_odd_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "ldr lr, [sp, #16]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -12413,7 +23317,646 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_decrypt_block\n\t" +#else + "\n" + "L_AES_CBC_decrypt_block_nr_128_even_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11, ror #16\n\t" +#endif +#else + "ubfx r4, r11, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r10, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r10, ror #8\n\t" +#endif +#else + "ubfx r12, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9\n\t" +#endif +#else + "ubfx lr, r9, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8, ror #16\n\t" +#endif +#else + "ubfx r5, r8, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #8\n\t" +#endif +#else + "ubfx r12, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10\n\t" +#endif +#else + "ubfx lr, r10, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9, ror #16\n\t" +#endif +#else + "ubfx r6, r9, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #8\n\t" +#endif +#else + "ubfx r12, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11\n\t" +#endif +#else + "ubfx lr, r11, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r8, #24\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r8\n\t" +#endif +#else + "ubfx r8, r8, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #16\n\t" +#endif +#else + "ubfx r7, r10, #16, #8\n\t" +#endif + "eor r6, r6, r12, ror #8\n\t" + "lsr r12, r11, #24\n\t" + "eor r6, r6, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r8, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #8\n\t" + "eor r7, r7, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_CBC_decrypt_block_nr_128_even_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r7, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r7, ror #16\n\t" +#endif +#else + "ubfx r8, r7, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r6, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r6, ror #8\n\t" +#endif +#else + "ubfx r12, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5\n\t" +#endif +#else + "ubfx lr, r5, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r4, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r4, ror #16\n\t" +#endif +#else + "ubfx r9, r4, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r7, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r7, ror #8\n\t" +#endif +#else + "ubfx r12, r7, #8, #8\n\t" +#endif + "eor r8, r8, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6\n\t" +#endif +#else + "ubfx lr, r6, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r5, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r5, ror #16\n\t" +#endif +#else + "ubfx r10, r5, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, r12, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r4, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r4, ror #8\n\t" +#endif +#else + "ubfx r12, r4, #8, #8\n\t" +#endif + "eor r9, r9, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #24\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7\n\t" +#endif +#else + "ubfx lr, r7, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r4, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r4\n\t" +#endif +#else + "ubfx r4, r4, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r6, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r6, ror #16\n\t" +#endif +#else + "ubfx r11, r6, #16, #8\n\t" +#endif + "eor r10, r10, r12, ror #8\n\t" + "lsr r12, r7, #24\n\t" + "eor r10, r10, lr, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r5, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r5, ror #8\n\t" +#endif +#else + "ubfx lr, r5, #8, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r12, [r0, r12, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "eor r12, r12, r4, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #8\n\t" + "eor r11, r11, r12, ror #24\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9\n\t" +#endif +#else + "ubfx r4, r9, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r11, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r11, ror #16\n\t" +#endif +#else + "ubfx r12, r11, #16, #8\n\t" +#endif + "lsr lr, r8, #24\n\t" + "ldrb r4, [r2, r4]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb lr, [r2, lr]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10\n\t" +#endif +#else + "ubfx r5, r10, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r8, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r8, ror #16\n\t" +#endif +#else + "ubfx r12, r8, #16, #8\n\t" +#endif + "eor r4, r4, lr, lsl #24\n\t" + "lsr lr, r9, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r5, [r2, r5]\n\t" + "ldrb r12, [r2, r12]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11\n\t" +#endif +#else + "ubfx r6, r11, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #8\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #16\n\t" +#endif +#else + "ubfx r12, r9, #16, #8\n\t" +#endif + "eor r5, r5, lr, lsl #24\n\t" + "lsr lr, r10, #24\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "ldrb r6, [r2, r6]\n\t" + "ldrb r12, [r2, r12]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8\n\t" +#endif +#else + "ubfx r7, r8, #0, #8\n\t" +#endif + "eor r6, r6, r12, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r12, r9, #16\n\t" + "lsr r12, r12, #24\n\t" +#else + "uxtb r12, r9, ror #8\n\t" +#endif +#else + "ubfx r12, r9, #8, #8\n\t" +#endif + "eor r6, r6, lr, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "ldrb r11, [r2, r11]\n\t" + "ldrb r12, [r2, r12]\n\t" + "ldrb r7, [r2, r7]\n\t" + "ldrb lr, [r2, lr]\n\t" + "eor r12, r12, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, r12, lsl #8\n\t" + "eor r7, r7, lr, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "ldr lr, [sp, #16]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) "eor r8, r4, r4, ror #16\n\t" @@ -12495,19 +24038,19 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "L_AES_CBC_decrypt_end_%=: \n\t" "pop {%[ks]-r4}\n\t" "pop {%[nr], %[iv]}\n\t" - "pop {%[L_AES_ARM32_td_ecb], %[L_AES_ARM32_td4]}\n\t" + "pop {%[L_AES_ARM32_td_ecb], %[L_AES_ARM32_cbc_td4]}\n\t" #ifndef WOLFSSL_NO_VAR_ASSIGN_REG : [in] "+r" (in), [out] "+r" (out), [len] "+r" (len), [ks] "+r" (ks), [nr] "+r" (nr), [iv] "+r" (iv), [L_AES_ARM32_td_ecb] "+r" (L_AES_ARM32_td_ecb_c), - [L_AES_ARM32_td4] "+r" (L_AES_ARM32_td4_c) + [L_AES_ARM32_cbc_td4] "+r" (L_AES_ARM32_cbc_td4_c) : #else : : [in] "r" (in), [out] "r" (out), [len] "r" (len), [ks] "r" (ks), [nr] "r" (nr), [iv] "r" (iv), [L_AES_ARM32_td_ecb] "r" (L_AES_ARM32_td_ecb_c), - [L_AES_ARM32_td4] "r" (L_AES_ARM32_td4_c) + [L_AES_ARM32_cbc_td4] "r" (L_AES_ARM32_cbc_td4_c) #endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ : "memory", "cc", "r6", "r7", "r8", "r9", "r10", "r11" ); @@ -13212,7 +24755,646 @@ WC_OMIT_FRAME_POINTER void AES_GCM_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_GCM_encrypt_block_nr_256_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_GCM_encrypt_block_nr_256_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -13272,7 +25454,646 @@ WC_OMIT_FRAME_POINTER void AES_GCM_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_GCM_encrypt_block_nr_192_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_GCM_encrypt_block_nr_192_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) @@ -13332,7 +26153,646 @@ WC_OMIT_FRAME_POINTER void AES_GCM_encrypt(const unsigned char* in, "eor r6, r6, r10\n\t" "eor r7, r7, r11\n\t" "mov r1, #4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "bl AES_encrypt_block\n\t" +#else + "\n" + "L_AES_GCM_encrypt_block_nr_128_%=: \n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r9, #8\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r9, ror #16\n\t" +#endif +#else + "ubfx r4, r9, #16, #8\n\t" +#endif + "lsr r7, r8, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #8\n\t" +#endif +#else + "ubfx lr, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r11, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r11\n\t" +#endif +#else + "ubfx r2, r11, #0, #8\n\t" +#endif + "ldr r4, [r0, r4, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r10, #8\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r10, ror #16\n\t" +#endif +#else + "ubfx r5, r10, #16, #8\n\t" +#endif + "eor r4, r4, r7, ror #24\n\t" + "lsr r7, r9, #24\n\t" + "eor r4, r4, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #8\n\t" +#endif +#else + "ubfx lr, r11, #8, #8\n\t" +#endif + "eor r4, r4, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8\n\t" +#endif +#else + "ubfx r2, r8, #0, #8\n\t" +#endif + "ldr r5, [r0, r5, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r11, #8\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r11, ror #16\n\t" +#endif +#else + "ubfx r6, r11, #16, #8\n\t" +#endif + "eor r5, r5, r7, ror #24\n\t" + "lsr r7, r10, #24\n\t" + "eor r5, r5, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r8, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r8, ror #8\n\t" +#endif +#else + "ubfx lr, r8, #8, #8\n\t" +#endif + "eor r5, r5, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9\n\t" +#endif +#else + "ubfx r2, r9, #0, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r10, #24\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r10\n\t" +#endif +#else + "ubfx r10, r10, #0, #8\n\t" +#endif + "eor r6, r6, r7, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #8\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #16\n\t" +#endif +#else + "ubfx r7, r8, #16, #8\n\t" +#endif + "eor r6, r6, lr, ror #8\n\t" + "lsr lr, r11, #24\n\t" + "eor r6, r6, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r9, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r9, ror #8\n\t" +#endif +#else + "ubfx r2, r9, #8, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r7, [r0, r7, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r10, ror #24\n\t" + "ldm %[ks]!, {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, ror #24\n\t" + "eor r7, r7, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" + "subs r1, r1, #1\n\t" + "bne L_AES_GCM_encrypt_block_nr_128_%=\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r8, r5, #8\n\t" + "lsr r8, r8, #24\n\t" +#else + "uxtb r8, r5, ror #16\n\t" +#endif +#else + "ubfx r8, r5, #16, #8\n\t" +#endif + "lsr r11, r4, #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r6, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r6, ror #8\n\t" +#endif +#else + "ubfx lr, r6, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r7, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r7\n\t" +#endif +#else + "ubfx r2, r7, #0, #8\n\t" +#endif + "ldr r8, [r0, r8, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r9, r6, #8\n\t" + "lsr r9, r9, #24\n\t" +#else + "uxtb r9, r6, ror #16\n\t" +#endif +#else + "ubfx r9, r6, #16, #8\n\t" +#endif + "eor r8, r8, r11, ror #24\n\t" + "lsr r11, r5, #24\n\t" + "eor r8, r8, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r7, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r7, ror #8\n\t" +#endif +#else + "ubfx lr, r7, #8, #8\n\t" +#endif + "eor r8, r8, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r4, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r4\n\t" +#endif +#else + "ubfx r2, r4, #0, #8\n\t" +#endif + "ldr r9, [r0, r9, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r10, r7, #8\n\t" + "lsr r10, r10, #24\n\t" +#else + "uxtb r10, r7, ror #16\n\t" +#endif +#else + "ubfx r10, r7, #16, #8\n\t" +#endif + "eor r9, r9, r11, ror #24\n\t" + "lsr r11, r6, #24\n\t" + "eor r9, r9, lr, ror #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r4, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r4, ror #8\n\t" +#endif +#else + "ubfx lr, r4, #8, #8\n\t" +#endif + "eor r9, r9, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #24\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5\n\t" +#endif +#else + "ubfx r2, r5, #0, #8\n\t" +#endif + "ldr r10, [r0, r10, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r6, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r6\n\t" +#endif +#else + "ubfx r6, r6, #0, #8\n\t" +#endif + "eor r10, r10, r11, ror #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r11, r4, #8\n\t" + "lsr r11, r11, #24\n\t" +#else + "uxtb r11, r4, ror #16\n\t" +#endif +#else + "ubfx r11, r4, #16, #8\n\t" +#endif + "eor r10, r10, lr, ror #8\n\t" + "lsr lr, r7, #24\n\t" + "eor r10, r10, r2, ror #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r5, #16\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r5, ror #8\n\t" +#endif +#else + "ubfx r2, r5, #8, #8\n\t" +#endif + "ldr r6, [r0, r6, lsl #2]\n\t" + "ldr lr, [r0, lr, lsl #2]\n\t" + "ldr r11, [r0, r11, lsl #2]\n\t" + "ldr r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r6, ror #24\n\t" + "ldm %[ks]!, {r4, r5, r6, r7}\n\t" + "eor r11, r11, lr, ror #24\n\t" + "eor r11, r11, r2, ror #8\n\t" + /* XOR in Key Schedule */ + "eor r8, r8, r4\n\t" + "eor r9, r9, r5\n\t" + "eor r10, r10, r6\n\t" + "eor r11, r11, r7\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r4, r11, #24\n\t" + "lsr r4, r4, #24\n\t" +#else + "uxtb r4, r11\n\t" +#endif +#else + "ubfx r4, r11, #0, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10, ror #8\n\t" +#endif +#else + "ubfx r7, r10, #8, #8\n\t" +#endif +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #16\n\t" +#endif +#else + "ubfx lr, r9, #16, #8\n\t" +#endif + "lsr r2, r8, #24\n\t" + "ldrb r4, [r0, r4, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r5, r8, #24\n\t" + "lsr r5, r5, #24\n\t" +#else + "uxtb r5, r8\n\t" +#endif +#else + "ubfx r5, r8, #0, #8\n\t" +#endif + "eor r4, r4, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r11, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r11, ror #8\n\t" +#endif +#else + "ubfx r7, r11, #8, #8\n\t" +#endif + "eor r4, r4, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r10, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r10, ror #16\n\t" +#endif +#else + "ubfx lr, r10, #16, #8\n\t" +#endif + "eor r4, r4, r2, lsl #24\n\t" + "lsr r2, r9, #24\n\t" + "ldrb r5, [r0, r5, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r6, r9, #24\n\t" + "lsr r6, r6, #24\n\t" +#else + "uxtb r6, r9\n\t" +#endif +#else + "ubfx r6, r9, #0, #8\n\t" +#endif + "eor r5, r5, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r8, #16\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r8, ror #8\n\t" +#endif +#else + "ubfx r7, r8, #8, #8\n\t" +#endif + "eor r5, r5, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r11, #8\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r11, ror #16\n\t" +#endif +#else + "ubfx lr, r11, #16, #8\n\t" +#endif + "eor r5, r5, r2, lsl #24\n\t" + "lsr r2, r10, #24\n\t" + "ldrb r6, [r0, r6, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "lsr r11, r11, #24\n\t" + "eor r6, r6, r7, lsl #8\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r7, r10, #24\n\t" + "lsr r7, r7, #24\n\t" +#else + "uxtb r7, r10\n\t" +#endif +#else + "ubfx r7, r10, #0, #8\n\t" +#endif + "eor r6, r6, lr, lsl #16\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl lr, r9, #16\n\t" + "lsr lr, lr, #24\n\t" +#else + "uxtb lr, r9, ror #8\n\t" +#endif +#else + "ubfx lr, r9, #8, #8\n\t" +#endif + "eor r6, r6, r2, lsl #24\n\t" +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 7) +#if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) + "lsl r2, r8, #8\n\t" + "lsr r2, r2, #24\n\t" +#else + "uxtb r2, r8, ror #16\n\t" +#endif +#else + "ubfx r2, r8, #16, #8\n\t" +#endif + "ldrb r11, [r0, r11, lsl #2]\n\t" + "ldrb r7, [r0, r7, lsl #2]\n\t" + "ldrb lr, [r0, lr, lsl #2]\n\t" + "ldrb r2, [r0, r2, lsl #2]\n\t" + "eor lr, lr, r11, lsl #16\n\t" + "ldm %[ks], {r8, r9, r10, r11}\n\t" + "eor r7, r7, lr, lsl #8\n\t" + "eor r7, r7, r2, lsl #16\n\t" + /* XOR in Key Schedule */ + "eor r4, r4, r8\n\t" + "eor r5, r5, r9\n\t" + "eor r6, r6, r10\n\t" + "eor r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "pop {r1, %[len], lr}\n\t" "ldr %[ks], [sp]\n\t" #if defined(WOLFSSL_ARM_ARCH) && (WOLFSSL_ARM_ARCH < 6) diff --git a/wolfcrypt/src/port/arm/armv8-aes-asm.S b/wolfcrypt/src/port/arm/armv8-aes-asm.S index 431dbb1dd..f0fd188a8 100644 --- a/wolfcrypt/src/port/arm/armv8-aes-asm.S +++ b/wolfcrypt/src/port/arm/armv8-aes-asm.S @@ -43294,14 +43294,14 @@ _AES_set_encrypt_key_NEON: add x4, x4, :lo12:L_AES_ARM64_NEON_rcon #else adrp x4, L_AES_ARM64_NEON_rcon@PAGE - add x4, x4, :lo12:L_AES_ARM64_NEON_rcon@PAGEOFF + add x4, x4, L_AES_ARM64_NEON_rcon@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x5, L_AES_ARM64_NEON_te add x5, x5, :lo12:L_AES_ARM64_NEON_te #else adrp x5, L_AES_ARM64_NEON_te@PAGE - add x5, x5, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x5, x5, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ ld1 {v6.16b, v7.16b, v8.16b, v9.16b}, [x5], #0x40 ld1 {v10.16b, v11.16b, v12.16b, v13.16b}, [x5], #0x40 @@ -43543,14 +43543,14 @@ _AES_ECB_encrypt_NEON: add x5, x5, :lo12:L_AES_ARM64_NEON_te #else adrp x5, L_AES_ARM64_NEON_te@PAGE - add x5, x5, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x5, x5, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x6, L_AES_ARM64_NEON_shift_rows_shuffle add x6, x6, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle #else adrp x6, L_AES_ARM64_NEON_shift_rows_shuffle@PAGE - add x6, x6, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF + add x6, x6, L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x5], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x5], #0x40 @@ -44324,14 +44324,14 @@ _AES_CBC_encrypt_NEON: add x6, x6, :lo12:L_AES_ARM64_NEON_te #else adrp x6, L_AES_ARM64_NEON_te@PAGE - add x6, x6, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x6, x6, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x7, L_AES_ARM64_NEON_shift_rows_shuffle add x7, x7, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle #else adrp x7, L_AES_ARM64_NEON_shift_rows_shuffle@PAGE - add x7, x7, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF + add x7, x7, L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v10.16b, v11.16b, v12.16b, v13.16b}, [x6], #0x40 ld1 {v14.16b, v15.16b, v16.16b, v17.16b}, [x6], #0x40 @@ -44489,14 +44489,14 @@ _AES_CTR_encrypt_NEON: add x6, x6, :lo12:L_AES_ARM64_NEON_te #else adrp x6, L_AES_ARM64_NEON_te@PAGE - add x6, x6, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x6, x6, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x7, L_AES_ARM64_NEON_shift_rows_shuffle add x7, x7, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle #else adrp x7, L_AES_ARM64_NEON_shift_rows_shuffle@PAGE - add x7, x7, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF + add x7, x7, L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x6], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x6], #0x40 @@ -45391,14 +45391,14 @@ _AES_ECB_decrypt_NEON: add x5, x5, :lo12:L_AES_ARM64_NEON_td #else adrp x5, L_AES_ARM64_NEON_td@PAGE - add x5, x5, :lo12:L_AES_ARM64_NEON_td@PAGEOFF + add x5, x5, L_AES_ARM64_NEON_td@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x6, L_AES_ARM64_NEON_shift_rows_invshuffle add x6, x6, :lo12:L_AES_ARM64_NEON_shift_rows_invshuffle #else adrp x6, L_AES_ARM64_NEON_shift_rows_invshuffle@PAGE - add x6, x6, :lo12:L_AES_ARM64_NEON_shift_rows_invshuffle@PAGEOFF + add x6, x6, L_AES_ARM64_NEON_shift_rows_invshuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x5], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x5], #0x40 @@ -46441,14 +46441,14 @@ _AES_CBC_decrypt_NEON: add x6, x6, :lo12:L_AES_ARM64_NEON_td #else adrp x6, L_AES_ARM64_NEON_td@PAGE - add x6, x6, :lo12:L_AES_ARM64_NEON_td@PAGEOFF + add x6, x6, L_AES_ARM64_NEON_td@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x7, L_AES_ARM64_NEON_shift_rows_invshuffle add x7, x7, :lo12:L_AES_ARM64_NEON_shift_rows_invshuffle #else adrp x7, L_AES_ARM64_NEON_shift_rows_invshuffle@PAGE - add x7, x7, :lo12:L_AES_ARM64_NEON_shift_rows_invshuffle@PAGEOFF + add x7, x7, L_AES_ARM64_NEON_shift_rows_invshuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x6], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x6], #0x40 @@ -47857,14 +47857,14 @@ _AES_GCM_encrypt_NEON: add x9, x9, :lo12:L_AES_ARM64_NEON_te #else adrp x9, L_AES_ARM64_NEON_te@PAGE - add x9, x9, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x9, x9, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x10, L_AES_ARM64_NEON_shift_rows_shuffle add x10, x10, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle #else adrp x10, L_AES_ARM64_NEON_shift_rows_shuffle@PAGE - add x10, x10, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF + add x10, x10, L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x9], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x9], #0x40 @@ -48666,14 +48666,14 @@ _AES_XTS_encrypt_NEON: add x19, x19, :lo12:L_AES_ARM64_NEON_te #else adrp x19, L_AES_ARM64_NEON_te@PAGE - add x19, x19, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x19, x19, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x20, L_AES_ARM64_NEON_shift_rows_shuffle add x20, x20, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle #else adrp x20, L_AES_ARM64_NEON_shift_rows_shuffle@PAGE - add x20, x20, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF + add x20, x20, L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x19], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x19], #0x40 @@ -49749,28 +49749,28 @@ _AES_XTS_decrypt_NEON: add x20, x20, :lo12:L_AES_ARM64_NEON_te #else adrp x20, L_AES_ARM64_NEON_te@PAGE - add x20, x20, :lo12:L_AES_ARM64_NEON_te@PAGEOFF + add x20, x20, L_AES_ARM64_NEON_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x21, L_AES_ARM64_NEON_td add x21, x21, :lo12:L_AES_ARM64_NEON_td #else adrp x21, L_AES_ARM64_NEON_td@PAGE - add x21, x21, :lo12:L_AES_ARM64_NEON_td@PAGEOFF + add x21, x21, L_AES_ARM64_NEON_td@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x22, L_AES_ARM64_NEON_shift_rows_shuffle add x22, x22, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle #else adrp x22, L_AES_ARM64_NEON_shift_rows_shuffle@PAGE - add x22, x22, :lo12:L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF + add x22, x22, L_AES_ARM64_NEON_shift_rows_shuffle@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x23, L_AES_ARM64_NEON_shift_rows_invshuffle add x23, x23, :lo12:L_AES_ARM64_NEON_shift_rows_invshuffle #else adrp x23, L_AES_ARM64_NEON_shift_rows_invshuffle@PAGE - add x23, x23, :lo12:L_AES_ARM64_NEON_shift_rows_invshuffle@PAGEOFF + add x23, x23, L_AES_ARM64_NEON_shift_rows_invshuffle@PAGEOFF #endif /* __APPLE__ */ ld1 {v16.16b, v17.16b, v18.16b, v19.16b}, [x20], #0x40 ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x20], #0x40 @@ -51851,14 +51851,14 @@ _AES_invert_key: add x2, x2, :lo12:L_AES_ARM64_te #else adrp x2, L_AES_ARM64_te@PAGE - add x2, x2, :lo12:L_AES_ARM64_te@PAGEOFF + add x2, x2, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_AES_ARM64_td add x3, x3, :lo12:L_AES_ARM64_td #else adrp x3, L_AES_ARM64_td@PAGE - add x3, x3, :lo12:L_AES_ARM64_td@PAGEOFF + add x3, x3, L_AES_ARM64_td@PAGEOFF #endif /* __APPLE__ */ add x12, x0, x1, lsl 4 mov w13, w1 @@ -52008,14 +52008,14 @@ _AES_set_encrypt_key: add x5, x5, :lo12:L_AES_ARM64_rcon #else adrp x5, L_AES_ARM64_rcon@PAGE - add x5, x5, :lo12:L_AES_ARM64_rcon@PAGEOFF + add x5, x5, L_AES_ARM64_rcon@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x12, L_AES_ARM64_te add x12, x12, :lo12:L_AES_ARM64_te #else adrp x12, L_AES_ARM64_te@PAGE - add x12, x12, :lo12:L_AES_ARM64_te@PAGEOFF + add x12, x12, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ cmp x1, #0x80 beq L_AES_set_encrypt_key_start_128 @@ -52271,7 +52271,7 @@ _AES_ECB_encrypt: add x5, x5, :lo12:L_AES_ARM64_te #else adrp x5, L_AES_ARM64_te@PAGE - add x5, x5, :lo12:L_AES_ARM64_te@PAGEOFF + add x5, x5, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ L_AES_ECB_encrypt_loop_block_128: mov x17, x3 @@ -52604,7 +52604,7 @@ _AES_CBC_encrypt: add x6, x6, :lo12:L_AES_ARM64_te #else adrp x6, L_AES_ARM64_te@PAGE - add x6, x6, :lo12:L_AES_ARM64_te@PAGEOFF + add x6, x6, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ ldp x7, x8, [x5] L_AES_CBC_encrypt_loop_block: @@ -52941,7 +52941,7 @@ _AES_CTR_encrypt: add x6, x6, :lo12:L_AES_ARM64_te #else adrp x6, L_AES_ARM64_te@PAGE - add x6, x6, :lo12:L_AES_ARM64_te@PAGEOFF + add x6, x6, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ ldp x15, x16, [x5] rev32 x15, x15 @@ -53335,14 +53335,14 @@ _AES_ECB_decrypt: add x5, x5, :lo12:L_AES_ARM64_td #else adrp x5, L_AES_ARM64_td@PAGE - add x5, x5, :lo12:L_AES_ARM64_td@PAGEOFF + add x5, x5, L_AES_ARM64_td@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x6, L_AES_ARM64_td4 add x6, x6, :lo12:L_AES_ARM64_td4 #else adrp x6, L_AES_ARM64_td4@PAGE - add x6, x6, :lo12:L_AES_ARM64_td4@PAGEOFF + add x6, x6, L_AES_ARM64_td4@PAGEOFF #endif /* __APPLE__ */ L_AES_ECB_decrypt_loop_block: mov x19, x3 @@ -53659,14 +53659,14 @@ _AES_CBC_decrypt: add x6, x6, :lo12:L_AES_ARM64_td4 #else adrp x6, L_AES_ARM64_td4@PAGE - add x6, x6, :lo12:L_AES_ARM64_td4@PAGEOFF + add x6, x6, L_AES_ARM64_td4@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x7, L_AES_ARM64_td add x7, x7, :lo12:L_AES_ARM64_td #else adrp x7, L_AES_ARM64_td@PAGE - add x7, x7, :lo12:L_AES_ARM64_td@PAGEOFF + add x7, x7, L_AES_ARM64_td@PAGEOFF #endif /* __APPLE__ */ L_AES_CBC_decrypt_loop_block: mov x20, x3 @@ -54327,7 +54327,7 @@ _GCM_gmult_len: add x10, x10, :lo12:L_GCM_gmult_len_r #else adrp x10, L_GCM_gmult_len_r@PAGE - add x10, x10, :lo12:L_GCM_gmult_len_r@PAGEOFF + add x10, x10, L_GCM_gmult_len_r@PAGEOFF #endif /* __APPLE__ */ L_GCM_gmult_len_start_block: ldp x4, x5, [x0] @@ -54754,7 +54754,7 @@ _AES_GCM_encrypt: add x19, x19, :lo12:L_AES_ARM64_te #else adrp x19, L_AES_ARM64_te@PAGE - add x19, x19, :lo12:L_AES_ARM64_te@PAGEOFF + add x19, x19, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ ldp x16, x17, [x5] rev32 x16, x16 @@ -55100,7 +55100,7 @@ _AES_XTS_encrypt: add x8, x8, :lo12:L_AES_ARM64_te #else adrp x8, L_AES_ARM64_te@PAGE - add x8, x8, :lo12:L_AES_ARM64_te@PAGEOFF + add x8, x8, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ mov x9, #0x87 mov x26, x5 @@ -56056,21 +56056,21 @@ _AES_XTS_decrypt: add x8, x8, :lo12:L_AES_ARM64_td #else adrp x8, L_AES_ARM64_td@PAGE - add x8, x8, :lo12:L_AES_ARM64_td@PAGEOFF + add x8, x8, L_AES_ARM64_td@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x9, L_AES_ARM64_td4 add x9, x9, :lo12:L_AES_ARM64_td4 #else adrp x9, L_AES_ARM64_td4@PAGE - add x9, x9, :lo12:L_AES_ARM64_td4@PAGEOFF + add x9, x9, L_AES_ARM64_td4@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x10, L_AES_ARM64_te add x10, x10, :lo12:L_AES_ARM64_te #else adrp x10, L_AES_ARM64_te@PAGE - add x10, x10, :lo12:L_AES_ARM64_te@PAGEOFF + add x10, x10, L_AES_ARM64_te@PAGEOFF #endif /* __APPLE__ */ ands w11, w2, #15 cset w11, ne diff --git a/wolfcrypt/src/port/arm/armv8-chacha-asm.S b/wolfcrypt/src/port/arm/armv8-chacha-asm.S index 8a5481125..b4f102a2a 100644 --- a/wolfcrypt/src/port/arm/armv8-chacha-asm.S +++ b/wolfcrypt/src/port/arm/armv8-chacha-asm.S @@ -95,14 +95,14 @@ _wc_chacha_crypt_bytes: add x5, x5, :lo12:L_chacha20_arm64_rol8 #else adrp x5, L_chacha20_arm64_rol8@PAGE - add x5, x5, :lo12:L_chacha20_arm64_rol8@PAGEOFF + add x5, x5, L_chacha20_arm64_rol8@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x6, L_chacha20_arm64_ctr add x6, x6, :lo12:L_chacha20_arm64_ctr #else adrp x6, L_chacha20_arm64_ctr@PAGE - add x6, x6, :lo12:L_chacha20_arm64_ctr@PAGEOFF + add x6, x6, L_chacha20_arm64_ctr@PAGEOFF #endif /* __APPLE__ */ eor v29.16b, v29.16b, v29.16b mov x26, #5 @@ -493,321 +493,233 @@ L_chacha_crypt_bytes_arm64_round_start_320: L_chacha_crypt_bytes_arm64_lt_320: cmp x3, #0x100 blt L_chacha_crypt_bytes_arm64_lt_256 - # Move state into regular register - mov x8, v16.d[0] - mov x10, v16.d[1] - mov x12, v17.d[0] - mov x14, v17.d[1] - mov x16, v18.d[0] - mov x19, v18.d[1] - mov x21, v19.d[0] - mov x23, v19.d[1] # Move state into vector registers - mov v0.16b, v16.16b - mov v1.16b, v17.16b - lsr x9, x8, #32 - mov v2.16b, v18.16b - add w5, w21, #1 - mov v3.16b, v19.16b - lsr x11, x10, #32 - mov v4.16b, v16.16b - mov v5.16b, v17.16b - lsr x13, x12, #32 - mov v6.16b, v18.16b - add w6, w21, #2 - mov v7.16b, v19.16b - lsr x15, x14, #32 - mov v8.16b, v16.16b - mov v9.16b, v17.16b - lsr x17, x16, #32 - mov v10.16b, v18.16b - add w21, w21, #3 - mov v11.16b, v19.16b - lsr x20, x19, #32 - mov v7.s[0], w5 - lsr x22, x21, #32 - mov v11.s[0], w6 - lsr x24, x23, #32 - add w7, w21, #1 + dup v0.4s, v16.s[0] + dup v1.4s, v16.s[1] + dup v2.4s, v16.s[2] + dup v3.4s, v16.s[3] + dup v4.4s, v17.s[0] + dup v5.4s, v17.s[1] + dup v6.4s, v17.s[2] + dup v7.4s, v17.s[3] + dup v8.4s, v18.s[0] + dup v9.4s, v18.s[1] + dup v10.4s, v18.s[2] + dup v11.4s, v18.s[3] + dup v12.4s, v19.s[0] + dup v13.4s, v19.s[1] + dup v14.4s, v19.s[2] + dup v15.4s, v19.s[3] + # Add to counter word + add v12.4s, v12.4s, v28.4s # Set number of odd+even rounds to perform mov x26, #10 L_chacha_crypt_bytes_arm64_round_start_256: subs x26, x26, #1 # Round odd # a += b; d ^= a; d <<<= 16; - add v0.4s, v0.4s, v1.4s - add w8, w8, w12 - add v4.4s, v4.4s, v5.4s - add w9, w9, w13 - add v8.4s, v8.4s, v9.4s - add w10, w10, w14 - eor v3.16b, v3.16b, v0.16b - add w11, w11, w15 - eor v7.16b, v7.16b, v4.16b - eor w21, w21, w8 - eor v11.16b, v11.16b, v8.16b - eor w22, w22, w9 - rev32 v3.8h, v3.8h - eor w23, w23, w10 - rev32 v7.8h, v7.8h - eor w24, w24, w11 - rev32 v11.8h, v11.8h - ror w21, w21, #16 + add v0.4s, v0.4s, v4.4s + add v1.4s, v1.4s, v5.4s + add v2.4s, v2.4s, v6.4s + add v3.4s, v3.4s, v7.4s + eor v12.16b, v12.16b, v0.16b + eor v13.16b, v13.16b, v1.16b + eor v14.16b, v14.16b, v2.16b + eor v15.16b, v15.16b, v3.16b + rev32 v12.8h, v12.8h + rev32 v13.8h, v13.8h + rev32 v14.8h, v14.8h + rev32 v15.8h, v15.8h # c += d; b ^= c; b <<<= 12; - add v2.4s, v2.4s, v3.4s - ror w22, w22, #16 - add v6.4s, v6.4s, v7.4s - ror w23, w23, #16 - add v10.4s, v10.4s, v11.4s - ror w24, w24, #16 - eor v20.16b, v1.16b, v2.16b - add w16, w16, w21 - eor v21.16b, v5.16b, v6.16b - add w17, w17, w22 - eor v22.16b, v9.16b, v10.16b - add w19, w19, w23 - shl v1.4s, v20.4s, #12 - add w20, w20, w24 + add v8.4s, v8.4s, v12.4s + add v9.4s, v9.4s, v13.4s + add v10.4s, v10.4s, v14.4s + add v11.4s, v11.4s, v15.4s + eor v20.16b, v4.16b, v8.16b + eor v21.16b, v5.16b, v9.16b + eor v22.16b, v6.16b, v10.16b + eor v23.16b, v7.16b, v11.16b + shl v4.4s, v20.4s, #12 shl v5.4s, v21.4s, #12 - eor w12, w12, w16 - shl v9.4s, v22.4s, #12 - eor w13, w13, w17 - sri v1.4s, v20.4s, #20 - eor w14, w14, w19 + shl v6.4s, v22.4s, #12 + shl v7.4s, v23.4s, #12 + sri v4.4s, v20.4s, #20 sri v5.4s, v21.4s, #20 - eor w15, w15, w20 - sri v9.4s, v22.4s, #20 - ror w12, w12, #20 + sri v6.4s, v22.4s, #20 + sri v7.4s, v23.4s, #20 # a += b; d ^= a; d <<<= 8; - add v0.4s, v0.4s, v1.4s - ror w13, w13, #20 - add v4.4s, v4.4s, v5.4s - ror w14, w14, #20 - add v8.4s, v8.4s, v9.4s - ror w15, w15, #20 - eor v3.16b, v3.16b, v0.16b - add w8, w8, w12 - eor v7.16b, v7.16b, v4.16b - add w9, w9, w13 - eor v11.16b, v11.16b, v8.16b - add w10, w10, w14 - tbl v3.16b, {v3.16b}, v30.16b - add w11, w11, w15 - tbl v7.16b, {v7.16b}, v30.16b - eor w21, w21, w8 - tbl v11.16b, {v11.16b}, v30.16b - eor w22, w22, w9 + add v0.4s, v0.4s, v4.4s + add v1.4s, v1.4s, v5.4s + add v2.4s, v2.4s, v6.4s + add v3.4s, v3.4s, v7.4s + eor v12.16b, v12.16b, v0.16b + eor v13.16b, v13.16b, v1.16b + eor v14.16b, v14.16b, v2.16b + eor v15.16b, v15.16b, v3.16b + tbl v12.16b, {v12.16b}, v30.16b + tbl v13.16b, {v13.16b}, v30.16b + tbl v14.16b, {v14.16b}, v30.16b + tbl v15.16b, {v15.16b}, v30.16b # c += d; b ^= c; b <<<= 7; - add v2.4s, v2.4s, v3.4s - eor w23, w23, w10 - add v6.4s, v6.4s, v7.4s - eor w24, w24, w11 - add v10.4s, v10.4s, v11.4s - ror w21, w21, #24 - eor v20.16b, v1.16b, v2.16b - ror w22, w22, #24 - eor v21.16b, v5.16b, v6.16b - ror w23, w23, #24 - eor v22.16b, v9.16b, v10.16b - ror w24, w24, #24 - shl v1.4s, v20.4s, #7 - add w16, w16, w21 + add v8.4s, v8.4s, v12.4s + add v9.4s, v9.4s, v13.4s + add v10.4s, v10.4s, v14.4s + add v11.4s, v11.4s, v15.4s + eor v20.16b, v4.16b, v8.16b + eor v21.16b, v5.16b, v9.16b + eor v22.16b, v6.16b, v10.16b + eor v23.16b, v7.16b, v11.16b + shl v4.4s, v20.4s, #7 shl v5.4s, v21.4s, #7 - add w17, w17, w22 - shl v9.4s, v22.4s, #7 - add w19, w19, w23 - sri v1.4s, v20.4s, #25 - add w20, w20, w24 + shl v6.4s, v22.4s, #7 + shl v7.4s, v23.4s, #7 + sri v4.4s, v20.4s, #25 sri v5.4s, v21.4s, #25 - eor w12, w12, w16 - sri v9.4s, v22.4s, #25 - eor w13, w13, w17 - ext v3.16b, v3.16b, v3.16b, #12 - eor w14, w14, w19 - ext v7.16b, v7.16b, v7.16b, #12 - eor w15, w15, w20 - ext v11.16b, v11.16b, v11.16b, #12 - ror w12, w12, #25 - ext v1.16b, v1.16b, v1.16b, #4 - ror w13, w13, #25 - ext v5.16b, v5.16b, v5.16b, #4 - ror w14, w14, #25 - ext v9.16b, v9.16b, v9.16b, #4 - ror w15, w15, #25 - ext v2.16b, v2.16b, v2.16b, #8 - ext v6.16b, v6.16b, v6.16b, #8 - ext v10.16b, v10.16b, v10.16b, #8 + sri v6.4s, v22.4s, #25 + sri v7.4s, v23.4s, #25 # Round even # a += b; d ^= a; d <<<= 16; - add v0.4s, v0.4s, v1.4s - add w8, w8, w13 - add v4.4s, v4.4s, v5.4s - add w9, w9, w14 - add v8.4s, v8.4s, v9.4s - add w10, w10, w15 - eor v3.16b, v3.16b, v0.16b - add w11, w11, w12 - eor v7.16b, v7.16b, v4.16b - eor w24, w24, w8 - eor v11.16b, v11.16b, v8.16b - eor w21, w21, w9 - rev32 v3.8h, v3.8h - eor w22, w22, w10 - rev32 v7.8h, v7.8h - eor w23, w23, w11 - rev32 v11.8h, v11.8h - ror w24, w24, #16 + add v0.4s, v0.4s, v5.4s + add v1.4s, v1.4s, v6.4s + add v2.4s, v2.4s, v7.4s + add v3.4s, v3.4s, v4.4s + eor v15.16b, v15.16b, v0.16b + eor v12.16b, v12.16b, v1.16b + eor v13.16b, v13.16b, v2.16b + eor v14.16b, v14.16b, v3.16b + rev32 v15.8h, v15.8h + rev32 v12.8h, v12.8h + rev32 v13.8h, v13.8h + rev32 v14.8h, v14.8h # c += d; b ^= c; b <<<= 12; - add v2.4s, v2.4s, v3.4s - ror w21, w21, #16 - add v6.4s, v6.4s, v7.4s - ror w22, w22, #16 - add v10.4s, v10.4s, v11.4s - ror w23, w23, #16 - eor v20.16b, v1.16b, v2.16b - add w19, w19, w24 - eor v21.16b, v5.16b, v6.16b - add w20, w20, w21 - eor v22.16b, v9.16b, v10.16b - add w16, w16, w22 - shl v1.4s, v20.4s, #12 - add w17, w17, w23 - shl v5.4s, v21.4s, #12 - eor w13, w13, w19 - shl v9.4s, v22.4s, #12 - eor w14, w14, w20 - sri v1.4s, v20.4s, #20 - eor w15, w15, w16 - sri v5.4s, v21.4s, #20 - eor w12, w12, w17 - sri v9.4s, v22.4s, #20 - ror w13, w13, #20 + add v10.4s, v10.4s, v15.4s + add v11.4s, v11.4s, v12.4s + add v8.4s, v8.4s, v13.4s + add v9.4s, v9.4s, v14.4s + eor v20.16b, v5.16b, v10.16b + eor v21.16b, v6.16b, v11.16b + eor v22.16b, v7.16b, v8.16b + eor v23.16b, v4.16b, v9.16b + shl v5.4s, v20.4s, #12 + shl v6.4s, v21.4s, #12 + shl v7.4s, v22.4s, #12 + shl v4.4s, v23.4s, #12 + sri v5.4s, v20.4s, #20 + sri v6.4s, v21.4s, #20 + sri v7.4s, v22.4s, #20 + sri v4.4s, v23.4s, #20 # a += b; d ^= a; d <<<= 8; - add v0.4s, v0.4s, v1.4s - ror w14, w14, #20 - add v4.4s, v4.4s, v5.4s - ror w15, w15, #20 - add v8.4s, v8.4s, v9.4s - ror w12, w12, #20 - eor v3.16b, v3.16b, v0.16b - add w8, w8, w13 - eor v7.16b, v7.16b, v4.16b - add w9, w9, w14 - eor v11.16b, v11.16b, v8.16b - add w10, w10, w15 - tbl v3.16b, {v3.16b}, v30.16b - add w11, w11, w12 - tbl v7.16b, {v7.16b}, v30.16b - eor w24, w24, w8 - tbl v11.16b, {v11.16b}, v30.16b - eor w21, w21, w9 + add v0.4s, v0.4s, v5.4s + add v1.4s, v1.4s, v6.4s + add v2.4s, v2.4s, v7.4s + add v3.4s, v3.4s, v4.4s + eor v15.16b, v15.16b, v0.16b + eor v12.16b, v12.16b, v1.16b + eor v13.16b, v13.16b, v2.16b + eor v14.16b, v14.16b, v3.16b + tbl v15.16b, {v15.16b}, v30.16b + tbl v12.16b, {v12.16b}, v30.16b + tbl v13.16b, {v13.16b}, v30.16b + tbl v14.16b, {v14.16b}, v30.16b # c += d; b ^= c; b <<<= 7; - add v2.4s, v2.4s, v3.4s - eor w22, w22, w10 - add v6.4s, v6.4s, v7.4s - eor w23, w23, w11 - add v10.4s, v10.4s, v11.4s - ror w24, w24, #24 - eor v20.16b, v1.16b, v2.16b - ror w21, w21, #24 - eor v21.16b, v5.16b, v6.16b - ror w22, w22, #24 - eor v22.16b, v9.16b, v10.16b - ror w23, w23, #24 - shl v1.4s, v20.4s, #7 - add w19, w19, w24 - shl v5.4s, v21.4s, #7 - add w20, w20, w21 - shl v9.4s, v22.4s, #7 - add w16, w16, w22 - sri v1.4s, v20.4s, #25 - add w17, w17, w23 - sri v5.4s, v21.4s, #25 - eor w13, w13, w19 - sri v9.4s, v22.4s, #25 - eor w14, w14, w20 - ext v3.16b, v3.16b, v3.16b, #4 - eor w15, w15, w16 - ext v7.16b, v7.16b, v7.16b, #4 - eor w12, w12, w17 - ext v11.16b, v11.16b, v11.16b, #4 - ror w13, w13, #25 - ext v1.16b, v1.16b, v1.16b, #12 - ror w14, w14, #25 - ext v5.16b, v5.16b, v5.16b, #12 - ror w15, w15, #25 - ext v9.16b, v9.16b, v9.16b, #12 - ror w12, w12, #25 - ext v2.16b, v2.16b, v2.16b, #8 - ext v6.16b, v6.16b, v6.16b, #8 - ext v10.16b, v10.16b, v10.16b, #8 + add v10.4s, v10.4s, v15.4s + add v11.4s, v11.4s, v12.4s + add v8.4s, v8.4s, v13.4s + add v9.4s, v9.4s, v14.4s + eor v20.16b, v5.16b, v10.16b + eor v21.16b, v6.16b, v11.16b + eor v22.16b, v7.16b, v8.16b + eor v23.16b, v4.16b, v9.16b + shl v5.4s, v20.4s, #7 + shl v6.4s, v21.4s, #7 + shl v7.4s, v22.4s, #7 + shl v4.4s, v23.4s, #7 + sri v5.4s, v20.4s, #25 + sri v6.4s, v21.4s, #25 + sri v7.4s, v22.4s, #25 + sri v4.4s, v23.4s, #25 bne L_chacha_crypt_bytes_arm64_round_start_256 + mov x26, #4 + # Add counter now rather than after transposed + add v12.4s, v12.4s, v28.4s # Load message - ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x2], #0x40 - # Add one (2 added during calculating vector results) - add w21, w21, #1 - # Add back state, XOR msg, store (load next block) - add v0.4s, v0.4s, v16.4s - add v1.4s, v1.4s, v17.4s - add v2.4s, v2.4s, v18.4s - add v3.4s, v3.4s, v19.4s - eor v0.16b, v0.16b, v20.16b - eor v1.16b, v1.16b, v21.16b - eor v2.16b, v2.16b, v22.16b - eor v3.16b, v3.16b, v23.16b - ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x2], #0x40 - st1 {v0.4s, v1.4s, v2.4s, v3.4s}, [x1], #0x40 - mov v19.s[0], w5 - add v4.4s, v4.4s, v16.4s - add v5.4s, v5.4s, v17.4s - add v6.4s, v6.4s, v18.4s - add v7.4s, v7.4s, v19.4s - eor v4.16b, v4.16b, v20.16b - eor v5.16b, v5.16b, v21.16b - eor v6.16b, v6.16b, v22.16b - eor v7.16b, v7.16b, v23.16b - ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x2], #0x40 - st1 {v4.4s, v5.4s, v6.4s, v7.4s}, [x1], #0x40 - mov v19.s[0], w6 - add v8.4s, v8.4s, v16.4s - add v9.4s, v9.4s, v17.4s - add v10.4s, v10.4s, v18.4s - add v11.4s, v11.4s, v19.4s - eor v8.16b, v8.16b, v20.16b - eor v9.16b, v9.16b, v21.16b - eor v10.16b, v10.16b, v22.16b - eor v11.16b, v11.16b, v23.16b - ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x2], #0x40 - st1 {v8.4s, v9.4s, v10.4s, v11.4s}, [x1], #0x40 - # Move regular registers into vector registers for adding and xor - orr x8, x8, x9, lsl 32 - orr x10, x10, x11, lsl 32 - orr x12, x12, x13, lsl 32 - mov v0.d[0], x8 - orr x14, x14, x15, lsl 32 - mov v0.d[1], x10 - orr x16, x16, x17, lsl 32 - mov v1.d[0], x12 - orr x19, x19, x20, lsl 32 - mov v1.d[1], x14 - orr x21, x21, x22, lsl 32 - mov v2.d[0], x16 - orr x23, x23, x24, lsl 32 - mov v2.d[1], x19 - mov v3.d[0], x21 - mov v3.d[1], x23 - # Add back state, XOR in message and store - add v0.4s, v0.4s, v16.4s - add v1.4s, v1.4s, v17.4s - add v2.4s, v2.4s, v18.4s - add v3.4s, v3.4s, v19.4s - eor v0.16b, v0.16b, v20.16b - eor v1.16b, v1.16b, v21.16b - eor v2.16b, v2.16b, v22.16b - eor v3.16b, v3.16b, v23.16b - st1 {v0.4s, v1.4s, v2.4s, v3.4s}, [x1], #0x40 - mov v19.d[0], x7 + ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [x2], #0x40 + # Transpose vectors + trn1 v20.4s, v0.4s, v1.4s + trn1 v22.4s, v2.4s, v3.4s + trn2 v21.4s, v0.4s, v1.4s + trn2 v23.4s, v2.4s, v3.4s + trn1 v0.2d, v20.2d, v22.2d + trn1 v1.2d, v21.2d, v23.2d + trn2 v2.2d, v20.2d, v22.2d + trn2 v3.2d, v21.2d, v23.2d + trn1 v20.4s, v4.4s, v5.4s + trn1 v22.4s, v6.4s, v7.4s + trn2 v21.4s, v4.4s, v5.4s + trn2 v23.4s, v6.4s, v7.4s + trn1 v4.2d, v20.2d, v22.2d + trn1 v5.2d, v21.2d, v23.2d + trn2 v6.2d, v20.2d, v22.2d + trn2 v7.2d, v21.2d, v23.2d + trn1 v20.4s, v8.4s, v9.4s + trn1 v22.4s, v10.4s, v11.4s + trn2 v21.4s, v8.4s, v9.4s + trn2 v23.4s, v10.4s, v11.4s + trn1 v8.2d, v20.2d, v22.2d + trn1 v9.2d, v21.2d, v23.2d + trn2 v10.2d, v20.2d, v22.2d + trn2 v11.2d, v21.2d, v23.2d + trn1 v20.4s, v12.4s, v13.4s + trn1 v22.4s, v14.4s, v15.4s + trn2 v21.4s, v12.4s, v13.4s + trn2 v23.4s, v14.4s, v15.4s + trn1 v12.2d, v20.2d, v22.2d + trn1 v13.2d, v21.2d, v23.2d + trn2 v14.2d, v20.2d, v22.2d + trn2 v15.2d, v21.2d, v23.2d + # Add back state, XOR in message and store (load next block) + add v20.4s, v0.4s, v16.4s + add v21.4s, v4.4s, v17.4s + add v22.4s, v8.4s, v18.4s + add v23.4s, v12.4s, v19.4s + eor v20.16b, v20.16b, v24.16b + eor v21.16b, v21.16b, v25.16b + eor v22.16b, v22.16b, v26.16b + eor v23.16b, v23.16b, v27.16b + ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [x2], #0x40 + st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x1], #0x40 + add v20.4s, v1.4s, v16.4s + add v21.4s, v5.4s, v17.4s + add v22.4s, v9.4s, v18.4s + add v23.4s, v13.4s, v19.4s + eor v20.16b, v20.16b, v24.16b + eor v21.16b, v21.16b, v25.16b + eor v22.16b, v22.16b, v26.16b + eor v23.16b, v23.16b, v27.16b + ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [x2], #0x40 + st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x1], #0x40 + add v20.4s, v2.4s, v16.4s + add v21.4s, v6.4s, v17.4s + add v22.4s, v10.4s, v18.4s + add v23.4s, v14.4s, v19.4s + eor v20.16b, v20.16b, v24.16b + eor v21.16b, v21.16b, v25.16b + eor v22.16b, v22.16b, v26.16b + eor v23.16b, v23.16b, v27.16b + ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [x2], #0x40 + st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x1], #0x40 + add v20.4s, v3.4s, v16.4s + add v21.4s, v7.4s, v17.4s + add v22.4s, v11.4s, v18.4s + add v23.4s, v15.4s, v19.4s + eor v20.16b, v20.16b, v24.16b + eor v21.16b, v21.16b, v25.16b + eor v22.16b, v22.16b, v26.16b + eor v23.16b, v23.16b, v27.16b + st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [x1], #0x40 + mov v29.s[0], w26 sub x3, x3, #0x100 + add v19.4s, v19.4s, v29.4s # Done 256-byte block L_chacha_crypt_bytes_arm64_lt_256: cmp x3, #0x80 @@ -1134,7 +1046,7 @@ _wc_chacha_setkey: add x3, x3, :lo12:L_chacha_setkey_arm64_constant #else adrp x3, L_chacha_setkey_arm64_constant@PAGE - add x3, x3, :lo12:L_chacha_setkey_arm64_constant@PAGEOFF + add x3, x3, L_chacha_setkey_arm64_constant@PAGEOFF #endif /* __APPLE__ */ subs x2, x2, #16 add x3, x3, x2 diff --git a/wolfcrypt/src/port/arm/armv8-chacha-asm_c.c b/wolfcrypt/src/port/arm/armv8-chacha-asm_c.c index 9722546fb..fbf67c193 100644 --- a/wolfcrypt/src/port/arm/armv8-chacha-asm_c.c +++ b/wolfcrypt/src/port/arm/armv8-chacha-asm_c.c @@ -439,42 +439,25 @@ void wc_chacha_crypt_bytes(ChaCha* ctx, byte* c, const byte* m, word32 len) "L_chacha_crypt_bytes_arm64_lt_320_%=: \n\t" "cmp %w[len], #0x100\n\t" "b.lt L_chacha_crypt_bytes_arm64_lt_256_%=\n\t" - /* Move state into regular register */ - "mov x8, v16.d[0]\n\t" - "mov x10, v16.d[1]\n\t" - "mov x12, v17.d[0]\n\t" - "mov x14, v17.d[1]\n\t" - "mov x16, v18.d[0]\n\t" - "mov x19, v18.d[1]\n\t" - "mov x21, v19.d[0]\n\t" - "mov x23, v19.d[1]\n\t" /* Move state into vector registers */ - "mov v0.16b, v16.16b\n\t" - "mov v1.16b, v17.16b\n\t" - "lsr x9, x8, #32\n\t" - "mov v2.16b, v18.16b\n\t" - "add %w[rol8], w21, #1\n\t" - "mov v3.16b, v19.16b\n\t" - "lsr x11, x10, #32\n\t" - "mov v4.16b, v16.16b\n\t" - "mov v5.16b, v17.16b\n\t" - "lsr x13, x12, #32\n\t" - "mov v6.16b, v18.16b\n\t" - "add %w[ctr], w21, #2\n\t" - "mov v7.16b, v19.16b\n\t" - "lsr x15, x14, #32\n\t" - "mov v8.16b, v16.16b\n\t" - "mov v9.16b, v17.16b\n\t" - "lsr x17, x16, #32\n\t" - "mov v10.16b, v18.16b\n\t" - "add w21, w21, #3\n\t" - "mov v11.16b, v19.16b\n\t" - "lsr x20, x19, #32\n\t" - "mov v7.s[0], %w[rol8]\n\t" - "lsr x22, x21, #32\n\t" - "mov v11.s[0], %w[ctr]\n\t" - "lsr x24, x23, #32\n\t" - "add w7, w21, #1\n\t" + "dup v0.4s, v16.s[0]\n\t" + "dup v1.4s, v16.s[1]\n\t" + "dup v2.4s, v16.s[2]\n\t" + "dup v3.4s, v16.s[3]\n\t" + "dup v4.4s, v17.s[0]\n\t" + "dup v5.4s, v17.s[1]\n\t" + "dup v6.4s, v17.s[2]\n\t" + "dup v7.4s, v17.s[3]\n\t" + "dup v8.4s, v18.s[0]\n\t" + "dup v9.4s, v18.s[1]\n\t" + "dup v10.4s, v18.s[2]\n\t" + "dup v11.4s, v18.s[3]\n\t" + "dup v12.4s, v19.s[0]\n\t" + "dup v13.4s, v19.s[1]\n\t" + "dup v14.4s, v19.s[2]\n\t" + "dup v15.4s, v19.s[3]\n\t" + /* Add to counter word */ + "add v12.4s, v12.4s, v28.4s\n\t" /* Set number of odd+even rounds to perform */ "mov x26, #10\n\t" "\n" @@ -482,279 +465,208 @@ void wc_chacha_crypt_bytes(ChaCha* ctx, byte* c, const byte* m, word32 len) "subs x26, x26, #1\n\t" /* Round odd */ /* a += b; d ^= a; d <<<= 16; */ - "add v0.4s, v0.4s, v1.4s\n\t" - "add w8, w8, w12\n\t" - "add v4.4s, v4.4s, v5.4s\n\t" - "add w9, w9, w13\n\t" - "add v8.4s, v8.4s, v9.4s\n\t" - "add w10, w10, w14\n\t" - "eor v3.16b, v3.16b, v0.16b\n\t" - "add w11, w11, w15\n\t" - "eor v7.16b, v7.16b, v4.16b\n\t" - "eor w21, w21, w8\n\t" - "eor v11.16b, v11.16b, v8.16b\n\t" - "eor w22, w22, w9\n\t" - "rev32 v3.8h, v3.8h\n\t" - "eor w23, w23, w10\n\t" - "rev32 v7.8h, v7.8h\n\t" - "eor w24, w24, w11\n\t" - "rev32 v11.8h, v11.8h\n\t" - "ror w21, w21, #16\n\t" + "add v0.4s, v0.4s, v4.4s\n\t" + "add v1.4s, v1.4s, v5.4s\n\t" + "add v2.4s, v2.4s, v6.4s\n\t" + "add v3.4s, v3.4s, v7.4s\n\t" + "eor v12.16b, v12.16b, v0.16b\n\t" + "eor v13.16b, v13.16b, v1.16b\n\t" + "eor v14.16b, v14.16b, v2.16b\n\t" + "eor v15.16b, v15.16b, v3.16b\n\t" + "rev32 v12.8h, v12.8h\n\t" + "rev32 v13.8h, v13.8h\n\t" + "rev32 v14.8h, v14.8h\n\t" + "rev32 v15.8h, v15.8h\n\t" /* c += d; b ^= c; b <<<= 12; */ - "add v2.4s, v2.4s, v3.4s\n\t" - "ror w22, w22, #16\n\t" - "add v6.4s, v6.4s, v7.4s\n\t" - "ror w23, w23, #16\n\t" - "add v10.4s, v10.4s, v11.4s\n\t" - "ror w24, w24, #16\n\t" - "eor v20.16b, v1.16b, v2.16b\n\t" - "add w16, w16, w21\n\t" - "eor v21.16b, v5.16b, v6.16b\n\t" - "add w17, w17, w22\n\t" - "eor v22.16b, v9.16b, v10.16b\n\t" - "add w19, w19, w23\n\t" - "shl v1.4s, v20.4s, #12\n\t" - "add w20, w20, w24\n\t" + "add v8.4s, v8.4s, v12.4s\n\t" + "add v9.4s, v9.4s, v13.4s\n\t" + "add v10.4s, v10.4s, v14.4s\n\t" + "add v11.4s, v11.4s, v15.4s\n\t" + "eor v20.16b, v4.16b, v8.16b\n\t" + "eor v21.16b, v5.16b, v9.16b\n\t" + "eor v22.16b, v6.16b, v10.16b\n\t" + "eor v23.16b, v7.16b, v11.16b\n\t" + "shl v4.4s, v20.4s, #12\n\t" "shl v5.4s, v21.4s, #12\n\t" - "eor w12, w12, w16\n\t" - "shl v9.4s, v22.4s, #12\n\t" - "eor w13, w13, w17\n\t" - "sri v1.4s, v20.4s, #20\n\t" - "eor w14, w14, w19\n\t" + "shl v6.4s, v22.4s, #12\n\t" + "shl v7.4s, v23.4s, #12\n\t" + "sri v4.4s, v20.4s, #20\n\t" "sri v5.4s, v21.4s, #20\n\t" - "eor w15, w15, w20\n\t" - "sri v9.4s, v22.4s, #20\n\t" - "ror w12, w12, #20\n\t" + "sri v6.4s, v22.4s, #20\n\t" + "sri v7.4s, v23.4s, #20\n\t" /* a += b; d ^= a; d <<<= 8; */ - "add v0.4s, v0.4s, v1.4s\n\t" - "ror w13, w13, #20\n\t" - "add v4.4s, v4.4s, v5.4s\n\t" - "ror w14, w14, #20\n\t" - "add v8.4s, v8.4s, v9.4s\n\t" - "ror w15, w15, #20\n\t" - "eor v3.16b, v3.16b, v0.16b\n\t" - "add w8, w8, w12\n\t" - "eor v7.16b, v7.16b, v4.16b\n\t" - "add w9, w9, w13\n\t" - "eor v11.16b, v11.16b, v8.16b\n\t" - "add w10, w10, w14\n\t" - "tbl v3.16b, {v3.16b}, v30.16b\n\t" - "add w11, w11, w15\n\t" - "tbl v7.16b, {v7.16b}, v30.16b\n\t" - "eor w21, w21, w8\n\t" - "tbl v11.16b, {v11.16b}, v30.16b\n\t" - "eor w22, w22, w9\n\t" + "add v0.4s, v0.4s, v4.4s\n\t" + "add v1.4s, v1.4s, v5.4s\n\t" + "add v2.4s, v2.4s, v6.4s\n\t" + "add v3.4s, v3.4s, v7.4s\n\t" + "eor v12.16b, v12.16b, v0.16b\n\t" + "eor v13.16b, v13.16b, v1.16b\n\t" + "eor v14.16b, v14.16b, v2.16b\n\t" + "eor v15.16b, v15.16b, v3.16b\n\t" + "tbl v12.16b, {v12.16b}, v30.16b\n\t" + "tbl v13.16b, {v13.16b}, v30.16b\n\t" + "tbl v14.16b, {v14.16b}, v30.16b\n\t" + "tbl v15.16b, {v15.16b}, v30.16b\n\t" /* c += d; b ^= c; b <<<= 7; */ - "add v2.4s, v2.4s, v3.4s\n\t" - "eor w23, w23, w10\n\t" - "add v6.4s, v6.4s, v7.4s\n\t" - "eor w24, w24, w11\n\t" - "add v10.4s, v10.4s, v11.4s\n\t" - "ror w21, w21, #24\n\t" - "eor v20.16b, v1.16b, v2.16b\n\t" - "ror w22, w22, #24\n\t" - "eor v21.16b, v5.16b, v6.16b\n\t" - "ror w23, w23, #24\n\t" - "eor v22.16b, v9.16b, v10.16b\n\t" - "ror w24, w24, #24\n\t" - "shl v1.4s, v20.4s, #7\n\t" - "add w16, w16, w21\n\t" + "add v8.4s, v8.4s, v12.4s\n\t" + "add v9.4s, v9.4s, v13.4s\n\t" + "add v10.4s, v10.4s, v14.4s\n\t" + "add v11.4s, v11.4s, v15.4s\n\t" + "eor v20.16b, v4.16b, v8.16b\n\t" + "eor v21.16b, v5.16b, v9.16b\n\t" + "eor v22.16b, v6.16b, v10.16b\n\t" + "eor v23.16b, v7.16b, v11.16b\n\t" + "shl v4.4s, v20.4s, #7\n\t" "shl v5.4s, v21.4s, #7\n\t" - "add w17, w17, w22\n\t" - "shl v9.4s, v22.4s, #7\n\t" - "add w19, w19, w23\n\t" - "sri v1.4s, v20.4s, #25\n\t" - "add w20, w20, w24\n\t" + "shl v6.4s, v22.4s, #7\n\t" + "shl v7.4s, v23.4s, #7\n\t" + "sri v4.4s, v20.4s, #25\n\t" "sri v5.4s, v21.4s, #25\n\t" - "eor w12, w12, w16\n\t" - "sri v9.4s, v22.4s, #25\n\t" - "eor w13, w13, w17\n\t" - "ext v3.16b, v3.16b, v3.16b, #12\n\t" - "eor w14, w14, w19\n\t" - "ext v7.16b, v7.16b, v7.16b, #12\n\t" - "eor w15, w15, w20\n\t" - "ext v11.16b, v11.16b, v11.16b, #12\n\t" - "ror w12, w12, #25\n\t" - "ext v1.16b, v1.16b, v1.16b, #4\n\t" - "ror w13, w13, #25\n\t" - "ext v5.16b, v5.16b, v5.16b, #4\n\t" - "ror w14, w14, #25\n\t" - "ext v9.16b, v9.16b, v9.16b, #4\n\t" - "ror w15, w15, #25\n\t" - "ext v2.16b, v2.16b, v2.16b, #8\n\t" - "ext v6.16b, v6.16b, v6.16b, #8\n\t" - "ext v10.16b, v10.16b, v10.16b, #8\n\t" + "sri v6.4s, v22.4s, #25\n\t" + "sri v7.4s, v23.4s, #25\n\t" /* Round even */ /* a += b; d ^= a; d <<<= 16; */ - "add v0.4s, v0.4s, v1.4s\n\t" - "add w8, w8, w13\n\t" - "add v4.4s, v4.4s, v5.4s\n\t" - "add w9, w9, w14\n\t" - "add v8.4s, v8.4s, v9.4s\n\t" - "add w10, w10, w15\n\t" - "eor v3.16b, v3.16b, v0.16b\n\t" - "add w11, w11, w12\n\t" - "eor v7.16b, v7.16b, v4.16b\n\t" - "eor w24, w24, w8\n\t" - "eor v11.16b, v11.16b, v8.16b\n\t" - "eor w21, w21, w9\n\t" - "rev32 v3.8h, v3.8h\n\t" - "eor w22, w22, w10\n\t" - "rev32 v7.8h, v7.8h\n\t" - "eor w23, w23, w11\n\t" - "rev32 v11.8h, v11.8h\n\t" - "ror w24, w24, #16\n\t" + "add v0.4s, v0.4s, v5.4s\n\t" + "add v1.4s, v1.4s, v6.4s\n\t" + "add v2.4s, v2.4s, v7.4s\n\t" + "add v3.4s, v3.4s, v4.4s\n\t" + "eor v15.16b, v15.16b, v0.16b\n\t" + "eor v12.16b, v12.16b, v1.16b\n\t" + "eor v13.16b, v13.16b, v2.16b\n\t" + "eor v14.16b, v14.16b, v3.16b\n\t" + "rev32 v15.8h, v15.8h\n\t" + "rev32 v12.8h, v12.8h\n\t" + "rev32 v13.8h, v13.8h\n\t" + "rev32 v14.8h, v14.8h\n\t" /* c += d; b ^= c; b <<<= 12; */ - "add v2.4s, v2.4s, v3.4s\n\t" - "ror w21, w21, #16\n\t" - "add v6.4s, v6.4s, v7.4s\n\t" - "ror w22, w22, #16\n\t" - "add v10.4s, v10.4s, v11.4s\n\t" - "ror w23, w23, #16\n\t" - "eor v20.16b, v1.16b, v2.16b\n\t" - "add w19, w19, w24\n\t" - "eor v21.16b, v5.16b, v6.16b\n\t" - "add w20, w20, w21\n\t" - "eor v22.16b, v9.16b, v10.16b\n\t" - "add w16, w16, w22\n\t" - "shl v1.4s, v20.4s, #12\n\t" - "add w17, w17, w23\n\t" - "shl v5.4s, v21.4s, #12\n\t" - "eor w13, w13, w19\n\t" - "shl v9.4s, v22.4s, #12\n\t" - "eor w14, w14, w20\n\t" - "sri v1.4s, v20.4s, #20\n\t" - "eor w15, w15, w16\n\t" - "sri v5.4s, v21.4s, #20\n\t" - "eor w12, w12, w17\n\t" - "sri v9.4s, v22.4s, #20\n\t" - "ror w13, w13, #20\n\t" + "add v10.4s, v10.4s, v15.4s\n\t" + "add v11.4s, v11.4s, v12.4s\n\t" + "add v8.4s, v8.4s, v13.4s\n\t" + "add v9.4s, v9.4s, v14.4s\n\t" + "eor v20.16b, v5.16b, v10.16b\n\t" + "eor v21.16b, v6.16b, v11.16b\n\t" + "eor v22.16b, v7.16b, v8.16b\n\t" + "eor v23.16b, v4.16b, v9.16b\n\t" + "shl v5.4s, v20.4s, #12\n\t" + "shl v6.4s, v21.4s, #12\n\t" + "shl v7.4s, v22.4s, #12\n\t" + "shl v4.4s, v23.4s, #12\n\t" + "sri v5.4s, v20.4s, #20\n\t" + "sri v6.4s, v21.4s, #20\n\t" + "sri v7.4s, v22.4s, #20\n\t" + "sri v4.4s, v23.4s, #20\n\t" /* a += b; d ^= a; d <<<= 8; */ - "add v0.4s, v0.4s, v1.4s\n\t" - "ror w14, w14, #20\n\t" - "add v4.4s, v4.4s, v5.4s\n\t" - "ror w15, w15, #20\n\t" - "add v8.4s, v8.4s, v9.4s\n\t" - "ror w12, w12, #20\n\t" - "eor v3.16b, v3.16b, v0.16b\n\t" - "add w8, w8, w13\n\t" - "eor v7.16b, v7.16b, v4.16b\n\t" - "add w9, w9, w14\n\t" - "eor v11.16b, v11.16b, v8.16b\n\t" - "add w10, w10, w15\n\t" - "tbl v3.16b, {v3.16b}, v30.16b\n\t" - "add w11, w11, w12\n\t" - "tbl v7.16b, {v7.16b}, v30.16b\n\t" - "eor w24, w24, w8\n\t" - "tbl v11.16b, {v11.16b}, v30.16b\n\t" - "eor w21, w21, w9\n\t" + "add v0.4s, v0.4s, v5.4s\n\t" + "add v1.4s, v1.4s, v6.4s\n\t" + "add v2.4s, v2.4s, v7.4s\n\t" + "add v3.4s, v3.4s, v4.4s\n\t" + "eor v15.16b, v15.16b, v0.16b\n\t" + "eor v12.16b, v12.16b, v1.16b\n\t" + "eor v13.16b, v13.16b, v2.16b\n\t" + "eor v14.16b, v14.16b, v3.16b\n\t" + "tbl v15.16b, {v15.16b}, v30.16b\n\t" + "tbl v12.16b, {v12.16b}, v30.16b\n\t" + "tbl v13.16b, {v13.16b}, v30.16b\n\t" + "tbl v14.16b, {v14.16b}, v30.16b\n\t" /* c += d; b ^= c; b <<<= 7; */ - "add v2.4s, v2.4s, v3.4s\n\t" - "eor w22, w22, w10\n\t" - "add v6.4s, v6.4s, v7.4s\n\t" - "eor w23, w23, w11\n\t" - "add v10.4s, v10.4s, v11.4s\n\t" - "ror w24, w24, #24\n\t" - "eor v20.16b, v1.16b, v2.16b\n\t" - "ror w21, w21, #24\n\t" - "eor v21.16b, v5.16b, v6.16b\n\t" - "ror w22, w22, #24\n\t" - "eor v22.16b, v9.16b, v10.16b\n\t" - "ror w23, w23, #24\n\t" - "shl v1.4s, v20.4s, #7\n\t" - "add w19, w19, w24\n\t" - "shl v5.4s, v21.4s, #7\n\t" - "add w20, w20, w21\n\t" - "shl v9.4s, v22.4s, #7\n\t" - "add w16, w16, w22\n\t" - "sri v1.4s, v20.4s, #25\n\t" - "add w17, w17, w23\n\t" - "sri v5.4s, v21.4s, #25\n\t" - "eor w13, w13, w19\n\t" - "sri v9.4s, v22.4s, #25\n\t" - "eor w14, w14, w20\n\t" - "ext v3.16b, v3.16b, v3.16b, #4\n\t" - "eor w15, w15, w16\n\t" - "ext v7.16b, v7.16b, v7.16b, #4\n\t" - "eor w12, w12, w17\n\t" - "ext v11.16b, v11.16b, v11.16b, #4\n\t" - "ror w13, w13, #25\n\t" - "ext v1.16b, v1.16b, v1.16b, #12\n\t" - "ror w14, w14, #25\n\t" - "ext v5.16b, v5.16b, v5.16b, #12\n\t" - "ror w15, w15, #25\n\t" - "ext v9.16b, v9.16b, v9.16b, #12\n\t" - "ror w12, w12, #25\n\t" - "ext v2.16b, v2.16b, v2.16b, #8\n\t" - "ext v6.16b, v6.16b, v6.16b, #8\n\t" - "ext v10.16b, v10.16b, v10.16b, #8\n\t" + "add v10.4s, v10.4s, v15.4s\n\t" + "add v11.4s, v11.4s, v12.4s\n\t" + "add v8.4s, v8.4s, v13.4s\n\t" + "add v9.4s, v9.4s, v14.4s\n\t" + "eor v20.16b, v5.16b, v10.16b\n\t" + "eor v21.16b, v6.16b, v11.16b\n\t" + "eor v22.16b, v7.16b, v8.16b\n\t" + "eor v23.16b, v4.16b, v9.16b\n\t" + "shl v5.4s, v20.4s, #7\n\t" + "shl v6.4s, v21.4s, #7\n\t" + "shl v7.4s, v22.4s, #7\n\t" + "shl v4.4s, v23.4s, #7\n\t" + "sri v5.4s, v20.4s, #25\n\t" + "sri v6.4s, v21.4s, #25\n\t" + "sri v7.4s, v22.4s, #25\n\t" + "sri v4.4s, v23.4s, #25\n\t" "b.ne L_chacha_crypt_bytes_arm64_round_start_256_%=\n\t" + "mov x26, #4\n\t" + /* Add counter now rather than after transposed */ + "add v12.4s, v12.4s, v28.4s\n\t" /* Load message */ - "ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[m]], #0x40\n\t" - /* Add one (2 added during calculating vector results) */ - "add w21, w21, #1\n\t" - /* Add back state, XOR msg, store (load next block) */ - "add v0.4s, v0.4s, v16.4s\n\t" - "add v1.4s, v1.4s, v17.4s\n\t" - "add v2.4s, v2.4s, v18.4s\n\t" - "add v3.4s, v3.4s, v19.4s\n\t" - "eor v0.16b, v0.16b, v20.16b\n\t" - "eor v1.16b, v1.16b, v21.16b\n\t" - "eor v2.16b, v2.16b, v22.16b\n\t" - "eor v3.16b, v3.16b, v23.16b\n\t" - "ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[m]], #0x40\n\t" - "st1 {v0.4s, v1.4s, v2.4s, v3.4s}, [%x[c]], #0x40\n\t" - "mov v19.s[0], %w[rol8]\n\t" - "add v4.4s, v4.4s, v16.4s\n\t" - "add v5.4s, v5.4s, v17.4s\n\t" - "add v6.4s, v6.4s, v18.4s\n\t" - "add v7.4s, v7.4s, v19.4s\n\t" - "eor v4.16b, v4.16b, v20.16b\n\t" - "eor v5.16b, v5.16b, v21.16b\n\t" - "eor v6.16b, v6.16b, v22.16b\n\t" - "eor v7.16b, v7.16b, v23.16b\n\t" - "ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[m]], #0x40\n\t" - "st1 {v4.4s, v5.4s, v6.4s, v7.4s}, [%x[c]], #0x40\n\t" - "mov v19.s[0], %w[ctr]\n\t" - "add v8.4s, v8.4s, v16.4s\n\t" - "add v9.4s, v9.4s, v17.4s\n\t" - "add v10.4s, v10.4s, v18.4s\n\t" - "add v11.4s, v11.4s, v19.4s\n\t" - "eor v8.16b, v8.16b, v20.16b\n\t" - "eor v9.16b, v9.16b, v21.16b\n\t" - "eor v10.16b, v10.16b, v22.16b\n\t" - "eor v11.16b, v11.16b, v23.16b\n\t" - "ld1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[m]], #0x40\n\t" - "st1 {v8.4s, v9.4s, v10.4s, v11.4s}, [%x[c]], #0x40\n\t" - /* Move regular registers into vector registers for adding and xor */ - "orr x8, x8, x9, lsl 32\n\t" - "orr x10, x10, x11, lsl 32\n\t" - "orr x12, x12, x13, lsl 32\n\t" - "mov v0.d[0], x8\n\t" - "orr x14, x14, x15, lsl 32\n\t" - "mov v0.d[1], x10\n\t" - "orr x16, x16, x17, lsl 32\n\t" - "mov v1.d[0], x12\n\t" - "orr x19, x19, x20, lsl 32\n\t" - "mov v1.d[1], x14\n\t" - "orr x21, x21, x22, lsl 32\n\t" - "mov v2.d[0], x16\n\t" - "orr x23, x23, x24, lsl 32\n\t" - "mov v2.d[1], x19\n\t" - "mov v3.d[0], x21\n\t" - "mov v3.d[1], x23\n\t" - /* Add back state, XOR in message and store */ - "add v0.4s, v0.4s, v16.4s\n\t" - "add v1.4s, v1.4s, v17.4s\n\t" - "add v2.4s, v2.4s, v18.4s\n\t" - "add v3.4s, v3.4s, v19.4s\n\t" - "eor v0.16b, v0.16b, v20.16b\n\t" - "eor v1.16b, v1.16b, v21.16b\n\t" - "eor v2.16b, v2.16b, v22.16b\n\t" - "eor v3.16b, v3.16b, v23.16b\n\t" - "st1 {v0.4s, v1.4s, v2.4s, v3.4s}, [%x[c]], #0x40\n\t" - "mov v19.d[0], x7\n\t" + "ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [%x[m]], #0x40\n\t" + /* Transpose vectors */ + "trn1 v20.4s, v0.4s, v1.4s\n\t" + "trn1 v22.4s, v2.4s, v3.4s\n\t" + "trn2 v21.4s, v0.4s, v1.4s\n\t" + "trn2 v23.4s, v2.4s, v3.4s\n\t" + "trn1 v0.2d, v20.2d, v22.2d\n\t" + "trn1 v1.2d, v21.2d, v23.2d\n\t" + "trn2 v2.2d, v20.2d, v22.2d\n\t" + "trn2 v3.2d, v21.2d, v23.2d\n\t" + "trn1 v20.4s, v4.4s, v5.4s\n\t" + "trn1 v22.4s, v6.4s, v7.4s\n\t" + "trn2 v21.4s, v4.4s, v5.4s\n\t" + "trn2 v23.4s, v6.4s, v7.4s\n\t" + "trn1 v4.2d, v20.2d, v22.2d\n\t" + "trn1 v5.2d, v21.2d, v23.2d\n\t" + "trn2 v6.2d, v20.2d, v22.2d\n\t" + "trn2 v7.2d, v21.2d, v23.2d\n\t" + "trn1 v20.4s, v8.4s, v9.4s\n\t" + "trn1 v22.4s, v10.4s, v11.4s\n\t" + "trn2 v21.4s, v8.4s, v9.4s\n\t" + "trn2 v23.4s, v10.4s, v11.4s\n\t" + "trn1 v8.2d, v20.2d, v22.2d\n\t" + "trn1 v9.2d, v21.2d, v23.2d\n\t" + "trn2 v10.2d, v20.2d, v22.2d\n\t" + "trn2 v11.2d, v21.2d, v23.2d\n\t" + "trn1 v20.4s, v12.4s, v13.4s\n\t" + "trn1 v22.4s, v14.4s, v15.4s\n\t" + "trn2 v21.4s, v12.4s, v13.4s\n\t" + "trn2 v23.4s, v14.4s, v15.4s\n\t" + "trn1 v12.2d, v20.2d, v22.2d\n\t" + "trn1 v13.2d, v21.2d, v23.2d\n\t" + "trn2 v14.2d, v20.2d, v22.2d\n\t" + "trn2 v15.2d, v21.2d, v23.2d\n\t" + /* Add back state, XOR in message and store (load next block) */ + "add v20.4s, v0.4s, v16.4s\n\t" + "add v21.4s, v4.4s, v17.4s\n\t" + "add v22.4s, v8.4s, v18.4s\n\t" + "add v23.4s, v12.4s, v19.4s\n\t" + "eor v20.16b, v20.16b, v24.16b\n\t" + "eor v21.16b, v21.16b, v25.16b\n\t" + "eor v22.16b, v22.16b, v26.16b\n\t" + "eor v23.16b, v23.16b, v27.16b\n\t" + "ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [%x[m]], #0x40\n\t" + "st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[c]], #0x40\n\t" + "add v20.4s, v1.4s, v16.4s\n\t" + "add v21.4s, v5.4s, v17.4s\n\t" + "add v22.4s, v9.4s, v18.4s\n\t" + "add v23.4s, v13.4s, v19.4s\n\t" + "eor v20.16b, v20.16b, v24.16b\n\t" + "eor v21.16b, v21.16b, v25.16b\n\t" + "eor v22.16b, v22.16b, v26.16b\n\t" + "eor v23.16b, v23.16b, v27.16b\n\t" + "ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [%x[m]], #0x40\n\t" + "st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[c]], #0x40\n\t" + "add v20.4s, v2.4s, v16.4s\n\t" + "add v21.4s, v6.4s, v17.4s\n\t" + "add v22.4s, v10.4s, v18.4s\n\t" + "add v23.4s, v14.4s, v19.4s\n\t" + "eor v20.16b, v20.16b, v24.16b\n\t" + "eor v21.16b, v21.16b, v25.16b\n\t" + "eor v22.16b, v22.16b, v26.16b\n\t" + "eor v23.16b, v23.16b, v27.16b\n\t" + "ld1 {v24.16b, v25.16b, v26.16b, v27.16b}, [%x[m]], #0x40\n\t" + "st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[c]], #0x40\n\t" + "add v20.4s, v3.4s, v16.4s\n\t" + "add v21.4s, v7.4s, v17.4s\n\t" + "add v22.4s, v11.4s, v18.4s\n\t" + "add v23.4s, v15.4s, v19.4s\n\t" + "eor v20.16b, v20.16b, v24.16b\n\t" + "eor v21.16b, v21.16b, v25.16b\n\t" + "eor v22.16b, v22.16b, v26.16b\n\t" + "eor v23.16b, v23.16b, v27.16b\n\t" + "st1 {v20.16b, v21.16b, v22.16b, v23.16b}, [%x[c]], #0x40\n\t" + "mov v29.s[0], w26\n\t" "sub %w[len], %w[len], #0x100\n\t" + "add v19.4s, v19.4s, v29.4s\n\t" /* Done 256-byte block */ "\n" "L_chacha_crypt_bytes_arm64_lt_256_%=: \n\t" diff --git a/wolfcrypt/src/port/arm/armv8-curve25519.S b/wolfcrypt/src/port/arm/armv8-curve25519.S index ffaf2d6b5..b3f0e31b4 100644 --- a/wolfcrypt/src/port/arm/armv8-curve25519.S +++ b/wolfcrypt/src/port/arm/armv8-curve25519.S @@ -555,6 +555,253 @@ _fe_cmov_table: #endif /* __APPLE__ */ #ifndef __APPLE__ .text +.globl fe_invert_nct +.type fe_invert_nct,@function +.align 2 +fe_invert_nct: +#else +.section __TEXT,__text +.globl _fe_invert_nct +.p2align 2 +_fe_invert_nct: +#endif /* __APPLE__ */ + stp x29, x30, [sp, #-80]! + add x29, sp, #0 + stp x17, x19, [x29, #24] + stp x20, x21, [x29, #40] + stp x22, x23, [x29, #56] + str x24, [x29, #72] + mov x19, #-19 + mov x20, #-1 + mov x21, #0x7fffffffffffffff + ldr x6, [x1] + ldr x7, [x1, #8] + ldr x8, [x1, #16] + ldr x9, [x1, #24] + mov x2, x19 + mov x3, x20 + mov x4, x20 + mov x5, x21 + mov x10, xzr + mov x11, xzr + mov x12, xzr + mov x13, xzr + mov x14, #1 + mov x15, xzr + mov x16, xzr + mov x17, xzr + mov x22, #0xff + cmp x9, #0 + beq L_fe_invert_nct_num_bits_init_v_0 + mov x24, #0x100 + clz x23, x9 + sub x23, x24, x23 + b L_fe_invert_nct_num_bits_init_v_3 +L_fe_invert_nct_num_bits_init_v_0: + cmp x8, #0 + beq L_fe_invert_nct_num_bits_init_v_1 + mov x24, #0xc0 + clz x23, x8 + sub x23, x24, x23 + b L_fe_invert_nct_num_bits_init_v_3 +L_fe_invert_nct_num_bits_init_v_1: + cmp x7, #0 + beq L_fe_invert_nct_num_bits_init_v_2 + mov x24, #0x80 + clz x23, x7 + sub x23, x24, x23 + b L_fe_invert_nct_num_bits_init_v_3 +L_fe_invert_nct_num_bits_init_v_2: + mov x24, #0x40 + clz x23, x6 + sub x23, x24, x23 +L_fe_invert_nct_num_bits_init_v_3: + tst x6, #1 + bne L_fe_invert_nct_loop +L_fe_invert_nct_even_init_v_0: + extr x6, x7, x6, #1 + extr x7, x8, x7, #1 + extr x8, x9, x8, #1 + lsr x9, x9, #1 + sub x23, x23, #1 + ands x24, x14, #1 + beq L_fe_invert_nct_even_init_v_1 + adds x14, x14, x19 + adcs x15, x15, x20 + adcs x16, x16, x20 + adcs x17, x17, x21 + cset x24, cs +L_fe_invert_nct_even_init_v_1: + extr x14, x15, x14, #1 + extr x15, x16, x15, #1 + extr x16, x17, x16, #1 + extr x17, x24, x17, #1 + tst x6, #1 + beq L_fe_invert_nct_even_init_v_0 +L_fe_invert_nct_loop: + cmp x22, #1 + beq L_fe_invert_nct_u_done + cmp x23, #1 + beq L_fe_invert_nct_v_done + cmp x22, x23 + bhi L_fe_invert_nct_u_larger + bcc L_fe_invert_nct_v_larger + cmp x5, x9 + bhi L_fe_invert_nct_u_larger + bcc L_fe_invert_nct_v_larger + cmp x4, x8 + bhi L_fe_invert_nct_u_larger + bcc L_fe_invert_nct_v_larger + cmp x3, x7 + bhi L_fe_invert_nct_u_larger + bcc L_fe_invert_nct_v_larger + cmp x2, x6 + bcc L_fe_invert_nct_v_larger +L_fe_invert_nct_u_larger: + subs x2, x2, x6 + sbcs x3, x3, x7 + sbcs x4, x4, x8 + sbc x5, x5, x9 + subs x10, x10, x14 + sbcs x11, x11, x15 + sbcs x12, x12, x16 + sbcs x13, x13, x17 + bcs L_fe_invert_nct_sub_uv + adds x10, x10, x19 + adcs x11, x11, x20 + adcs x12, x12, x20 + adc x13, x13, x21 +L_fe_invert_nct_sub_uv: + cmp x5, #0 + beq L_fe_invert_nct_nct_num_bits_u_0 + mov x24, #0x100 + clz x22, x5 + sub x22, x24, x22 + b L_fe_invert_nct_nct_num_bits_u_3 +L_fe_invert_nct_nct_num_bits_u_0: + cmp x4, #0 + beq L_fe_invert_nct_nct_num_bits_u_1 + mov x24, #0xc0 + clz x22, x4 + sub x22, x24, x22 + b L_fe_invert_nct_nct_num_bits_u_3 +L_fe_invert_nct_nct_num_bits_u_1: + cmp x3, #0 + beq L_fe_invert_nct_nct_num_bits_u_2 + mov x24, #0x80 + clz x22, x3 + sub x22, x24, x22 + b L_fe_invert_nct_nct_num_bits_u_3 +L_fe_invert_nct_nct_num_bits_u_2: + mov x24, #0x40 + clz x22, x2 + sub x22, x24, x22 +L_fe_invert_nct_nct_num_bits_u_3: +L_fe_invert_nct_even_u_0: + extr x2, x3, x2, #1 + extr x3, x4, x3, #1 + extr x4, x5, x4, #1 + lsr x5, x5, #1 + sub x22, x22, #1 + ands x24, x10, #1 + beq L_fe_invert_nct_even_u_1 + adds x10, x10, x19 + adcs x11, x11, x20 + adcs x12, x12, x20 + adcs x13, x13, x21 + cset x24, cs +L_fe_invert_nct_even_u_1: + extr x10, x11, x10, #1 + extr x11, x12, x11, #1 + extr x12, x13, x12, #1 + extr x13, x24, x13, #1 + tst x2, #1 + beq L_fe_invert_nct_even_u_0 + b L_fe_invert_nct_loop +L_fe_invert_nct_v_larger: + subs x6, x6, x2 + sbcs x7, x7, x3 + sbcs x8, x8, x4 + sbc x9, x9, x5 + subs x14, x14, x10 + sbcs x15, x15, x11 + sbcs x16, x16, x12 + sbcs x17, x17, x13 + bcs L_fe_invert_nct_sub_vu + adds x14, x14, x19 + adcs x15, x15, x20 + adcs x16, x16, x20 + adc x17, x17, x21 +L_fe_invert_nct_sub_vu: + cmp x9, #0 + beq L_fe_invert_nct_nct_num_bits_v_0 + mov x24, #0x100 + clz x23, x9 + sub x23, x24, x23 + b L_fe_invert_nct_nct_num_bits_v_3 +L_fe_invert_nct_nct_num_bits_v_0: + cmp x8, #0 + beq L_fe_invert_nct_nct_num_bits_v_1 + mov x24, #0xc0 + clz x23, x8 + sub x23, x24, x23 + b L_fe_invert_nct_nct_num_bits_v_3 +L_fe_invert_nct_nct_num_bits_v_1: + cmp x7, #0 + beq L_fe_invert_nct_nct_num_bits_v_2 + mov x24, #0x80 + clz x23, x7 + sub x23, x24, x23 + b L_fe_invert_nct_nct_num_bits_v_3 +L_fe_invert_nct_nct_num_bits_v_2: + mov x24, #0x40 + clz x23, x6 + sub x23, x24, x23 +L_fe_invert_nct_nct_num_bits_v_3: +L_fe_invert_nct_even_v_0: + extr x6, x7, x6, #1 + extr x7, x8, x7, #1 + extr x8, x9, x8, #1 + lsr x9, x9, #1 + sub x23, x23, #1 + ands x24, x14, #1 + beq L_fe_invert_nct_even_v_1 + adds x14, x14, x19 + adcs x15, x15, x20 + adcs x16, x16, x20 + adcs x17, x17, x21 + cset x24, cs +L_fe_invert_nct_even_v_1: + extr x14, x15, x14, #1 + extr x15, x16, x15, #1 + extr x16, x17, x16, #1 + extr x17, x24, x17, #1 + tst x6, #1 + beq L_fe_invert_nct_even_v_0 + b L_fe_invert_nct_loop +L_fe_invert_nct_u_done: + str x10, [x0] + str x11, [x0, #8] + str x12, [x0, #16] + str x13, [x0, #24] + b L_fe_invert_nct_done +L_fe_invert_nct_v_done: + str x14, [x0] + str x15, [x0, #8] + str x16, [x0, #16] + str x17, [x0, #24] +L_fe_invert_nct_done: + ldp x17, x19, [x29, #24] + ldp x20, x21, [x29, #40] + ldp x22, x23, [x29, #56] + ldr x24, [x29, #72] + ldp x29, x30, [sp], #0x50 + ret +#ifndef __APPLE__ + .size fe_invert_nct,.-fe_invert_nct +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text .globl fe_mul .type fe_mul,@function .align 2 @@ -1733,7 +1980,7 @@ _curve25519_base: add x2, x2, :lo12:L_curve25519_base_x2 #else adrp x2, L_curve25519_base_x2@PAGE - add x2, x2, :lo12:L_curve25519_base_x2@PAGEOFF + add x2, x2, L_curve25519_base_x2@PAGEOFF #endif /* __APPLE__ */ ldp x6, x7, [x2] ldp x8, x9, [x2, #16] diff --git a/wolfcrypt/src/port/arm/armv8-curve25519_c.c b/wolfcrypt/src/port/arm/armv8-curve25519_c.c index c91e32e1a..2ffeb85e4 100644 --- a/wolfcrypt/src/port/arm/armv8-curve25519_c.c +++ b/wolfcrypt/src/port/arm/armv8-curve25519_c.c @@ -471,6 +471,263 @@ void fe_cmov_table(fe* r, fe* base, signed char b) ); } +void fe_invert_nct(word64* r, const word64* a) +{ + __asm__ __volatile__ ( + "mov x19, #-19\n\t" + "mov x20, #-1\n\t" + "mov x21, #0x7fffffffffffffff\n\t" + "ldr x6, [%x[a]]\n\t" + "ldr x7, [%x[a], #8]\n\t" + "ldr x8, [%x[a], #16]\n\t" + "ldr x9, [%x[a], #24]\n\t" + "mov x2, x19\n\t" + "mov x3, x20\n\t" + "mov x4, x20\n\t" + "mov x5, x21\n\t" + "mov x10, xzr\n\t" + "mov x11, xzr\n\t" + "mov x12, xzr\n\t" + "mov x13, xzr\n\t" + "mov x14, #1\n\t" + "mov x15, xzr\n\t" + "mov x16, xzr\n\t" + "mov x17, xzr\n\t" + "mov x22, #0xff\n\t" + "cmp x9, #0\n\t" + "b.eq L_fe_invert_nct_num_bits_init_v_0_%=\n\t" + "mov x24, #0x100\n\t" + "clz x23, x9\n\t" + "sub x23, x24, x23\n\t" + "b L_fe_invert_nct_num_bits_init_v_3_%=\n\t" + "\n" + "L_fe_invert_nct_num_bits_init_v_0_%=: \n\t" + "cmp x8, #0\n\t" + "b.eq L_fe_invert_nct_num_bits_init_v_1_%=\n\t" + "mov x24, #0xc0\n\t" + "clz x23, x8\n\t" + "sub x23, x24, x23\n\t" + "b L_fe_invert_nct_num_bits_init_v_3_%=\n\t" + "\n" + "L_fe_invert_nct_num_bits_init_v_1_%=: \n\t" + "cmp x7, #0\n\t" + "b.eq L_fe_invert_nct_num_bits_init_v_2_%=\n\t" + "mov x24, #0x80\n\t" + "clz x23, x7\n\t" + "sub x23, x24, x23\n\t" + "b L_fe_invert_nct_num_bits_init_v_3_%=\n\t" + "\n" + "L_fe_invert_nct_num_bits_init_v_2_%=: \n\t" + "mov x24, #0x40\n\t" + "clz x23, x6\n\t" + "sub x23, x24, x23\n\t" + "\n" + "L_fe_invert_nct_num_bits_init_v_3_%=: \n\t" + "tst x6, #1\n\t" + "b.ne L_fe_invert_nct_loop_%=\n\t" + "\n" + "L_fe_invert_nct_even_init_v_0_%=: \n\t" + "extr x6, x7, x6, #1\n\t" + "extr x7, x8, x7, #1\n\t" + "extr x8, x9, x8, #1\n\t" + "lsr x9, x9, #1\n\t" + "sub x23, x23, #1\n\t" + "ands x24, x14, #1\n\t" + "b.eq L_fe_invert_nct_even_init_v_1_%=\n\t" + "adds x14, x14, x19\n\t" + "adcs x15, x15, x20\n\t" + "adcs x16, x16, x20\n\t" + "adcs x17, x17, x21\n\t" + "cset x24, cs\n\t" + "\n" + "L_fe_invert_nct_even_init_v_1_%=: \n\t" + "extr x14, x15, x14, #1\n\t" + "extr x15, x16, x15, #1\n\t" + "extr x16, x17, x16, #1\n\t" + "extr x17, x24, x17, #1\n\t" + "tst x6, #1\n\t" + "b.eq L_fe_invert_nct_even_init_v_0_%=\n\t" + "\n" + "L_fe_invert_nct_loop_%=: \n\t" + "cmp x22, #1\n\t" + "b.eq L_fe_invert_nct_u_done_%=\n\t" + "cmp x23, #1\n\t" + "b.eq L_fe_invert_nct_v_done_%=\n\t" + "cmp x22, x23\n\t" + "bhi L_fe_invert_nct_u_larger_%=\n\t" + "bcc L_fe_invert_nct_v_larger_%=\n\t" + "cmp x5, x9\n\t" + "bhi L_fe_invert_nct_u_larger_%=\n\t" + "bcc L_fe_invert_nct_v_larger_%=\n\t" + "cmp x4, x8\n\t" + "bhi L_fe_invert_nct_u_larger_%=\n\t" + "bcc L_fe_invert_nct_v_larger_%=\n\t" + "cmp x3, x7\n\t" + "bhi L_fe_invert_nct_u_larger_%=\n\t" + "bcc L_fe_invert_nct_v_larger_%=\n\t" + "cmp x2, x6\n\t" + "bcc L_fe_invert_nct_v_larger_%=\n\t" + "\n" + "L_fe_invert_nct_u_larger_%=: \n\t" + "subs x2, x2, x6\n\t" + "sbcs x3, x3, x7\n\t" + "sbcs x4, x4, x8\n\t" + "sbc x5, x5, x9\n\t" + "subs x10, x10, x14\n\t" + "sbcs x11, x11, x15\n\t" + "sbcs x12, x12, x16\n\t" + "sbcs x13, x13, x17\n\t" + "bcs L_fe_invert_nct_sub_uv_%=\n\t" + "adds x10, x10, x19\n\t" + "adcs x11, x11, x20\n\t" + "adcs x12, x12, x20\n\t" + "adc x13, x13, x21\n\t" + "\n" + "L_fe_invert_nct_sub_uv_%=: \n\t" + "cmp x5, #0\n\t" + "b.eq L_fe_invert_nct_nct_num_bits_u_0_%=\n\t" + "mov x24, #0x100\n\t" + "clz x22, x5\n\t" + "sub x22, x24, x22\n\t" + "b L_fe_invert_nct_nct_num_bits_u_3_%=\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_u_0_%=: \n\t" + "cmp x4, #0\n\t" + "b.eq L_fe_invert_nct_nct_num_bits_u_1_%=\n\t" + "mov x24, #0xc0\n\t" + "clz x22, x4\n\t" + "sub x22, x24, x22\n\t" + "b L_fe_invert_nct_nct_num_bits_u_3_%=\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_u_1_%=: \n\t" + "cmp x3, #0\n\t" + "b.eq L_fe_invert_nct_nct_num_bits_u_2_%=\n\t" + "mov x24, #0x80\n\t" + "clz x22, x3\n\t" + "sub x22, x24, x22\n\t" + "b L_fe_invert_nct_nct_num_bits_u_3_%=\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_u_2_%=: \n\t" + "mov x24, #0x40\n\t" + "clz x22, x2\n\t" + "sub x22, x24, x22\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_u_3_%=: \n\t" + "\n" + "L_fe_invert_nct_even_u_0_%=: \n\t" + "extr x2, x3, x2, #1\n\t" + "extr x3, x4, x3, #1\n\t" + "extr x4, x5, x4, #1\n\t" + "lsr x5, x5, #1\n\t" + "sub x22, x22, #1\n\t" + "ands x24, x10, #1\n\t" + "b.eq L_fe_invert_nct_even_u_1_%=\n\t" + "adds x10, x10, x19\n\t" + "adcs x11, x11, x20\n\t" + "adcs x12, x12, x20\n\t" + "adcs x13, x13, x21\n\t" + "cset x24, cs\n\t" + "\n" + "L_fe_invert_nct_even_u_1_%=: \n\t" + "extr x10, x11, x10, #1\n\t" + "extr x11, x12, x11, #1\n\t" + "extr x12, x13, x12, #1\n\t" + "extr x13, x24, x13, #1\n\t" + "tst x2, #1\n\t" + "b.eq L_fe_invert_nct_even_u_0_%=\n\t" + "b L_fe_invert_nct_loop_%=\n\t" + "\n" + "L_fe_invert_nct_v_larger_%=: \n\t" + "subs x6, x6, x2\n\t" + "sbcs x7, x7, x3\n\t" + "sbcs x8, x8, x4\n\t" + "sbc x9, x9, x5\n\t" + "subs x14, x14, x10\n\t" + "sbcs x15, x15, x11\n\t" + "sbcs x16, x16, x12\n\t" + "sbcs x17, x17, x13\n\t" + "bcs L_fe_invert_nct_sub_vu_%=\n\t" + "adds x14, x14, x19\n\t" + "adcs x15, x15, x20\n\t" + "adcs x16, x16, x20\n\t" + "adc x17, x17, x21\n\t" + "\n" + "L_fe_invert_nct_sub_vu_%=: \n\t" + "cmp x9, #0\n\t" + "b.eq L_fe_invert_nct_nct_num_bits_v_0_%=\n\t" + "mov x24, #0x100\n\t" + "clz x23, x9\n\t" + "sub x23, x24, x23\n\t" + "b L_fe_invert_nct_nct_num_bits_v_3_%=\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_v_0_%=: \n\t" + "cmp x8, #0\n\t" + "b.eq L_fe_invert_nct_nct_num_bits_v_1_%=\n\t" + "mov x24, #0xc0\n\t" + "clz x23, x8\n\t" + "sub x23, x24, x23\n\t" + "b L_fe_invert_nct_nct_num_bits_v_3_%=\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_v_1_%=: \n\t" + "cmp x7, #0\n\t" + "b.eq L_fe_invert_nct_nct_num_bits_v_2_%=\n\t" + "mov x24, #0x80\n\t" + "clz x23, x7\n\t" + "sub x23, x24, x23\n\t" + "b L_fe_invert_nct_nct_num_bits_v_3_%=\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_v_2_%=: \n\t" + "mov x24, #0x40\n\t" + "clz x23, x6\n\t" + "sub x23, x24, x23\n\t" + "\n" + "L_fe_invert_nct_nct_num_bits_v_3_%=: \n\t" + "\n" + "L_fe_invert_nct_even_v_0_%=: \n\t" + "extr x6, x7, x6, #1\n\t" + "extr x7, x8, x7, #1\n\t" + "extr x8, x9, x8, #1\n\t" + "lsr x9, x9, #1\n\t" + "sub x23, x23, #1\n\t" + "ands x24, x14, #1\n\t" + "b.eq L_fe_invert_nct_even_v_1_%=\n\t" + "adds x14, x14, x19\n\t" + "adcs x15, x15, x20\n\t" + "adcs x16, x16, x20\n\t" + "adcs x17, x17, x21\n\t" + "cset x24, cs\n\t" + "\n" + "L_fe_invert_nct_even_v_1_%=: \n\t" + "extr x14, x15, x14, #1\n\t" + "extr x15, x16, x15, #1\n\t" + "extr x16, x17, x16, #1\n\t" + "extr x17, x24, x17, #1\n\t" + "tst x6, #1\n\t" + "b.eq L_fe_invert_nct_even_v_0_%=\n\t" + "b L_fe_invert_nct_loop_%=\n\t" + "\n" + "L_fe_invert_nct_u_done_%=: \n\t" + "str x10, [%x[r]]\n\t" + "str x11, [%x[r], #8]\n\t" + "str x12, [%x[r], #16]\n\t" + "str x13, [%x[r], #24]\n\t" + "b L_fe_invert_nct_done_%=\n\t" + "\n" + "L_fe_invert_nct_v_done_%=: \n\t" + "str x14, [%x[r]]\n\t" + "str x15, [%x[r], #8]\n\t" + "str x16, [%x[r], #16]\n\t" + "str x17, [%x[r], #24]\n\t" + "\n" + "L_fe_invert_nct_done_%=: \n\t" + : [r] "+r" (r) + : [a] "r" (a) + : "memory", "cc", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x10", + "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x19", "x20", + "x21", "x22", "x23", "x24" + ); +} + void fe_mul(fe r, const fe a, const fe b) { __asm__ __volatile__ ( diff --git a/wolfcrypt/src/port/arm/armv8-mlkem-asm.S b/wolfcrypt/src/port/arm/armv8-mlkem-asm.S index a45475c9f..1ded4afde 100644 --- a/wolfcrypt/src/port/arm/armv8-mlkem-asm.S +++ b/wolfcrypt/src/port/arm/armv8-mlkem-asm.S @@ -168,21 +168,21 @@ _mlkem_ntt: add x2, x2, :lo12:L_mlkem_aarch64_zetas #else adrp x2, L_mlkem_aarch64_zetas@PAGE - add x2, x2, :lo12:L_mlkem_aarch64_zetas@PAGEOFF + add x2, x2, L_mlkem_aarch64_zetas@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_mlkem_aarch64_zetas_qinv add x3, x3, :lo12:L_mlkem_aarch64_zetas_qinv #else adrp x3, L_mlkem_aarch64_zetas_qinv@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_zetas_qinv@PAGEOFF + add x3, x3, L_mlkem_aarch64_zetas_qinv@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_aarch64_consts add x4, x4, :lo12:L_mlkem_aarch64_consts #else adrp x4, L_mlkem_aarch64_consts@PAGE - add x4, x4, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x4, x4, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ add x1, x0, #0x100 ldr q4, [x4] @@ -1562,21 +1562,21 @@ _mlkem_invntt: add x2, x2, :lo12:L_mlkem_aarch64_zetas_inv #else adrp x2, L_mlkem_aarch64_zetas_inv@PAGE - add x2, x2, :lo12:L_mlkem_aarch64_zetas_inv@PAGEOFF + add x2, x2, L_mlkem_aarch64_zetas_inv@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_mlkem_aarch64_zetas_inv_qinv add x3, x3, :lo12:L_mlkem_aarch64_zetas_inv_qinv #else adrp x3, L_mlkem_aarch64_zetas_inv_qinv@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_zetas_inv_qinv@PAGEOFF + add x3, x3, L_mlkem_aarch64_zetas_inv_qinv@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_aarch64_consts add x4, x4, :lo12:L_mlkem_aarch64_consts #else adrp x4, L_mlkem_aarch64_consts@PAGE - add x4, x4, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x4, x4, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ add x1, x0, #0x100 ldr q8, [x4] @@ -3013,21 +3013,21 @@ _mlkem_ntt_sqrdmlsh: add x2, x2, :lo12:L_mlkem_aarch64_zetas #else adrp x2, L_mlkem_aarch64_zetas@PAGE - add x2, x2, :lo12:L_mlkem_aarch64_zetas@PAGEOFF + add x2, x2, L_mlkem_aarch64_zetas@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_mlkem_aarch64_zetas_qinv add x3, x3, :lo12:L_mlkem_aarch64_zetas_qinv #else adrp x3, L_mlkem_aarch64_zetas_qinv@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_zetas_qinv@PAGEOFF + add x3, x3, L_mlkem_aarch64_zetas_qinv@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_aarch64_consts add x4, x4, :lo12:L_mlkem_aarch64_consts #else adrp x4, L_mlkem_aarch64_consts@PAGE - add x4, x4, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x4, x4, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ add x1, x0, #0x100 ldr q4, [x4] @@ -4195,21 +4195,21 @@ _mlkem_invntt_sqrdmlsh: add x2, x2, :lo12:L_mlkem_aarch64_zetas_inv #else adrp x2, L_mlkem_aarch64_zetas_inv@PAGE - add x2, x2, :lo12:L_mlkem_aarch64_zetas_inv@PAGEOFF + add x2, x2, L_mlkem_aarch64_zetas_inv@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_mlkem_aarch64_zetas_inv_qinv add x3, x3, :lo12:L_mlkem_aarch64_zetas_inv_qinv #else adrp x3, L_mlkem_aarch64_zetas_inv_qinv@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_zetas_inv_qinv@PAGEOFF + add x3, x3, L_mlkem_aarch64_zetas_inv_qinv@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_aarch64_consts add x4, x4, :lo12:L_mlkem_aarch64_consts #else adrp x4, L_mlkem_aarch64_consts@PAGE - add x4, x4, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x4, x4, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ add x1, x0, #0x100 ldr q8, [x4] @@ -5532,14 +5532,14 @@ _mlkem_basemul_mont: add x3, x3, :lo12:L_mlkem_aarch64_zetas_mul #else adrp x3, L_mlkem_aarch64_zetas_mul@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_zetas_mul@PAGEOFF + add x3, x3, L_mlkem_aarch64_zetas_mul@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_aarch64_consts add x4, x4, :lo12:L_mlkem_aarch64_consts #else adrp x4, L_mlkem_aarch64_consts@PAGE - add x4, x4, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x4, x4, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q1, [x4] ldp q2, q3, [x1] @@ -6230,14 +6230,14 @@ _mlkem_basemul_mont_add: add x3, x3, :lo12:L_mlkem_aarch64_zetas_mul #else adrp x3, L_mlkem_aarch64_zetas_mul@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_zetas_mul@PAGEOFF + add x3, x3, L_mlkem_aarch64_zetas_mul@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_aarch64_consts add x4, x4, :lo12:L_mlkem_aarch64_consts #else adrp x4, L_mlkem_aarch64_consts@PAGE - add x4, x4, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x4, x4, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q1, [x4] ldp q2, q3, [x1] @@ -6991,7 +6991,7 @@ _mlkem_csubq_neon: add x1, x1, :lo12:L_mlkem_aarch64_q #else adrp x1, L_mlkem_aarch64_q@PAGE - add x1, x1, :lo12:L_mlkem_aarch64_q@PAGEOFF + add x1, x1, L_mlkem_aarch64_q@PAGEOFF #endif /* __APPLE__ */ ldr q20, [x1] ld4 {v0.8h, v1.8h, v2.8h, v3.8h}, [x0], #0x40 @@ -7172,7 +7172,7 @@ _mlkem_add_reduce: add x2, x2, :lo12:L_mlkem_aarch64_consts #else adrp x2, L_mlkem_aarch64_consts@PAGE - add x2, x2, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x2, x2, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q0, [x2] ld4 {v1.8h, v2.8h, v3.8h, v4.8h}, [x0], #0x40 @@ -7363,7 +7363,7 @@ _mlkem_add3_reduce: add x3, x3, :lo12:L_mlkem_aarch64_consts #else adrp x3, L_mlkem_aarch64_consts@PAGE - add x3, x3, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x3, x3, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q0, [x3] ld4 {v1.8h, v2.8h, v3.8h, v4.8h}, [x0], #0x40 @@ -7594,7 +7594,7 @@ _mlkem_rsub_reduce: add x2, x2, :lo12:L_mlkem_aarch64_consts #else adrp x2, L_mlkem_aarch64_consts@PAGE - add x2, x2, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x2, x2, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q0, [x2] ld4 {v1.8h, v2.8h, v3.8h, v4.8h}, [x0], #0x40 @@ -7785,7 +7785,7 @@ _mlkem_to_mont: add x1, x1, :lo12:L_mlkem_aarch64_consts #else adrp x1, L_mlkem_aarch64_consts@PAGE - add x1, x1, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x1, x1, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q0, [x1] ld4 {v1.8h, v2.8h, v3.8h, v4.8h}, [x0], #0x40 @@ -7999,7 +7999,7 @@ _mlkem_to_mont_sqrdmlsh: add x1, x1, :lo12:L_mlkem_aarch64_consts #else adrp x1, L_mlkem_aarch64_consts@PAGE - add x1, x1, :lo12:L_mlkem_aarch64_consts@PAGEOFF + add x1, x1, L_mlkem_aarch64_consts@PAGEOFF #endif /* __APPLE__ */ ldr q0, [x1] ld4 {v1.8h, v2.8h, v3.8h, v4.8h}, [x0], #0x40 @@ -8226,21 +8226,21 @@ _mlkem_to_msg_neon: add x2, x2, :lo12:L_mlkem_to_msg_low #else adrp x2, L_mlkem_to_msg_low@PAGE - add x2, x2, :lo12:L_mlkem_to_msg_low@PAGEOFF + add x2, x2, L_mlkem_to_msg_low@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_mlkem_to_msg_high add x3, x3, :lo12:L_mlkem_to_msg_high #else adrp x3, L_mlkem_to_msg_high@PAGE - add x3, x3, :lo12:L_mlkem_to_msg_high@PAGEOFF + add x3, x3, L_mlkem_to_msg_high@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x4, L_mlkem_to_msg_bits add x4, x4, :lo12:L_mlkem_to_msg_bits #else adrp x4, L_mlkem_to_msg_bits@PAGE - add x4, x4, :lo12:L_mlkem_to_msg_bits@PAGEOFF + add x4, x4, L_mlkem_to_msg_bits@PAGEOFF #endif /* __APPLE__ */ ldr q0, [x2] ldr q1, [x3] @@ -8506,14 +8506,14 @@ _mlkem_from_msg_neon: add x2, x2, :lo12:L_mlkem_from_msg_q1half #else adrp x2, L_mlkem_from_msg_q1half@PAGE - add x2, x2, :lo12:L_mlkem_from_msg_q1half@PAGEOFF + add x2, x2, L_mlkem_from_msg_q1half@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x3, L_mlkem_from_msg_bits add x3, x3, :lo12:L_mlkem_from_msg_bits #else adrp x3, L_mlkem_from_msg_bits@PAGE - add x3, x3, :lo12:L_mlkem_from_msg_bits@PAGEOFF + add x3, x3, L_mlkem_from_msg_bits@PAGEOFF #endif /* __APPLE__ */ ld1 {v2.16b, v3.16b}, [x1] ldr q1, [x2] @@ -9517,28 +9517,28 @@ _mlkem_rej_uniform_neon: add x4, x4, :lo12:L_mlkem_rej_uniform_mask #else adrp x4, L_mlkem_rej_uniform_mask@PAGE - add x4, x4, :lo12:L_mlkem_rej_uniform_mask@PAGEOFF + add x4, x4, L_mlkem_rej_uniform_mask@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x5, L_mlkem_aarch64_q add x5, x5, :lo12:L_mlkem_aarch64_q #else adrp x5, L_mlkem_aarch64_q@PAGE - add x5, x5, :lo12:L_mlkem_aarch64_q@PAGEOFF + add x5, x5, L_mlkem_aarch64_q@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x6, L_mlkem_rej_uniform_bits add x6, x6, :lo12:L_mlkem_rej_uniform_bits #else adrp x6, L_mlkem_rej_uniform_bits@PAGE - add x6, x6, :lo12:L_mlkem_rej_uniform_bits@PAGEOFF + add x6, x6, L_mlkem_rej_uniform_bits@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x7, L_mlkem_rej_uniform_indices add x7, x7, :lo12:L_mlkem_rej_uniform_indices #else adrp x7, L_mlkem_rej_uniform_indices@PAGE - add x7, x7, :lo12:L_mlkem_rej_uniform_indices@PAGEOFF + add x7, x7, L_mlkem_rej_uniform_indices@PAGEOFF #endif /* __APPLE__ */ eor v1.16b, v1.16b, v1.16b eor v12.16b, v12.16b, v12.16b @@ -9754,7 +9754,7 @@ _mlkem_sha3_blocksx3_neon: add x27, x27, :lo12:L_sha3_aarch64_r #else adrp x27, L_sha3_aarch64_r@PAGE - add x27, x27, :lo12:L_sha3_aarch64_r@PAGEOFF + add x27, x27, L_sha3_aarch64_r@PAGEOFF #endif /* __APPLE__ */ str x0, [x29, #40] ld4 {v0.d, v1.d, v2.d, v3.d}[0], [x0], #32 @@ -10079,7 +10079,7 @@ _mlkem_shake128_blocksx3_seed_neon: add x28, x28, :lo12:L_sha3_aarch64_r #else adrp x28, L_sha3_aarch64_r@PAGE - add x28, x28, :lo12:L_sha3_aarch64_r@PAGEOFF + add x28, x28, L_sha3_aarch64_r@PAGEOFF #endif /* __APPLE__ */ str x0, [x29, #40] add x0, x0, #32 @@ -10426,7 +10426,7 @@ _mlkem_shake256_blocksx3_seed_neon: add x28, x28, :lo12:L_sha3_aarch64_r #else adrp x28, L_sha3_aarch64_r@PAGE - add x28, x28, :lo12:L_sha3_aarch64_r@PAGEOFF + add x28, x28, L_sha3_aarch64_r@PAGEOFF #endif /* __APPLE__ */ str x0, [x29, #40] add x0, x0, #32 @@ -10774,7 +10774,7 @@ _mlkem_sha3_blocksx3_neon: add x27, x27, :lo12:L_sha3_aarch64_r #else adrp x27, L_sha3_aarch64_r@PAGE - add x27, x27, :lo12:L_sha3_aarch64_r@PAGEOFF + add x27, x27, L_sha3_aarch64_r@PAGEOFF #endif /* __APPLE__ */ str x0, [x29, #40] ld4 {v0.d, v1.d, v2.d, v3.d}[0], [x0], #32 @@ -11184,7 +11184,7 @@ _mlkem_shake128_blocksx3_seed_neon: add x28, x28, :lo12:L_sha3_aarch64_r #else adrp x28, L_sha3_aarch64_r@PAGE - add x28, x28, :lo12:L_sha3_aarch64_r@PAGEOFF + add x28, x28, L_sha3_aarch64_r@PAGEOFF #endif /* __APPLE__ */ str x0, [x29, #40] add x0, x0, #32 @@ -11616,7 +11616,7 @@ _mlkem_shake256_blocksx3_seed_neon: add x28, x28, :lo12:L_sha3_aarch64_r #else adrp x28, L_sha3_aarch64_r@PAGE - add x28, x28, :lo12:L_sha3_aarch64_r@PAGEOFF + add x28, x28, L_sha3_aarch64_r@PAGEOFF #endif /* __APPLE__ */ str x0, [x29, #40] add x0, x0, #32 diff --git a/wolfcrypt/src/port/arm/armv8-poly1305-asm.S b/wolfcrypt/src/port/arm/armv8-poly1305-asm.S index 880d041e3..dd50ac3e8 100644 --- a/wolfcrypt/src/port/arm/armv8-poly1305-asm.S +++ b/wolfcrypt/src/port/arm/armv8-poly1305-asm.S @@ -474,7 +474,7 @@ _poly1305_set_key: add x2, x2, :lo12:L_poly1305_set_key_arm64_clamp #else adrp x2, L_poly1305_set_key_arm64_clamp@PAGE - add x2, x2, :lo12:L_poly1305_set_key_arm64_clamp@PAGEOFF + add x2, x2, L_poly1305_set_key_arm64_clamp@PAGEOFF #endif /* __APPLE__ */ # Load key and pad. ldp x11, x12, [x1] diff --git a/wolfcrypt/src/port/arm/armv8-sha256-asm.S b/wolfcrypt/src/port/arm/armv8-sha256-asm.S index 7ed0fa286..fb0795365 100644 --- a/wolfcrypt/src/port/arm/armv8-sha256-asm.S +++ b/wolfcrypt/src/port/arm/armv8-sha256-asm.S @@ -133,7 +133,7 @@ _Transform_Sha256_Len_neon: add x3, x3, :lo12:L_SHA256_transform_neon_len_k #else adrp x3, L_SHA256_transform_neon_len_k@PAGE - add x3, x3, :lo12:L_SHA256_transform_neon_len_k@PAGEOFF + add x3, x3, L_SHA256_transform_neon_len_k@PAGEOFF #endif /* __APPLE__ */ # Load digest into working vars ldr w4, [x0] @@ -1200,7 +1200,7 @@ _Transform_Sha256_Len_crypto: add x3, x3, :lo12:L_SHA256_trans_crypto_len_k #else adrp x3, L_SHA256_trans_crypto_len_k@PAGE - add x3, x3, :lo12:L_SHA256_trans_crypto_len_k@PAGEOFF + add x3, x3, L_SHA256_trans_crypto_len_k@PAGEOFF #endif /* __APPLE__ */ # Load K into vector registers ld1 {v8.4s, v9.4s, v10.4s, v11.4s}, [x3], #0x40 diff --git a/wolfcrypt/src/port/arm/armv8-sha3-asm.S b/wolfcrypt/src/port/arm/armv8-sha3-asm.S index 5414ba713..e45626386 100644 --- a/wolfcrypt/src/port/arm/armv8-sha3-asm.S +++ b/wolfcrypt/src/port/arm/armv8-sha3-asm.S @@ -92,7 +92,7 @@ _BlockSha3_crypto: add x1, x1, :lo12:L_SHA3_transform_crypto_r #else adrp x1, L_SHA3_transform_crypto_r@PAGE - add x1, x1, :lo12:L_SHA3_transform_crypto_r@PAGEOFF + add x1, x1, L_SHA3_transform_crypto_r@PAGEOFF #endif /* __APPLE__ */ #ifdef __APPLE__ .arch_extension sha3 @@ -268,7 +268,7 @@ _BlockSha3_base: add x27, x27, :lo12:L_SHA3_transform_base_r #else adrp x27, L_SHA3_transform_base_r@PAGE - add x27, x27, :lo12:L_SHA3_transform_base_r@PAGEOFF + add x27, x27, L_SHA3_transform_base_r@PAGEOFF #endif /* __APPLE__ */ ldp x1, x2, [x0] ldp x3, x4, [x0, #16] diff --git a/wolfcrypt/src/port/arm/armv8-sha512-asm.S b/wolfcrypt/src/port/arm/armv8-sha512-asm.S index fde25d54e..9da9a236a 100644 --- a/wolfcrypt/src/port/arm/armv8-sha512-asm.S +++ b/wolfcrypt/src/port/arm/armv8-sha512-asm.S @@ -165,14 +165,14 @@ _Transform_Sha512_Len_neon: add x3, x3, :lo12:L_SHA512_transform_neon_len_k #else adrp x3, L_SHA512_transform_neon_len_k@PAGE - add x3, x3, :lo12:L_SHA512_transform_neon_len_k@PAGEOFF + add x3, x3, L_SHA512_transform_neon_len_k@PAGEOFF #endif /* __APPLE__ */ #ifndef __APPLE__ adrp x27, L_SHA512_transform_neon_len_r8 add x27, x27, :lo12:L_SHA512_transform_neon_len_r8 #else adrp x27, L_SHA512_transform_neon_len_r8@PAGE - add x27, x27, :lo12:L_SHA512_transform_neon_len_r8@PAGEOFF + add x27, x27, L_SHA512_transform_neon_len_r8@PAGEOFF #endif /* __APPLE__ */ ld1 {v11.16b}, [x27] # Load digest into working vars @@ -1202,7 +1202,7 @@ _Transform_Sha512_Len_crypto: add x4, x4, :lo12:L_SHA512_trans_crypto_len_k #else adrp x4, L_SHA512_trans_crypto_len_k@PAGE - add x4, x4, :lo12:L_SHA512_trans_crypto_len_k@PAGEOFF + add x4, x4, L_SHA512_trans_crypto_len_k@PAGEOFF #endif /* __APPLE__ */ #ifdef __APPLE__ .arch_extension sha3 diff --git a/wolfcrypt/src/port/arm/thumb2-aes-asm.S b/wolfcrypt/src/port/arm/thumb2-aes-asm.S index f2ef6be73..fb30f004b 100644 --- a/wolfcrypt/src/port/arm/thumb2-aes-asm.S +++ b/wolfcrypt/src/port/arm/thumb2-aes-asm.S @@ -922,6 +922,7 @@ L_AES_set_encrypt_key_end: POP {r4, r5, r6, r7, r8, r9, r10, pc} /* Cycle Count = 340 */ .size AES_set_encrypt_key,.-AES_set_encrypt_key +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE .text .align 4 .globl AES_encrypt_block @@ -1138,6 +1139,7 @@ L_AES_encrypt_block_nr: POP {pc} /* Cycle Count = 285 */ .size AES_encrypt_block,.-AES_encrypt_block +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ #if defined(HAVE_AES_CBC) || defined(HAVE_AESCCM) || defined(HAVE_AESGCM) || \ defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) .text @@ -1190,7 +1192,217 @@ L_AES_ECB_encrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_ECB_encrypt_block_nr_256: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_ECB_encrypt_block_nr_256 +#else + BNE.W L_AES_ECB_encrypt_block_nr_256 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1232,7 +1444,217 @@ L_AES_ECB_encrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_ECB_encrypt_block_nr_192: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_ECB_encrypt_block_nr_192 +#else + BNE.W L_AES_ECB_encrypt_block_nr_192 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1274,7 +1696,217 @@ L_AES_ECB_encrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_ECB_encrypt_block_nr_128: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_ECB_encrypt_block_nr_128 +#else + BNE.W L_AES_ECB_encrypt_block_nr_128 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1296,7 +1928,7 @@ L_AES_ECB_encrypt_loop_block_128: L_AES_ECB_encrypt_end: POP {r3} POP {r4, r5, r6, r7, r8, r9, r10, r11, pc} - /* Cycle Count = 212 */ + /* Cycle Count = 1055 */ .size AES_ECB_encrypt,.-AES_ECB_encrypt #endif /* HAVE_AESCCM || HAVE_AESGCM || WOLFSSL_AES_DIRECT || * WOLFSSL_AES_COUNTER || HAVE_AES_ECB */ @@ -1346,7 +1978,217 @@ L_AES_CBC_encrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_CBC_encrypt_block_nr_256: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_encrypt_block_nr_256 +#else + BNE.W L_AES_CBC_encrypt_block_nr_256 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1392,7 +2234,217 @@ L_AES_CBC_encrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_CBC_encrypt_block_nr_192: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_encrypt_block_nr_192 +#else + BNE.W L_AES_CBC_encrypt_block_nr_192 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1438,7 +2490,217 @@ L_AES_CBC_encrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_CBC_encrypt_block_nr_128: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_encrypt_block_nr_128 +#else + BNE.W L_AES_CBC_encrypt_block_nr_128 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1461,10 +2723,16 @@ L_AES_CBC_encrypt_end: POP {r3, r9} STM r9, {r4, r5, r6, r7} POP {r4, r5, r6, r7, r8, r9, r10, r11, pc} - /* Cycle Count = 238 */ + /* Cycle Count = 1081 */ .size AES_CBC_encrypt,.-AES_CBC_encrypt #endif /* HAVE_AES_CBC */ #ifdef WOLFSSL_AES_COUNTER + .text + .type L_AES_Thumb2_te_ctr, %object + .size L_AES_Thumb2_te_ctr, 12 + .align 4 +L_AES_Thumb2_te_ctr: + .word L_AES_Thumb2_te_data .text .align 4 .globl AES_CTR_encrypt @@ -1474,7 +2742,7 @@ AES_CTR_encrypt: LDR r12, [sp, #36] LDR r8, [sp, #40] MOV lr, r0 - LDR r0, L_AES_Thumb2_te_ecb + LDR r0, L_AES_Thumb2_te_ctr LDM r8, {r4, r5, r6, r7} REV r4, r4 REV r5, r5 @@ -1509,7 +2777,217 @@ L_AES_CTR_encrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_CTR_encrypt_block_nr_256: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CTR_encrypt_block_nr_256 +#else + BNE.W L_AES_CTR_encrypt_block_nr_256 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1559,7 +3037,217 @@ L_AES_CTR_encrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_CTR_encrypt_block_nr_192: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CTR_encrypt_block_nr_192 +#else + BNE.W L_AES_CTR_encrypt_block_nr_192 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1609,7 +3297,217 @@ L_AES_CTR_encrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_CTR_encrypt_block_nr_128: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CTR_encrypt_block_nr_128 +#else + BNE.W L_AES_CTR_encrypt_block_nr_128 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -1646,12 +3544,13 @@ L_AES_CTR_encrypt_end: REV r7, r7 STM r8, {r4, r5, r6, r7} POP {r4, r5, r6, r7, r8, r9, r10, r11, pc} - /* Cycle Count = 293 */ + /* Cycle Count = 1136 */ .size AES_CTR_encrypt,.-AES_CTR_encrypt #endif /* WOLFSSL_AES_COUNTER */ #ifdef HAVE_AES_DECRYPT #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) || \ defined(HAVE_AES_CBC) || defined(HAVE_AES_ECB) +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE .text .align 4 .globl AES_decrypt_block @@ -1868,6 +3767,7 @@ L_AES_decrypt_block_nr: POP {pc} /* Cycle Count = 285 */ .size AES_decrypt_block,.-AES_decrypt_block +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ .text .type L_AES_Thumb2_td_ecb, %object .size L_AES_Thumb2_td_ecb, 12 @@ -2176,7 +4076,217 @@ L_AES_ECB_decrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_ECB_decrypt_block_nr_256: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_ECB_decrypt_block_nr_256 +#else + BNE.W L_AES_ECB_decrypt_block_nr_256 +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r3, r12, lr} REV r4, r4 REV r5, r5 @@ -2217,7 +4327,217 @@ L_AES_ECB_decrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_ECB_decrypt_block_nr_192: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_ECB_decrypt_block_nr_192 +#else + BNE.W L_AES_ECB_decrypt_block_nr_192 +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r3, r12, lr} REV r4, r4 REV r5, r5 @@ -2258,7 +4578,217 @@ L_AES_ECB_decrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_ECB_decrypt_block_nr_128: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_ECB_decrypt_block_nr_128 +#else + BNE.W L_AES_ECB_decrypt_block_nr_128 +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r3, r12, lr} REV r4, r4 REV r5, r5 @@ -2278,7 +4808,7 @@ L_AES_ECB_decrypt_loop_block_128: #endif L_AES_ECB_decrypt_end: POP {r4, r5, r6, r7, r8, r9, r10, r11, pc} - /* Cycle Count = 210 */ + /* Cycle Count = 1053 */ .size AES_ECB_decrypt,.-AES_ECB_decrypt #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER || defined(HAVE_AES_ECB) */ #ifdef HAVE_AES_CBC @@ -2327,7 +4857,217 @@ L_AES_CBC_decrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_256_odd: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_decrypt_block_nr_256_odd +#else + BNE.W L_AES_CBC_decrypt_block_nr_256_odd +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ LDR lr, [sp, #16] REV r4, r4 REV r5, r5 @@ -2371,7 +5111,217 @@ L_AES_CBC_decrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_256_even: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_decrypt_block_nr_256_even +#else + BNE.W L_AES_CBC_decrypt_block_nr_256_even +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ LDR lr, [sp, #16] REV r4, r4 REV r5, r5 @@ -2422,7 +5372,217 @@ L_AES_CBC_decrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_192_odd: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_decrypt_block_nr_192_odd +#else + BNE.W L_AES_CBC_decrypt_block_nr_192_odd +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ LDR lr, [sp, #16] REV r4, r4 REV r5, r5 @@ -2466,7 +5626,217 @@ L_AES_CBC_decrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_192_even: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_decrypt_block_nr_192_even +#else + BNE.W L_AES_CBC_decrypt_block_nr_192_even +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ LDR lr, [sp, #16] REV r4, r4 REV r5, r5 @@ -2517,7 +5887,217 @@ L_AES_CBC_decrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_128_odd: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_decrypt_block_nr_128_odd +#else + BNE.W L_AES_CBC_decrypt_block_nr_128_odd +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ LDR lr, [sp, #16] REV r4, r4 REV r5, r5 @@ -2561,7 +6141,217 @@ L_AES_CBC_decrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_decrypt_block +#else +L_AES_CBC_decrypt_block_nr_128_even: + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #16, #8 + LSR r7, r8, #24 + UBFX r12, r10, #8, #8 + UBFX lr, r9, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r5, r8, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, r12, ROR #8 + UBFX r12, r11, #8, #8 + EOR r4, r4, lr, ROR #16 + UBFX lr, r10, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r6, r9, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, r12, ROR #8 + UBFX r12, r8, #8, #8 + EOR r5, r5, lr, ROR #16 + UBFX lr, r11, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r8, r8, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r10, #16, #8 + EOR r6, r6, r12, ROR #8 + LSR r12, r11, #24 + EOR r6, r6, lr, ROR #16 + UBFX lr, r9, #8, #8 + LDR r8, [r0, r8, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r8, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #8 + EOR r7, r7, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_CBC_decrypt_block_nr_128_even +#else + BNE.W L_AES_CBC_decrypt_block_nr_128_even +#endif + UBFX r8, r7, #16, #8 + LSR r11, r4, #24 + UBFX r12, r6, #8, #8 + UBFX lr, r5, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r9, r4, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, r12, ROR #8 + UBFX r12, r7, #8, #8 + EOR r8, r8, lr, ROR #16 + UBFX lr, r6, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r10, r5, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, r12, ROR #8 + UBFX r12, r4, #8, #8 + EOR r9, r9, lr, ROR #16 + UBFX lr, r7, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR lr, [r0, lr, LSL #2] + UBFX r4, r4, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r6, #16, #8 + EOR r10, r10, r12, ROR #8 + LSR r12, r7, #24 + EOR r10, r10, lr, ROR #16 + UBFX lr, r5, #8, #8 + LDR r4, [r0, r4, LSL #2] + LDR r12, [r0, r12, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + EOR r12, r12, r4, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #8 + EOR r11, r11, r12, ROR #24 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #0, #8 + UBFX r7, r10, #8, #8 + UBFX r12, r11, #16, #8 + LSR lr, r8, #24 + LDRB r4, [r2, r4] + LDRB r7, [r2, r7] + LDRB r12, [r2, r12] + LDRB lr, [r2, lr] + UBFX r5, r10, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, r12, LSL #16 + UBFX r12, r8, #16, #8 + EOR r4, r4, lr, LSL #24 + LSR lr, r9, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r5, [r2, r5] + LDRB r12, [r2, r12] + UBFX r6, r11, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, r12, LSL #16 + UBFX r12, r9, #16, #8 + EOR r5, r5, lr, LSL #24 + LSR lr, r10, #24 + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + LDRB r6, [r2, r6] + LDRB r12, [r2, r12] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r8, #0, #8 + EOR r6, r6, r12, LSL #16 + UBFX r12, r9, #8, #8 + EOR r6, r6, lr, LSL #24 + UBFX lr, r10, #16, #8 + LDRB r11, [r2, r11] + LDRB r12, [r2, r12] + LDRB r7, [r2, r7] + LDRB lr, [r2, lr] + EOR r12, r12, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, r12, LSL #8 + EOR r7, r7, lr, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ LDR lr, [sp, #16] REV r4, r4 REV r5, r5 @@ -2601,7 +6391,7 @@ L_AES_CBC_decrypt_end_odd: L_AES_CBC_decrypt_end: POP {r3, r4} POP {r4, r5, r6, r7, r8, r9, r10, r11, pc} - /* Cycle Count = 518 */ + /* Cycle Count = 2204 */ .size AES_CBC_decrypt,.-AES_CBC_decrypt #endif /* HAVE_AES_CBC */ #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER || HAVE_AES_CBC @@ -3236,7 +7026,217 @@ L_AES_GCM_encrypt_loop_block_256: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x6 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_GCM_encrypt_block_nr_256: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_GCM_encrypt_block_nr_256 +#else + BNE.W L_AES_GCM_encrypt_block_nr_256 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -3283,7 +7283,217 @@ L_AES_GCM_encrypt_loop_block_192: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x5 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_GCM_encrypt_block_nr_192: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_GCM_encrypt_block_nr_192 +#else + BNE.W L_AES_GCM_encrypt_block_nr_192 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -3330,7 +7540,217 @@ L_AES_GCM_encrypt_loop_block_128: EOR r6, r6, r10 EOR r7, r7, r11 MOV r1, #0x4 +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE BL AES_encrypt_block +#else +L_AES_GCM_encrypt_block_nr_128: + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r9, #16, #8 + LSR r7, r8, #24 + UBFX lr, r10, #8, #8 + UBFX r2, r11, #0, #8 + LDR r4, [r0, r4, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r5, r10, #16, #8 + EOR r4, r4, r7, ROR #24 + LSR r7, r9, #24 + EOR r4, r4, lr, ROR #8 + UBFX lr, r11, #8, #8 + EOR r4, r4, r2, ROR #16 + UBFX r2, r8, #0, #8 + LDR r5, [r0, r5, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r11, #16, #8 + EOR r5, r5, r7, ROR #24 + LSR r7, r10, #24 + EOR r5, r5, lr, ROR #8 + UBFX lr, r8, #8, #8 + EOR r5, r5, r2, ROR #16 + UBFX r2, r9, #0, #8 + LDR r6, [r0, r6, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r10, #0, #8 + EOR r6, r6, r7, ROR #24 + UBFX r7, r8, #16, #8 + EOR r6, r6, lr, ROR #8 + LSR lr, r11, #24 + EOR r6, r6, r2, ROR #16 + UBFX r2, r9, #8, #8 + LDR r10, [r0, r10, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r7, [r0, r7, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r10, ROR #24 + LDM r3!, {r8, r9, r10, r11} + EOR r7, r7, lr, ROR #24 + EOR r7, r7, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 + SUBS r1, r1, #0x1 +#if defined(__GNUC__) || defined(__ICCARM__) || defined(__IAR_SYSTEMS_ICC__) + BNE L_AES_GCM_encrypt_block_nr_128 +#else + BNE.W L_AES_GCM_encrypt_block_nr_128 +#endif + UBFX r8, r5, #16, #8 + LSR r11, r4, #24 + UBFX lr, r6, #8, #8 + UBFX r2, r7, #0, #8 + LDR r8, [r0, r8, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r9, r6, #16, #8 + EOR r8, r8, r11, ROR #24 + LSR r11, r5, #24 + EOR r8, r8, lr, ROR #8 + UBFX lr, r7, #8, #8 + EOR r8, r8, r2, ROR #16 + UBFX r2, r4, #0, #8 + LDR r9, [r0, r9, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r10, r7, #16, #8 + EOR r9, r9, r11, ROR #24 + LSR r11, r6, #24 + EOR r9, r9, lr, ROR #8 + UBFX lr, r4, #8, #8 + EOR r9, r9, r2, ROR #16 + UBFX r2, r5, #0, #8 + LDR r10, [r0, r10, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r2, [r0, r2, LSL #2] + UBFX r6, r6, #0, #8 + EOR r10, r10, r11, ROR #24 + UBFX r11, r4, #16, #8 + EOR r10, r10, lr, ROR #8 + LSR lr, r7, #24 + EOR r10, r10, r2, ROR #16 + UBFX r2, r5, #8, #8 + LDR r6, [r0, r6, LSL #2] + LDR lr, [r0, lr, LSL #2] + LDR r11, [r0, r11, LSL #2] + LDR r2, [r0, r2, LSL #2] + EOR lr, lr, r6, ROR #24 + LDM r3!, {r4, r5, r6, r7} + EOR r11, r11, lr, ROR #24 + EOR r11, r11, r2, ROR #8 + /* XOR in Key Schedule */ + EOR r8, r8, r4 + EOR r9, r9, r5 + EOR r10, r10, r6 + EOR r11, r11, r7 + UBFX r4, r11, #0, #8 + UBFX r7, r10, #8, #8 + UBFX lr, r9, #16, #8 + LSR r2, r8, #24 + LDRB r4, [r0, r4, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r5, r8, #0, #8 + EOR r4, r4, r7, LSL #8 + UBFX r7, r11, #8, #8 + EOR r4, r4, lr, LSL #16 + UBFX lr, r10, #16, #8 + EOR r4, r4, r2, LSL #24 + LSR r2, r9, #24 + LDRB r5, [r0, r5, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + UBFX r6, r9, #0, #8 + EOR r5, r5, r7, LSL #8 + UBFX r7, r8, #8, #8 + EOR r5, r5, lr, LSL #16 + UBFX lr, r11, #16, #8 + EOR r5, r5, r2, LSL #24 + LSR r2, r10, #24 + LDRB r6, [r0, r6, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + LSR r11, r11, #24 + EOR r6, r6, r7, LSL #8 + UBFX r7, r10, #0, #8 + EOR r6, r6, lr, LSL #16 + UBFX lr, r9, #8, #8 + EOR r6, r6, r2, LSL #24 + UBFX r2, r8, #16, #8 + LDRB r11, [r0, r11, LSL #2] + LDRB r7, [r0, r7, LSL #2] + LDRB lr, [r0, lr, LSL #2] + LDRB r2, [r0, r2, LSL #2] + EOR lr, lr, r11, LSL #16 + LDM r3, {r8, r9, r10, r11} + EOR r7, r7, lr, LSL #8 + EOR r7, r7, r2, LSL #16 + /* XOR in Key Schedule */ + EOR r4, r4, r8 + EOR r5, r5, r9 + EOR r6, r6, r10 + EOR r7, r7, r11 +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ POP {r1, r2, lr} LDR r3, [sp] REV r4, r4 @@ -3367,7 +7787,7 @@ L_AES_GCM_encrypt_end: REV r7, r7 STM r8, {r4, r5, r6, r7} POP {r4, r5, r6, r7, r8, r9, r10, r11, pc} - /* Cycle Count = 275 */ + /* Cycle Count = 1118 */ .size AES_GCM_encrypt,.-AES_GCM_encrypt #endif /* HAVE_AESGCM */ #endif /* !NO_AES */ diff --git a/wolfcrypt/src/port/arm/thumb2-aes-asm_c.c b/wolfcrypt/src/port/arm/thumb2-aes-asm_c.c index b4d9d90ba..b02b436ec 100644 --- a/wolfcrypt/src/port/arm/thumb2-aes-asm_c.c +++ b/wolfcrypt/src/port/arm/thumb2-aes-asm_c.c @@ -637,6 +637,7 @@ WC_OMIT_FRAME_POINTER void AES_set_encrypt_key(const unsigned char* key, ); } +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE void AES_encrypt_block(const word32* te, int nr, int len, const word32* ks); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG WC_OMIT_FRAME_POINTER void AES_encrypt_block(const word32* te_p, int nr_p, @@ -874,6 +875,7 @@ WC_OMIT_FRAME_POINTER void AES_encrypt_block(const word32* te, int nr, int len, ); } +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ #if defined(HAVE_AES_CBC) || defined(HAVE_AESCCM) || defined(HAVE_AESGCM) || \ defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) static const word32* L_AES_Thumb2_te_ecb = L_AES_Thumb2_te_data; @@ -956,7 +958,224 @@ WC_OMIT_FRAME_POINTER void AES_ECB_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_ECB_encrypt_block_nr_256:\n\t" +#else + "L_AES_ECB_encrypt_block_nr_256_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_ECB_encrypt_block_nr_256_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_ECB_encrypt_block_nr_256\n\t" +#else + "BNE.W L_AES_ECB_encrypt_block_nr_256_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1012,7 +1231,224 @@ WC_OMIT_FRAME_POINTER void AES_ECB_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_ECB_encrypt_block_nr_192:\n\t" +#else + "L_AES_ECB_encrypt_block_nr_192_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_ECB_encrypt_block_nr_192_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_ECB_encrypt_block_nr_192\n\t" +#else + "BNE.W L_AES_ECB_encrypt_block_nr_192_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1068,7 +1504,224 @@ WC_OMIT_FRAME_POINTER void AES_ECB_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_ECB_encrypt_block_nr_128:\n\t" +#else + "L_AES_ECB_encrypt_block_nr_128_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_ECB_encrypt_block_nr_128_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_ECB_encrypt_block_nr_128\n\t" +#else + "BNE.W L_AES_ECB_encrypt_block_nr_128_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1192,7 +1845,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_encrypt_block_nr_256:\n\t" +#else + "L_AES_CBC_encrypt_block_nr_256_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_encrypt_block_nr_256_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_encrypt_block_nr_256\n\t" +#else + "BNE.W L_AES_CBC_encrypt_block_nr_256_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1252,7 +2122,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_encrypt_block_nr_192:\n\t" +#else + "L_AES_CBC_encrypt_block_nr_192_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_encrypt_block_nr_192_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_encrypt_block_nr_192\n\t" +#else + "BNE.W L_AES_CBC_encrypt_block_nr_192_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1312,7 +2399,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_encrypt_block_nr_128:\n\t" +#else + "L_AES_CBC_encrypt_block_nr_128_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_encrypt_block_nr_128_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_encrypt_block_nr_128\n\t" +#else + "BNE.W L_AES_CBC_encrypt_block_nr_128_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1351,6 +2655,7 @@ WC_OMIT_FRAME_POINTER void AES_CBC_encrypt(const unsigned char* in, #endif /* HAVE_AES_CBC */ #ifdef WOLFSSL_AES_COUNTER +static const word32* L_AES_Thumb2_te_ctr = L_AES_Thumb2_te_data; void AES_CTR_encrypt(const unsigned char* in, unsigned char* out, unsigned long len, const unsigned char* ks, int nr, unsigned char* ctr); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG @@ -1372,11 +2677,11 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, (const unsigned char*)ks_p; register int nr __asm__ ("r4") = (int)nr_p; register unsigned char* ctr __asm__ ("r5") = (unsigned char*)ctr_p; - register word32* L_AES_Thumb2_te_ecb_c __asm__ ("r6") = - (word32*)L_AES_Thumb2_te_ecb; + register word32* L_AES_Thumb2_te_ctr_c __asm__ ("r6") = + (word32*)L_AES_Thumb2_te_ctr; #else - register word32* L_AES_Thumb2_te_ecb_c = (word32*)L_AES_Thumb2_te_ecb; + register word32* L_AES_Thumb2_te_ctr_c = (word32*)L_AES_Thumb2_te_ctr; #endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ @@ -1392,7 +2697,7 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "MOV r8, %[ctr]\n\t" #endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ "MOV lr, %[in]\n\t" - "MOV r0, %[L_AES_Thumb2_te_ecb]\n\t" + "MOV r0, %[L_AES_Thumb2_te_ctr]\n\t" "LDM r8, {r4, r5, r6, r7}\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -1436,7 +2741,224 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CTR_encrypt_block_nr_256:\n\t" +#else + "L_AES_CTR_encrypt_block_nr_256_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CTR_encrypt_block_nr_256_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CTR_encrypt_block_nr_256\n\t" +#else + "BNE.W L_AES_CTR_encrypt_block_nr_256_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1500,7 +3022,224 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CTR_encrypt_block_nr_192:\n\t" +#else + "L_AES_CTR_encrypt_block_nr_192_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CTR_encrypt_block_nr_192_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CTR_encrypt_block_nr_192\n\t" +#else + "BNE.W L_AES_CTR_encrypt_block_nr_192_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1564,7 +3303,224 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CTR_encrypt_block_nr_128:\n\t" +#else + "L_AES_CTR_encrypt_block_nr_128_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CTR_encrypt_block_nr_128_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CTR_encrypt_block_nr_128\n\t" +#else + "BNE.W L_AES_CTR_encrypt_block_nr_128_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -1609,7 +3565,7 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, "STM r8, {r4, r5, r6, r7}\n\t" : [in] "+r" (in), [out] "+r" (out), [len] "+r" (len), [ks] "+r" (ks), [nr] "+r" (nr), [ctr] "+r" (ctr), - [L_AES_Thumb2_te_ecb] "+r" (L_AES_Thumb2_te_ecb_c) + [L_AES_Thumb2_te_ctr] "+r" (L_AES_Thumb2_te_ctr_c) : : "memory", "cc", "r12", "lr", "r7", "r8", "r9", "r10", "r11" ); @@ -1619,6 +3575,7 @@ WC_OMIT_FRAME_POINTER void AES_CTR_encrypt(const unsigned char* in, #ifdef HAVE_AES_DECRYPT #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) || \ defined(HAVE_AES_CBC) || defined(HAVE_AES_ECB) +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE void AES_decrypt_block(const word32* td, int nr, const byte* td4); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG WC_OMIT_FRAME_POINTER void AES_decrypt_block(const word32* td_p, int nr_p, @@ -1855,6 +3812,7 @@ WC_OMIT_FRAME_POINTER void AES_decrypt_block(const word32* td, int nr, ); } +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ static const word32* L_AES_Thumb2_td_ecb = L_AES_Thumb2_td_data; static const byte L_AES_Thumb2_td4[] = { 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, @@ -1972,7 +3930,224 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_ECB_decrypt_block_nr_256:\n\t" +#else + "L_AES_ECB_decrypt_block_nr_256_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_ECB_decrypt_block_nr_256_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_ECB_decrypt_block_nr_256\n\t" +#else + "BNE.W L_AES_ECB_decrypt_block_nr_256_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[ks], r12, lr}\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2027,7 +4202,224 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_ECB_decrypt_block_nr_192:\n\t" +#else + "L_AES_ECB_decrypt_block_nr_192_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_ECB_decrypt_block_nr_192_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_ECB_decrypt_block_nr_192\n\t" +#else + "BNE.W L_AES_ECB_decrypt_block_nr_192_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[ks], r12, lr}\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2082,7 +4474,224 @@ WC_OMIT_FRAME_POINTER void AES_ECB_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_ECB_decrypt_block_nr_128:\n\t" +#else + "L_AES_ECB_decrypt_block_nr_128_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_ECB_decrypt_block_nr_128_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_ECB_decrypt_block_nr_128\n\t" +#else + "BNE.W L_AES_ECB_decrypt_block_nr_128_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[ks], r12, lr}\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2209,7 +4818,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_decrypt_block_nr_256_odd:\n\t" +#else + "L_AES_CBC_decrypt_block_nr_256_odd_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_decrypt_block_nr_256_odd_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_decrypt_block_nr_256_odd\n\t" +#else + "BNE.W L_AES_CBC_decrypt_block_nr_256_odd_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "LDR lr, [sp, #16]\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2255,7 +5081,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_decrypt_block_nr_256_even:\n\t" +#else + "L_AES_CBC_decrypt_block_nr_256_even_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_decrypt_block_nr_256_even_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_decrypt_block_nr_256_even\n\t" +#else + "BNE.W L_AES_CBC_decrypt_block_nr_256_even_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "LDR lr, [sp, #16]\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2315,7 +5358,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_decrypt_block_nr_192_odd:\n\t" +#else + "L_AES_CBC_decrypt_block_nr_192_odd_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_decrypt_block_nr_192_odd_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_decrypt_block_nr_192_odd\n\t" +#else + "BNE.W L_AES_CBC_decrypt_block_nr_192_odd_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "LDR lr, [sp, #16]\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2361,7 +5621,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_decrypt_block_nr_192_even:\n\t" +#else + "L_AES_CBC_decrypt_block_nr_192_even_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_decrypt_block_nr_192_even_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_decrypt_block_nr_192_even\n\t" +#else + "BNE.W L_AES_CBC_decrypt_block_nr_192_even_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "LDR lr, [sp, #16]\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2421,7 +5898,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_decrypt_block_nr_128_odd:\n\t" +#else + "L_AES_CBC_decrypt_block_nr_128_odd_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_decrypt_block_nr_128_odd_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_decrypt_block_nr_128_odd\n\t" +#else + "BNE.W L_AES_CBC_decrypt_block_nr_128_odd_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "LDR lr, [sp, #16]\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -2467,7 +6161,224 @@ WC_OMIT_FRAME_POINTER void AES_CBC_decrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_decrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_CBC_decrypt_block_nr_128_even:\n\t" +#else + "L_AES_CBC_decrypt_block_nr_128_even_%=:\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX r12, r10, #8, #8\n\t" + "UBFX lr, r9, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r5, r8, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, r12, ROR #8\n\t" + "UBFX r12, r11, #8, #8\n\t" + "EOR r4, r4, lr, ROR #16\n\t" + "UBFX lr, r10, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r6, r9, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, r12, ROR #8\n\t" + "UBFX r12, r8, #8, #8\n\t" + "EOR r5, r5, lr, ROR #16\n\t" + "UBFX lr, r11, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r8, r8, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r10, #16, #8\n\t" + "EOR r6, r6, r12, ROR #8\n\t" + "LSR r12, r11, #24\n\t" + "EOR r6, r6, lr, ROR #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r8, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #8\n\t" + "EOR r7, r7, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_CBC_decrypt_block_nr_128_even_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_CBC_decrypt_block_nr_128_even\n\t" +#else + "BNE.W L_AES_CBC_decrypt_block_nr_128_even_%=\n\t" +#endif + "UBFX r8, r7, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX r12, r6, #8, #8\n\t" + "UBFX lr, r5, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r9, r4, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, r12, ROR #8\n\t" + "UBFX r12, r7, #8, #8\n\t" + "EOR r8, r8, lr, ROR #16\n\t" + "UBFX lr, r6, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r10, r5, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, r12, ROR #8\n\t" + "UBFX r12, r4, #8, #8\n\t" + "EOR r9, r9, lr, ROR #16\n\t" + "UBFX lr, r7, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "UBFX r4, r4, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r6, #16, #8\n\t" + "EOR r10, r10, r12, ROR #8\n\t" + "LSR r12, r7, #24\n\t" + "EOR r10, r10, lr, ROR #16\n\t" + "UBFX lr, r5, #8, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r12, [r0, r12, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "EOR r12, r12, r4, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #8\n\t" + "EOR r11, r11, r12, ROR #24\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX r12, r11, #16, #8\n\t" + "LSR lr, r8, #24\n\t" + "LDRB r4, [r2, r4]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB lr, [r2, lr]\n\t" + "UBFX r5, r10, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, r12, LSL #16\n\t" + "UBFX r12, r8, #16, #8\n\t" + "EOR r4, r4, lr, LSL #24\n\t" + "LSR lr, r9, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r5, [r2, r5]\n\t" + "LDRB r12, [r2, r12]\n\t" + "UBFX r6, r11, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, r12, LSL #16\n\t" + "UBFX r12, r9, #16, #8\n\t" + "EOR r5, r5, lr, LSL #24\n\t" + "LSR lr, r10, #24\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "LDRB r6, [r2, r6]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r8, #0, #8\n\t" + "EOR r6, r6, r12, LSL #16\n\t" + "UBFX r12, r9, #8, #8\n\t" + "EOR r6, r6, lr, LSL #24\n\t" + "UBFX lr, r10, #16, #8\n\t" + "LDRB r11, [r2, r11]\n\t" + "LDRB r12, [r2, r12]\n\t" + "LDRB r7, [r2, r7]\n\t" + "LDRB lr, [r2, lr]\n\t" + "EOR r12, r12, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, r12, LSL #8\n\t" + "EOR r7, r7, lr, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "LDR lr, [sp, #16]\n\t" "REV r4, r4\n\t" "REV r5, r5\n\t" @@ -3216,7 +7127,224 @@ WC_OMIT_FRAME_POINTER void AES_GCM_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x6\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_GCM_encrypt_block_nr_256:\n\t" +#else + "L_AES_GCM_encrypt_block_nr_256_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_GCM_encrypt_block_nr_256_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_GCM_encrypt_block_nr_256\n\t" +#else + "BNE.W L_AES_GCM_encrypt_block_nr_256_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -3277,7 +7405,224 @@ WC_OMIT_FRAME_POINTER void AES_GCM_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x5\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_GCM_encrypt_block_nr_192:\n\t" +#else + "L_AES_GCM_encrypt_block_nr_192_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_GCM_encrypt_block_nr_192_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_GCM_encrypt_block_nr_192\n\t" +#else + "BNE.W L_AES_GCM_encrypt_block_nr_192_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" @@ -3338,7 +7683,224 @@ WC_OMIT_FRAME_POINTER void AES_GCM_encrypt(const unsigned char* in, "EOR r6, r6, r10\n\t" "EOR r7, r7, r11\n\t" "MOV r1, #0x4\n\t" +#ifndef WOLFSSL_ARMASM_AES_BLOCK_INLINE "BL AES_encrypt_block\n\t" +#else + "\n" +#if defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "L_AES_GCM_encrypt_block_nr_128:\n\t" +#else + "L_AES_GCM_encrypt_block_nr_128_%=:\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r9, #16, #8\n\t" + "LSR r7, r8, #24\n\t" + "UBFX lr, r10, #8, #8\n\t" + "UBFX r2, r11, #0, #8\n\t" + "LDR r4, [r0, r4, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r10, #16, #8\n\t" + "EOR r4, r4, r7, ROR #24\n\t" + "LSR r7, r9, #24\n\t" + "EOR r4, r4, lr, ROR #8\n\t" + "UBFX lr, r11, #8, #8\n\t" + "EOR r4, r4, r2, ROR #16\n\t" + "UBFX r2, r8, #0, #8\n\t" + "LDR r5, [r0, r5, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r11, #16, #8\n\t" + "EOR r5, r5, r7, ROR #24\n\t" + "LSR r7, r10, #24\n\t" + "EOR r5, r5, lr, ROR #8\n\t" + "UBFX lr, r8, #8, #8\n\t" + "EOR r5, r5, r2, ROR #16\n\t" + "UBFX r2, r9, #0, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r10, #0, #8\n\t" + "EOR r6, r6, r7, ROR #24\n\t" + "UBFX r7, r8, #16, #8\n\t" + "EOR r6, r6, lr, ROR #8\n\t" + "LSR lr, r11, #24\n\t" + "EOR r6, r6, r2, ROR #16\n\t" + "UBFX r2, r9, #8, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r7, [r0, r7, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r10, ROR #24\n\t" + "LDM %[ks]!, {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, ROR #24\n\t" + "EOR r7, r7, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" + "SUBS r1, r1, #0x1\n\t" +#if defined(__GNUC__) + "BNE L_AES_GCM_encrypt_block_nr_128_%=\n\t" +#elif defined(__IAR_SYSTEMS_ICC__) && (__VER__ < 9000000) + "BNE.W L_AES_GCM_encrypt_block_nr_128\n\t" +#else + "BNE.W L_AES_GCM_encrypt_block_nr_128_%=\n\t" +#endif + "UBFX r8, r5, #16, #8\n\t" + "LSR r11, r4, #24\n\t" + "UBFX lr, r6, #8, #8\n\t" + "UBFX r2, r7, #0, #8\n\t" + "LDR r8, [r0, r8, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r9, r6, #16, #8\n\t" + "EOR r8, r8, r11, ROR #24\n\t" + "LSR r11, r5, #24\n\t" + "EOR r8, r8, lr, ROR #8\n\t" + "UBFX lr, r7, #8, #8\n\t" + "EOR r8, r8, r2, ROR #16\n\t" + "UBFX r2, r4, #0, #8\n\t" + "LDR r9, [r0, r9, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r10, r7, #16, #8\n\t" + "EOR r9, r9, r11, ROR #24\n\t" + "LSR r11, r6, #24\n\t" + "EOR r9, r9, lr, ROR #8\n\t" + "UBFX lr, r4, #8, #8\n\t" + "EOR r9, r9, r2, ROR #16\n\t" + "UBFX r2, r5, #0, #8\n\t" + "LDR r10, [r0, r10, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r6, #0, #8\n\t" + "EOR r10, r10, r11, ROR #24\n\t" + "UBFX r11, r4, #16, #8\n\t" + "EOR r10, r10, lr, ROR #8\n\t" + "LSR lr, r7, #24\n\t" + "EOR r10, r10, r2, ROR #16\n\t" + "UBFX r2, r5, #8, #8\n\t" + "LDR r6, [r0, r6, LSL #2]\n\t" + "LDR lr, [r0, lr, LSL #2]\n\t" + "LDR r11, [r0, r11, LSL #2]\n\t" + "LDR r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r6, ROR #24\n\t" + "LDM %[ks]!, {r4, r5, r6, r7}\n\t" + "EOR r11, r11, lr, ROR #24\n\t" + "EOR r11, r11, r2, ROR #8\n\t" + /* XOR in Key Schedule */ + "EOR r8, r8, r4\n\t" + "EOR r9, r9, r5\n\t" + "EOR r10, r10, r6\n\t" + "EOR r11, r11, r7\n\t" + "UBFX r4, r11, #0, #8\n\t" + "UBFX r7, r10, #8, #8\n\t" + "UBFX lr, r9, #16, #8\n\t" + "LSR r2, r8, #24\n\t" + "LDRB r4, [r0, r4, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r5, r8, #0, #8\n\t" + "EOR r4, r4, r7, LSL #8\n\t" + "UBFX r7, r11, #8, #8\n\t" + "EOR r4, r4, lr, LSL #16\n\t" + "UBFX lr, r10, #16, #8\n\t" + "EOR r4, r4, r2, LSL #24\n\t" + "LSR r2, r9, #24\n\t" + "LDRB r5, [r0, r5, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "UBFX r6, r9, #0, #8\n\t" + "EOR r5, r5, r7, LSL #8\n\t" + "UBFX r7, r8, #8, #8\n\t" + "EOR r5, r5, lr, LSL #16\n\t" + "UBFX lr, r11, #16, #8\n\t" + "EOR r5, r5, r2, LSL #24\n\t" + "LSR r2, r10, #24\n\t" + "LDRB r6, [r0, r6, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "LSR r11, r11, #24\n\t" + "EOR r6, r6, r7, LSL #8\n\t" + "UBFX r7, r10, #0, #8\n\t" + "EOR r6, r6, lr, LSL #16\n\t" + "UBFX lr, r9, #8, #8\n\t" + "EOR r6, r6, r2, LSL #24\n\t" + "UBFX r2, r8, #16, #8\n\t" + "LDRB r11, [r0, r11, LSL #2]\n\t" + "LDRB r7, [r0, r7, LSL #2]\n\t" + "LDRB lr, [r0, lr, LSL #2]\n\t" + "LDRB r2, [r0, r2, LSL #2]\n\t" + "EOR lr, lr, r11, LSL #16\n\t" + "LDM %[ks], {r8, r9, r10, r11}\n\t" + "EOR r7, r7, lr, LSL #8\n\t" + "EOR r7, r7, r2, LSL #16\n\t" + /* XOR in Key Schedule */ + "EOR r4, r4, r8\n\t" + "EOR r5, r5, r9\n\t" + "EOR r6, r6, r10\n\t" + "EOR r7, r7, r11\n\t" +#endif /* !WOLFSSL_ARMASM_AES_BLOCK_INLINE */ "POP {r1, %[len], lr}\n\t" "LDR %[ks], [sp]\n\t" "REV r4, r4\n\t" diff --git a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_cr.c b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_cr.c new file mode 100644 index 000000000..b41126576 --- /dev/null +++ b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_cr.c @@ -0,0 +1,7673 @@ +/* ppc32-sha256-asm + * + * Copyright (C) 2006-2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +/* Generated using (from wolfssl): + * cd ../scripts + * ruby ./sha2/sha256.rb ppc32 \ + * ../wolfssl/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.c + */ + +#include +#include + +#ifdef WOLFSSL_PPC32_ASM +#include +#include +#ifdef WOLFSSL_PPC32_ASM_INLINE + +#ifdef __IAR_SYSTEMS_ICC__ +#define __asm__ asm +#define __volatile__ volatile +#define WOLFSSL_NO_VAR_ASSIGN_REG +#endif /* __IAR_SYSTEMS_ICC__ */ +#ifdef __KEIL__ +#define __asm__ __asm +#define __volatile__ volatile +#endif /* __KEIL__ */ +#ifdef __ghs__ +#define __asm__ __asm +#define __volatile__ +#define WOLFSSL_NO_VAR_ASSIGN_REG +#endif /* __ghs__ */ +#ifndef NO_SHA256 +#include + +#ifdef WOLFSSL_PPC32_ASM_SPE +static const word32 L_SHA256_transform_spe_len_k[] = { + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, + 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, + 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, + 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, + 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, + 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, + 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, + 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, + 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, +}; + +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, + word32 len_p); +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p) +#else +void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) +#endif /* WOLFSSL_NO_VAR_ASSIGN_REG */ +{ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + register wc_Sha256* sha256 asm ("3") = (wc_Sha256*)sha256_p; + register const byte* data asm ("4") = (const byte*)data_p; + register word32 len asm ("5") = (word32)len_p; + register word32* L_SHA256_transform_spe_len_k_c asm ("6") = + (word32*)&L_SHA256_transform_spe_len_k; +#else + register word32* L_SHA256_transform_spe_len_k_c = + (word32*)&L_SHA256_transform_spe_len_k; + +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + + __asm__ __volatile__ ( + "srwi %[len], %[len], 6\n\t" + "mr r30, %[L_SHA256_transform_spe_len_k]\n\t" + /* Load digest into registers */ + "lwz r14, 0(%[sha256])\n\t" + "lwz r15, 4(%[sha256])\n\t" + "lwz r16, 8(%[sha256])\n\t" + "lwz r17, 12(%[sha256])\n\t" + "lwz r18, 16(%[sha256])\n\t" + "lwz r19, 20(%[sha256])\n\t" + "lwz r20, 24(%[sha256])\n\t" + "lwz r21, 28(%[sha256])\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_spe_len_begin_%=: \n\t" + /* Load W */ + "lwz r22, 0(%[data])\n\t" + "lwz r0, 4(%[data])\n\t" + "evmergelo r22, r0, r22\n\t" + "lwz r23, 8(%[data])\n\t" + "lwz r0, 12(%[data])\n\t" + "evmergelo r23, r0, r23\n\t" + "lwz r24, 16(%[data])\n\t" + "lwz r0, 20(%[data])\n\t" + "evmergelo r24, r0, r24\n\t" + "lwz r25, 24(%[data])\n\t" + "lwz r0, 28(%[data])\n\t" + "evmergelo r25, r0, r25\n\t" + "lwz r26, 32(%[data])\n\t" + "lwz r0, 36(%[data])\n\t" + "evmergelo r26, r0, r26\n\t" + "lwz r27, 40(%[data])\n\t" + "lwz r0, 44(%[data])\n\t" + "evmergelo r27, r0, r27\n\t" + "lwz r28, 48(%[data])\n\t" + "lwz r0, 52(%[data])\n\t" + "evmergelo r28, r0, r28\n\t" + "lwz r29, 56(%[data])\n\t" + "lwz r0, 60(%[data])\n\t" + "evmergelo r29, r0, r29\n\t" + "li r0, 3\n\t" + "mtctr r0\n\t" + /* Start of 16 rounds */ + "\n" + "L_SHA256_transform_spe_len_start_%=: \n\t" + /* Round 0 */ + "mr r9, r22\n\t" + "rotlwi r6, r18, 26\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r18, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r18\n\t" + "rotlwi r8, r18, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + "lwz r6, 0(r30)\n\t" + "add r21, r21, r9\n\t" + "add r21, r21, r6\n\t" + "add r17, r17, r21\n\t" + "rotlwi r6, r14, 30\n\t" + "xor r7, r14, r15\n\t" + "rotlwi r8, r14, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r15, r16\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r14, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r15\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + /* Round 1 */ + "evmergehi r9, r22, r22\n\t" + /* Calc new W[0]-W[1] */ + "evmergelohi r12, r23, r22\n\t" + "rotlwi r6, r17, 26\n\t" + "evrlwi r10, r29, 15\n\t" + "xor r7, r18, r19\n\t" + "evrlwi r11, r29, 13\n\t" + "rotlwi r8, r17, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r29, 10\n\t" + "and r7, r7, r17\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r17, 7\n\t" + "evaddw r22, r22, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r27, r26\n\t" + "xor r7, r7, r19\n\t" + "evaddw r22, r22, r10\n\t" + "add r20, r20, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r20, r20, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 4(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r20, r20, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r20, r20, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r16, r16, r20\n\t" + "evaddw r22, r22, r11\n\t" + "rotlwi r6, r21, 30\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r21, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r14, r15\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r21, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r20, r20, r6\n\t" + "add r20, r20, r7\n\t" + /* Round 2 */ + "mr r9, r23\n\t" + "rotlwi r6, r16, 26\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r16, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r16\n\t" + "rotlwi r8, r16, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + "lwz r6, 8(r30)\n\t" + "add r19, r19, r9\n\t" + "add r19, r19, r6\n\t" + "add r15, r15, r19\n\t" + "rotlwi r6, r20, 30\n\t" + "xor r7, r20, r21\n\t" + "rotlwi r8, r20, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r21, r14\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r20, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r21\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + /* Round 3 */ + "evmergehi r9, r23, r23\n\t" + /* Calc new W[2]-W[3] */ + "evmergelohi r12, r24, r23\n\t" + "rotlwi r6, r15, 26\n\t" + "evrlwi r10, r22, 15\n\t" + "xor r7, r16, r17\n\t" + "evrlwi r11, r22, 13\n\t" + "rotlwi r8, r15, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r22, 10\n\t" + "and r7, r7, r15\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r15, 7\n\t" + "evaddw r23, r23, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r28, r27\n\t" + "xor r7, r7, r17\n\t" + "evaddw r23, r23, r10\n\t" + "add r18, r18, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r18, r18, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 12(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r18, r18, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r18, r18, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r14, r14, r18\n\t" + "evaddw r23, r23, r11\n\t" + "rotlwi r6, r19, 30\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r19, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r20, r21\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r19, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r18, r18, r6\n\t" + "add r18, r18, r7\n\t" + /* Round 4 */ + "mr r9, r24\n\t" + "rotlwi r6, r14, 26\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r14, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r14\n\t" + "rotlwi r8, r14, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + "lwz r6, 16(r30)\n\t" + "add r17, r17, r9\n\t" + "add r17, r17, r6\n\t" + "add r21, r21, r17\n\t" + "rotlwi r6, r18, 30\n\t" + "xor r7, r18, r19\n\t" + "rotlwi r8, r18, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r19, r20\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r18, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r19\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + /* Round 5 */ + "evmergehi r9, r24, r24\n\t" + /* Calc new W[4]-W[5] */ + "evmergelohi r12, r25, r24\n\t" + "rotlwi r6, r21, 26\n\t" + "evrlwi r10, r23, 15\n\t" + "xor r7, r14, r15\n\t" + "evrlwi r11, r23, 13\n\t" + "rotlwi r8, r21, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r23, 10\n\t" + "and r7, r7, r21\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r21, 7\n\t" + "evaddw r24, r24, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r29, r28\n\t" + "xor r7, r7, r15\n\t" + "evaddw r24, r24, r10\n\t" + "add r16, r16, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r16, r16, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 20(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r16, r16, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r16, r16, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r20, r20, r16\n\t" + "evaddw r24, r24, r11\n\t" + "rotlwi r6, r17, 30\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r17, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r18, r19\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r17, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r16, r16, r6\n\t" + "add r16, r16, r7\n\t" + /* Round 6 */ + "mr r9, r25\n\t" + "rotlwi r6, r20, 26\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r20, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r20\n\t" + "rotlwi r8, r20, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + "lwz r6, 24(r30)\n\t" + "add r15, r15, r9\n\t" + "add r15, r15, r6\n\t" + "add r19, r19, r15\n\t" + "rotlwi r6, r16, 30\n\t" + "xor r7, r16, r17\n\t" + "rotlwi r8, r16, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r17, r18\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r16, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r17\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + /* Round 7 */ + "evmergehi r9, r25, r25\n\t" + /* Calc new W[6]-W[7] */ + "evmergelohi r12, r26, r25\n\t" + "rotlwi r6, r19, 26\n\t" + "evrlwi r10, r24, 15\n\t" + "xor r7, r20, r21\n\t" + "evrlwi r11, r24, 13\n\t" + "rotlwi r8, r19, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r24, 10\n\t" + "and r7, r7, r19\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r19, 7\n\t" + "evaddw r25, r25, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r22, r29\n\t" + "xor r7, r7, r21\n\t" + "evaddw r25, r25, r10\n\t" + "add r14, r14, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r14, r14, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 28(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r14, r14, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r14, r14, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r18, r18, r14\n\t" + "evaddw r25, r25, r11\n\t" + "rotlwi r6, r15, 30\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r15, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r16, r17\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r15, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r14, r14, r6\n\t" + "add r14, r14, r7\n\t" + /* Round 8 */ + "mr r9, r26\n\t" + "rotlwi r6, r18, 26\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r18, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r18\n\t" + "rotlwi r8, r18, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + "lwz r6, 32(r30)\n\t" + "add r21, r21, r9\n\t" + "add r21, r21, r6\n\t" + "add r17, r17, r21\n\t" + "rotlwi r6, r14, 30\n\t" + "xor r7, r14, r15\n\t" + "rotlwi r8, r14, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r15, r16\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r14, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r15\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + /* Round 9 */ + "evmergehi r9, r26, r26\n\t" + /* Calc new W[8]-W[9] */ + "evmergelohi r12, r27, r26\n\t" + "rotlwi r6, r17, 26\n\t" + "evrlwi r10, r25, 15\n\t" + "xor r7, r18, r19\n\t" + "evrlwi r11, r25, 13\n\t" + "rotlwi r8, r17, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r25, 10\n\t" + "and r7, r7, r17\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r17, 7\n\t" + "evaddw r26, r26, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r23, r22\n\t" + "xor r7, r7, r19\n\t" + "evaddw r26, r26, r10\n\t" + "add r20, r20, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r20, r20, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 36(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r20, r20, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r20, r20, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r16, r16, r20\n\t" + "evaddw r26, r26, r11\n\t" + "rotlwi r6, r21, 30\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r21, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r14, r15\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r21, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r20, r20, r6\n\t" + "add r20, r20, r7\n\t" + /* Round 10 */ + "mr r9, r27\n\t" + "rotlwi r6, r16, 26\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r16, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r16\n\t" + "rotlwi r8, r16, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + "lwz r6, 40(r30)\n\t" + "add r19, r19, r9\n\t" + "add r19, r19, r6\n\t" + "add r15, r15, r19\n\t" + "rotlwi r6, r20, 30\n\t" + "xor r7, r20, r21\n\t" + "rotlwi r8, r20, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r21, r14\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r20, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r21\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + /* Round 11 */ + "evmergehi r9, r27, r27\n\t" + /* Calc new W[10]-W[11] */ + "evmergelohi r12, r28, r27\n\t" + "rotlwi r6, r15, 26\n\t" + "evrlwi r10, r26, 15\n\t" + "xor r7, r16, r17\n\t" + "evrlwi r11, r26, 13\n\t" + "rotlwi r8, r15, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r26, 10\n\t" + "and r7, r7, r15\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r15, 7\n\t" + "evaddw r27, r27, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r24, r23\n\t" + "xor r7, r7, r17\n\t" + "evaddw r27, r27, r10\n\t" + "add r18, r18, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r18, r18, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 44(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r18, r18, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r18, r18, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r14, r14, r18\n\t" + "evaddw r27, r27, r11\n\t" + "rotlwi r6, r19, 30\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r19, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r20, r21\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r19, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r18, r18, r6\n\t" + "add r18, r18, r7\n\t" + /* Round 12 */ + "mr r9, r28\n\t" + "rotlwi r6, r14, 26\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r14, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r14\n\t" + "rotlwi r8, r14, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + "lwz r6, 48(r30)\n\t" + "add r17, r17, r9\n\t" + "add r17, r17, r6\n\t" + "add r21, r21, r17\n\t" + "rotlwi r6, r18, 30\n\t" + "xor r7, r18, r19\n\t" + "rotlwi r8, r18, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r19, r20\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r18, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r19\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + /* Round 13 */ + "evmergehi r9, r28, r28\n\t" + /* Calc new W[12]-W[13] */ + "evmergelohi r12, r29, r28\n\t" + "rotlwi r6, r21, 26\n\t" + "evrlwi r10, r27, 15\n\t" + "xor r7, r14, r15\n\t" + "evrlwi r11, r27, 13\n\t" + "rotlwi r8, r21, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r27, 10\n\t" + "and r7, r7, r21\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r21, 7\n\t" + "evaddw r28, r28, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r25, r24\n\t" + "xor r7, r7, r15\n\t" + "evaddw r28, r28, r10\n\t" + "add r16, r16, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r16, r16, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 52(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r16, r16, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r16, r16, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r20, r20, r16\n\t" + "evaddw r28, r28, r11\n\t" + "rotlwi r6, r17, 30\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r17, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r18, r19\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r17, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r16, r16, r6\n\t" + "add r16, r16, r7\n\t" + /* Round 14 */ + "mr r9, r29\n\t" + "rotlwi r6, r20, 26\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r20, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r20\n\t" + "rotlwi r8, r20, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + "lwz r6, 56(r30)\n\t" + "add r15, r15, r9\n\t" + "add r15, r15, r6\n\t" + "add r19, r19, r15\n\t" + "rotlwi r6, r16, 30\n\t" + "xor r7, r16, r17\n\t" + "rotlwi r8, r16, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r17, r18\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r16, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r17\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + /* Round 15 */ + "evmergehi r9, r29, r29\n\t" + /* Calc new W[14]-W[15] */ + "evmergelohi r12, r22, r29\n\t" + "rotlwi r6, r19, 26\n\t" + "evrlwi r10, r28, 15\n\t" + "xor r7, r20, r21\n\t" + "evrlwi r11, r28, 13\n\t" + "rotlwi r8, r19, 21\n\t" + "evxor r11, r11, r10\n\t" + "xor r6, r6, r8\n\t" + "evsrwiu r10, r28, 10\n\t" + "and r7, r7, r19\n\t" + "evxor r11, r11, r10\n\t" + "rotlwi r8, r19, 7\n\t" + "evaddw r29, r29, r11\n\t" + "xor r6, r6, r8\n\t" + "evmergelohi r10, r26, r25\n\t" + "xor r7, r7, r21\n\t" + "evaddw r29, r29, r10\n\t" + "add r14, r14, r6\n\t" + "evrlwi r10, r12, 25\n\t" + "add r14, r14, r7\n\t" + "evrlwi r11, r12, 14\n\t" + "lwz r6, 60(r30)\n\t" + "evxor r11, r11, r10\n\t" + "add r14, r14, r9\n\t" + "evsrwiu r12, r12, 3\n\t" + "add r14, r14, r6\n\t" + "evxor r11, r11, r12\n\t" + "add r18, r18, r14\n\t" + "evaddw r29, r29, r11\n\t" + "rotlwi r6, r15, 30\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r15, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r16, r17\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r15, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r14, r14, r6\n\t" + "add r14, r14, r7\n\t" + "addi r30, r30, 0x40\n\t" + "bdnz L_SHA256_transform_spe_len_start_%=\n\t" + /* Round 0 */ + "mr r9, r22\n\t" + "rotlwi r6, r18, 26\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r18, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r18\n\t" + "rotlwi r8, r18, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + "lwz r6, 0(r30)\n\t" + "add r21, r21, r9\n\t" + "add r21, r21, r6\n\t" + "add r17, r17, r21\n\t" + "rotlwi r6, r14, 30\n\t" + "xor r7, r14, r15\n\t" + "rotlwi r8, r14, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r15, r16\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r14, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r15\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + /* Round 1 */ + "evmergehi r9, r22, r22\n\t" + "rotlwi r6, r17, 26\n\t" + "xor r7, r18, r19\n\t" + "rotlwi r8, r17, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r17\n\t" + "rotlwi r8, r17, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r19\n\t" + "add r20, r20, r6\n\t" + "add r20, r20, r7\n\t" + "lwz r6, 4(r30)\n\t" + "add r20, r20, r9\n\t" + "add r20, r20, r6\n\t" + "add r16, r16, r20\n\t" + "rotlwi r6, r21, 30\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r21, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r14, r15\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r21, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r20, r20, r6\n\t" + "add r20, r20, r7\n\t" + /* Round 2 */ + "mr r9, r23\n\t" + "rotlwi r6, r16, 26\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r16, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r16\n\t" + "rotlwi r8, r16, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + "lwz r6, 8(r30)\n\t" + "add r19, r19, r9\n\t" + "add r19, r19, r6\n\t" + "add r15, r15, r19\n\t" + "rotlwi r6, r20, 30\n\t" + "xor r7, r20, r21\n\t" + "rotlwi r8, r20, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r21, r14\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r20, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r21\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + /* Round 3 */ + "evmergehi r9, r23, r23\n\t" + "rotlwi r6, r15, 26\n\t" + "xor r7, r16, r17\n\t" + "rotlwi r8, r15, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r15\n\t" + "rotlwi r8, r15, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r17\n\t" + "add r18, r18, r6\n\t" + "add r18, r18, r7\n\t" + "lwz r6, 12(r30)\n\t" + "add r18, r18, r9\n\t" + "add r18, r18, r6\n\t" + "add r14, r14, r18\n\t" + "rotlwi r6, r19, 30\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r19, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r20, r21\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r19, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r18, r18, r6\n\t" + "add r18, r18, r7\n\t" + /* Round 4 */ + "mr r9, r24\n\t" + "rotlwi r6, r14, 26\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r14, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r14\n\t" + "rotlwi r8, r14, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + "lwz r6, 16(r30)\n\t" + "add r17, r17, r9\n\t" + "add r17, r17, r6\n\t" + "add r21, r21, r17\n\t" + "rotlwi r6, r18, 30\n\t" + "xor r7, r18, r19\n\t" + "rotlwi r8, r18, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r19, r20\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r18, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r19\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + /* Round 5 */ + "evmergehi r9, r24, r24\n\t" + "rotlwi r6, r21, 26\n\t" + "xor r7, r14, r15\n\t" + "rotlwi r8, r21, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r21\n\t" + "rotlwi r8, r21, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r15\n\t" + "add r16, r16, r6\n\t" + "add r16, r16, r7\n\t" + "lwz r6, 20(r30)\n\t" + "add r16, r16, r9\n\t" + "add r16, r16, r6\n\t" + "add r20, r20, r16\n\t" + "rotlwi r6, r17, 30\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r17, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r18, r19\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r17, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r16, r16, r6\n\t" + "add r16, r16, r7\n\t" + /* Round 6 */ + "mr r9, r25\n\t" + "rotlwi r6, r20, 26\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r20, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r20\n\t" + "rotlwi r8, r20, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + "lwz r6, 24(r30)\n\t" + "add r15, r15, r9\n\t" + "add r15, r15, r6\n\t" + "add r19, r19, r15\n\t" + "rotlwi r6, r16, 30\n\t" + "xor r7, r16, r17\n\t" + "rotlwi r8, r16, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r17, r18\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r16, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r17\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + /* Round 7 */ + "evmergehi r9, r25, r25\n\t" + "rotlwi r6, r19, 26\n\t" + "xor r7, r20, r21\n\t" + "rotlwi r8, r19, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r19\n\t" + "rotlwi r8, r19, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r21\n\t" + "add r14, r14, r6\n\t" + "add r14, r14, r7\n\t" + "lwz r6, 28(r30)\n\t" + "add r14, r14, r9\n\t" + "add r14, r14, r6\n\t" + "add r18, r18, r14\n\t" + "rotlwi r6, r15, 30\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r15, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r16, r17\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r15, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r14, r14, r6\n\t" + "add r14, r14, r7\n\t" + /* Round 8 */ + "mr r9, r26\n\t" + "rotlwi r6, r18, 26\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r18, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r18\n\t" + "rotlwi r8, r18, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + "lwz r6, 32(r30)\n\t" + "add r21, r21, r9\n\t" + "add r21, r21, r6\n\t" + "add r17, r17, r21\n\t" + "rotlwi r6, r14, 30\n\t" + "xor r7, r14, r15\n\t" + "rotlwi r8, r14, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r15, r16\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r14, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r15\n\t" + "add r21, r21, r6\n\t" + "add r21, r21, r7\n\t" + /* Round 9 */ + "evmergehi r9, r26, r26\n\t" + "rotlwi r6, r17, 26\n\t" + "xor r7, r18, r19\n\t" + "rotlwi r8, r17, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r17\n\t" + "rotlwi r8, r17, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r19\n\t" + "add r20, r20, r6\n\t" + "add r20, r20, r7\n\t" + "lwz r6, 36(r30)\n\t" + "add r20, r20, r9\n\t" + "add r20, r20, r6\n\t" + "add r16, r16, r20\n\t" + "rotlwi r6, r21, 30\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r21, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r14, r15\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r21, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r20, r20, r6\n\t" + "add r20, r20, r7\n\t" + /* Round 10 */ + "mr r9, r27\n\t" + "rotlwi r6, r16, 26\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r16, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r16\n\t" + "rotlwi r8, r16, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + "lwz r6, 40(r30)\n\t" + "add r19, r19, r9\n\t" + "add r19, r19, r6\n\t" + "add r15, r15, r19\n\t" + "rotlwi r6, r20, 30\n\t" + "xor r7, r20, r21\n\t" + "rotlwi r8, r20, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r21, r14\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r20, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r21\n\t" + "add r19, r19, r6\n\t" + "add r19, r19, r7\n\t" + /* Round 11 */ + "evmergehi r9, r27, r27\n\t" + "rotlwi r6, r15, 26\n\t" + "xor r7, r16, r17\n\t" + "rotlwi r8, r15, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r15\n\t" + "rotlwi r8, r15, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r17\n\t" + "add r18, r18, r6\n\t" + "add r18, r18, r7\n\t" + "lwz r6, 44(r30)\n\t" + "add r18, r18, r9\n\t" + "add r18, r18, r6\n\t" + "add r14, r14, r18\n\t" + "rotlwi r6, r19, 30\n\t" + "xor r7, r19, r20\n\t" + "rotlwi r8, r19, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r20, r21\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r19, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r20\n\t" + "add r18, r18, r6\n\t" + "add r18, r18, r7\n\t" + /* Round 12 */ + "mr r9, r28\n\t" + "rotlwi r6, r14, 26\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r14, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r14\n\t" + "rotlwi r8, r14, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + "lwz r6, 48(r30)\n\t" + "add r17, r17, r9\n\t" + "add r17, r17, r6\n\t" + "add r21, r21, r17\n\t" + "rotlwi r6, r18, 30\n\t" + "xor r7, r18, r19\n\t" + "rotlwi r8, r18, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r19, r20\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r18, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r19\n\t" + "add r17, r17, r6\n\t" + "add r17, r17, r7\n\t" + /* Round 13 */ + "evmergehi r9, r28, r28\n\t" + "rotlwi r6, r21, 26\n\t" + "xor r7, r14, r15\n\t" + "rotlwi r8, r21, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r21\n\t" + "rotlwi r8, r21, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r15\n\t" + "add r16, r16, r6\n\t" + "add r16, r16, r7\n\t" + "lwz r6, 52(r30)\n\t" + "add r16, r16, r9\n\t" + "add r16, r16, r6\n\t" + "add r20, r20, r16\n\t" + "rotlwi r6, r17, 30\n\t" + "xor r7, r17, r18\n\t" + "rotlwi r8, r17, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r18, r19\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r17, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r18\n\t" + "add r16, r16, r6\n\t" + "add r16, r16, r7\n\t" + /* Round 14 */ + "mr r9, r29\n\t" + "rotlwi r6, r20, 26\n\t" + "xor r7, r21, r14\n\t" + "rotlwi r8, r20, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r20\n\t" + "rotlwi r8, r20, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r14\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + "lwz r6, 56(r30)\n\t" + "add r15, r15, r9\n\t" + "add r15, r15, r6\n\t" + "add r19, r19, r15\n\t" + "rotlwi r6, r16, 30\n\t" + "xor r7, r16, r17\n\t" + "rotlwi r8, r16, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r17, r18\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r16, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r17\n\t" + "add r15, r15, r6\n\t" + "add r15, r15, r7\n\t" + /* Round 15 */ + "evmergehi r9, r29, r29\n\t" + "rotlwi r6, r19, 26\n\t" + "xor r7, r20, r21\n\t" + "rotlwi r8, r19, 21\n\t" + "xor r6, r6, r8\n\t" + "and r7, r7, r19\n\t" + "rotlwi r8, r19, 7\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r21\n\t" + "add r14, r14, r6\n\t" + "add r14, r14, r7\n\t" + "lwz r6, 60(r30)\n\t" + "add r14, r14, r9\n\t" + "add r14, r14, r6\n\t" + "add r18, r18, r14\n\t" + "rotlwi r6, r15, 30\n\t" + "xor r7, r15, r16\n\t" + "rotlwi r8, r15, 19\n\t" + "xor r6, r6, r8\n\t" + "xor r8, r16, r17\n\t" + "and r7, r7, r8\n\t" + "rotlwi r8, r15, 10\n\t" + "xor r6, r6, r8\n\t" + "xor r7, r7, r16\n\t" + "add r14, r14, r6\n\t" + "add r14, r14, r7\n\t" + /* Add in digest from start */ + "lwz r6, 0(%[sha256])\n\t" + "lwz r7, 4(%[sha256])\n\t" + "add r14, r14, r6\n\t" + "add r15, r15, r7\n\t" + "lwz r6, 8(%[sha256])\n\t" + "lwz r7, 12(%[sha256])\n\t" + "add r16, r16, r6\n\t" + "add r17, r17, r7\n\t" + "lwz r6, 16(%[sha256])\n\t" + "lwz r7, 20(%[sha256])\n\t" + "add r18, r18, r6\n\t" + "add r19, r19, r7\n\t" + "lwz r6, 24(%[sha256])\n\t" + "lwz r7, 28(%[sha256])\n\t" + "add r20, r20, r6\n\t" + "add r21, r21, r7\n\t" + "stw r14, 0(%[sha256])\n\t" + "stw r15, 4(%[sha256])\n\t" + "stw r16, 8(%[sha256])\n\t" + "stw r17, 12(%[sha256])\n\t" + "stw r18, 16(%[sha256])\n\t" + "stw r19, 20(%[sha256])\n\t" + "stw r20, 24(%[sha256])\n\t" + "stw r21, 28(%[sha256])\n\t" + "subi r30, r30, 0xc0\n\t" + "mtctr %[len]\n\t" + "addi %[data], %[data], 0x40\n\t" + "subi %[len], %[len], 1\n\t" + "bdnz L_SHA256_transform_spe_len_begin_%=\n\t" +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + : [sha256] "+r" (sha256), [data] "+r" (data), [len] "+r" (len), + [L_SHA256_transform_spe_len_k] "+r" (L_SHA256_transform_spe_len_k_c) + : +#else + : + : [sha256] "r" (sha256), [data] "r" (data), [len] "r" (len), + [L_SHA256_transform_spe_len_k] "r" (L_SHA256_transform_spe_len_k_c) +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + : "memory", "cc", "r0", "r7", "r8", "r9", "r10", "r11", "r12", "r14", + "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23", + "r24", "r25", "r26", "r27", "r28", "r29", "r30" + ); +} + +#endif /* WOLFSSL_PPC32_ASM_SPE */ +#ifndef WOLFSSL_PPC32_ASM_SPE +#include + +static const word32 L_SHA256_transform_len_k[] = { + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, + 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, + 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, + 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, + 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, + 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, + 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, + 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, + 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, +}; + +#ifndef __PIC__ +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, + word32 len_p); +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p) +#else +void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) +#endif /* WOLFSSL_NO_VAR_ASSIGN_REG */ +{ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + register wc_Sha256* sha256 asm ("3") = (wc_Sha256*)sha256_p; + register const byte* data asm ("4") = (const byte*)data_p; + register word32 len asm ("5") = (word32)len_p; + register word32* L_SHA256_transform_len_k_c asm ("6") = + (word32*)&L_SHA256_transform_len_k; +#else + register word32* L_SHA256_transform_len_k_c = + (word32*)&L_SHA256_transform_len_k; + +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + + __asm__ __volatile__ ( + "srwi %[len], %[len], 6\n\t" + "mr r6, %[L_SHA256_transform_len_k]\n\t" + /* Copy digest to add in at end */ + "lwz r7, 0(%[sha256])\n\t" + "lwz r8, 4(%[sha256])\n\t" + "lwz r9, 8(%[sha256])\n\t" + "lwz r10, 12(%[sha256])\n\t" + "lwz r11, 16(%[sha256])\n\t" + "lwz r12, 20(%[sha256])\n\t" + "lwz r14, 24(%[sha256])\n\t" + "lwz r15, 28(%[sha256])\n\t" +#ifndef WOLFSSL_PPC32_ASM_SMALL + "mtctr %[len]\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz r16, 0(%[data])\n\t" + "lwz r17, 4(%[data])\n\t" + "lwz r18, 8(%[data])\n\t" + "lwz r19, 12(%[data])\n\t" + "lwz r20, 16(%[data])\n\t" + "lwz r21, 20(%[data])\n\t" + "lwz r22, 24(%[data])\n\t" + "lwz r23, 28(%[data])\n\t" + "lwz r24, 32(%[data])\n\t" + "lwz r25, 36(%[data])\n\t" + "lwz r26, 40(%[data])\n\t" + "lwz r27, 44(%[data])\n\t" + "lwz r28, 48(%[data])\n\t" + "lwz r29, 52(%[data])\n\t" + "lwz r30, 56(%[data])\n\t" + "lwz r31, 60(%[data])\n\t" + /* Start of 16 rounds */ + /* Round 0 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 0(r6)\n\t" + "add r15, r15, r16\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Calc new W[0] */ + "rotlwi r0, r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "rotlwi r0, r30, 15\n\t" + "rotlwi %[len], r30, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "add r16, r16, r25\n\t" + /* Round 1 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 4(r6)\n\t" + "add r14, r14, r17\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Calc new W[1] */ + "rotlwi r0, r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "rotlwi r0, r31, 15\n\t" + "rotlwi %[len], r31, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "add r17, r17, r26\n\t" + /* Round 2 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 8(r6)\n\t" + "add r12, r12, r18\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Calc new W[2] */ + "rotlwi r0, r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "rotlwi r0, r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "add r18, r18, r27\n\t" + /* Round 3 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 12(r6)\n\t" + "add r11, r11, r19\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Calc new W[3] */ + "rotlwi r0, r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "rotlwi r0, r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "add r19, r19, r28\n\t" + /* Round 4 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 16(r6)\n\t" + "add r10, r10, r20\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Calc new W[4] */ + "rotlwi r0, r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "rotlwi r0, r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "add r20, r20, r29\n\t" + /* Round 5 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 20(r6)\n\t" + "add r9, r9, r21\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Calc new W[5] */ + "rotlwi r0, r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "rotlwi r0, r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "add r21, r21, r30\n\t" + /* Round 6 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 24(r6)\n\t" + "add r8, r8, r22\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Calc new W[6] */ + "rotlwi r0, r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "rotlwi r0, r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "add r22, r22, r31\n\t" + /* Round 7 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 28(r6)\n\t" + "add r7, r7, r23\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Calc new W[7] */ + "rotlwi r0, r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "rotlwi r0, r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "add r23, r23, r16\n\t" + /* Round 8 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 32(r6)\n\t" + "add r15, r15, r24\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Calc new W[8] */ + "rotlwi r0, r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "rotlwi r0, r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "add r24, r24, r17\n\t" + /* Round 9 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 36(r6)\n\t" + "add r14, r14, r25\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Calc new W[9] */ + "rotlwi r0, r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "rotlwi r0, r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "add r25, r25, r18\n\t" + /* Round 10 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 40(r6)\n\t" + "add r12, r12, r26\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Calc new W[10] */ + "rotlwi r0, r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "rotlwi r0, r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "add r26, r26, r19\n\t" + /* Round 11 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 44(r6)\n\t" + "add r11, r11, r27\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Calc new W[11] */ + "rotlwi r0, r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "rotlwi r0, r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "add r27, r27, r20\n\t" + /* Round 12 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 48(r6)\n\t" + "add r10, r10, r28\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Calc new W[12] */ + "rotlwi r0, r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "rotlwi r0, r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "add r28, r28, r21\n\t" + /* Round 13 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 52(r6)\n\t" + "add r9, r9, r29\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Calc new W[13] */ + "rotlwi r0, r30, 25\n\t" + "rotlwi %[len], r30, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "rotlwi r0, r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "add r29, r29, r22\n\t" + /* Round 14 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 56(r6)\n\t" + "add r8, r8, r30\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Calc new W[14] */ + "rotlwi r0, r31, 25\n\t" + "rotlwi %[len], r31, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "rotlwi r0, r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "add r30, r30, r23\n\t" + /* Round 15 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 60(r6)\n\t" + "add r7, r7, r31\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Calc new W[15] */ + "rotlwi r0, r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "rotlwi r0, r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "add r31, r31, r24\n\t" + "addi r6, r6, 0x40\n\t" + /* Round 0 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 0(r6)\n\t" + "add r15, r15, r16\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Calc new W[0] */ + "rotlwi r0, r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "rotlwi r0, r30, 15\n\t" + "rotlwi %[len], r30, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "add r16, r16, r25\n\t" + /* Round 1 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 4(r6)\n\t" + "add r14, r14, r17\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Calc new W[1] */ + "rotlwi r0, r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "rotlwi r0, r31, 15\n\t" + "rotlwi %[len], r31, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "add r17, r17, r26\n\t" + /* Round 2 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 8(r6)\n\t" + "add r12, r12, r18\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Calc new W[2] */ + "rotlwi r0, r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "rotlwi r0, r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "add r18, r18, r27\n\t" + /* Round 3 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 12(r6)\n\t" + "add r11, r11, r19\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Calc new W[3] */ + "rotlwi r0, r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "rotlwi r0, r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "add r19, r19, r28\n\t" + /* Round 4 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 16(r6)\n\t" + "add r10, r10, r20\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Calc new W[4] */ + "rotlwi r0, r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "rotlwi r0, r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "add r20, r20, r29\n\t" + /* Round 5 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 20(r6)\n\t" + "add r9, r9, r21\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Calc new W[5] */ + "rotlwi r0, r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "rotlwi r0, r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "add r21, r21, r30\n\t" + /* Round 6 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 24(r6)\n\t" + "add r8, r8, r22\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Calc new W[6] */ + "rotlwi r0, r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "rotlwi r0, r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "add r22, r22, r31\n\t" + /* Round 7 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 28(r6)\n\t" + "add r7, r7, r23\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Calc new W[7] */ + "rotlwi r0, r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "rotlwi r0, r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "add r23, r23, r16\n\t" + /* Round 8 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 32(r6)\n\t" + "add r15, r15, r24\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Calc new W[8] */ + "rotlwi r0, r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "rotlwi r0, r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "add r24, r24, r17\n\t" + /* Round 9 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 36(r6)\n\t" + "add r14, r14, r25\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Calc new W[9] */ + "rotlwi r0, r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "rotlwi r0, r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "add r25, r25, r18\n\t" + /* Round 10 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 40(r6)\n\t" + "add r12, r12, r26\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Calc new W[10] */ + "rotlwi r0, r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "rotlwi r0, r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "add r26, r26, r19\n\t" + /* Round 11 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 44(r6)\n\t" + "add r11, r11, r27\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Calc new W[11] */ + "rotlwi r0, r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "rotlwi r0, r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "add r27, r27, r20\n\t" + /* Round 12 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 48(r6)\n\t" + "add r10, r10, r28\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Calc new W[12] */ + "rotlwi r0, r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "rotlwi r0, r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "add r28, r28, r21\n\t" + /* Round 13 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 52(r6)\n\t" + "add r9, r9, r29\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Calc new W[13] */ + "rotlwi r0, r30, 25\n\t" + "rotlwi %[len], r30, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "rotlwi r0, r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "add r29, r29, r22\n\t" + /* Round 14 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 56(r6)\n\t" + "add r8, r8, r30\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Calc new W[14] */ + "rotlwi r0, r31, 25\n\t" + "rotlwi %[len], r31, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "rotlwi r0, r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "add r30, r30, r23\n\t" + /* Round 15 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 60(r6)\n\t" + "add r7, r7, r31\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Calc new W[15] */ + "rotlwi r0, r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "rotlwi r0, r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "add r31, r31, r24\n\t" + "addi r6, r6, 0x40\n\t" + /* Round 0 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 0(r6)\n\t" + "add r15, r15, r16\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Calc new W[0] */ + "rotlwi r0, r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "rotlwi r0, r30, 15\n\t" + "rotlwi %[len], r30, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "add r16, r16, r25\n\t" + /* Round 1 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 4(r6)\n\t" + "add r14, r14, r17\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Calc new W[1] */ + "rotlwi r0, r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "rotlwi r0, r31, 15\n\t" + "rotlwi %[len], r31, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "add r17, r17, r26\n\t" + /* Round 2 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 8(r6)\n\t" + "add r12, r12, r18\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Calc new W[2] */ + "rotlwi r0, r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "rotlwi r0, r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "add r18, r18, r27\n\t" + /* Round 3 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 12(r6)\n\t" + "add r11, r11, r19\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Calc new W[3] */ + "rotlwi r0, r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "rotlwi r0, r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "add r19, r19, r28\n\t" + /* Round 4 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 16(r6)\n\t" + "add r10, r10, r20\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Calc new W[4] */ + "rotlwi r0, r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "rotlwi r0, r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "add r20, r20, r29\n\t" + /* Round 5 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 20(r6)\n\t" + "add r9, r9, r21\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Calc new W[5] */ + "rotlwi r0, r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "rotlwi r0, r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "add r21, r21, r30\n\t" + /* Round 6 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 24(r6)\n\t" + "add r8, r8, r22\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Calc new W[6] */ + "rotlwi r0, r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "rotlwi r0, r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "add r22, r22, r31\n\t" + /* Round 7 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 28(r6)\n\t" + "add r7, r7, r23\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Calc new W[7] */ + "rotlwi r0, r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "rotlwi r0, r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "add r23, r23, r16\n\t" + /* Round 8 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 32(r6)\n\t" + "add r15, r15, r24\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Calc new W[8] */ + "rotlwi r0, r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "rotlwi r0, r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "add r24, r24, r17\n\t" + /* Round 9 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 36(r6)\n\t" + "add r14, r14, r25\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Calc new W[9] */ + "rotlwi r0, r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "rotlwi r0, r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "add r25, r25, r18\n\t" + /* Round 10 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 40(r6)\n\t" + "add r12, r12, r26\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Calc new W[10] */ + "rotlwi r0, r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "rotlwi r0, r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "add r26, r26, r19\n\t" + /* Round 11 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 44(r6)\n\t" + "add r11, r11, r27\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Calc new W[11] */ + "rotlwi r0, r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "rotlwi r0, r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "add r27, r27, r20\n\t" + /* Round 12 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 48(r6)\n\t" + "add r10, r10, r28\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Calc new W[12] */ + "rotlwi r0, r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "rotlwi r0, r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "add r28, r28, r21\n\t" + /* Round 13 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 52(r6)\n\t" + "add r9, r9, r29\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Calc new W[13] */ + "rotlwi r0, r30, 25\n\t" + "rotlwi %[len], r30, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "rotlwi r0, r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "add r29, r29, r22\n\t" + /* Round 14 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 56(r6)\n\t" + "add r8, r8, r30\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Calc new W[14] */ + "rotlwi r0, r31, 25\n\t" + "rotlwi %[len], r31, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "rotlwi r0, r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "add r30, r30, r23\n\t" + /* Round 15 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 60(r6)\n\t" + "add r7, r7, r31\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Calc new W[15] */ + "rotlwi r0, r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "rotlwi r0, r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "add r31, r31, r24\n\t" + "addi r6, r6, 0x40\n\t" + /* Round 0 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 0(r6)\n\t" + "add r15, r15, r16\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Round 1 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 4(r6)\n\t" + "add r14, r14, r17\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Round 2 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 8(r6)\n\t" + "add r12, r12, r18\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Round 3 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 12(r6)\n\t" + "add r11, r11, r19\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Round 4 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 16(r6)\n\t" + "add r10, r10, r20\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Round 5 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 20(r6)\n\t" + "add r9, r9, r21\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Round 6 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 24(r6)\n\t" + "add r8, r8, r22\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Round 7 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 28(r6)\n\t" + "add r7, r7, r23\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + /* Round 8 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 32(r6)\n\t" + "add r15, r15, r24\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + /* Round 9 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 36(r6)\n\t" + "add r14, r14, r25\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + /* Round 10 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 40(r6)\n\t" + "add r12, r12, r26\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + /* Round 11 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 44(r6)\n\t" + "add r11, r11, r27\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + /* Round 12 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 48(r6)\n\t" + "add r10, r10, r28\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + /* Round 13 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 52(r6)\n\t" + "add r9, r9, r29\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + /* Round 14 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 56(r6)\n\t" + "add r8, r8, r30\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + /* Round 15 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 60(r6)\n\t" + "add r7, r7, r31\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + "subi r6, r6, 0xc0\n\t" + /* Add in digest from start */ + "lwz r0, 0(%[sha256])\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add r7, r7, r0\n\t" + "add r8, r8, %[len]\n\t" + "lwz r0, 8(%[sha256])\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add r9, r9, r0\n\t" + "add r10, r10, %[len]\n\t" + "lwz r0, 16(%[sha256])\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add r11, r11, r0\n\t" + "add r12, r12, %[len]\n\t" + "lwz r0, 24(%[sha256])\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add r14, r14, r0\n\t" + "add r15, r15, %[len]\n\t" + "stw r7, 0(%[sha256])\n\t" + "stw r8, 4(%[sha256])\n\t" + "stw r9, 8(%[sha256])\n\t" + "stw r10, 12(%[sha256])\n\t" + "stw r11, 16(%[sha256])\n\t" + "stw r12, 20(%[sha256])\n\t" + "stw r14, 24(%[sha256])\n\t" + "stw r15, 28(%[sha256])\n\t" + "addi %[data], %[data], 0x40\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" +#else + "subi r1, r1, 4\n\t" + "stw %[len], 0(r1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz r16, 0(%[data])\n\t" + "lwz r17, 4(%[data])\n\t" + "lwz r18, 8(%[data])\n\t" + "lwz r19, 12(%[data])\n\t" + "lwz r20, 16(%[data])\n\t" + "lwz r21, 20(%[data])\n\t" + "lwz r22, 24(%[data])\n\t" + "lwz r23, 28(%[data])\n\t" + "lwz r24, 32(%[data])\n\t" + "lwz r25, 36(%[data])\n\t" + "lwz r26, 40(%[data])\n\t" + "lwz r27, 44(%[data])\n\t" + "lwz r28, 48(%[data])\n\t" + "lwz r29, 52(%[data])\n\t" + "lwz r30, 56(%[data])\n\t" + "lwz r31, 60(%[data])\n\t" + "li r0, 4\n\t" + "mtctr r0\n\t" + /* Start of 16 rounds */ + "\n" + "L_SHA256_transform_len_start_%=: \n\t" + /* Round 0 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 0(r6)\n\t" + "add r15, r15, r16\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_0_%=\n\t" + /* Calc new W[0] */ + "rotlwi r0, r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "rotlwi r0, r30, 15\n\t" + "rotlwi %[len], r30, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r16, r16, r0\n\t" + "add r16, r16, r25\n\t" + "\n" + "L_SHA256_transform_len_after_blk_0_%=: \n\t" + /* Round 1 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 4(r6)\n\t" + "add r14, r14, r17\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_1_%=\n\t" + /* Calc new W[1] */ + "rotlwi r0, r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "rotlwi r0, r31, 15\n\t" + "rotlwi %[len], r31, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r17, r17, r0\n\t" + "add r17, r17, r26\n\t" + "\n" + "L_SHA256_transform_len_after_blk_1_%=: \n\t" + /* Round 2 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 8(r6)\n\t" + "add r12, r12, r18\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_2_%=\n\t" + /* Calc new W[2] */ + "rotlwi r0, r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "rotlwi r0, r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r18, r18, r0\n\t" + "add r18, r18, r27\n\t" + "\n" + "L_SHA256_transform_len_after_blk_2_%=: \n\t" + /* Round 3 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 12(r6)\n\t" + "add r11, r11, r19\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_3_%=\n\t" + /* Calc new W[3] */ + "rotlwi r0, r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "rotlwi r0, r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r19, r19, r0\n\t" + "add r19, r19, r28\n\t" + "\n" + "L_SHA256_transform_len_after_blk_3_%=: \n\t" + /* Round 4 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 16(r6)\n\t" + "add r10, r10, r20\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_4_%=\n\t" + /* Calc new W[4] */ + "rotlwi r0, r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "rotlwi r0, r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r20, r20, r0\n\t" + "add r20, r20, r29\n\t" + "\n" + "L_SHA256_transform_len_after_blk_4_%=: \n\t" + /* Round 5 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 20(r6)\n\t" + "add r9, r9, r21\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_5_%=\n\t" + /* Calc new W[5] */ + "rotlwi r0, r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "rotlwi r0, r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r21, r21, r0\n\t" + "add r21, r21, r30\n\t" + "\n" + "L_SHA256_transform_len_after_blk_5_%=: \n\t" + /* Round 6 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 24(r6)\n\t" + "add r8, r8, r22\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_6_%=\n\t" + /* Calc new W[6] */ + "rotlwi r0, r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "rotlwi r0, r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r22, r22, r0\n\t" + "add r22, r22, r31\n\t" + "\n" + "L_SHA256_transform_len_after_blk_6_%=: \n\t" + /* Round 7 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 28(r6)\n\t" + "add r7, r7, r23\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_7_%=\n\t" + /* Calc new W[7] */ + "rotlwi r0, r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "rotlwi r0, r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r23, r23, r0\n\t" + "add r23, r23, r16\n\t" + "\n" + "L_SHA256_transform_len_after_blk_7_%=: \n\t" + /* Round 8 */ + "rotlwi r0, r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, r11\n\t" + "xor r0, r0, r14\n\t" + "add r15, r15, r0\n\t" + "lwz r0, 32(r6)\n\t" + "add r15, r15, r24\n\t" + "add r15, r15, r0\n\t" + "add r10, r10, r15\n\t" + "rotlwi r0, r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r15, r15, r0\n\t" + "xor %[len], r7, r8\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r8\n\t" + "add r15, r15, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_8_%=\n\t" + /* Calc new W[8] */ + "rotlwi r0, r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "rotlwi r0, r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r24, r24, r0\n\t" + "add r24, r24, r17\n\t" + "\n" + "L_SHA256_transform_len_after_blk_8_%=: \n\t" + /* Round 9 */ + "rotlwi r0, r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, r10\n\t" + "xor r0, r0, r12\n\t" + "add r14, r14, r0\n\t" + "lwz r0, 36(r6)\n\t" + "add r14, r14, r25\n\t" + "add r14, r14, r0\n\t" + "add r9, r9, r14\n\t" + "rotlwi r0, r15, 30\n\t" + "rotlwi %[len], r15, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r14, r14, r0\n\t" + "xor %[len], r15, r7\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r7\n\t" + "add r14, r14, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_9_%=\n\t" + /* Calc new W[9] */ + "rotlwi r0, r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "rotlwi r0, r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r25, r25, r0\n\t" + "add r25, r25, r18\n\t" + "\n" + "L_SHA256_transform_len_after_blk_9_%=: \n\t" + /* Round 10 */ + "rotlwi r0, r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, r9\n\t" + "xor r0, r0, r11\n\t" + "add r12, r12, r0\n\t" + "lwz r0, 40(r6)\n\t" + "add r12, r12, r26\n\t" + "add r12, r12, r0\n\t" + "add r8, r8, r12\n\t" + "rotlwi r0, r14, 30\n\t" + "rotlwi %[len], r14, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r12, r12, r0\n\t" + "xor %[len], r14, r15\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r15\n\t" + "add r12, r12, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_10_%=\n\t" + /* Calc new W[10] */ + "rotlwi r0, r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "rotlwi r0, r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r26, r26, r0\n\t" + "add r26, r26, r19\n\t" + "\n" + "L_SHA256_transform_len_after_blk_10_%=: \n\t" + /* Round 11 */ + "rotlwi r0, r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, r8\n\t" + "xor r0, r0, r10\n\t" + "add r11, r11, r0\n\t" + "lwz r0, 44(r6)\n\t" + "add r11, r11, r27\n\t" + "add r11, r11, r0\n\t" + "add r7, r7, r11\n\t" + "rotlwi r0, r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r11, r11, r0\n\t" + "xor %[len], r12, r14\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r14\n\t" + "add r11, r11, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_11_%=\n\t" + /* Calc new W[11] */ + "rotlwi r0, r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "rotlwi r0, r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r27, r27, r0\n\t" + "add r27, r27, r20\n\t" + "\n" + "L_SHA256_transform_len_after_blk_11_%=: \n\t" + /* Round 12 */ + "rotlwi r0, r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor r0, r8, r9\n\t" + "and r0, r0, r7\n\t" + "xor r0, r0, r9\n\t" + "add r10, r10, r0\n\t" + "lwz r0, 48(r6)\n\t" + "add r10, r10, r28\n\t" + "add r10, r10, r0\n\t" + "add r15, r15, r10\n\t" + "rotlwi r0, r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r10, r10, r0\n\t" + "xor %[len], r11, r12\n\t" + "xor r0, r12, r14\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r12\n\t" + "add r10, r10, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_12_%=\n\t" + /* Calc new W[12] */ + "rotlwi r0, r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "rotlwi r0, r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r28, r28, r0\n\t" + "add r28, r28, r21\n\t" + "\n" + "L_SHA256_transform_len_after_blk_12_%=: \n\t" + /* Round 13 */ + "rotlwi r0, r15, 26\n\t" + "rotlwi %[len], r15, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r15, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor r0, r7, r8\n\t" + "and r0, r0, r15\n\t" + "xor r0, r0, r8\n\t" + "add r9, r9, r0\n\t" + "lwz r0, 52(r6)\n\t" + "add r9, r9, r29\n\t" + "add r9, r9, r0\n\t" + "add r14, r14, r9\n\t" + "rotlwi r0, r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r9, r9, r0\n\t" + "xor %[len], r10, r11\n\t" + "xor r0, r11, r12\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r11\n\t" + "add r9, r9, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_13_%=\n\t" + /* Calc new W[13] */ + "rotlwi r0, r30, 25\n\t" + "rotlwi %[len], r30, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r30, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "rotlwi r0, r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r29, r29, r0\n\t" + "add r29, r29, r22\n\t" + "\n" + "L_SHA256_transform_len_after_blk_13_%=: \n\t" + /* Round 14 */ + "rotlwi r0, r14, 26\n\t" + "rotlwi %[len], r14, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r14, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor r0, r15, r7\n\t" + "and r0, r0, r14\n\t" + "xor r0, r0, r7\n\t" + "add r8, r8, r0\n\t" + "lwz r0, 56(r6)\n\t" + "add r8, r8, r30\n\t" + "add r8, r8, r0\n\t" + "add r12, r12, r8\n\t" + "rotlwi r0, r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r8, r8, r0\n\t" + "xor %[len], r9, r10\n\t" + "xor r0, r10, r11\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r10\n\t" + "add r8, r8, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_14_%=\n\t" + /* Calc new W[14] */ + "rotlwi r0, r31, 25\n\t" + "rotlwi %[len], r31, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r31, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "rotlwi r0, r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r30, r30, r0\n\t" + "add r30, r30, r23\n\t" + "\n" + "L_SHA256_transform_len_after_blk_14_%=: \n\t" + /* Round 15 */ + "rotlwi r0, r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor r0, r14, r15\n\t" + "and r0, r0, r12\n\t" + "xor r0, r0, r15\n\t" + "add r7, r7, r0\n\t" + "lwz r0, 60(r6)\n\t" + "add r7, r7, r31\n\t" + "add r7, r7, r0\n\t" + "add r11, r11, r7\n\t" + "rotlwi r0, r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor r0, r0, %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r7, r7, r0\n\t" + "xor %[len], r8, r9\n\t" + "xor r0, r9, r10\n\t" + "and r0, r0, %[len]\n\t" + "xor r0, r0, r9\n\t" + "add r7, r7, r0\n\t" + "mfctr %[len]\n\t" + "cmpwi r0, %[len], 1\n\t" + "beq r0, L_SHA256_transform_len_after_blk_15_%=\n\t" + /* Calc new W[15] */ + "rotlwi r0, r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "rotlwi r0, r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor r0, r0, %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor r0, r0, %[len]\n\t" + "add r31, r31, r0\n\t" + "add r31, r31, r24\n\t" + "\n" + "L_SHA256_transform_len_after_blk_15_%=: \n\t" + "addi r6, r6, 0x40\n\t" + "bdnz L_SHA256_transform_len_start_%=\n\t" + "subi r6, r6, 0x100\n\t" + /* Add in digest from start */ + "lwz r0, 0(%[sha256])\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add r7, r7, r0\n\t" + "add r8, r8, %[len]\n\t" + "lwz r0, 8(%[sha256])\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add r9, r9, r0\n\t" + "add r10, r10, %[len]\n\t" + "lwz r0, 16(%[sha256])\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add r11, r11, r0\n\t" + "add r12, r12, %[len]\n\t" + "lwz r0, 24(%[sha256])\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add r14, r14, r0\n\t" + "add r15, r15, %[len]\n\t" + "stw r7, 0(%[sha256])\n\t" + "stw r8, 4(%[sha256])\n\t" + "stw r9, 8(%[sha256])\n\t" + "stw r10, 12(%[sha256])\n\t" + "stw r11, 16(%[sha256])\n\t" + "stw r12, 20(%[sha256])\n\t" + "stw r14, 24(%[sha256])\n\t" + "stw r15, 28(%[sha256])\n\t" + "lwz %[len], 0(r1)\n\t" + "mtctr %[len]\n\t" + "subi %[len], %[len], 1\n\t" + "addi %[data], %[data], 0x40\n\t" + "stw %[len], 0(r1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi r1, r1, 4\n\t" +#endif /* WOLFSSL_PPC32_ASM_SMALL */ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + : [sha256] "+r" (sha256), [data] "+r" (data), [len] "+r" (len), + [L_SHA256_transform_len_k] "+r" (L_SHA256_transform_len_k_c) + : +#else + : + : [sha256] "r" (sha256), [data] "r" (data), [len] "r" (len), + [L_SHA256_transform_len_k] "r" (L_SHA256_transform_len_k_c) +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + : "memory", "cc", "r0", "r7", "r8", "r9", "r10", "r11", "r12", "r14", + "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23", + "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31" + ); +} + +#else +/* PIC version not using register 30 or 31 */ +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, + word32 len_p); +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p) +#else +void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) +#endif /* WOLFSSL_NO_VAR_ASSIGN_REG */ +{ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + register wc_Sha256* sha256 asm ("3") = (wc_Sha256*)sha256_p; + register const byte* data asm ("4") = (const byte*)data_p; + register word32 len asm ("5") = (word32)len_p; + register word32* L_SHA256_transform_len_k_c asm ("6") = + (word32*)&L_SHA256_transform_len_k; +#else + register word32* L_SHA256_transform_len_k_c = + (word32*)&L_SHA256_transform_len_k; + +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + + __asm__ __volatile__ ( + "srwi %[len], %[len], 6\n\t" + "mr r6, %[L_SHA256_transform_len_k]\n\t" +#ifndef WOLFSSL_PPC32_ASM_SMALL + "subi r1, r1, 8\n\t" + "stw %[sha256], 0(r1)\n\t" + "stw %[data], 4(r1)\n\t" + "mtctr %[len]\n\t" + /* Copy digest to add in at end */ + "lwz r0, 0(%[sha256])\n\t" + "lwz %[data], 4(%[sha256])\n\t" + "lwz r7, 8(%[sha256])\n\t" + "lwz r8, 12(%[sha256])\n\t" + "lwz r9, 16(%[sha256])\n\t" + "lwz r10, 20(%[sha256])\n\t" + "lwz r11, 24(%[sha256])\n\t" + "lwz r12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(r1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz r14, 0(%[sha256])\n\t" + "lwz r15, 4(%[sha256])\n\t" + "lwz r16, 8(%[sha256])\n\t" + "lwz r17, 12(%[sha256])\n\t" + "lwz r18, 16(%[sha256])\n\t" + "lwz r19, 20(%[sha256])\n\t" + "lwz r20, 24(%[sha256])\n\t" + "lwz r21, 28(%[sha256])\n\t" + "lwz r22, 32(%[sha256])\n\t" + "lwz r23, 36(%[sha256])\n\t" + "lwz r24, 40(%[sha256])\n\t" + "lwz r25, 44(%[sha256])\n\t" + "lwz r26, 48(%[sha256])\n\t" + "lwz r27, 52(%[sha256])\n\t" + "lwz r28, 56(%[sha256])\n\t" + "lwz r29, 60(%[sha256])\n\t" + /* Start of 16 rounds */ + /* Round 0 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 0(r6)\n\t" + "add r12, r12, r14\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], r15, 25\n\t" + "rotlwi %[len], r15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "rotlwi %[sha256], r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "add r14, r14, r23\n\t" + /* Round 1 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 4(r6)\n\t" + "add r11, r11, r15\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "rotlwi %[sha256], r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "add r15, r15, r24\n\t" + /* Round 2 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 8(r6)\n\t" + "add r10, r10, r16\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "rotlwi %[sha256], r14, 15\n\t" + "rotlwi %[len], r14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "add r16, r16, r25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 12(r6)\n\t" + "add r9, r9, r17\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "rotlwi %[sha256], r15, 15\n\t" + "rotlwi %[len], r15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "add r17, r17, r26\n\t" + /* Round 4 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 16(r6)\n\t" + "add r8, r8, r18\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "rotlwi %[sha256], r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "add r18, r18, r27\n\t" + /* Round 5 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 20(r6)\n\t" + "add r7, r7, r19\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "rotlwi %[sha256], r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "add r19, r19, r28\n\t" + /* Round 6 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(r6)\n\t" + "add %[data], %[data], r20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "rotlwi %[sha256], r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "add r20, r20, r29\n\t" + /* Round 7 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 28(r6)\n\t" + "add r0, r0, r21\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "rotlwi %[sha256], r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "add r21, r21, r14\n\t" + /* Round 8 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 32(r6)\n\t" + "add r12, r12, r22\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "rotlwi %[sha256], r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "add r22, r22, r15\n\t" + /* Round 9 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 36(r6)\n\t" + "add r11, r11, r23\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "rotlwi %[sha256], r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "add r23, r23, r16\n\t" + /* Round 10 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 40(r6)\n\t" + "add r10, r10, r24\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "rotlwi %[sha256], r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "add r24, r24, r17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 44(r6)\n\t" + "add r9, r9, r25\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "rotlwi %[sha256], r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "add r25, r25, r18\n\t" + /* Round 12 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 48(r6)\n\t" + "add r8, r8, r26\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "rotlwi %[sha256], r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "add r26, r26, r19\n\t" + /* Round 13 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 52(r6)\n\t" + "add r7, r7, r27\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "rotlwi %[sha256], r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "add r27, r27, r20\n\t" + /* Round 14 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(r6)\n\t" + "add %[data], %[data], r28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "rotlwi %[sha256], r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "add r28, r28, r21\n\t" + /* Round 15 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 60(r6)\n\t" + "add r0, r0, r29\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], r14, 25\n\t" + "rotlwi %[len], r14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "rotlwi %[sha256], r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "add r29, r29, r22\n\t" + "addi r6, r6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 0(r6)\n\t" + "add r12, r12, r14\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], r15, 25\n\t" + "rotlwi %[len], r15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "rotlwi %[sha256], r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "add r14, r14, r23\n\t" + /* Round 1 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 4(r6)\n\t" + "add r11, r11, r15\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "rotlwi %[sha256], r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "add r15, r15, r24\n\t" + /* Round 2 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 8(r6)\n\t" + "add r10, r10, r16\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "rotlwi %[sha256], r14, 15\n\t" + "rotlwi %[len], r14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "add r16, r16, r25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 12(r6)\n\t" + "add r9, r9, r17\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "rotlwi %[sha256], r15, 15\n\t" + "rotlwi %[len], r15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "add r17, r17, r26\n\t" + /* Round 4 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 16(r6)\n\t" + "add r8, r8, r18\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "rotlwi %[sha256], r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "add r18, r18, r27\n\t" + /* Round 5 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 20(r6)\n\t" + "add r7, r7, r19\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "rotlwi %[sha256], r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "add r19, r19, r28\n\t" + /* Round 6 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(r6)\n\t" + "add %[data], %[data], r20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "rotlwi %[sha256], r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "add r20, r20, r29\n\t" + /* Round 7 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 28(r6)\n\t" + "add r0, r0, r21\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "rotlwi %[sha256], r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "add r21, r21, r14\n\t" + /* Round 8 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 32(r6)\n\t" + "add r12, r12, r22\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "rotlwi %[sha256], r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "add r22, r22, r15\n\t" + /* Round 9 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 36(r6)\n\t" + "add r11, r11, r23\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "rotlwi %[sha256], r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "add r23, r23, r16\n\t" + /* Round 10 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 40(r6)\n\t" + "add r10, r10, r24\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "rotlwi %[sha256], r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "add r24, r24, r17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 44(r6)\n\t" + "add r9, r9, r25\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "rotlwi %[sha256], r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "add r25, r25, r18\n\t" + /* Round 12 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 48(r6)\n\t" + "add r8, r8, r26\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "rotlwi %[sha256], r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "add r26, r26, r19\n\t" + /* Round 13 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 52(r6)\n\t" + "add r7, r7, r27\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "rotlwi %[sha256], r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "add r27, r27, r20\n\t" + /* Round 14 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(r6)\n\t" + "add %[data], %[data], r28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "rotlwi %[sha256], r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "add r28, r28, r21\n\t" + /* Round 15 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 60(r6)\n\t" + "add r0, r0, r29\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], r14, 25\n\t" + "rotlwi %[len], r14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "rotlwi %[sha256], r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "add r29, r29, r22\n\t" + "addi r6, r6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 0(r6)\n\t" + "add r12, r12, r14\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], r15, 25\n\t" + "rotlwi %[len], r15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "rotlwi %[sha256], r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "add r14, r14, r23\n\t" + /* Round 1 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 4(r6)\n\t" + "add r11, r11, r15\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "rotlwi %[sha256], r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "add r15, r15, r24\n\t" + /* Round 2 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 8(r6)\n\t" + "add r10, r10, r16\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "rotlwi %[sha256], r14, 15\n\t" + "rotlwi %[len], r14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "add r16, r16, r25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 12(r6)\n\t" + "add r9, r9, r17\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "rotlwi %[sha256], r15, 15\n\t" + "rotlwi %[len], r15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "add r17, r17, r26\n\t" + /* Round 4 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 16(r6)\n\t" + "add r8, r8, r18\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "rotlwi %[sha256], r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "add r18, r18, r27\n\t" + /* Round 5 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 20(r6)\n\t" + "add r7, r7, r19\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "rotlwi %[sha256], r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "add r19, r19, r28\n\t" + /* Round 6 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(r6)\n\t" + "add %[data], %[data], r20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "rotlwi %[sha256], r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "add r20, r20, r29\n\t" + /* Round 7 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 28(r6)\n\t" + "add r0, r0, r21\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "rotlwi %[sha256], r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "add r21, r21, r14\n\t" + /* Round 8 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 32(r6)\n\t" + "add r12, r12, r22\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "rotlwi %[sha256], r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "add r22, r22, r15\n\t" + /* Round 9 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 36(r6)\n\t" + "add r11, r11, r23\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "rotlwi %[sha256], r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "add r23, r23, r16\n\t" + /* Round 10 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 40(r6)\n\t" + "add r10, r10, r24\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "rotlwi %[sha256], r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "add r24, r24, r17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 44(r6)\n\t" + "add r9, r9, r25\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "rotlwi %[sha256], r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "add r25, r25, r18\n\t" + /* Round 12 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 48(r6)\n\t" + "add r8, r8, r26\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "rotlwi %[sha256], r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "add r26, r26, r19\n\t" + /* Round 13 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 52(r6)\n\t" + "add r7, r7, r27\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "rotlwi %[sha256], r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "add r27, r27, r20\n\t" + /* Round 14 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(r6)\n\t" + "add %[data], %[data], r28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "rotlwi %[sha256], r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "add r28, r28, r21\n\t" + /* Round 15 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 60(r6)\n\t" + "add r0, r0, r29\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], r14, 25\n\t" + "rotlwi %[len], r14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "rotlwi %[sha256], r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "add r29, r29, r22\n\t" + "addi r6, r6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 0(r6)\n\t" + "add r12, r12, r14\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Round 1 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 4(r6)\n\t" + "add r11, r11, r15\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Round 2 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 8(r6)\n\t" + "add r10, r10, r16\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 12(r6)\n\t" + "add r9, r9, r17\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Round 4 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 16(r6)\n\t" + "add r8, r8, r18\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Round 5 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 20(r6)\n\t" + "add r7, r7, r19\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Round 6 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(r6)\n\t" + "add %[data], %[data], r20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Round 7 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 28(r6)\n\t" + "add r0, r0, r21\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + /* Round 8 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 32(r6)\n\t" + "add r12, r12, r22\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + /* Round 9 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 36(r6)\n\t" + "add r11, r11, r23\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + /* Round 10 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 40(r6)\n\t" + "add r10, r10, r24\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 44(r6)\n\t" + "add r9, r9, r25\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + /* Round 12 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 48(r6)\n\t" + "add r8, r8, r26\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + /* Round 13 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 52(r6)\n\t" + "add r7, r7, r27\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + /* Round 14 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(r6)\n\t" + "add %[data], %[data], r28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Round 15 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 60(r6)\n\t" + "add r0, r0, r29\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + "subi r6, r6, 0xc0\n\t" + "lwz %[sha256], 0(r1)\n\t" + /* Add in digest from start */ + "lwz %[len], 0(%[sha256])\n\t" + "add r0, r0, %[len]\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add %[data], %[data], %[len]\n\t" + "lwz %[len], 8(%[sha256])\n\t" + "add r7, r7, %[len]\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add r8, r8, %[len]\n\t" + "lwz %[len], 16(%[sha256])\n\t" + "add r9, r9, %[len]\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add r10, r10, %[len]\n\t" + "lwz %[len], 24(%[sha256])\n\t" + "add r11, r11, %[len]\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add r12, r12, %[len]\n\t" + "stw r0, 0(%[sha256])\n\t" + "stw %[data], 4(%[sha256])\n\t" + "stw r7, 8(%[sha256])\n\t" + "stw r8, 12(%[sha256])\n\t" + "stw r9, 16(%[sha256])\n\t" + "stw r10, 20(%[sha256])\n\t" + "stw r11, 24(%[sha256])\n\t" + "stw r12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(r1)\n\t" + "addi %[sha256], %[sha256], 0x40\n\t" + "stw %[sha256], 4(r1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi r1, r1, 8\n\t" +#else + "subi r1, r1, 12\n\t" + "stw %[sha256], 0(r1)\n\t" + "stw %[data], 4(r1)\n\t" + "stw %[len], 8(r1)\n\t" + /* Copy digest to add in at end */ + "lwz r0, 0(%[sha256])\n\t" + "lwz %[data], 4(%[sha256])\n\t" + "lwz r7, 8(%[sha256])\n\t" + "lwz r8, 12(%[sha256])\n\t" + "lwz r9, 16(%[sha256])\n\t" + "lwz r10, 20(%[sha256])\n\t" + "lwz r11, 24(%[sha256])\n\t" + "lwz r12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(r1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz r14, 0(%[sha256])\n\t" + "lwz r15, 4(%[sha256])\n\t" + "lwz r16, 8(%[sha256])\n\t" + "lwz r17, 12(%[sha256])\n\t" + "lwz r18, 16(%[sha256])\n\t" + "lwz r19, 20(%[sha256])\n\t" + "lwz r20, 24(%[sha256])\n\t" + "lwz r21, 28(%[sha256])\n\t" + "lwz r22, 32(%[sha256])\n\t" + "lwz r23, 36(%[sha256])\n\t" + "lwz r24, 40(%[sha256])\n\t" + "lwz r25, 44(%[sha256])\n\t" + "lwz r26, 48(%[sha256])\n\t" + "lwz r27, 52(%[sha256])\n\t" + "lwz r28, 56(%[sha256])\n\t" + "lwz r29, 60(%[sha256])\n\t" + "li %[sha256], 4\n\t" + "mtctr %[sha256]\n\t" + /* Start of 16 rounds */ + "\n" + "L_SHA256_transform_len_start_%=: \n\t" + /* Round 0 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 0(r6)\n\t" + "add r12, r12, r14\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_0_%=\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], r15, 25\n\t" + "rotlwi %[len], r15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "rotlwi %[sha256], r28, 15\n\t" + "rotlwi %[len], r28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r14, r14, %[sha256]\n\t" + "add r14, r14, r23\n\t" + "\n" + "L_SHA256_transform_len_after_blk_0_%=: \n\t" + /* Round 1 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 4(r6)\n\t" + "add r11, r11, r15\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_1_%=\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], r16, 25\n\t" + "rotlwi %[len], r16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "rotlwi %[sha256], r29, 15\n\t" + "rotlwi %[len], r29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r15, r15, %[sha256]\n\t" + "add r15, r15, r24\n\t" + "\n" + "L_SHA256_transform_len_after_blk_1_%=: \n\t" + /* Round 2 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 8(r6)\n\t" + "add r10, r10, r16\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_2_%=\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], r17, 25\n\t" + "rotlwi %[len], r17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "rotlwi %[sha256], r14, 15\n\t" + "rotlwi %[len], r14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r16, r16, %[sha256]\n\t" + "add r16, r16, r25\n\t" + "\n" + "L_SHA256_transform_len_after_blk_2_%=: \n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 12(r6)\n\t" + "add r9, r9, r17\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_3_%=\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], r18, 25\n\t" + "rotlwi %[len], r18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "rotlwi %[sha256], r15, 15\n\t" + "rotlwi %[len], r15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r17, r17, %[sha256]\n\t" + "add r17, r17, r26\n\t" + "\n" + "L_SHA256_transform_len_after_blk_3_%=: \n\t" + /* Round 4 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 16(r6)\n\t" + "add r8, r8, r18\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_4_%=\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], r19, 25\n\t" + "rotlwi %[len], r19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "rotlwi %[sha256], r16, 15\n\t" + "rotlwi %[len], r16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r18, r18, %[sha256]\n\t" + "add r18, r18, r27\n\t" + "\n" + "L_SHA256_transform_len_after_blk_4_%=: \n\t" + /* Round 5 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 20(r6)\n\t" + "add r7, r7, r19\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_5_%=\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], r20, 25\n\t" + "rotlwi %[len], r20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "rotlwi %[sha256], r17, 15\n\t" + "rotlwi %[len], r17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r19, r19, %[sha256]\n\t" + "add r19, r19, r28\n\t" + "\n" + "L_SHA256_transform_len_after_blk_5_%=: \n\t" + /* Round 6 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(r6)\n\t" + "add %[data], %[data], r20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_6_%=\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], r21, 25\n\t" + "rotlwi %[len], r21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "rotlwi %[sha256], r18, 15\n\t" + "rotlwi %[len], r18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r20, r20, %[sha256]\n\t" + "add r20, r20, r29\n\t" + "\n" + "L_SHA256_transform_len_after_blk_6_%=: \n\t" + /* Round 7 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 28(r6)\n\t" + "add r0, r0, r21\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_7_%=\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], r22, 25\n\t" + "rotlwi %[len], r22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "rotlwi %[sha256], r19, 15\n\t" + "rotlwi %[len], r19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r21, r21, %[sha256]\n\t" + "add r21, r21, r14\n\t" + "\n" + "L_SHA256_transform_len_after_blk_7_%=: \n\t" + /* Round 8 */ + "rotlwi %[sha256], r9, 26\n\t" + "rotlwi %[len], r9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], r9\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r12, r12, %[sha256]\n\t" + "lwz %[sha256], 32(r6)\n\t" + "add r12, r12, r22\n\t" + "add r12, r12, %[sha256]\n\t" + "add r8, r8, r12\n\t" + "rotlwi %[sha256], r0, 30\n\t" + "rotlwi %[len], r0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r12, r12, %[sha256]\n\t" + "xor %[len], r0, %[data]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r12, r12, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_8_%=\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], r23, 25\n\t" + "rotlwi %[len], r23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "rotlwi %[sha256], r20, 15\n\t" + "rotlwi %[len], r20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r22, r22, %[sha256]\n\t" + "add r22, r22, r15\n\t" + "\n" + "L_SHA256_transform_len_after_blk_8_%=: \n\t" + /* Round 9 */ + "rotlwi %[sha256], r8, 26\n\t" + "rotlwi %[len], r8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], r8\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r11, r11, %[sha256]\n\t" + "lwz %[sha256], 36(r6)\n\t" + "add r11, r11, r23\n\t" + "add r11, r11, %[sha256]\n\t" + "add r7, r7, r11\n\t" + "rotlwi %[sha256], r12, 30\n\t" + "rotlwi %[len], r12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r11, r11, %[sha256]\n\t" + "xor %[len], r12, r0\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add r11, r11, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_9_%=\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], r24, 25\n\t" + "rotlwi %[len], r24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "rotlwi %[sha256], r21, 15\n\t" + "rotlwi %[len], r21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r23, r23, %[sha256]\n\t" + "add r23, r23, r16\n\t" + "\n" + "L_SHA256_transform_len_after_blk_9_%=: \n\t" + /* Round 10 */ + "rotlwi %[sha256], r7, 26\n\t" + "rotlwi %[len], r7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], r7\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r10, r10, %[sha256]\n\t" + "lwz %[sha256], 40(r6)\n\t" + "add r10, r10, r24\n\t" + "add r10, r10, %[sha256]\n\t" + "add %[data], %[data], r10\n\t" + "rotlwi %[sha256], r11, 30\n\t" + "rotlwi %[len], r11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r10, r10, %[sha256]\n\t" + "xor %[len], r11, r12\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r10, r10, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_10_%=\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], r25, 25\n\t" + "rotlwi %[len], r25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "rotlwi %[sha256], r22, 15\n\t" + "rotlwi %[len], r22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r24, r24, %[sha256]\n\t" + "add r24, r24, r17\n\t" + "\n" + "L_SHA256_transform_len_after_blk_10_%=: \n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add r9, r9, %[sha256]\n\t" + "lwz %[sha256], 44(r6)\n\t" + "add r9, r9, r25\n\t" + "add r9, r9, %[sha256]\n\t" + "add r0, r0, r9\n\t" + "rotlwi %[sha256], r10, 30\n\t" + "rotlwi %[len], r10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r9, r9, %[sha256]\n\t" + "xor %[len], r10, r11\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r11\n\t" + "add r9, r9, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_11_%=\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], r26, 25\n\t" + "rotlwi %[len], r26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "rotlwi %[sha256], r23, 15\n\t" + "rotlwi %[len], r23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r25, r25, %[sha256]\n\t" + "add r25, r25, r18\n\t" + "\n" + "L_SHA256_transform_len_after_blk_11_%=: \n\t" + /* Round 12 */ + "rotlwi %[sha256], r0, 26\n\t" + "rotlwi %[len], r0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[sha256], %[data], r7\n\t" + "and %[sha256], %[sha256], r0\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r8, r8, %[sha256]\n\t" + "lwz %[sha256], 48(r6)\n\t" + "add r8, r8, r26\n\t" + "add r8, r8, %[sha256]\n\t" + "add r12, r12, r8\n\t" + "rotlwi %[sha256], r9, 30\n\t" + "rotlwi %[len], r9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r8, r8, %[sha256]\n\t" + "xor %[len], r9, r10\n\t" + "xor %[sha256], r10, r11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r10\n\t" + "add r8, r8, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_12_%=\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], r27, 25\n\t" + "rotlwi %[len], r27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "rotlwi %[sha256], r24, 15\n\t" + "rotlwi %[len], r24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r26, r26, %[sha256]\n\t" + "add r26, r26, r19\n\t" + "\n" + "L_SHA256_transform_len_after_blk_12_%=: \n\t" + /* Round 13 */ + "rotlwi %[sha256], r12, 26\n\t" + "rotlwi %[len], r12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[sha256], r0, %[data]\n\t" + "and %[sha256], %[sha256], r12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add r7, r7, %[sha256]\n\t" + "lwz %[sha256], 52(r6)\n\t" + "add r7, r7, r27\n\t" + "add r7, r7, %[sha256]\n\t" + "add r11, r11, r7\n\t" + "rotlwi %[sha256], r8, 30\n\t" + "rotlwi %[len], r8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r7, r7, %[sha256]\n\t" + "xor %[len], r8, r9\n\t" + "xor %[sha256], r9, r10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r9\n\t" + "add r7, r7, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_13_%=\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], r28, 25\n\t" + "rotlwi %[len], r28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "rotlwi %[sha256], r25, 15\n\t" + "rotlwi %[len], r25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r27, r27, %[sha256]\n\t" + "add r27, r27, r20\n\t" + "\n" + "L_SHA256_transform_len_after_blk_13_%=: \n\t" + /* Round 14 */ + "rotlwi %[sha256], r11, 26\n\t" + "rotlwi %[len], r11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], r12, r0\n\t" + "and %[sha256], %[sha256], r11\n\t" + "xor %[sha256], %[sha256], r0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(r6)\n\t" + "add %[data], %[data], r28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add r10, r10, %[data]\n\t" + "rotlwi %[sha256], r7, 30\n\t" + "rotlwi %[len], r7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], r7, r8\n\t" + "xor %[sha256], r8, r9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r8\n\t" + "add %[data], %[data], %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_14_%=\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], r29, 25\n\t" + "rotlwi %[len], r29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "rotlwi %[sha256], r26, 15\n\t" + "rotlwi %[len], r26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r28, r28, %[sha256]\n\t" + "add r28, r28, r21\n\t" + "\n" + "L_SHA256_transform_len_after_blk_14_%=: \n\t" + /* Round 15 */ + "rotlwi %[sha256], r10, 26\n\t" + "rotlwi %[len], r10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], r10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[sha256], r11, r12\n\t" + "and %[sha256], %[sha256], r10\n\t" + "xor %[sha256], %[sha256], r12\n\t" + "add r0, r0, %[sha256]\n\t" + "lwz %[sha256], 60(r6)\n\t" + "add r0, r0, r29\n\t" + "add r0, r0, %[sha256]\n\t" + "add r9, r9, r0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r0, r0, %[sha256]\n\t" + "xor %[len], %[data], r7\n\t" + "xor %[sha256], r7, r8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], r7\n\t" + "add r0, r0, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_15_%=\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], r14, 25\n\t" + "rotlwi %[len], r14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "rotlwi %[sha256], r27, 15\n\t" + "rotlwi %[len], r27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], r27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add r29, r29, %[sha256]\n\t" + "add r29, r29, r22\n\t" + "\n" + "L_SHA256_transform_len_after_blk_15_%=: \n\t" + "addi r6, r6, 0x40\n\t" + "bdnz L_SHA256_transform_len_start_%=\n\t" + "subi r6, r6, 0x100\n\t" + "lwz %[sha256], 0(r1)\n\t" + /* Add in digest from start */ + "lwz %[len], 0(%[sha256])\n\t" + "add r0, r0, %[len]\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add %[data], %[data], %[len]\n\t" + "lwz %[len], 8(%[sha256])\n\t" + "add r7, r7, %[len]\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add r8, r8, %[len]\n\t" + "lwz %[len], 16(%[sha256])\n\t" + "add r9, r9, %[len]\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add r10, r10, %[len]\n\t" + "lwz %[len], 24(%[sha256])\n\t" + "add r11, r11, %[len]\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add r12, r12, %[len]\n\t" + "stw r0, 0(%[sha256])\n\t" + "stw %[data], 4(%[sha256])\n\t" + "stw r7, 8(%[sha256])\n\t" + "stw r8, 12(%[sha256])\n\t" + "stw r9, 16(%[sha256])\n\t" + "stw r10, 20(%[sha256])\n\t" + "stw r11, 24(%[sha256])\n\t" + "stw r12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(r1)\n\t" + "lwz %[len], 8(r1)\n\t" + "mtctr %[len]\n\t" + "subi %[len], %[len], 1\n\t" + "addi %[sha256], %[sha256], 0x40\n\t" + "stw %[sha256], 4(r1)\n\t" + "stw %[len], 8(r1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi r1, r1, 12\n\t" +#endif /* WOLFSSL_PPC32_ASM_SMALL */ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + : [sha256] "+r" (sha256), [data] "+r" (data), [len] "+r" (len), + [L_SHA256_transform_len_k] "+r" (L_SHA256_transform_len_k_c) + : +#else + : + : [sha256] "r" (sha256), [data] "r" (data), [len] "r" (len), + [L_SHA256_transform_len_k] "r" (L_SHA256_transform_len_k_c) +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + : "memory", "cc", "r0", "r7", "r8", "r9", "r10", "r11", "r12", "r14", + "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23", + "r24", "r25", "r26", "r27", "r28", "r29" + ); +} + +#endif /* __PIC__ */ +#endif /* !WOLFSSL_PPC32_ASM_SPE */ +#endif /* !NO_SHA256 */ + +#endif /* WOLFSSL_PPC32_ASM_INLINE */ +#endif /* WOLFSSL_PPC32_ASM */ diff --git a/wolfcrypt/src/port/riscv/riscv-64-sha512.c b/wolfcrypt/src/port/riscv/riscv-64-sha512.c index 238a574d0..5dfeab938 100644 --- a/wolfcrypt/src/port/riscv/riscv-64-sha512.c +++ b/wolfcrypt/src/port/riscv/riscv-64-sha512.c @@ -22,7 +22,7 @@ #include #ifdef WOLFSSL_RISCV_ASM -#if !defined(NO_SHA512) || defined(WOLFSSL_SHA384) +#if defined(WOLFSSL_SHA512) || defined(WOLFSSL_SHA384) #if FIPS_VERSION3_LT(6,0,0) && defined(HAVE_FIPS) #undef HAVE_FIPS @@ -984,7 +984,7 @@ static WC_INLINE void Sha512Final(wc_Sha512* sha512, byte* hash, int hashLen) } -#ifndef NO_SHA512 +#ifdef WOLFSSL_SHA512 /* Initialize SHA-512 object for hashing. * @@ -1494,7 +1494,7 @@ int wc_Sha512_256Transform(wc_Sha512* sha512, const unsigned char* data) #endif /* !HAVE_FIPS && !HAVE_SELFTEST */ -#endif /* !NO_SHA512 */ +#endif /* WOLFSSL_SHA512 */ #ifdef WOLFSSL_SHA384 @@ -1737,5 +1737,5 @@ int wc_Sha384Copy(wc_Sha384* src, wc_Sha384* dst) #endif /* WOLFSSL_SHA384 */ -#endif /* !NO_SHA512 || WOLFSSL_SHA384 */ +#endif /* WOLFSSL_SHA512 || WOLFSSL_SHA384 */ #endif /* WOLFSSL_RISCV_ASM */ diff --git a/wolfcrypt/src/port/st/README.md b/wolfcrypt/src/port/st/README.md index 1dae1036f..fc5baf42a 100644 --- a/wolfcrypt/src/port/st/README.md +++ b/wolfcrypt/src/port/st/README.md @@ -9,7 +9,9 @@ Support for the STM32 PKA on WB55, H7, MP13 and other devices with on-board public-key acceleration: - ECC192/ECC224/ECC256/ECC384 -Support for the STSAFE-A100 crypto hardware accelerator co-processor via I2C for ECC supporting NIST or Brainpool 256-bit and 384-bit curves. It requires the ST-Safe SDK including wolfSSL's `stsafe_interface.c/.h` files. Please contact us at support@wolfssl.com to get this code. +Support for the STSAFE-A secure element family via I2C for ECC supporting NIST P-256/P-384 and Brainpool 256/384-bit curves: + - **STSAFE-A100/A110**: Uses ST's proprietary STSAFE-A1xx middleware. Contact us at support@wolfssl.com for integration assistance. + - **STSAFE-A120**: Uses ST's open-source [STSELib](https://github.com/STMicroelectronics/STSELib) (BSD-3 license). For details see our [wolfSSL ST](https://www.wolfssl.com/docs/stm32/) page. @@ -65,29 +67,69 @@ To enable support define the following When the support is enabled, the ECC operations will be accelerated using the PKA crypto co-processor. -## STSAFE-A100 ECC Acceleration +## STSAFE-A ECC Acceleration -Using the wolfSSL PK callbacks and the reference ST Safe reference API's we support an ECC only cipher suite such as ECDHE-ECDSA-AES128-SHA256 for TLS client or server. +Using the wolfSSL PK callbacks or Crypto callbacks with the ST-Safe reference API's we support ECC operations for TLS client/server: + - **ECDSA Sign/Verify**: P-256 and P-384 (NIST and Brainpool curves) + - **ECDH Key Agreement**: For TLS key exchange + - **ECC Key Generation**: Ephemeral keys for TLS -At the wolfCrypt level we also support ECC native API's for `wc_ecc_*` using the ST-Safe. +At the wolfCrypt level we also support ECC native API's for `wc_ecc_*` using the ST-Safe via Crypto Callbacks. + +### Supported Hardware + +| Model | Macro | SDK | +|-------|-------|-----| +| STSAFE-A100/A110 | `WOLFSSL_STSAFEA100` | ST STSAFE-A1xx Middleware (proprietary) | +| STSAFE-A120 | `WOLFSSL_STSAFEA120` | [STSELib](https://github.com/STMicroelectronics/STSELib) (BSD-3, open source) | ### Building -`./configure --enable-pkcallbacks CFLAGS="-DWOLFSSL_STSAFEA100"` +For STSAFE-A100/A110 (legacy): -or +``` +./configure --enable-pkcallbacks CFLAGS="-DWOLFSSL_STSAFEA100" +``` -`#define HAVE_PK_CALLBACKS` -`#define WOLFSSL_STSAFEA100` +or in `user_settings.h`: +```c +#define HAVE_PK_CALLBACKS +#define WOLFSSL_STSAFEA100 +``` + +For STSAFE-A120 with STSELib: + +``` +./configure --enable-pkcallbacks CFLAGS="-DWOLFSSL_STSAFEA120" +``` + +or in `user_settings.h`: + +```c +#define HAVE_PK_CALLBACKS +#define WOLFSSL_STSAFEA120 +``` + +To use Crypto Callbacks (recommended for wolfCrypt-level ECC operations): + +```c +#define WOLF_CRYPTO_CB +#define WOLFSSL_STSAFEA120 /* or WOLFSSL_STSAFEA100 */ +``` ### Coding +#### Using PK Callbacks (TLS) + Setup the PK callbacks for TLS using: -``` -/* Setup PK Callbacks for STSAFE-A100 */ +```c +/* Setup PK Callbacks for STSAFE */ WOLFSSL_CTX* ctx; +SSL_STSAFE_SetupPkCallbacks(ctx); + +/* Or manually: */ wolfSSL_CTX_SetEccKeyGenCb(ctx, SSL_STSAFE_CreateKeyCb); wolfSSL_CTX_SetEccSignCb(ctx, SSL_STSAFE_SignCertificateCb); wolfSSL_CTX_SetEccVerifyCb(ctx, SSL_STSAFE_VerifyPeerCertCb); @@ -95,20 +137,131 @@ wolfSSL_CTX_SetEccSharedSecretCb(ctx, SSL_STSAFE_SharedSecretCb); wolfSSL_CTX_SetDevId(ctx, 0); /* enables wolfCrypt `wc_ecc_*` ST-Safe use */ ``` -The reference STSAFE-A100 PK callback functions are located in the `wolfcrypt/src/port/st/stsafe.c` file. +The reference STSAFE PK callback functions are located in the `wolfcrypt/src/port/st/stsafe.c` file. Adding a custom context to the callbacks: -``` +```c /* Setup PK Callbacks context */ WOLFSSL* ssl; void* myOwnCtx; -wolfSSL_SetEccKeyGenCtx(ssl, myOwnCtx); -wolfSSL_SetEccVerifyCtx(ssl, myOwnCtx); -wolfSSL_SetEccSignCtx(ssl, myOwnCtx); -wolfSSL_SetEccSharedSecretCtx(ssl, myOwnCtx); +SSL_STSAFE_SetupPkCallbackCtx(ssl, myOwnCtx); ``` +#### Using Crypto Callbacks (wolfCrypt) + +For direct wolfCrypt ECC operations using the hardware: + +```c +#include + +/* Register the crypto callback */ +wolfSTSAFE_CryptoCb_Ctx stsafeCtx; +stsafeCtx.devId = WOLF_STSAFE_DEVID; +wc_CryptoCb_RegisterDevice(WOLF_STSAFE_DEVID, wolfSSL_STSAFE_CryptoDevCb, &stsafeCtx); + +/* Use with ECC operations */ +ecc_key key; +wc_ecc_init_ex(&key, NULL, WOLF_STSAFE_DEVID); +/* ECC operations will now use STSAFE hardware */ +``` + +### Implementation Details + +The STSAFE support is self-contained in `wolfcrypt/src/port/st/stsafe.c` with SDK-specific implementations selected at compile time: + +| Macro | SDK | Description | +|-------|-----|-------------| +| `WOLFSSL_STSAFEA100` | STSAFE-A1xx Middleware | ST's proprietary SDK for A100/A110 | +| `WOLFSSL_STSAFEA120` | [STSELib](https://github.com/STMicroelectronics/STSELib) | ST's open-source SDK for A120 (BSD-3) | + +#### External Interface (Backwards Compatibility) + +For customers with existing custom implementations, define `WOLFSSL_STSAFE_INTERFACE_EXTERNAL` to use an external `stsafe_interface.h` file instead of the built-in implementation: + +```c +#define WOLFSSL_STSAFEA100 /* or WOLFSSL_STSAFEA120 */ +#define WOLFSSL_STSAFE_INTERFACE_EXTERNAL +``` + +When `WOLFSSL_STSAFE_INTERFACE_EXTERNAL` is defined, the customer must provide a `stsafe_interface.h` header that defines: + +| Item | Type | Description | +|------|------|-------------| +| `stsafe_curve_id_t` | typedef | Curve identifier type | +| `stsafe_slot_t` | typedef | Key slot identifier type | +| `STSAFE_ECC_CURVE_P256` | macro | P-256 curve ID value | +| `STSAFE_ECC_CURVE_P384` | macro | P-384 curve ID value | +| `STSAFE_KEY_SLOT_0/1/EPHEMERAL` | macros | Key slot values | +| `STSAFE_A_OK` | macro | Success return code | +| `STSAFE_MAX_KEY_LEN` | macro | Max key size in bytes (48) | +| `STSAFE_MAX_PUBKEY_RAW_LEN` | macro | Max public key size (96) | +| `STSAFE_MAX_SIG_LEN` | macro | Max signature size (96) | + +And provide implementations for these internal interface functions: +- `int stsafe_interface_init(void)` +- `int stsafe_create_key(stsafe_slot_t*, stsafe_curve_id_t, uint8_t*)` +- `int stsafe_sign(stsafe_slot_t, stsafe_curve_id_t, uint8_t*, uint8_t*)` +- `int stsafe_verify(stsafe_curve_id_t, uint8_t*, uint8_t*, uint8_t*, uint8_t*, int32_t*)` +- `int stsafe_shared_secret(stsafe_slot_t, stsafe_curve_id_t, uint8_t*, uint8_t*, uint8_t*, int32_t*)` +- `int stsafe_read_certificate(uint8_t**, uint32_t*)` +- `int stsafe_get_random(uint8_t*, uint32_t)` (if `USE_STSAFE_RNG_SEED` defined) + +When **NOT** defined (default behavior): All code is self-contained in `stsafe.c` using the appropriate SDK automatically. + +The implementation provides these internal operations: + +| Operation | Description | +|-----------|-------------| +| `stsafe_interface_init()` | Initialize the STSAFE device (called by `wolfCrypt_Init()`) | +| `stsafe_sign()` | ECDSA signature generation (P-256/P-384) | +| `stsafe_verify()` | ECDSA signature verification (P-256/P-384) | +| `stsafe_create_key()` | Generate ECC key pair on device | +| `stsafe_shared_secret()` | ECDH shared secret computation | +| `stsafe_read_certificate()` | Read device certificate from secure storage | + +### STSELib Setup (A120) + +For STSAFE-A120, you need to include the STSELib library: + +1. Clone STSELib as a submodule or add to your project: + ```bash + git submodule add https://github.com/STMicroelectronics/STSELib.git lib/stselib + ``` + +2. Add STSELib headers to your include path + +3. Implement the platform abstraction files required by STSELib: + - `stse_conf.h` - Configuration (target device, features) + - `stse_platform_generic.h` - Platform callbacks (I2C, timing) + +4. See STSELib documentation for platform-specific integration details + +### Raspberry Pi with STSAFE-A120 + +For testing on a Raspberry Pi with an STSAFE-A120 connected via I2C: + +1. **Enable I2C** on the Raspberry Pi: + ```bash + sudo raspi-config + # Navigate to: Interface Options -> I2C -> Enable + ``` + +2. **Verify the STSAFE device is detected** (default I2C address is 0x20): + ```bash + sudo i2cdetect -y 1 + ``` + +3. **Build wolfSSL with STSAFE-A120 support**: + ```bash + ./configure --enable-pkcallbacks --enable-cryptocb \ + CFLAGS="-DWOLFSSL_STSAFEA120 -I/path/to/STSELib" + make + sudo make install + ``` + +4. **Platform abstraction**: Implement the STSELib I2C callbacks using the Linux I2C driver (`/dev/i2c-1`). + ### Benchmarks and Memory Use Software only implementation (STM32L4 120Mhz, Cortex-M4, Fast Math): diff --git a/wolfcrypt/src/port/st/stsafe.c b/wolfcrypt/src/port/st/stsafe.c index 832b70214..7f308d6f1 100644 --- a/wolfcrypt/src/port/st/stsafe.c +++ b/wolfcrypt/src/port/st/stsafe.c @@ -26,91 +26,1113 @@ #include #include #include - -#ifndef STSAFE_INTERFACE_PRINTF -#define STSAFE_INTERFACE_PRINTF(...) WC_DO_NOTHING +#ifndef NO_ASN + #include #endif -#ifdef WOLFSSL_STSAFEA100 +#ifndef STSAFE_INTERFACE_PRINTF + #define STSAFE_INTERFACE_PRINTF(...) WC_DO_NOTHING +#endif -int SSL_STSAFE_LoadDeviceCertificate(byte** pRawCertificate, - word32* pRawCertificateLen) +/* Combined STSAFE macro - set in stsafe.h when either A100/A120 is defined */ +#ifdef WOLFSSL_STSAFE + +/* ========================================================================== */ +/* Internal Implementation (when NOT using external stsafe_interface.h) */ +/* ========================================================================== */ + +/* When WOLFSSL_STSAFE_INTERFACE_EXTERNAL is defined, all internal + * implementation is skipped and the customer provides their own + * stsafe_interface.h with custom implementations. This maintains + * backwards compatibility with older integration approaches. */ +#ifndef WOLFSSL_STSAFE_INTERFACE_EXTERNAL + +/* ========================================================================== */ +/* SDK-Specific Includes */ +/* ========================================================================== */ + +#ifdef WOLFSSL_STSAFEA120 + /* STSELib includes for A120 */ + #include "stselib.h" +#else /* WOLFSSL_STSAFEA100 */ + /* Legacy STSAFE-A1xx SDK includes */ + #include + #include + #include + #include + #include + #include + #include + #include +#endif + +/* ========================================================================== */ +/* Global State */ +/* ========================================================================== */ + +#ifdef WOLFSSL_STSAFEA120 + /* STSELib handler */ + static stse_Handler_t g_stse_handler; + static int g_stse_initialized = 0; +#else /* WOLFSSL_STSAFEA100 */ + /* Legacy SDK handle */ + static void* g_stsafe_handle = NULL; + + /* Host MAC and Cipher Keys for secure communication */ + /* NOTE: These are example keys + * - real implementations should store securely */ + #ifndef STSAFE_HOST_KEY_MAC + static const uint8_t g_host_mac_key[16] = { + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF + }; + #endif + #ifndef STSAFE_HOST_KEY_CIPHER + static const uint8_t g_host_cipher_key[16] = { + 0x11, 0x11, 0x22, 0x22, 0x33, 0x33, 0x44, 0x44, + 0x55, 0x55, 0x66, 0x66, 0x77, 0x77, 0x88, 0x88 + }; + #endif +#endif + +/* Current curve mode for signing operations */ +static stsafe_curve_id_t g_stsafe_curve_mode = STSAFE_DEFAULT_CURVE; + + +/* ========================================================================== */ +/* Internal Helper Functions */ +/* ========================================================================== */ + +/** + * \brief Helper macros to store/retrieve slot number in devCtx + * \details Slot number is stored directly in devCtx as void* to avoid + * dynamic memory allocation. Slot values are small (0, 1, 0xFF) + * so safe to cast to/from void*. + */ +#define STSAFE_SLOT_TO_DEVCXT(slot) ((void*)(uintptr_t)(slot)) +#define STSAFE_DEVCXT_TO_SLOT(devCtx) ((stsafe_slot_t)(uintptr_t)(devCtx)) + + +/** + * \brief Get key size in bytes for a given curve + */ +static int stsafe_get_key_size(stsafe_curve_id_t curve_id) { - int err; + switch (curve_id) { + case STSAFE_ECC_CURVE_P256: + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_256) + case STSAFE_ECC_CURVE_BP256: + #endif + return 32; + case STSAFE_ECC_CURVE_P384: + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_384) + case STSAFE_ECC_CURVE_BP384: + #endif + return 48; + default: + break; + } + return 0; +} - if (pRawCertificate == NULL || pRawCertificateLen == NULL) { +/** + * \brief Convert wolfSSL ECC curve ID to STSAFE curve ID + */ +static stsafe_curve_id_t stsafe_get_ecc_curve_id(int ecc_curve) +{ + switch (ecc_curve) { + case ECC_SECP256R1: + return STSAFE_ECC_CURVE_P256; + case ECC_SECP384R1: + return STSAFE_ECC_CURVE_P384; + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_256) + case ECC_BRAINPOOLP256R1: + return STSAFE_ECC_CURVE_BP256; + #endif + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_384) + case ECC_BRAINPOOLP384R1: + return STSAFE_ECC_CURVE_BP384; + #endif + default: + break; + } + return STSAFE_DEFAULT_CURVE; +} + +/** + * \brief Convert STSAFE curve ID to wolfSSL ECC curve ID + */ +#if !defined(WOLFCRYPT_ONLY) && defined(HAVE_PK_CALLBACKS) +static int stsafe_get_ecc_curve(stsafe_curve_id_t curve_id) +{ + switch (curve_id) { + case STSAFE_ECC_CURVE_P256: + return ECC_SECP256R1; + case STSAFE_ECC_CURVE_P384: + return ECC_SECP384R1; + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_256) + case STSAFE_ECC_CURVE_BP256: + return ECC_BRAINPOOLP256R1; + #endif + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_384) + case STSAFE_ECC_CURVE_BP384: + return ECC_BRAINPOOLP384R1; + #endif + default: + break; + } + return ECC_SECP256R1; +} +#endif + +/** + * \brief Get current curve mode for signing + */ +static stsafe_curve_id_t stsafe_get_curve_mode(void) +{ + return g_stsafe_curve_mode; +} + +/** + * \brief Set current curve mode for signing + */ +static int stsafe_set_curve_mode(stsafe_curve_id_t curve_id) +{ + g_stsafe_curve_mode = curve_id; + return 0; +} + +/* Unused function workaround for some compilers */ +#ifdef __GNUC__ +__attribute__((unused)) +#endif +static void stsafe_unused_funcs(void) +{ +#if !defined(WOLFCRYPT_ONLY) && defined(HAVE_PK_CALLBACKS) + (void)stsafe_get_ecc_curve; +#endif + (void)stsafe_set_curve_mode; +} + +/* ========================================================================== */ +/* Internal Interface Functions - SDK Specific Implementations */ +/* ========================================================================== */ + +#ifdef WOLFSSL_STSAFEA120 +/* -------------------------------------------------------------------------- */ +/* STSELib (A120) Implementation */ +/* -------------------------------------------------------------------------- */ + +/** + * \brief Initialize STSAFE-A120 device using STSELib + */ +int stsafe_interface_init(void) +{ + int rc = 0; + stse_ReturnCode_t ret; + + if (g_stse_initialized) { + return 0; /* Already initialized */ + } + + /* Set default handler values */ + ret = stse_set_default_handler_value(&g_stse_handler); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_set_default_handler_value error: %d\n", + ret); + rc = -1; + } + + if (rc == 0) { + /* Configure for STSAFE-A120 on I2C bus 1 */ + g_stse_handler.device_type = STSAFE_A120; + #ifdef STSAFE_I2C_BUS + g_stse_handler.io.busID = STSAFE_I2C_BUS; + #else + g_stse_handler.io.busID = 1; + #endif + g_stse_handler.io.BusSpeed = 400; /* 400 kHz */ + + /* Initialize STSELib - this sets up I2C communication */ + ret = stse_init(&g_stse_handler); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_init error: %d\n", ret); + rc = -1; + } + } + + if (rc == 0) { + g_stse_initialized = 1; + #ifdef USE_STSAFE_VERBOSE + WOLFSSL_MSG("STSAFE-A120 (STSELib) initialized"); + #endif + } + + return rc; +} + +/** + * \brief Generate ECC key pair on STSAFE-A120 + * \details Uses dedicated key slot (slot 1) for persistent keys. + * For ephemeral ECDHE keys, use stsafe_create_ecdhe_key() instead. + */ +static int stsafe_create_key(stsafe_slot_t slot, stsafe_curve_id_t curve_id, + uint8_t* pPubKeyRaw) +{ + int rc = STSAFE_A_OK; + stse_ReturnCode_t ret; + + if (pPubKeyRaw == NULL) { return BAD_FUNC_ARG; } -#ifdef USE_STSAFE_VERBOSE - WOLFSSL_MSG("SSL_STSAFE_LoadDeviceCertificate"); + /* Generate key pair - public key is X||Y concatenated + * Note: stse_generate_ecc_key_pair expects stse_ecc_key_type_t, + * but stsafe_curve_id_t values match stse_ecc_key_type_t enum values */ + ret = stse_generate_ecc_key_pair(&g_stse_handler, slot, + (stse_ecc_key_type_t)curve_id, + STSAFE_PERSISTENT_KEY_USAGE_LIMIT, + pPubKeyRaw); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_generate_ecc_key_pair error: %d\n", ret); + rc = (int)ret; + } + + return rc; +} + +/** + * \brief Generate ECDHE ephemeral key pair on STSAFE-A120 + * \details Uses stse_generate_ECDHE_key_pair() which generates truly + * ephemeral keys (not stored in slots). The private key remains + * in STSE internal memory for use with shared secret computation. + * Public key is returned in X||Y format (same as stse_generate_ecc_key_pair). + */ +static int stsafe_create_ecdhe_key(stsafe_curve_id_t curve_id, + uint8_t* pPubKeyRaw) +{ + int rc = STSAFE_A_OK; + stse_ReturnCode_t ret; + + if (pPubKeyRaw == NULL) { + return BAD_FUNC_ARG; + } + + /* Generate ECDHE ephemeral key pair - public key returned as X||Y */ + ret = stse_generate_ECDHE_key_pair(&g_stse_handler, + (stse_ecc_key_type_t)curve_id, pPubKeyRaw); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_generate_ECDHE_key_pair error: %d\n", ret); + rc = (int)ret; + } + + return rc; +} + +/** + * \brief ECDSA sign using STSAFE-A120 + */ +static int stsafe_sign(stsafe_slot_t slot, stsafe_curve_id_t curve_id, + uint8_t* pHash, uint8_t* pSigRS) +{ + int rc = STSAFE_A_OK; + stse_ReturnCode_t ret; + int key_sz = stsafe_get_key_size(curve_id); + + if (pHash == NULL || pSigRS == NULL) { + return BAD_FUNC_ARG; + } + + /* Sign hash - output is R || S concatenated */ + ret = stse_ecc_generate_signature(&g_stse_handler, slot, curve_id, + pHash, (uint16_t)key_sz, pSigRS); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_ecc_generate_signature error: %d\n", ret); + rc = (int)ret; + } + + return rc; +} + +/** + * \brief ECDSA verify using STSAFE-A120 + */ +static int stsafe_verify(stsafe_curve_id_t curve_id, uint8_t* pHash, + uint8_t* pSigRS, uint8_t* pPubKeyX, uint8_t* pPubKeyY, + int32_t* pResult) +{ + int rc = STSAFE_A_OK; + stse_ReturnCode_t ret; + int key_sz = stsafe_get_key_size(curve_id); + uint8_t pubKey[STSAFE_MAX_PUBKEY_RAW_LEN]; + uint8_t validity = 0; + + if (pHash == NULL || pSigRS == NULL || pPubKeyX == NULL || + pPubKeyY == NULL || pResult == NULL) { + return BAD_FUNC_ARG; + } + + /* Combine X and Y into single buffer (X||Y) */ + XMEMCPY(pubKey, pPubKeyX, key_sz); + XMEMCPY(pubKey + key_sz, pPubKeyY, key_sz); + + /* Verify signature - pMessage is the hash, pSignature is R||S */ + ret = stse_ecc_verify_signature(&g_stse_handler, curve_id, + pubKey, /* public key X||Y */ + pSigRS, /* signature R||S */ + pHash, /* message (hash) */ + (uint16_t)key_sz, /* message length */ + 0, /* eddsa_variant (0 for non-EdDSA) */ + &validity); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_ecc_verify_signature error: %d\n", ret); + *pResult = 0; + rc = (int)ret; + } + + if (rc == STSAFE_A_OK) { + *pResult = (validity != 0) ? 1 : 0; + } + + return rc; +} + +/** + * \brief ECDH shared secret using STSAFE-A120 + */ +static int stsafe_shared_secret(stsafe_slot_t slot, stsafe_curve_id_t curve_id, + uint8_t* pPubKeyX, uint8_t* pPubKeyY, + uint8_t* pSharedSecret, + int32_t* pSharedSecretLen) +{ + int rc = STSAFE_A_OK; + stse_ReturnCode_t ret; + int key_sz = stsafe_get_key_size(curve_id); +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + uint8_t* peerPubKey = NULL; +#else + uint8_t peerPubKey[STSAFE_MAX_PUBKEY_RAW_LEN]; #endif - /* Try reading device certificate from ST-SAFE Zone 0 */ - err = stsafe_interface_read_device_certificate_raw( - pRawCertificate, (uint32_t*)pRawCertificateLen); - if (err == STSAFE_A_OK) { - #if 0 - /* example for loading into WOLFSSL_CTX */ - err = wolfSSL_CTX_use_certificate_buffer(ctx, - *pRawCertificate, *pRawCertificateLen, SSL_FILETYPE_ASN1); - if (err != WOLFSSL_SUCCESS) { - /* failed */ + if (pPubKeyX == NULL || pPubKeyY == NULL || pSharedSecret == NULL || + pSharedSecretLen == NULL) { + return BAD_FUNC_ARG; + } + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + peerPubKey = (uint8_t*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (peerPubKey == NULL) { + return MEMORY_E; + } +#endif + + /* Combine peer X and Y (X||Y format) */ + XMEMCPY(peerPubKey, pPubKeyX, key_sz); + XMEMCPY(peerPubKey + key_sz, pPubKeyY, key_sz); + + /* Compute shared secret + * Note: stse_ecc_establish_shared_secret expects stse_ecc_key_type_t. + * For STSAFE-A120, stsafe_curve_id_t values match stse_ecc_key_type_t enum values: + * STSAFE_ECC_CURVE_P256 (0) = STSE_ECC_KT_NIST_P_256 (0) + * STSAFE_ECC_CURVE_P384 (1) = STSE_ECC_KT_NIST_P_384 (1) */ + ret = stse_ecc_establish_shared_secret(&g_stse_handler, slot, + (stse_ecc_key_type_t)curve_id, peerPubKey, pSharedSecret); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_ecc_establish_shared_secret error: %d (slot: %d, curve_id: %d)\n", + ret, slot, curve_id); + rc = (int)ret; + } + + if (rc == STSAFE_A_OK) { + *pSharedSecretLen = (int32_t)key_sz; + } + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(peerPubKey, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + + return rc; +} + +/** + * \brief Read device certificate from STSAFE-A120 + */ +static int stsafe_read_certificate(uint8_t** ppCert, uint32_t* pCertLen) +{ +#ifdef WOLFSSL_NO_MALLOC + /* Certificate reading requires dynamic allocation */ + (void)ppCert; + (void)pCertLen; + return NOT_COMPILED_IN; +#else + int rc = STSAFE_A_OK; + stse_ReturnCode_t ret; + uint16_t certLen = 0; + uint8_t certZone = 0; /* Certificate zone 0 */ + + /* First, get certificate size */ + ret = stse_get_device_certificate_size(&g_stse_handler, certZone, &certLen); + if (ret != STSE_OK) { + STSAFE_INTERFACE_PRINTF("stse_get_device_certificate_size error: %d\n", + ret); + rc = (int)ret; + } + else if (certLen == 0) { + /* Certificate size is 0 - invalid certificate data */ + STSAFE_INTERFACE_PRINTF("stse_get_device_certificate_size returned zero length\n"); + rc = ASN_PARSE_E; + } + + /* Allocate buffer */ + if (rc == STSAFE_A_OK) { + *ppCert = (uint8_t*)XMALLOC(certLen, NULL, DYNAMIC_TYPE_TMP_BUFFER); + if (*ppCert == NULL) { + rc = MEMORY_E; } - /* can free now */ - XFREE(*pRawCertificate, NULL, DYNAMIC_TEMP_BUFFER); - *pRawCertificate = NULL; - #endif + } + + /* Read certificate */ + if (rc == STSAFE_A_OK) { + ret = stse_get_device_certificate(&g_stse_handler, certZone, certLen, + *ppCert); + if (ret != STSE_OK) { + XFREE(*ppCert, NULL, DYNAMIC_TYPE_TMP_BUFFER); + *ppCert = NULL; + STSAFE_INTERFACE_PRINTF("stse_get_device_certificate error: %d\n", + ret); + rc = (int)ret; + } + } + + if (rc == STSAFE_A_OK) { + *pCertLen = certLen; + } + + return rc; +#endif /* WOLFSSL_NO_MALLOC */ +} + +#if !defined(WC_NO_RNG) && defined(USE_STSAFE_RNG_SEED) +/** + * \brief Get random bytes from STSAFE-A120 + */ +static int stsafe_get_random(uint8_t* pRandom, uint32_t size) +{ + int rc; + stse_ReturnCode_t ret; + uint16_t len = (size > 0xFFFF) ? 0xFFFF : (uint16_t)size; + + ret = stse_generate_random(&g_stse_handler, pRandom, len); + if (ret != STSE_OK) { + rc = -1; } else { - err = WC_HW_E; + rc = (int)len; + } + + return rc; +} +#endif + +#else /* WOLFSSL_STSAFEA100 */ +/* -------------------------------------------------------------------------- */ +/* Legacy STSAFE-A1xx SDK (A100/A110) Implementation */ +/* -------------------------------------------------------------------------- */ + +/** + * \brief Set host keys for secure communication + */ +static void stsafe_set_host_keys(void* handle) +{ + StSafeA_SetHostMacKey(handle, g_host_mac_key); + StSafeA_SetHostCipherKey(handle, g_host_cipher_key); +} + +/** + * \brief Check and initialize host keys + */ +static int stsafe_check_host_keys(void* handle) +{ + uint8_t status_code; + StSafeA_HostKeySlotBuffer* pHostKeySlot; + + status_code = StSafeA_HostKeySlotQuery(handle, &pHostKeySlot, + STSAFE_A_NO_MAC); + + if (status_code == STSAFE_A_OK && !pHostKeySlot->HostKeyPresenceFlag) { + /* Host keys not set, initialize them */ + uint8_t hostKeys[32]; + XMEMCPY(hostKeys, g_host_mac_key, 16); + XMEMCPY(hostKeys + 16, g_host_cipher_key, 16); + + status_code = StSafeA_PutAttribute(handle, STSAFE_A_HOST_KEY_SLOT_TAG, + hostKeys, sizeof(hostKeys), STSAFE_A_NO_MAC); + } + + return status_code; +} + +/** + * \brief Initialize STSAFE-A100/A110 device + */ +int stsafe_interface_init(void) +{ + int rc = 0; + uint8_t status_code; + const uint8_t echo_data[3] = {0x01, 0x02, 0x03}; + StSafeA_EchoBuffer* echo_resp = NULL; + + if (g_stsafe_handle != NULL) { + return 0; /* Already initialized */ + } + + /* Create handle */ + status_code = StSafeA_CreateHandle(&g_stsafe_handle, STSAFE_I2C_ADDR); + if (status_code != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("StSafeA_CreateHandle error: %d\n", + status_code); + rc = -1; + } + + /* Echo test to verify communication */ + if (rc == 0) { + status_code = StSafeA_Echo(g_stsafe_handle, (uint8_t*)echo_data, 3, + &echo_resp, STSAFE_A_NO_MAC); + if (status_code != STSAFE_A_OK || + XMEMCMP(echo_data, echo_resp->Data, 3) != 0) { + STSAFE_INTERFACE_PRINTF("StSafeA_Echo error: %d\n", status_code); + rc = -1; + } + XFREE(echo_resp, NULL, DYNAMIC_TYPE_TMP_BUFFER); + } + + /* Check/initialize host keys */ + if (rc == 0) { + status_code = stsafe_check_host_keys(g_stsafe_handle); + if (status_code != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_check_host_keys error: %d\n", + status_code); + rc = -1; + } + } + +#ifdef USE_STSAFE_VERBOSE + if (rc == 0) { + WOLFSSL_MSG("STSAFE-A100/A110 initialized"); + } +#endif + + return rc; +} + +/** + * \brief Generate ECC key pair on STSAFE-A100/A110 + */ +static int stsafe_create_key(stsafe_slot_t* pSlot, stsafe_curve_id_t curve_id, + uint8_t* pPubKeyRaw) +{ + int rc; + uint8_t status_code; + int key_sz = stsafe_get_key_size(curve_id); + stsafe_slot_t slot = STSAFE_KEY_SLOT_1; + StSafeA_CoordinateBuffer* pubX = NULL; + StSafeA_CoordinateBuffer* pubY = NULL; + uint8_t* pointRepId = NULL; + + stsafe_set_host_keys(g_stsafe_handle); + + status_code = StSafeA_GenerateKeyPair(g_stsafe_handle, slot, 0xFFFF, 1, + (StSafeA_KeyUsageAuthorizationFlags)( + STSAFE_A_COMMAND_RESPONSE_SIGNATURE | + STSAFE_A_MESSAGE_DIGEST_SIGNATURE | + STSAFE_A_KEY_ESTABLISHMENT), + curve_id, &pointRepId, &pubX, &pubY, STSAFE_A_HOST_C_MAC); + + if (status_code == STSAFE_A_OK && pointRepId != NULL && + *pointRepId == STSAFE_A_POINT_REPRESENTATION_ID) { + XMEMCPY(pPubKeyRaw, pubX->Data, pubX->Length); + XMEMCPY(pPubKeyRaw + key_sz, pubY->Data, pubY->Length); + rc = STSAFE_A_OK; + } + else { + rc = (int)(uint8_t)-1; + } + + /* Free SDK-allocated buffers */ + XFREE(pubX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(pubY, NULL, DYNAMIC_TYPE_TMP_BUFFER); + + if (rc == STSAFE_A_OK && pSlot != NULL) { + *pSlot = slot; + } + + return rc; +} + +/** + * \brief ECDSA sign using STSAFE-A100/A110 + */ +static int stsafe_sign(stsafe_slot_t slot, stsafe_curve_id_t curve_id, + uint8_t* pHash, uint8_t* pSigRS) +{ + int rc; + uint8_t status_code; + int key_sz = stsafe_get_key_size(curve_id); + StSafeA_SignatureBuffer* signature = NULL; + StSafeA_HashTypes hashType; + size_t r_length, s_length; + + hashType = (curve_id == STSAFE_ECC_CURVE_P384 || + curve_id == STSAFE_ECC_CURVE_BP384) ? + STSAFE_HASH_SHA384 : STSAFE_HASH_SHA256; + + status_code = StSafeA_GenerateSignature(g_stsafe_handle, slot, pHash, + hashType, &signature, STSAFE_A_NO_MAC); + + if (status_code == STSAFE_A_OK && signature != NULL) { + /* Parse signature - format is: len(2) || R || len(2) || S */ + r_length = ((uint16_t)signature->Data[0] << 8) | signature->Data[1]; + + /* Bounds check: r_length must be valid and fit within signature buffer */ + if (r_length > key_sz || r_length == 0 || + (size_t)(2 + r_length + 2) > signature->Length) { + rc = ASN_PARSE_E; + } + else { + s_length = ((uint16_t)signature->Data[2 + r_length] << 8) | + signature->Data[3 + r_length]; + + /* Bounds check: s_length must be valid and fit within signature buffer */ + if (s_length > key_sz || s_length == 0 || + (size_t)(4 + r_length + s_length) > signature->Length) { + rc = ASN_PARSE_E; + } + else { + /* Copy R and S to output (zero-padded) */ + XMEMSET(pSigRS, 0, key_sz * 2); + XMEMCPY(pSigRS + (key_sz - r_length), &signature->Data[2], r_length); + XMEMCPY(pSigRS + key_sz + (key_sz - s_length), + &signature->Data[4 + r_length], s_length); + rc = STSAFE_A_OK; + } + } + } + else { + rc = (int)status_code; + } + + /* Free SDK-allocated buffer */ + XFREE(signature, NULL, DYNAMIC_TYPE_TMP_BUFFER); + + return rc; +} + +/** + * \brief ECDSA verify using STSAFE-A100/A110 + */ +static int stsafe_verify(stsafe_curve_id_t curve_id, uint8_t* pHash, + uint8_t* pSigRS, uint8_t* pPubKeyX, uint8_t* pPubKeyY, + int32_t* pResult) +{ + int rc = (int)(uint8_t)-1; + uint8_t status_code; + int key_sz = stsafe_get_key_size(curve_id); +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + StSafeA_CoordinateBuffer* X = NULL; + StSafeA_CoordinateBuffer* Y = NULL; + StSafeA_SignatureBuffer* R = NULL; + StSafeA_SignatureBuffer* S = NULL; + StSafeA_SignatureBuffer* Hash = NULL; +#else + /* Stack buffers: 2 bytes for Length + STSAFE_MAX_KEY_LEN for Data */ + byte R_buf[2 + STSAFE_MAX_KEY_LEN]; + byte S_buf[2 + STSAFE_MAX_KEY_LEN]; + byte Hash_buf[2 + STSAFE_MAX_KEY_LEN]; + byte X_buf[2 + STSAFE_MAX_KEY_LEN]; + byte Y_buf[2 + STSAFE_MAX_KEY_LEN]; + StSafeA_SignatureBuffer* R = (StSafeA_SignatureBuffer*)R_buf; + StSafeA_SignatureBuffer* S = (StSafeA_SignatureBuffer*)S_buf; + StSafeA_SignatureBuffer* Hash = (StSafeA_SignatureBuffer*)Hash_buf; + StSafeA_CoordinateBuffer* X = (StSafeA_CoordinateBuffer*)X_buf; + StSafeA_CoordinateBuffer* Y = (StSafeA_CoordinateBuffer*)Y_buf; +#endif + StSafeA_VerifySignatureBuffer* Verif = NULL; + + *pResult = 0; + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + /* Allocate buffers */ + R = (StSafeA_SignatureBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + S = (StSafeA_SignatureBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + Hash = (StSafeA_SignatureBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + X = (StSafeA_CoordinateBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + Y = (StSafeA_CoordinateBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + + if (X == NULL || Y == NULL || R == NULL || S == NULL || Hash == NULL) { + XFREE(R, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(S, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(Hash, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(X, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(Y, NULL, DYNAMIC_TYPE_TMP_BUFFER); + return MEMORY_E; + } +#endif + + R->Length = key_sz; + S->Length = key_sz; + Hash->Length = key_sz; + X->Length = key_sz; + Y->Length = key_sz; + + XMEMCPY(R->Data, pSigRS, key_sz); + XMEMCPY(S->Data, pSigRS + key_sz, key_sz); + XMEMCPY(Hash->Data, pHash, key_sz); + XMEMCPY(X->Data, pPubKeyX, key_sz); + XMEMCPY(Y->Data, pPubKeyY, key_sz); + + status_code = StSafeA_VerifyMessageSignature(g_stsafe_handle, + curve_id, X, Y, R, S, Hash, &Verif, STSAFE_A_NO_MAC); + + if (status_code == STSAFE_A_OK && Verif != NULL) { + *pResult = Verif->SignatureValidity ? 1 : 0; + if (Verif->SignatureValidity) { + rc = STSAFE_A_OK; + } + } +#ifndef WOLFSSL_NO_MALLOC + /* Free SDK-allocated buffer */ + XFREE(Verif, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(R, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(S, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(Hash, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(X, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(Y, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + + return rc; +} + +/** + * \brief ECDH shared secret using STSAFE-A100/A110 + */ +static int stsafe_shared_secret(stsafe_slot_t slot, stsafe_curve_id_t curve_id, + uint8_t* pPubKeyX, uint8_t* pPubKeyY, + uint8_t* pSharedSecret, + int32_t* pSharedSecretLen) +{ + int rc = (int)(uint8_t)-1; + uint8_t status_code; + int key_sz = stsafe_get_key_size(curve_id); +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + StSafeA_CoordinateBuffer* peerX = NULL; + StSafeA_CoordinateBuffer* peerY = NULL; +#else + /* Stack buffers: 2 bytes for Length + STSAFE_MAX_KEY_LEN for Data */ + byte peerX_buf[2 + STSAFE_MAX_KEY_LEN]; + byte peerY_buf[2 + STSAFE_MAX_KEY_LEN]; + StSafeA_CoordinateBuffer* peerX = (StSafeA_CoordinateBuffer*)peerX_buf; + StSafeA_CoordinateBuffer* peerY = (StSafeA_CoordinateBuffer*)peerY_buf; +#endif + StSafeA_SharedSecretBuffer* sharedSecret = NULL; + + stsafe_set_host_keys(g_stsafe_handle); + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + peerX = (StSafeA_CoordinateBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + peerY = (StSafeA_CoordinateBuffer*)XMALLOC(key_sz + 2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + + if (peerX == NULL || peerY == NULL) { + XFREE(peerX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(peerY, NULL, DYNAMIC_TYPE_TMP_BUFFER); + return MEMORY_E; + } +#endif + + peerX->Length = key_sz; + peerY->Length = key_sz; + XMEMCPY(peerX->Data, pPubKeyX, key_sz); + XMEMCPY(peerY->Data, pPubKeyY, key_sz); + + status_code = StSafeA_EstablishKey(g_stsafe_handle, slot, + peerX, peerY, &sharedSecret, STSAFE_A_HOST_C_MAC); + + if (status_code == STSAFE_A_OK && sharedSecret != NULL) { + *pSharedSecretLen = sharedSecret->SharedSecret.Length; + XMEMCPY(pSharedSecret, sharedSecret->SharedSecret.Data, + sharedSecret->SharedSecret.Length); + rc = STSAFE_A_OK; + } +#ifndef WOLFSSL_NO_MALLOC + /* Free SDK-allocated buffer */ + XFREE(sharedSecret, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(peerX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(peerY, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + + return rc; +} + +/** + * \brief Read device certificate from STSAFE-A100/A110 + */ +static int stsafe_read_certificate(uint8_t** ppCert, uint32_t* pCertLen) +{ +#ifdef WOLFSSL_NO_MALLOC + /* Certificate reading requires dynamic allocation */ + (void)ppCert; + (void)pCertLen; + return NOT_COMPILED_IN; +#else + int rc = STSAFE_A_OK; + uint8_t status_code; + StSafeA_ReadBuffer* readBuf = NULL; + struct stsafe_a* stsafe_a = (struct stsafe_a*)g_stsafe_handle; + uint8_t step; + uint16_t i; + + *pCertLen = 0; + + /* Read first 4 bytes to determine certificate length */ + status_code = StSafeA_Read(g_stsafe_handle, 0, 0, STSAFE_A_ALWAYS, + 0, 0, 4, &readBuf, STSAFE_A_NO_MAC); + + if (status_code == STSAFE_A_OK && readBuf->Length == 4) { + /* Parse ASN.1 DER certificate header */ + /* 0x30 = ASN_SEQUENCE | ASN_CONSTRUCTED (certificate is a SEQUENCE) */ + if (readBuf->Data[0] == (ASN_SEQUENCE | ASN_CONSTRUCTED)) { + /* Parse ASN.1 length encoding */ + switch (readBuf->Data[1]) { + case (ASN_LONG_LENGTH | 0x01): /* Length encoded in 1 byte */ + *pCertLen = readBuf->Data[2] + 3; + break; + case (ASN_LONG_LENGTH | 0x02): /* Length encoded in 2 bytes */ + *pCertLen = ((uint16_t)readBuf->Data[2] << 8) + + readBuf->Data[3] + 4; + break; + default: + /* Short form: length < 128, encoded directly */ + if (readBuf->Data[1] < ASN_LONG_LENGTH) { + *pCertLen = readBuf->Data[1] + 2; + } + break; + } + /* Check if length parsing succeeded */ + if (*pCertLen == 0) { + rc = ASN_PARSE_E; + } + } + else { + /* Invalid ASN.1 header - expected SEQUENCE tag */ + rc = ASN_PARSE_E; + } + } + else { + rc = (int)status_code; + } + XFREE(readBuf, NULL, DYNAMIC_TYPE_TMP_BUFFER); + readBuf = NULL; + + if (rc == STSAFE_A_OK && *pCertLen > 0) { + *ppCert = (uint8_t*)XMALLOC(*pCertLen, NULL, DYNAMIC_TYPE_TMP_BUFFER); + if (*ppCert == NULL) { + rc = (int)(uint8_t)-1; + } + } + + if (rc == STSAFE_A_OK && *pCertLen > 0) { + /* STSAFE-A100/A110 maximum read size is 225 bytes per command. + * When CRC is supported, 2 bytes are used for CRC, leaving 223 bytes + * for data. Without CRC, we can read up to 225 bytes, but use 223 + * for consistency and to leave room for protocol overhead. */ + step = 223 - (stsafe_a->CrcSupport ? 2 : 0); + + for (i = 0; rc == STSAFE_A_OK && i < *pCertLen / step; i++) { + status_code = StSafeA_Read(g_stsafe_handle, 0, 0, + STSAFE_A_ALWAYS, 0, i * step, step, &readBuf, + STSAFE_A_NO_MAC); + if (status_code == STSAFE_A_OK) { + XMEMCPY(*ppCert + (i * step), readBuf->Data, readBuf->Length); + } + else { + rc = (int)status_code; + } + XFREE(readBuf, NULL, DYNAMIC_TYPE_TMP_BUFFER); + readBuf = NULL; + } + + if (rc == STSAFE_A_OK && (*pCertLen % step)) { + status_code = StSafeA_Read(g_stsafe_handle, 0, 0, + STSAFE_A_ALWAYS, 0, i * step, *pCertLen % step, + &readBuf, STSAFE_A_NO_MAC); + if (status_code == STSAFE_A_OK) { + XMEMCPY(*ppCert + (i * step), readBuf->Data, readBuf->Length); + } + else { + rc = (int)status_code; + } + XFREE(readBuf, NULL, DYNAMIC_TYPE_TMP_BUFFER); + readBuf = NULL; + } + } + + return rc; +#endif /* WOLFSSL_NO_MALLOC */ +} + +#if !defined(WC_NO_RNG) && defined(USE_STSAFE_RNG_SEED) +/** + * \brief Get random bytes from STSAFE-A100/A110 + */ +static int stsafe_get_random(uint8_t* pRandom, uint32_t size) +{ + int rc; + uint8_t status_code; + StSafeA_GenerateRandomBuffer* rndBuf = NULL; + uint8_t reqSize = (size > 255) ? 255 : (uint8_t)size; + + status_code = StSafeA_GenerateRandom(g_stsafe_handle, STSAFE_A_EPHEMERAL, + reqSize, &rndBuf, STSAFE_A_NO_MAC); + + if (status_code == STSAFE_A_OK && rndBuf != NULL) { + rc = (int)rndBuf->Length; + XMEMCPY(pRandom, rndBuf->Data, rndBuf->Length); + } + else { + rc = -1; + } + + XFREE(rndBuf, NULL, DYNAMIC_TYPE_TMP_BUFFER); + + return rc; +} +#endif + +#endif /* WOLFSSL_STSAFEA120 */ + +#endif /* !WOLFSSL_STSAFE_INTERFACE_EXTERNAL */ + + +/* ========================================================================== */ +/* Public API Functions */ +/* ========================================================================== */ + +/** + * \brief Load device certificate from STSAFE + */ +int SSL_STSAFE_LoadDeviceCertificate(byte** pRawCertificate, + word32* pRawCertificateLen) +{ + int err = 0; + + if (pRawCertificate == NULL || pRawCertificateLen == NULL) { + err = BAD_FUNC_ARG; + } + +#ifdef USE_STSAFE_VERBOSE + if (err == 0) { + WOLFSSL_MSG("SSL_STSAFE_LoadDeviceCertificate"); + } +#endif + + if (err == 0) { + err = stsafe_read_certificate(pRawCertificate, pRawCertificateLen); + if (err != STSAFE_A_OK) { + err = WC_HW_E; + } } return err; } -#ifdef HAVE_PK_CALLBACKS + +/* ========================================================================== */ +/* PK Callbacks */ +/* ========================================================================== */ + +#if !defined(WOLFCRYPT_ONLY) && defined(HAVE_PK_CALLBACKS) /** - * \brief Key Gen Callback (used by TLS server) + * \brief Key Gen Callback (used by TLS server for ECDHE) */ int SSL_STSAFE_CreateKeyCb(WOLFSSL* ssl, ecc_key* key, word32 keySz, int ecc_curve, void* ctx) { - int err; + int err = 0; +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* pubKeyRaw = NULL; +#else byte pubKeyRaw[STSAFE_MAX_PUBKEY_RAW_LEN]; - StSafeA_KeySlotNumber slot; - StSafeA_CurveId curve_id; +#endif + stsafe_slot_t slot; + stsafe_curve_id_t curve_id; (void)ssl; (void)ctx; #ifdef USE_STSAFE_VERBOSE - WOLFSSL_MSG("CreateKeyCb: STSAFE"); + WOLFSSL_MSG("CreateKeyCb: STSAFE (ECDHE)"); #endif - /* get curve */ - curve_id = stsafe_get_ecc_curve_id(ecc_curve); +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + pubKeyRaw = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (pubKeyRaw == NULL) { + err = MEMORY_E; + } +#endif - /* generate new ephemeral key on device */ - err = stsafe_interface_create_key(&slot, curve_id, (uint8_t*)&pubKeyRaw[0]); - if (err != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_create_key error: %d\n", err); - #endif - err = WC_HW_E; - return err; + if (err == 0) { + curve_id = stsafe_get_ecc_curve_id(ecc_curve); + +#ifdef WOLFSSL_STSAFEA120 + /* Use ECDHE ephemeral key generation for A120 */ + err = stsafe_create_ecdhe_key(curve_id, pubKeyRaw); + if (err != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_create_ecdhe_key error: %d\n", err); + err = WC_HW_E; + } + /* For ECDHE, slot is not used (ephemeral key stored internally) */ + slot = STSAFE_KEY_SLOT_EPHEMERAL; +#else + /* Legacy A100/A110 uses slot-based key generation */ + err = stsafe_create_key(&slot, curve_id, pubKeyRaw); + if (err != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_create_key error: %d\n", err); + err = WC_HW_E; + } +#endif } - /* load generated public key into key, used by wolfSSL */ - err = wc_ecc_import_unsigned(key, &pubKeyRaw[0], &pubKeyRaw[keySz], - NULL, ecc_curve); + if (err == 0) { + err = wc_ecc_import_unsigned(key, pubKeyRaw, &pubKeyRaw[keySz], + NULL, ecc_curve); + } + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(pubKeyRaw, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + + (void)slot; /* May be unused for A120 ECDHE */ return err; } /** - * \brief Verify Peer Cert Callback. + * \brief Verify Peer Cert Callback */ int SSL_STSAFE_VerifyPeerCertCb(WOLFSSL* ssl, const unsigned char* sig, unsigned int sigSz, @@ -118,42 +1140,63 @@ int SSL_STSAFE_VerifyPeerCertCb(WOLFSSL* ssl, const unsigned char* keyDer, unsigned int keySz, int* result, void* ctx) { - int err; + int err = 0; + int eccKeyInit = 0; +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* sigRS = NULL; + byte* pubKeyX = NULL; + byte* pubKeyY = NULL; +#else byte sigRS[STSAFE_MAX_SIG_LEN]; - byte *r = NULL, *s = NULL; - word32 r_len = STSAFE_MAX_SIG_LEN/2, s_len = STSAFE_MAX_SIG_LEN/2; byte pubKeyX[STSAFE_MAX_PUBKEY_RAW_LEN/2]; byte pubKeyY[STSAFE_MAX_PUBKEY_RAW_LEN/2]; - word32 pubKeyX_len = sizeof(pubKeyX); - word32 pubKeyY_len = sizeof(pubKeyY); - ecc_key key; +#endif + byte* r = NULL; + byte* s = NULL; + word32 r_len = STSAFE_MAX_SIG_LEN/2, s_len = STSAFE_MAX_SIG_LEN/2; + word32 pubKeyX_len = STSAFE_MAX_PUBKEY_RAW_LEN/2; + word32 pubKeyY_len = STSAFE_MAX_PUBKEY_RAW_LEN/2; + ecc_key eccKey; word32 inOutIdx = 0; - StSafeA_CurveId curve_id = STSAFE_A_NIST_P_256; + stsafe_curve_id_t curve_id = STSAFE_ECC_CURVE_P256; int ecc_curve; int key_sz = 0; (void)ssl; (void)ctx; + (void)hashSz; #ifdef USE_STSAFE_VERBOSE WOLFSSL_MSG("VerifyPeerCertCB: STSAFE"); #endif - err = wc_ecc_init(&key); - if (err != 0) { - return err; +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + sigRS = (byte*)XMALLOC(STSAFE_MAX_SIG_LEN, NULL, DYNAMIC_TYPE_TMP_BUFFER); + pubKeyX = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN/2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + pubKeyY = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN/2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (sigRS == NULL || pubKeyX == NULL || pubKeyY == NULL) { + err = MEMORY_E; + } +#endif + + if (err == 0) { + err = wc_ecc_init(&eccKey); + if (err == 0) { + eccKeyInit = 1; + } } - /* Decode the public key */ - err = wc_EccPublicKeyDecode(keyDer, &inOutIdx, &key, keySz); if (err == 0) { - /* Extract Raw X and Y coordinates of the public key */ - err = wc_ecc_export_public_raw(&key, pubKeyX, &pubKeyX_len, + err = wc_EccPublicKeyDecode(keyDer, &inOutIdx, &eccKey, keySz); + } + if (err == 0) { + err = wc_ecc_export_public_raw(&eccKey, pubKeyX, &pubKeyX_len, pubKeyY, &pubKeyY_len); } if (err == 0) { - /* determine curve */ - ecc_curve = key.dp->id; + ecc_curve = eccKey.dp->id; curve_id = stsafe_get_ecc_curve_id(ecc_curve); key_sz = stsafe_get_key_size(curve_id); if (key_sz <= 0 || key_sz > STSAFE_MAX_KEY_LEN) { @@ -161,113 +1204,139 @@ int SSL_STSAFE_VerifyPeerCertCb(WOLFSSL* ssl, } } if (err == 0) { - /* Extract R and S from signature */ - XMEMSET(sigRS, 0, sizeof(sigRS)); + XMEMSET(sigRS, 0, STSAFE_MAX_SIG_LEN); r = &sigRS[0]; s = &sigRS[key_sz]; err = wc_ecc_sig_to_rs(sig, sigSz, r, &r_len, s, &s_len); } if (err == 0) { - /* make sure R and S are not too large */ - if (r_len > key_sz || s_len > key_sz) { + if ((int)r_len > key_sz || (int)s_len > key_sz) { err = BAD_FUNC_ARG; } } if (err == 0) { - /* make sure R and S are zero padded on front */ - XMEMMOVE(&sigRS[key_sz-r_len], r, r_len); - XMEMSET(&sigRS[0], 0, key_sz-r_len); - XMEMMOVE(&sigRS[key_sz + (key_sz-s_len)], s, s_len); - XMEMSET(&sigRS[key_sz], 0, key_sz-s_len); + /* Zero-pad R and S */ + XMEMMOVE(&sigRS[key_sz - r_len], r, r_len); + XMEMSET(&sigRS[0], 0, key_sz - r_len); + XMEMMOVE(&sigRS[key_sz + (key_sz - s_len)], s, s_len); + XMEMSET(&sigRS[key_sz], 0, key_sz - s_len); - /* Verify signature */ - err = stsafe_interface_verify(curve_id, (uint8_t*)hash, sigRS, + err = stsafe_verify(curve_id, (uint8_t*)hash, sigRS, pubKeyX, pubKeyY, (int32_t*)result); if (err != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_verify error: %d\n", err); - #endif - err = -err; + STSAFE_INTERFACE_PRINTF("stsafe_verify error: %d\n", err); + err = WC_HW_E; } } - wc_ecc_free(&key); + if (eccKeyInit) { + wc_ecc_free(&eccKey); + } + +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(sigRS, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(pubKeyX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(pubKeyY, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + return err; } /** - * \brief Sign Certificate Callback. + * \brief Sign Certificate Callback */ int SSL_STSAFE_SignCertificateCb(WOLFSSL* ssl, const byte* in, word32 inSz, byte* out, word32* outSz, const byte* key, word32 keySz, void* ctx) { - int err; + int err = 0; +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* digest = NULL; + byte* sigRS = NULL; +#else byte digest[STSAFE_MAX_KEY_LEN]; byte sigRS[STSAFE_MAX_SIG_LEN]; - byte *r, *s; - StSafeA_CurveId curve_id; +#endif + byte* r; + byte* s; + stsafe_curve_id_t curve_id; int key_sz; (void)ssl; (void)ctx; + (void)key; + (void)keySz; #ifdef USE_STSAFE_VERBOSE WOLFSSL_MSG("SignCertificateCb: STSAFE"); #endif - curve_id = stsafe_get_curve_mode(); - key_sz = stsafe_get_key_size(curve_id); +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + digest = (byte*)XMALLOC(STSAFE_MAX_KEY_LEN, NULL, DYNAMIC_TYPE_TMP_BUFFER); + sigRS = (byte*)XMALLOC(STSAFE_MAX_SIG_LEN, NULL, DYNAMIC_TYPE_TMP_BUFFER); + if (digest == NULL || sigRS == NULL) { + err = MEMORY_E; + } +#endif - /* Build input digest */ - if (inSz > key_sz) - inSz = key_sz; - XMEMSET(&digest[0], 0, sizeof(digest)); - XMEMCPY(&digest[key_sz - inSz], in, inSz); + if (err == 0) { + curve_id = stsafe_get_curve_mode(); + key_sz = stsafe_get_key_size(curve_id); - /* Sign using slot 0: Result is R then S */ - /* Sign will always use the curve type in slot 0 (the TLS curve needs to match) */ - XMEMSET(sigRS, 0, sizeof(sigRS)); - err = stsafe_interface_sign(STSAFE_A_SLOT_0, curve_id, digest, sigRS); - if (err != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_sign error: %d\n", err); - #endif - err = WC_HW_E; - return err; + if ((int)inSz > key_sz) + inSz = key_sz; + + XMEMSET(digest, 0, STSAFE_MAX_KEY_LEN); + XMEMCPY(&digest[key_sz - inSz], in, inSz); + + XMEMSET(sigRS, 0, STSAFE_MAX_SIG_LEN); + err = stsafe_sign(STSAFE_KEY_SLOT_0, curve_id, digest, sigRS); + if (err != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_sign error: %d\n", err); + err = WC_HW_E; + } } - /* Convert R and S to signature */ - r = &sigRS[0]; - s = &sigRS[key_sz]; - err = wc_ecc_rs_raw_to_sig((const byte*)r, key_sz, (const byte*)s, key_sz, - out, outSz); - if (err != 0) { - #ifdef USE_STSAFE_VERBOSE - WOLFSSL_MSG("Error converting RS to Signature"); - #endif + if (err == 0) { + r = &sigRS[0]; + s = &sigRS[key_sz]; + err = wc_ecc_rs_raw_to_sig(r, key_sz, s, key_sz, out, outSz); + if (err != 0) { + WOLFSSL_MSG("Error converting RS to Signature"); + } } +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(digest, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(sigRS, NULL, DYNAMIC_TYPE_TMP_BUFFER); +#endif + return err; } - /** - * \brief Create pre master secret using peer's public key and self private key. + * \brief Shared Secret Callback (ECDHE) */ int SSL_STSAFE_SharedSecretCb(WOLFSSL* ssl, ecc_key* otherKey, unsigned char* pubKeyDer, unsigned int* pubKeySz, unsigned char* out, unsigned int* outlen, int side, void* ctx) { - int err; + int err = 0; + int tmpKeyInit = 0; +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* otherKeyX = NULL; + byte* otherKeyY = NULL; + byte* pubKeyRaw = NULL; +#else byte otherKeyX[STSAFE_MAX_KEY_LEN]; byte otherKeyY[STSAFE_MAX_KEY_LEN]; - word32 otherKeyX_len = sizeof(otherKeyX); - word32 otherKeyY_len = sizeof(otherKeyY); byte pubKeyRaw[STSAFE_MAX_PUBKEY_RAW_LEN]; - StSafeA_KeySlotNumber slot = STSAFE_A_SLOT_0; - StSafeA_CurveId curve_id; +#endif + word32 otherKeyX_len = STSAFE_MAX_KEY_LEN; + word32 otherKeyY_len = STSAFE_MAX_KEY_LEN; + stsafe_slot_t slot = STSAFE_KEY_SLOT_0; + stsafe_curve_id_t curve_id; ecc_key tmpKey; int ecc_curve; int key_sz; @@ -276,95 +1345,117 @@ int SSL_STSAFE_SharedSecretCb(WOLFSSL* ssl, ecc_key* otherKey, (void)ctx; #ifdef USE_STSAFE_VERBOSE - WOLFSSL_MSG("SharedSecretCb: STSAFE"); + WOLFSSL_MSG("SharedSecretCb: STSAFE (ECDHE)"); #endif - err = wc_ecc_init(&tmpKey); - if (err != 0) { - return err; +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + otherKeyX = (byte*)XMALLOC(STSAFE_MAX_KEY_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + otherKeyY = (byte*)XMALLOC(STSAFE_MAX_KEY_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + pubKeyRaw = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (otherKeyX == NULL || otherKeyY == NULL || pubKeyRaw == NULL) { + err = MEMORY_E; } +#endif - /* set curve */ - ecc_curve = otherKey->dp->id; - curve_id = stsafe_get_ecc_curve_id(ecc_curve); - key_sz = stsafe_get_key_size(curve_id); - - /* for client: create and export public key */ - if (side == WOLFSSL_CLIENT_END) { - /* Export otherKey raw X and Y */ - err = wc_ecc_export_public_raw(otherKey, - &otherKeyX[0], (word32*)&otherKeyX_len, - &otherKeyY[0], (word32*)&otherKeyY_len); - if (err != 0) { - return err; - } - - err = stsafe_interface_create_key(&slot, curve_id, (uint8_t*)&pubKeyRaw[0]); - if (err != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_create_key error: %d\n", err); - #endif - err = WC_HW_E; - return err; - } - - /* convert raw unsigned public key to X.963 format for TLS */ + if (err == 0) { err = wc_ecc_init(&tmpKey); if (err == 0) { - err = wc_ecc_import_unsigned(&tmpKey, &pubKeyRaw[0], &pubKeyRaw[key_sz], - NULL, ecc_curve); + tmpKeyInit = 1; + } + } + + if (err == 0) { + ecc_curve = otherKey->dp->id; + curve_id = stsafe_get_ecc_curve_id(ecc_curve); + key_sz = stsafe_get_key_size(curve_id); + + if (side == WOLFSSL_CLIENT_END) { + err = wc_ecc_export_public_raw(otherKey, otherKeyX, &otherKeyX_len, + otherKeyY, &otherKeyY_len); + + if (err == 0) { +#ifdef WOLFSSL_STSAFEA120 + /* Use ECDHE ephemeral key generation for A120 */ + err = stsafe_create_ecdhe_key(curve_id, pubKeyRaw); + if (err != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_create_ecdhe_key error: %d\n", + err); + err = WC_HW_E; + } + slot = STSAFE_KEY_SLOT_EPHEMERAL; +#else + /* Legacy A100/A110 uses slot-based key generation */ + err = stsafe_create_key(&slot, curve_id, pubKeyRaw); + if (err != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_create_key error: %d\n", + err); + err = WC_HW_E; + } +#endif + } + + if (err == 0) { + err = wc_ecc_import_unsigned(&tmpKey, pubKeyRaw, + &pubKeyRaw[key_sz], NULL, ecc_curve); + } if (err == 0) { err = wc_ecc_export_x963(&tmpKey, pubKeyDer, pubKeySz); } - wc_ecc_free(&tmpKey); + } + else if (side == WOLFSSL_SERVER_END) { + err = wc_ecc_import_x963_ex(pubKeyDer, *pubKeySz, &tmpKey, + ecc_curve); + if (err == 0) { + err = wc_ecc_export_public_raw(&tmpKey, otherKeyX, + &otherKeyX_len, otherKeyY, &otherKeyY_len); + } + } + else { + err = BAD_FUNC_ARG; } } - /* for server: import public key */ - else if (side == WOLFSSL_SERVER_END) { - /* import peer's key and export as raw unsigned for hardware */ - err = wc_ecc_import_x963_ex(pubKeyDer, *pubKeySz, &tmpKey, ecc_curve); - if (err == 0) { - err = wc_ecc_export_public_raw(&tmpKey, otherKeyX, &otherKeyX_len, - otherKeyY, &otherKeyY_len); + + if (err == 0) { + err = stsafe_shared_secret(slot, curve_id, otherKeyX, otherKeyY, + out, (int32_t*)outlen); + if (err != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_shared_secret error: %d\n", err); + err = WC_HW_E; } } - else { - err = BAD_FUNC_ARG; + + if (tmpKeyInit) { + wc_ecc_free(&tmpKey); } - wc_ecc_free(&tmpKey); - - if (err != 0) { - return err; - } - - /* Compute shared secret */ - err = stsafe_interface_shared_secret( -#ifdef WOLFSSL_STSAFE_TAKES_SLOT - slot, +#if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(otherKeyX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(otherKeyY, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(pubKeyRaw, NULL, DYNAMIC_TYPE_TMP_BUFFER); #endif - curve_id, &otherKeyX[0], &otherKeyY[0], - out, (int32_t*)outlen); - if (err != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_shared_secret error: %d\n", err); - #endif - err = WC_HW_E; - } return err; } +/** + * \brief Setup PK callbacks for STSAFE + */ int SSL_STSAFE_SetupPkCallbacks(WOLFSSL_CTX* ctx) { wolfSSL_CTX_SetEccKeyGenCb(ctx, SSL_STSAFE_CreateKeyCb); wolfSSL_CTX_SetEccSignCb(ctx, SSL_STSAFE_SignCertificateCb); wolfSSL_CTX_SetEccVerifyCb(ctx, SSL_STSAFE_VerifyPeerCertCb); wolfSSL_CTX_SetEccSharedSecretCb(ctx, SSL_STSAFE_SharedSecretCb); - wolfSSL_CTX_SetDevId(ctx, 0); /* enables wolfCrypt `wc_ecc_*` ST-Safe use */ + wolfSSL_CTX_SetDevId(ctx, 0); return 0; } +/** + * \brief Setup PK callback context + */ int SSL_STSAFE_SetupPkCallbackCtx(WOLFSSL* ssl, void* user_ctx) { wolfSSL_SetEccKeyGenCtx(ssl, user_ctx); @@ -374,9 +1465,13 @@ int SSL_STSAFE_SetupPkCallbackCtx(WOLFSSL* ssl, void* user_ctx) return 0; } - #endif /* HAVE_PK_CALLBACKS */ + +/* ========================================================================== */ +/* Crypto Callbacks */ +/* ========================================================================== */ + #ifdef WOLF_CRYPTO_CB int wolfSSL_STSAFE_CryptoDevCb(int devId, wc_CryptoInfo* info, void* ctx) @@ -384,212 +1479,376 @@ int wolfSSL_STSAFE_CryptoDevCb(int devId, wc_CryptoInfo* info, void* ctx) int rc = CRYPTOCB_UNAVAILABLE; wolfSTSAFE_CryptoCb_Ctx* stsCtx = (wolfSTSAFE_CryptoCb_Ctx*)ctx; - if (info == NULL || ctx == NULL) - return BAD_FUNC_ARG; + if (info == NULL || ctx == NULL) { + rc = BAD_FUNC_ARG; + } (void)devId; (void)stsCtx; - if (info->algo_type == WC_ALGO_TYPE_SEED) { - /* use the STSAFE hardware for RNG seed */ + if (rc != BAD_FUNC_ARG && info->algo_type == WC_ALGO_TYPE_SEED) { #if !defined(WC_NO_RNG) && defined(USE_STSAFE_RNG_SEED) - while (info->seed.sz > 0) { - rc = stsafe_interface_getrandom(info->seed.seed, info->seed.sz); - if (rc < 0) { - return rc; - } - info->seed.seed += rc; - info->seed.sz -= rc; - } rc = 0; + while (rc == 0 && info->seed.sz > 0) { + int len = stsafe_get_random(info->seed.seed, info->seed.sz); + if (len < 0) { + rc = len; + } + else { + info->seed.seed += len; + info->seed.sz -= len; + } + } #else rc = CRYPTOCB_UNAVAILABLE; #endif } #ifdef HAVE_ECC - else if (info->algo_type == WC_ALGO_TYPE_PK) { + else if (rc != BAD_FUNC_ARG && info->algo_type == WC_ALGO_TYPE_PK) { #ifdef USE_STSAFE_VERBOSE STSAFE_INTERFACE_PRINTF("STSAFE Pk: Type %d\n", info->pk.type); #endif if (info->pk.type == WC_PK_TYPE_EC_KEYGEN) { + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* pubKeyRaw = NULL; + #else byte pubKeyRaw[STSAFE_MAX_PUBKEY_RAW_LEN]; - StSafeA_KeySlotNumber slot; - StSafeA_CurveId curve_id; + #endif + stsafe_slot_t slot; + stsafe_curve_id_t curve_id; int ecc_curve, key_sz; WOLFSSL_MSG("STSAFE: ECC KeyGen"); - /* get curve */ - ecc_curve = info->pk.eckg.curveId; - curve_id = stsafe_get_ecc_curve_id(ecc_curve); - key_sz = stsafe_get_key_size(curve_id); + rc = 0; + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + pubKeyRaw = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (pubKeyRaw == NULL) { + rc = MEMORY_E; + } + #endif - /* generate new ephemeral key on device */ - rc = stsafe_interface_create_key(&slot, curve_id, - (uint8_t*)pubKeyRaw); - if (rc != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_create_key error: %d\n", rc); - #endif - rc = WC_HW_E; - return rc; + if (rc == 0) { + ecc_curve = info->pk.eckg.curveId; + curve_id = stsafe_get_ecc_curve_id(ecc_curve); + key_sz = stsafe_get_key_size(curve_id); + + /* For A120, generate keys in slot 1 (persistent slot) by default for ECDSA signing. + * For ECDH operations, the key slot from devCtx will be used directly. + * If ECDH is required, keys should be generated in the ephemeral slot from the start. */ +#ifdef WOLFSSL_STSAFEA120 + /* Retrieve slot from devCtx if available, otherwise use default */ + slot = STSAFE_KEY_SLOT_1; /* Default fallback */ + if (info->pk.eckg.key != NULL && info->pk.eckg.key->devCtx != NULL) { + slot = STSAFE_DEVCXT_TO_SLOT(info->pk.eckg.key->devCtx); + } + + STSAFE_INTERFACE_PRINTF("STSAFE: KeyGen slot %d, curve_id %d\n", + slot, curve_id); + + if (slot == STSAFE_KEY_SLOT_EPHEMERAL) { + rc = stsafe_create_ecdhe_key(curve_id, pubKeyRaw); + } else { + rc = stsafe_create_key(slot, curve_id, pubKeyRaw); + } + if (rc != STSE_OK) { + STSAFE_INTERFACE_PRINTF("STSAFE: KeyGen (slot %d) error: %d\n", slot, rc); + rc = WC_HW_E; + } else { + rc = STSAFE_A_OK; + } +#else + /* Legacy A100/A110 uses slot-based key generation */ + rc = stsafe_create_key(&slot, curve_id, pubKeyRaw); + if (rc != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_create_key error: %d\n", + rc); + rc = WC_HW_E; + } +#endif } - /* load generated public key into key, used by wolfSSL */ - rc = wc_ecc_import_unsigned(info->pk.eckg.key, pubKeyRaw, - &pubKeyRaw[key_sz], NULL, ecc_curve); + if (rc == 0) { + /* Import public key */ + rc = wc_ecc_import_unsigned(info->pk.eckg.key, pubKeyRaw, + &pubKeyRaw[key_sz], NULL, ecc_curve); + } + + if (rc == 0) { + /* Store slot number directly in devCtx */ + info->pk.eckg.key->devCtx = STSAFE_SLOT_TO_DEVCXT(slot); + } + + + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(pubKeyRaw, NULL, DYNAMIC_TYPE_TMP_BUFFER); + #endif } else if (info->pk.type == WC_PK_TYPE_ECDSA_SIGN) { + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* digest = NULL; + byte* sigRS = NULL; + #else byte digest[STSAFE_MAX_KEY_LEN]; byte sigRS[STSAFE_MAX_SIG_LEN]; - byte *r, *s; - StSafeA_CurveId curve_id; + #endif + byte* r; + byte* s; + stsafe_slot_t slot; + stsafe_curve_id_t curve_id; + int ecc_curve; word32 inSz = info->pk.eccsign.inlen; int key_sz; WOLFSSL_MSG("STSAFE: ECC Sign"); - curve_id = stsafe_get_curve_mode(); - key_sz = stsafe_get_key_size(curve_id); + rc = 0; + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + digest = (byte*)XMALLOC(STSAFE_MAX_KEY_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + sigRS = (byte*)XMALLOC(STSAFE_MAX_SIG_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (digest == NULL || sigRS == NULL) { + rc = MEMORY_E; + } + #endif - /* truncate input to match key size */ - if (inSz > key_sz) - inSz = key_sz; + if (rc == 0) { + /* Get curve from signing key */ + if (info->pk.eccsign.key != NULL && + info->pk.eccsign.key->dp != NULL) { + ecc_curve = info->pk.eccsign.key->dp->id; + curve_id = stsafe_get_ecc_curve_id(ecc_curve); + } else { + curve_id = stsafe_get_curve_mode(); + } + key_sz = stsafe_get_key_size(curve_id); - /* Build input digest */ - XMEMSET(&digest[0], 0, sizeof(digest)); - XMEMCPY(&digest[key_sz - inSz], info->pk.eccsign.in, inSz); + if ((int)inSz > key_sz) + inSz = key_sz; - /* Sign using slot 0: Result is R then S */ - /* Sign will always use the curve type in slot 0 - (the TLS curve needs to match) */ - XMEMSET(sigRS, 0, sizeof(sigRS)); - rc = stsafe_interface_sign(STSAFE_A_SLOT_0, curve_id, - (uint8_t*)info->pk.eccsign.in, sigRS); - if (rc != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_sign error: %d\n", rc); - #endif - rc = WC_HW_E; - return rc; + XMEMSET(digest, 0, STSAFE_MAX_KEY_LEN); + XMEMCPY(&digest[key_sz - inSz], info->pk.eccsign.in, inSz); + + XMEMSET(sigRS, 0, STSAFE_MAX_SIG_LEN); + /* Retrieve slot from devCtx if available, otherwise use default */ + slot = STSAFE_KEY_SLOT_1; /* Default fallback */ + if (info->pk.eccsign.key != NULL && info->pk.eccsign.key->devCtx != NULL) { + slot = STSAFE_DEVCXT_TO_SLOT(info->pk.eccsign.key->devCtx); + STSAFE_INTERFACE_PRINTF("STSAFE: Sign using slot %d\n", slot); + } else { + WOLFSSL_MSG("STSAFE: Sign using default slot 1"); + } + rc = stsafe_sign(slot, curve_id, digest, sigRS); + if (rc != STSAFE_A_OK) { + STSAFE_INTERFACE_PRINTF("stsafe_sign error: %d\n", rc); + rc = WC_HW_E; + } } - /* Convert R and S to signature */ - r = &sigRS[0]; - s = &sigRS[key_sz]; - rc = wc_ecc_rs_raw_to_sig((const byte*)r, key_sz, (const byte*)s, - key_sz, info->pk.eccsign.out, info->pk.eccsign.outlen); - if (rc != 0) { - WOLFSSL_MSG("Error converting RS to Signature"); + if (rc == 0) { + r = &sigRS[0]; + s = &sigRS[key_sz]; + rc = wc_ecc_rs_raw_to_sig(r, key_sz, s, key_sz, + info->pk.eccsign.out, info->pk.eccsign.outlen); + if (rc != 0) { + WOLFSSL_MSG("Error converting RS to Signature"); + } } + + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(digest, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(sigRS, NULL, DYNAMIC_TYPE_TMP_BUFFER); + #endif } else if (info->pk.type == WC_PK_TYPE_ECDSA_VERIFY) { + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* sigRS = NULL; + byte* pubKeyX = NULL; + byte* pubKeyY = NULL; + #else byte sigRS[STSAFE_MAX_SIG_LEN]; - byte *r = NULL, *s = NULL; - word32 r_len = STSAFE_MAX_SIG_LEN/2, s_len = STSAFE_MAX_SIG_LEN/2; byte pubKeyX[STSAFE_MAX_PUBKEY_RAW_LEN/2]; byte pubKeyY[STSAFE_MAX_PUBKEY_RAW_LEN/2]; - word32 pubKeyX_len = sizeof(pubKeyX); - word32 pubKeyY_len = sizeof(pubKeyY); - StSafeA_CurveId curve_id; + #endif + byte* r = NULL; + byte* s = NULL; + word32 r_len = STSAFE_MAX_SIG_LEN/2, s_len = STSAFE_MAX_SIG_LEN/2; + word32 pubKeyX_len = STSAFE_MAX_PUBKEY_RAW_LEN/2; + word32 pubKeyY_len = STSAFE_MAX_PUBKEY_RAW_LEN/2; + stsafe_curve_id_t curve_id; int ecc_curve, key_sz; WOLFSSL_MSG("STSAFE: ECC Verify"); + rc = 0; if (info->pk.eccverify.key == NULL || info->pk.eccverify.key->dp == NULL) { - return BAD_FUNC_ARG; + rc = BAD_FUNC_ARG; } - /* determine curve */ - ecc_curve = info->pk.eccverify.key->dp->id; - curve_id = stsafe_get_ecc_curve_id(ecc_curve); - key_sz = stsafe_get_key_size(curve_id); - if (key_sz <= 0 || key_sz > STSAFE_MAX_KEY_LEN) { - return BAD_FUNC_ARG; - } - - /* Extract Raw X and Y coordinates of the public key */ - rc = wc_ecc_export_public_raw(info->pk.eccverify.key, - pubKeyX, &pubKeyX_len, - pubKeyY, &pubKeyY_len); + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) if (rc == 0) { - /* Extract R and S from signature */ - XMEMSET(sigRS, 0, sizeof(sigRS)); + sigRS = (byte*)XMALLOC(STSAFE_MAX_SIG_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + pubKeyX = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN/2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + pubKeyY = (byte*)XMALLOC(STSAFE_MAX_PUBKEY_RAW_LEN/2, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (sigRS == NULL || pubKeyX == NULL || pubKeyY == NULL) { + rc = MEMORY_E; + } + } + #endif + + if (rc == 0) { + ecc_curve = info->pk.eccverify.key->dp->id; + curve_id = stsafe_get_ecc_curve_id(ecc_curve); + key_sz = stsafe_get_key_size(curve_id); + if (key_sz <= 0 || key_sz > STSAFE_MAX_KEY_LEN) { + rc = BAD_FUNC_ARG; + } + } + + if (rc == 0) { + rc = wc_ecc_export_public_raw(info->pk.eccverify.key, + pubKeyX, &pubKeyX_len, pubKeyY, &pubKeyY_len); + } + if (rc == 0) { + XMEMSET(sigRS, 0, STSAFE_MAX_SIG_LEN); r = &sigRS[0]; s = &sigRS[key_sz]; rc = wc_ecc_sig_to_rs(info->pk.eccverify.sig, info->pk.eccverify.siglen, r, &r_len, s, &s_len); } if (rc == 0) { - /* make sure R and S are not too large */ - if (r_len > key_sz || s_len > key_sz) { + if ((int)r_len > key_sz || (int)s_len > key_sz) { rc = BAD_FUNC_ARG; } } if (rc == 0) { - /* make sure R and S are zero padded on front */ - XMEMMOVE(&sigRS[key_sz-r_len], r, r_len); - XMEMSET(&sigRS[0], 0, key_sz-r_len); - XMEMMOVE(&sigRS[key_sz + (key_sz-s_len)], s, s_len); - XMEMSET(&sigRS[key_sz], 0, key_sz-s_len); + XMEMMOVE(&sigRS[key_sz - r_len], r, r_len); + XMEMSET(&sigRS[0], 0, key_sz - r_len); + XMEMMOVE(&sigRS[key_sz + (key_sz - s_len)], s, s_len); + XMEMSET(&sigRS[key_sz], 0, key_sz - s_len); - /* Verify signature */ - rc = stsafe_interface_verify(curve_id, - (uint8_t*)info->pk.eccverify.hash, sigRS, pubKeyX, pubKeyY, - (int32_t*)info->pk.eccverify.res); + rc = stsafe_verify(curve_id, (uint8_t*)info->pk.eccverify.hash, + sigRS, pubKeyX, pubKeyY, (int32_t*)info->pk.eccverify.res); if (rc != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_verify error: %d\n", rc); - #endif - rc = -rc; - } - } - } - else if (info->pk.type == WC_PK_TYPE_ECDH) { - byte otherKeyX[STSAFE_MAX_KEY_LEN]; - byte otherKeyY[STSAFE_MAX_KEY_LEN]; - word32 otherKeyX_len = sizeof(otherKeyX); - word32 otherKeyY_len = sizeof(otherKeyY); - StSafeA_CurveId curve_id; - int ecc_curve; - - WOLFSSL_MSG("STSAFE: PMS"); - - if (info->pk.ecdh.public_key == NULL) - return BAD_FUNC_ARG; - - /* get curve */ - ecc_curve = info->pk.ecdh.public_key->dp->id; - curve_id = stsafe_get_ecc_curve_id(ecc_curve); - - /* Export otherKey raw X and Y */ - rc = wc_ecc_export_public_raw(info->pk.ecdh.public_key, - &otherKeyX[0], (word32*)&otherKeyX_len, - &otherKeyY[0], (word32*)&otherKeyY_len); - if (rc == 0) { - /* Compute shared secret */ - *info->pk.ecdh.outlen = 0; - rc = stsafe_interface_shared_secret( - #ifdef WOLFSSL_STSAFE_TAKES_SLOT - STSAFE_A_SLOT_0, - #endif - curve_id, - otherKeyX, otherKeyY, - info->pk.ecdh.out, (int32_t*)info->pk.ecdh.outlen); - if (rc != STSAFE_A_OK) { - #ifdef USE_STSAFE_VERBOSE - STSAFE_INTERFACE_PRINTF("stsafe_interface_shared_secret error: %d\n", rc); - #endif + STSAFE_INTERFACE_PRINTF("stsafe_verify error: %d\n", rc); rc = WC_HW_E; } } + + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(sigRS, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(pubKeyX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(pubKeyY, NULL, DYNAMIC_TYPE_TMP_BUFFER); + #endif + } + else if (info->pk.type == WC_PK_TYPE_ECDH) { + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + byte* otherKeyX = NULL; + byte* otherKeyY = NULL; + #else + byte otherKeyX[STSAFE_MAX_KEY_LEN]; + byte otherKeyY[STSAFE_MAX_KEY_LEN]; + #endif + word32 otherKeyX_len = STSAFE_MAX_KEY_LEN; + word32 otherKeyY_len = STSAFE_MAX_KEY_LEN; + stsafe_curve_id_t curve_id; + stsafe_slot_t slot; + int ecc_curve; + + WOLFSSL_MSG("STSAFE: ECDH"); + + rc = 0; + if (info->pk.ecdh.public_key == NULL) { + rc = BAD_FUNC_ARG; + } + + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + if (rc == 0) { + otherKeyX = (byte*)XMALLOC(STSAFE_MAX_KEY_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + otherKeyY = (byte*)XMALLOC(STSAFE_MAX_KEY_LEN, NULL, + DYNAMIC_TYPE_TMP_BUFFER); + if (otherKeyX == NULL || otherKeyY == NULL) { + rc = MEMORY_E; + } + } + #endif + + if (rc == 0) { + /* Get curve from private_key (hardware key), not public_key (peer key) */ + if (info->pk.ecdh.private_key != NULL && + info->pk.ecdh.private_key->dp != NULL) { + ecc_curve = info->pk.ecdh.private_key->dp->id; + } else if (info->pk.ecdh.public_key != NULL && + info->pk.ecdh.public_key->dp != NULL) { + /* Fallback to public_key if private_key not available */ + ecc_curve = info->pk.ecdh.public_key->dp->id; + } else { + rc = BAD_FUNC_ARG; + } + if (rc == 0) { + curve_id = stsafe_get_ecc_curve_id(ecc_curve); + /* Note: STSAFE_ECC_CURVE_P256 is 0, so we can't use STSAFE_DEFAULT_CURVE check. + * Instead, verify the curve_id is valid by checking it's one of the supported curves */ + if (curve_id != STSAFE_ECC_CURVE_P256 && curve_id != STSAFE_ECC_CURVE_P384 + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_256) + && curve_id != STSAFE_ECC_CURVE_BP256 + #endif + #if defined(HAVE_ECC_BRAINPOOL) && defined(STSE_CONF_ECC_BRAINPOOL_P_384) + && curve_id != STSAFE_ECC_CURVE_BP384 + #endif + ) { + rc = BAD_FUNC_ARG; + } + } + + if (rc == 0) { + rc = wc_ecc_export_public_raw(info->pk.ecdh.public_key, + otherKeyX, &otherKeyX_len, otherKeyY, &otherKeyY_len); + } + } + if (rc == 0) { + *info->pk.ecdh.outlen = 0; + + if (rc == 0) { + /* For ECDH operations, use the slot from devCtx. */ + slot = STSAFE_KEY_SLOT_EPHEMERAL; + if (info->pk.ecdh.private_key != NULL && + info->pk.ecdh.private_key->devCtx != NULL) { + slot = STSAFE_DEVCXT_TO_SLOT(info->pk.ecdh.private_key->devCtx); + } + + STSAFE_INTERFACE_PRINTF("STSAFE: ECDH with slot %d, curve_id %d\n", + slot, curve_id); + + rc = stsafe_shared_secret(slot, curve_id, + otherKeyX, otherKeyY, + info->pk.ecdh.out, (int32_t*)info->pk.ecdh.outlen); + if (rc != STSAFE_A_OK) { + WOLFSSL_MSG("STSAFE: stsafe_shared_secret failed"); + STSAFE_INTERFACE_PRINTF("stsafe_shared_secret " + "error: %d (slot: %d, curve_id: %d)\n", + rc, slot, curve_id); + rc = WC_HW_E; + } + } + } + + #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_NO_MALLOC) + XFREE(otherKeyX, NULL, DYNAMIC_TYPE_TMP_BUFFER); + XFREE(otherKeyY, NULL, DYNAMIC_TYPE_TMP_BUFFER); + #endif } } #endif /* HAVE_ECC */ - /* need to return negative here for error */ if (rc != 0 && rc != WC_NO_ERR_TRACE(CRYPTOCB_UNAVAILABLE)) { WOLFSSL_MSG("STSAFE: CryptoCb failed"); #ifdef USE_STSAFE_VERBOSE @@ -603,4 +1862,4 @@ int wolfSSL_STSAFE_CryptoDevCb(int devId, wc_CryptoInfo* info, void* ctx) #endif /* WOLF_CRYPTO_CB */ -#endif /* WOLFSSL_STSAFEA100 */ +#endif /* WOLFSSL_STSAFE */ diff --git a/wolfcrypt/src/random.c b/wolfcrypt/src/random.c index 9b92e9f88..71acbc4b4 100644 --- a/wolfcrypt/src/random.c +++ b/wolfcrypt/src/random.c @@ -68,6 +68,9 @@ This library contains implementation for the random number generator. #include +#ifdef WC_RNG_BANK_SUPPORT + #include +#endif #include #ifndef WC_NO_RNG /* if not FIPS and RNG is disabled then do not compile */ @@ -369,6 +372,12 @@ static int Hash_df(DRBG_internal* drbg, byte* out, word32 outSz, byte type, XFREE(digest, drbg->heap, DYNAMIC_TYPE_DIGEST); #endif +#ifdef WC_VERBOSE_RNG + if (ret != 0) + WOLFSSL_DEBUG_PRINTF("ERROR: %s failed with err = %d", __FUNCTION__, + ret); +#endif + return (ret == 0) ? DRBG_SUCCESS : DRBG_FAILURE; } @@ -406,6 +415,13 @@ static int Hash_DRBG_Reseed(DRBG_internal* drbg, const byte* seed, word32 seedSz #ifndef WOLFSSL_SMALL_STACK_CACHE WC_FREE_VAR_EX(newV, drbg->heap, DYNAMIC_TYPE_TMP_BUFFER); #endif + + #ifdef WC_VERBOSE_RNG + if (ret != 0) + WOLFSSL_DEBUG_PRINTF("ERROR: Hash_DRBG_Reseed failed with err %d.", + ret); + #endif + return ret; } @@ -525,6 +541,19 @@ static int Hash_gen(DRBG_internal* drbg, byte* out, word32 outSz, const byte* V) WC_FREE_VAR_EX(data, drbg->heap, DYNAMIC_TYPE_TMP_BUFFER); #endif + #ifdef WC_VERBOSE_RNG + if ((ret != DRBG_SUCCESS) && (ret != DRBG_FAILURE)) { + /* Note, if we're just going to return DRBG_FAILURE to the caller, then + * there's no point printing it out here because (1) the lower-level + * code that was remapped to DRBG_FAILURE already got printed before the + * remapping, so a DRBG_FAILURE message would just be spamming the log, + * and (2) the caller will actually see the DRBG_FAILURE code, and is + * free to (and probably will) log it itself. + */ + WOLFSSL_DEBUG_PRINTF("ERROR: Hash_gen failed with err %d.", ret); + } + #endif + return (ret == 0) ? DRBG_SUCCESS : DRBG_FAILURE; } @@ -635,6 +664,14 @@ static int Hash_DRBG_Generate(DRBG_internal* drbg, byte* out, word32 outSz) #endif } + #ifdef WC_VERBOSE_RNG + if ((ret != DRBG_SUCCESS) && (ret != DRBG_FAILURE)) { + /* see note above regarding log spam reduction */ + WOLFSSL_DEBUG_PRINTF("ERROR: Hash_DRBG_Generate failed with err %d.", + ret); + } + #endif + return (ret == 0) ? DRBG_SUCCESS : DRBG_FAILURE; } @@ -715,7 +752,12 @@ int wc_RNG_TestSeed(const byte* seed, word32 seedSz) /* Check the seed for duplicate words. */ word32 seedIdx = 0; - word32 scratchSz = min(SEED_BLOCK_SZ, seedSz - SEED_BLOCK_SZ); + word32 scratchSz = 0; + + if (seed == NULL || seedSz < SEED_BLOCK_SZ) + return BAD_FUNC_ARG; + + scratchSz = min(SEED_BLOCK_SZ, seedSz - SEED_BLOCK_SZ); while (seedIdx < seedSz - SEED_BLOCK_SZ) { if (ConstantCompare(seed + seedIdx, @@ -825,6 +867,11 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, } #endif +#if defined(WOLFSSL_KEEP_RNG_SEED_FD_OPEN) && !defined(USE_WINDOWS_API) + if (!rng->seed.seedFdOpen) + rng->seed.fd = XBADFD; +#endif + #ifdef CUSTOM_RAND_GENERATE_BLOCK ret = 0; /* success */ #else @@ -926,6 +973,11 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, else { ret = seedCb(&rng->seed, seed, seedSz); if (ret != 0) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "ERROR: seedCb in _InitRng() failed with err = %d", + ret); +#endif ret = DRBG_FAILURE; } } @@ -935,6 +987,10 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, if (ret != 0) { #if defined(DEBUG_WOLFSSL) WOLFSSL_MSG_EX("Seed generation failed... %d", ret); + #elif defined(WC_VERBOSE_RNG) + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_GenerateSeed() in _InitRng() failed with err %d", + ret); #endif ret = DRBG_FAILURE; rng->status = DRBG_FAILED; @@ -946,7 +1002,14 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, if (ret != 0) { WOLFSSL_MSG_EX("wc_RNG_TestSeed failed... %d", ret); } + #elif defined(WC_VERBOSE_RNG) + if (ret != DRBG_SUCCESS) { + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_RNG_TestSeed() in _InitRng() returned err %d.", + ret); + } #endif + if (ret == DRBG_SUCCESS) ret = Hash_DRBG_Instantiate((DRBG_internal *)rng->drbg, #if defined(HAVE_FIPS) || !defined(WOLFSSL_RNG_USE_FULL_SEED) @@ -1120,19 +1183,35 @@ static int PollAndReSeed(WC_RNG* rng) else { ret = seedCb(&rng->seed, newSeed, SEED_SZ + SEED_BLOCK_SZ); if (ret != 0) { + #ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF("ERROR: seedCb() in PollAndReSeed() " + "failed with err %d", ret); + #endif ret = DRBG_FAILURE; } } #else ret = wc_GenerateSeed(&rng->seed, newSeed, SEED_SZ + SEED_BLOCK_SZ); - #endif - if (ret != 0) + if (ret != 0) { + #ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_GenerateSeed() in PollAndReSeed() failed with " + "err %d", ret); + #endif ret = DRBG_FAILURE; + } + #endif } - if (ret == DRBG_SUCCESS) + if (ret == DRBG_SUCCESS) { ret = wc_RNG_TestSeed(newSeed, SEED_SZ + SEED_BLOCK_SZ); - + #ifdef WC_VERBOSE_RNG + if (ret != DRBG_SUCCESS) + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_RNG_TestSeed() in PollAndReSeed() returned " + "err %d.", ret); + #endif + } if (ret == DRBG_SUCCESS) ret = Hash_DRBG_Reseed((DRBG_internal *)rng->drbg, newSeed + SEED_BLOCK_SZ, SEED_SZ); @@ -1154,8 +1233,12 @@ static int PollAndReSeed(WC_RNG* rng) #endif /* place a generated block in output */ +#ifdef WC_RNG_BANK_SUPPORT +static int wc_local_RNG_GenerateBlock(WC_RNG* rng, byte* output, word32 sz) +#else WOLFSSL_ABI int wc_RNG_GenerateBlock(WC_RNG* rng, byte* output, word32 sz) +#endif { int ret; @@ -1202,6 +1285,11 @@ int wc_RNG_GenerateBlock(WC_RNG* rng, byte* output, word32 sz) #ifdef CUSTOM_RAND_GENERATE_BLOCK XMEMSET(output, 0, sz); ret = (int)CUSTOM_RAND_GENERATE_BLOCK(output, sz); + #ifdef WC_VERBOSE_RNG + if (ret != 0) + WOLFSSL_DEBUG_PRINTF( + "ERROR: CUSTOM_RAND_GENERATE_BLOCK failed with err %d.", ret); + #endif #else #ifdef HAVE_HASHDRBG @@ -1251,6 +1339,42 @@ int wc_RNG_GenerateBlock(WC_RNG* rng, byte* output, word32 sz) return ret; } +#ifdef WC_RNG_BANK_SUPPORT +WOLFSSL_ABI +int wc_RNG_GenerateBlock(WC_RNG* rng, byte* output, word32 sz) +{ + if (rng == NULL) + return BAD_FUNC_ARG; + + if (rng->status == WC_DRBG_BANKREF) { + int ret; + struct wc_rng_bank_inst *bank_inst = NULL; + + ret = wc_local_rng_bank_checkout_for_bankref(rng->bankref, &bank_inst); + if (ret != 0) + return ret; + if (bank_inst == NULL) + return BAD_STATE_E; + ret = wc_local_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(bank_inst), + output, sz); + { + int checkin_ret = wc_rng_bank_checkin(rng->bankref, &bank_inst); + if (checkin_ret != 0) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_RNG_GenerateBlock() wc_rng_bank_checkin() " + "failed with err %d.", checkin_ret); +#endif + if (ret == 0) + ret = checkin_ret; + } + } + return ret; + } + else + return wc_local_RNG_GenerateBlock(rng, output, sz); +} +#endif int wc_RNG_GenerateByte(WC_RNG* rng, byte* b) { @@ -1265,6 +1389,11 @@ int wc_FreeRng(WC_RNG* rng) if (rng == NULL) return BAD_FUNC_ARG; +#ifdef WC_RNG_BANK_SUPPORT + if (rng->status == WC_DRBG_BANKREF) + return wc_BankRef_Release(rng); +#endif /* WC_RNG_BANK_SUPPORT */ + #if defined(WOLFSSL_ASYNC_CRYPT) wolfAsync_DevCtxFree(&rng->asyncDev, WOLFSSL_ASYNC_MARKER_RNG); #endif @@ -1304,6 +1433,15 @@ int wc_FreeRng(WC_RNG* rng) ret = WC_HW_E; #endif +#if defined(WOLFSSL_KEEP_RNG_SEED_FD_OPEN) && defined(XCLOSE) && \ + !defined(USE_WINDOWS_API) + if(rng->seed.seedFdOpen && rng->seed.fd != XBADFD) { + XCLOSE(rng->seed.fd); + rng->seed.fd = XBADFD; + rng->seed.seedFdOpen = 0; + } +#endif + return ret; } @@ -1803,12 +1941,49 @@ static int wc_GenerateSeed_IntelRD(OS_Seed* os, byte* output, word32 sz) { int ret; word64 rndTmp; + static int rdseed_sanity_status = 0; (void)os; if (!IS_INTEL_RDSEED(intel_flags)) return -1; + /* Note, access to rdseed_sanity_status is benignly racey on multithreaded + * targets. + */ + if (rdseed_sanity_status == 0) { + word64 sanity_word1 = 0, sanity_word2 = 0; + + ret = IntelRDseed64_r(&sanity_word1); + if (ret != 0) + return ret; + + ret = IntelRDseed64_r(&sanity_word2); + if (ret != 0) + return ret; + + if (sanity_word1 == sanity_word2) { + ret = IntelRDseed64_r(&sanity_word1); + if (ret != 0) + return ret; + + if (sanity_word1 == sanity_word2) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "WARNING: disabling RDSEED due to repeating word 0x%lx -- " + "check CPU microcode version.", sanity_word2); +#endif + rdseed_sanity_status = -1; + return -1; + } + } + + rdseed_sanity_status = 1; + } + else if (rdseed_sanity_status < 0) { + return -1; + } + for (; (sz / sizeof(word64)) > 0; sz -= sizeof(word64), output += sizeof(word64)) { ret = IntelRDseed64_r((word64*)output); @@ -3002,32 +3177,10 @@ int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) #elif defined(WOLFSSL_LINUXKM) - /* When registering the kernel default DRBG with a native/intrinsic entropy - * source, fallback to get_random_bytes() isn't allowed because we replace - * it with our DRBG. - */ + #ifndef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT + #include + #endif - #if defined(HAVE_ENTROPY_MEMUSE) && \ - defined(LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT) - - int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) - { - (void)os; - return wc_Entropy_Get(MAX_ENTROPY_BITS, output, sz); - } - - #elif (defined(HAVE_INTEL_RDSEED) || defined(HAVE_AMD_RDSEED)) && \ - defined(LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT) - - int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) - { - (void)os; - return wc_GenerateSeed_IntelRD(NULL, output, sz); - } - - #else /* !((HAVE_ENTROPY_MEMUSE || HAVE_*_RDSEED) && LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT) */ - - #include int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) { (void)os; @@ -3035,11 +3188,9 @@ int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) #ifdef HAVE_ENTROPY_MEMUSE ret = wc_Entropy_Get(MAX_ENTROPY_BITS, output, sz); - if (ret == 0) { + if (ret == 0) return 0; - } #ifdef ENTROPY_MEMUSE_FORCE_FAILURE - /* Don't fallback to /dev/urandom. */ return ret; #endif #endif @@ -3047,23 +3198,30 @@ int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) #if defined(HAVE_INTEL_RDSEED) || defined(HAVE_AMD_RDSEED) if (IS_INTEL_RDSEED(intel_flags)) { ret = wc_GenerateSeed_IntelRD(NULL, output, sz); - #ifndef FORCE_FAILURE_RDSEED if (ret == 0) - #endif - { - return ret; - } + return 0; + #ifdef FORCE_FAILURE_RDSEED + return ret; + #endif } #endif /* HAVE_INTEL_RDSEED || HAVE_AMD_RDSEED */ + #ifdef LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT + #if !defined(HAVE_ENTROPY_MEMUSE) && \ + !defined(HAVE_INTEL_RDSEED) && \ + !defined(HAVE_AMD_RDSEED) + #error LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT requires an intrinsic entropy source. + #else + return ret; + #endif + #else (void)ret; get_random_bytes(output, sz); return 0; + #endif } - #endif /* !(HAVE_*_RDSEED && LINUXKM_LKCAPI_REGISTER_HASH_DRBG_DEFAULT) */ - #elif defined(WOLFSSL_BSDKM) #include int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) @@ -3497,30 +3655,56 @@ int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) } #endif - #endif /* (!HAVE_INTEL_RDSEED && !HAVE_AMD_RDSEED) || !FORCE_FAILURE_RDSEED */ - - #endif /*!HAVE_ENTROPY_MEMUSE || !ENTROPY_MEMUSE_FORCE_FAILURE */ - #ifndef NO_FILESYSTEM - #ifndef NO_DEV_URANDOM /* way to disable use of /dev/urandom */ + #ifdef WOLFSSL_KEEP_RNG_SEED_FD_OPEN + if (!os->seedFdOpen) + { + #ifndef NO_DEV_URANDOM /* way to disable use of /dev/urandom */ + os->fd = open("/dev/urandom", O_RDONLY); + #if defined(DEBUG_WOLFSSL) + WOLFSSL_MSG("opened /dev/urandom."); + #endif /* DEBUG_WOLFSSL */ + if (os->fd == XBADFD) + #endif /* NO_DEV_URANDOM */ + { + /* may still have /dev/random */ + os->fd = open("/dev/random", O_RDONLY); + #if defined(DEBUG_WOLFSSL) + WOLFSSL_MSG("opened /dev/random."); + #endif /* DEBUG_WOLFSSL */ + if (os->fd == XBADFD) + return OPEN_RAN_E; + else { + os->keepSeedFdOpen = 0; + os->seedFdOpen = 1; + } + } + else { + os->keepSeedFdOpen = 1; + os->seedFdOpen = 1; + } + } + #else /* WOLFSSL_KEEP_RNG_SEED_FD_OPEN */ + #ifndef NO_DEV_URANDOM /* way to disable use of /dev/urandom */ os->fd = open("/dev/urandom", O_RDONLY); #if defined(DEBUG_WOLFSSL) WOLFSSL_MSG("opened /dev/urandom."); - #endif - if (os->fd == -1) - #endif + #endif /* DEBUG_WOLFSSL */ + if (os->fd == XBADFD) + #endif /* !NO_DEV_URANDOM */ { /* may still have /dev/random */ os->fd = open("/dev/random", O_RDONLY); - #if defined(DEBUG_WOLFSSL) + #if defined(DEBUG_WOLFSSL) WOLFSSL_MSG("opened /dev/random."); - #endif - if (os->fd == -1) + #endif /* DEBUG_WOLFSSL */ + if (os->fd == XBADFD) return OPEN_RAN_E; } + #endif /* WOLFSSL_KEEP_RNG_SEED_FD_OPEN */ #if defined(DEBUG_WOLFSSL) WOLFSSL_MSG("rnd read..."); - #endif + #endif /* DEBUG_WOLFSSL */ while (sz) { int len = (int)read(os->fd, output, sz); if (len == -1) { @@ -3537,17 +3721,31 @@ int wc_GenerateSeed(OS_Seed* os, byte* output, word32 sz) #else ret = RAN_BLOCK_E; break; - #endif + #endif /* BLOCKING || WC_RNG_BLOCKING */ } } + #ifdef WOLFSSL_KEEP_RNG_SEED_FD_OPEN + if (!os->keepSeedFdOpen && os->seedFdOpen) + { + close(os->fd); + os->fd = -1; + os->seedFdOpen = 0; + } + #else close(os->fd); -#else + #endif /* WOLFSSL_KEEP_RNG_SEED_FD_OPEN */ +#else /* NO_FILESYSTEM */ (void)output; (void)sz; ret = NOT_COMPILED_IN; #endif /* NO_FILESYSTEM */ return ret; + + #endif /* (!HAVE_INTEL_RDSEED && !HAVE_AMD_RDSEED) || !FORCE_FAILURE_RDSEED */ + + #endif /*!HAVE_ENTROPY_MEMUSE || !ENTROPY_MEMUSE_FORCE_FAILURE */ + } #endif diff --git a/wolfcrypt/src/rng_bank.c b/wolfcrypt/src/rng_bank.c new file mode 100644 index 000000000..5bb5d9d35 --- /dev/null +++ b/wolfcrypt/src/rng_bank.c @@ -0,0 +1,739 @@ +/* rng_bank.c + * + * Copyright (C) 2006-2026 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#include + +#ifdef WC_RNG_BANK_SUPPORT + +#include +#include + +WOLFSSL_API int wc_rng_bank_init( + struct wc_rng_bank *ctx, + int n_rngs, + word32 flags, + int timeout_secs, + void *heap, + int devId) +{ + int i; + int ret; + int need_reenable_vec = 0; + + if ((ctx == NULL) || (n_rngs <= 0)) + return BAD_FUNC_ARG; + + XMEMSET(ctx, 0, sizeof(*ctx)); + + wolfSSL_RefInit(&ctx->refcount, &ret); + if (ret != 0) + return ret; + + ctx->flags = flags | WC_RNG_BANK_FLAG_INITED; + ctx->heap = heap; + +#ifdef WC_RNG_BANK_STATIC + if (n_rngs > WC_RNG_BANK_STATIC_SIZE) + return BAD_LENGTH_E; +#else + ctx->rngs = (struct wc_rng_bank_inst *) + XMALLOC(sizeof(*ctx->rngs) * (size_t)n_rngs, + heap, DYNAMIC_TYPE_RNG); + if (! ctx->rngs) + ret = MEMORY_E; +#endif + + if (ret == 0) { + XMEMSET(ctx->rngs, 0, sizeof(*ctx->rngs) * (size_t)n_rngs); + ctx->n_rngs = n_rngs; + + for (i = 0; i < n_rngs; ++i) { +#ifdef WC_VERBOSE_RNG + int nretries = 0; +#endif + time_t ts1 = XTIME(0); + for (;;) { + time_t ts2; + + if (flags & WC_RNG_BANK_FLAG_NO_VECTOR_OPS) + need_reenable_vec = (DISABLE_VECTOR_REGISTERS() == 0); + ret = wc_InitRngNonce_ex( + WC_RNG_BANK_INST_TO_RNG(ctx->rngs + i), + (byte *)&ctx->rngs[i], sizeof(byte *), heap, devId); + + if (need_reenable_vec) + REENABLE_VECTOR_REGISTERS(); + /* if we're allowed to sleep, relax the loop between each inner + * iteration even on success, assuring relaxation of the outer + * iterations. + */ + WC_RELAX_LONG_LOOP(); + if (ret == 0) + break; + /* Allow interrupt only if we're stuck spinning retries -- i.e., + * don't allow an untimely user signal to derail an + * initialization that is proceeding expeditiously. + */ + ret = WC_CHECK_FOR_INTR_SIGNALS(); + if (ret == WC_NO_ERR_TRACE(INTERRUPTED_E)) + break; + ts2 = XTIME(0); + if (ts2 - ts1 > timeout_secs) { + ret = WC_TIMEOUT_E; + break; + } +#ifdef WC_VERBOSE_RNG + ++nretries; +#endif + } + if (ret != 0) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_InitRng returned %d after %d retries.\n", ret, + nretries); +#endif + break; + } + } + } + + if (ret != 0) + (void)wc_rng_bank_fini(ctx); + + return ret; +} + +#ifndef WC_RNG_BANK_STATIC +WOLFSSL_API int wc_rng_bank_new( + struct wc_rng_bank **ctx, + int n_rngs, + word32 flags, + int timeout_secs, + void *heap, + int devId) +{ + int ret; + + if ((ctx == NULL) || (n_rngs <= 0)) + return BAD_FUNC_ARG; + + *ctx = (struct wc_rng_bank *)XMALLOC(sizeof(struct wc_rng_bank), heap, DYNAMIC_TYPE_RNG); + if (*ctx == NULL) + return MEMORY_E; + + ret = wc_rng_bank_init(*ctx, n_rngs, flags, timeout_secs, heap, devId); + + if (ret != 0) { + XFREE(*ctx, heap, DYNAMIC_TYPE_RNG); + *ctx = NULL; + } + + return ret; +} +#endif /* !WC_RNG_BANK_STATIC */ + +WOLFSSL_API int wc_rng_bank_set_affinity_handlers( + struct wc_rng_bank *ctx, + wc_affinity_lock_fn_t affinity_lock_cb, + wc_affinity_get_id_fn_t affinity_get_id_cb, + wc_affinity_unlock_fn_t affinity_unlock_cb, + void *cb_arg) +{ + if ((ctx == NULL) || + (! (ctx->flags & WC_RNG_BANK_FLAG_INITED))) + { + return BAD_FUNC_ARG; + } + if ((affinity_lock_cb == NULL) ^ (affinity_unlock_cb == NULL)) + return BAD_FUNC_ARG; + if (wolfSSL_RefCur(ctx->refcount) != 1) + return BUSY_E; + ctx->affinity_lock_cb = affinity_lock_cb; + ctx->affinity_get_id_cb = affinity_get_id_cb; + ctx->affinity_unlock_cb = affinity_unlock_cb; + ctx->cb_arg = cb_arg; + return 0; +} + +WOLFSSL_API int wc_rng_bank_fini(struct wc_rng_bank *ctx) { + int i; + + if (ctx == NULL) + return BAD_FUNC_ARG; + + if (ctx->flags == WC_RNG_BANK_FLAG_NONE) + return 0; + + if (! (ctx->flags & WC_RNG_BANK_FLAG_INITED)) + return BAD_FUNC_ARG; + + if (wolfSSL_RefCur(ctx->refcount) > 1) + return BUSY_E; + +#ifndef WC_RNG_BANK_STATIC + if (ctx->rngs) +#endif + { + for (i = 0; i < ctx->n_rngs; ++i) { + if (ctx->rngs[i].lock != 0) { + /* better to leak than to crash. */ +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "BUG: wc_rng_bank_fini() called with RNG #%d still " + "locked.\n", i); +#endif + return BUSY_E; + } + } + + for (i = 0; i < ctx->n_rngs; ++i) { + wc_FreeRng(&ctx->rngs[i].rng); + } + +#ifndef WC_RNG_BANK_STATIC + XFREE(ctx->rngs, ctx->heap, DYNAMIC_TYPE_RNG); + ctx->rngs = NULL; +#endif + ctx->n_rngs = 0; + } + + wolfSSL_RefFree(&ctx->refcount); + + ctx->flags = WC_RNG_BANK_FLAG_NONE; + ctx->cb_arg = NULL; + + return 0; +} + +#ifndef WC_RNG_BANK_STATIC +WOLFSSL_API int wc_rng_bank_free(struct wc_rng_bank **ctx) { + int ret; + void *heap; + + if (ctx == NULL) + return BAD_FUNC_ARG; + + if (*ctx == NULL) + return 0; + + heap = (*ctx)->heap; + + ret = wc_rng_bank_fini(*ctx); + + if (ret == 0) { + XFREE(*ctx, heap, DYNAMIC_TYPE_RNG); + *ctx = NULL; + } + + return ret; +} +#endif /* !WC_RNG_BANK_STATIC */ + +/* wc_rng_bank_checkout() uses atomic operations to get exclusive ownership of a + * DRBG without delay. It expects to be called in uninterruptible context, + * though works fine in any context. When _PREFER_AFFINITY_INST, it starts by + * trying the DRBG matching the local DRBG (usually the current CPU ID, returned + * by bank->affinity_get_id_cb()), and if that doesn't immediately succeed, and + * _CAN_FAIL_OVER_INST, it iterates upward until one succeeds. The first + * attempt will always succeed, even under intense load, unless there is or has + * recently been a reseed or mix-in operation competing with generators. + */ +WOLFSSL_API int wc_rng_bank_checkout( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst **rng_inst, + int preferred_inst_offset, + int timeout_secs, + word32 flags) +{ + int new_lock_value = WC_RNG_BANK_INST_LOCK_HELD; + int ret = 0; + time_t ts1, ts2; + int n_rngs_tried = 0; + + if ((bank == NULL) || + (! (bank->flags & WC_RNG_BANK_FLAG_INITED)) || + (rng_inst == NULL)) + { + return BAD_FUNC_ARG; + } + + if ((flags & WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST) && + (bank->affinity_get_id_cb == NULL)) + { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "BUG: wc_rng_bank_checkout() called with _PREFER_AFFINITY_INST but " + "no _get_id_cb.\n"); +#endif + return BAD_FUNC_ARG; + } + + if (flags & WC_RNG_BANK_FLAG_AFFINITY_LOCK) { + if ((bank->affinity_lock_cb == NULL) || + (bank->affinity_unlock_cb == NULL)) + { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "BUG: wc_rng_bank_checkout() called with _AFFINITY_LOCK but " + "missing _lock_cb.\n"); +#endif + return BAD_FUNC_ARG; + } + ret = bank->affinity_lock_cb(bank->cb_arg); + if (ret == 0) + new_lock_value |= WC_RNG_BANK_INST_LOCK_AFFINITY_LOCKED; + else if (ret != WC_NO_ERR_TRACE(ALREADY_E)) + return ret; + } + + if (flags & WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST) { + preferred_inst_offset = -1; + ret = bank->affinity_get_id_cb(bank->cb_arg, &preferred_inst_offset); + if (ret != 0) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "BUG: bank->affinity_get_id_cb() returned err %d.\n", ret); +#endif + } + else if (((preferred_inst_offset < 0) || + (preferred_inst_offset >= bank->n_rngs))) + { + ret = BAD_INDEX_E; + } + } + else { + if ((preferred_inst_offset < 0) || + (preferred_inst_offset >= bank->n_rngs)) + { + ret = BAD_INDEX_E; + } + } + + if ((timeout_secs > 0) && (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) + ts1 = XTIME(0); + else + ts1 = 0; /* mollify -Wmaybe-uninitialized... */ + + for (; ret == 0;) { + int expected = 0; + + if (wolfSSL_Atomic_Int_CompareExchange( + &bank->rngs[preferred_inst_offset].lock, + &expected, + new_lock_value)) + { + *rng_inst = &bank->rngs[preferred_inst_offset]; + + if ((! (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) && + (((struct DRBG_internal *)(*rng_inst)->rng.drbg)->reseedCtr >= + WC_RESEED_INTERVAL) && + (flags & WC_RNG_BANK_FLAG_CAN_FAIL_OVER_INST) && + (n_rngs_tried < bank->n_rngs)) + { + WOLFSSL_ATOMIC_STORE((*rng_inst)->lock, WC_RNG_BANK_INST_LOCK_FREE); + } + else { +#ifdef WC_VERBOSE_RNG + if ((! (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) && + (((struct DRBG_internal *)(*rng_inst)->rng.drbg)->reseedCtr >= + WC_RESEED_INTERVAL)) + { + WOLFSSL_DEBUG_PRINTF( + "WARNING: wc_rng_bank_checkout() returning RNG ID %d, " + "currently marked for reseed, to !_CAN_WAIT caller.\n", + preferred_inst_offset); + } + + /* Note that a caller can still encounter a PollAndReSeed() via + * wc_RNG_GenerateBlock() if a call bumps reseedCtr up to + * WC_RESEED_INTERVAL. In kernel mode, the default interval is + * the SP 800-90A max of 2.81E+14, which is unlikely to be + * reached in practice. + */ +#endif + + if ((flags | bank->flags) & WC_RNG_BANK_FLAG_NO_VECTOR_OPS) { + if (DISABLE_VECTOR_REGISTERS() == 0) + WOLFSSL_ATOMIC_STORE((*rng_inst)->lock, new_lock_value | + WC_RNG_BANK_INST_LOCK_VEC_OPS_INH); + } + + return 0; /* Short-circuit return, holding onto RNG and affinity + * locks and vector register inhibition. + */ + } + } + + if (flags & WC_RNG_BANK_FLAG_CAN_FAIL_OVER_INST) { + if ((! (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) && + (n_rngs_tried >= bank->n_rngs)) + { + ret = BUSY_E; + break; /* jump to cleanup. */ + } + ++preferred_inst_offset; + if (preferred_inst_offset >= bank->n_rngs) + preferred_inst_offset = 0; + ++n_rngs_tried; + } + else { + if (! (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) { + ret = BUSY_E; + break; /* jump to cleanup. */ + } + } + + if (flags & WC_RNG_BANK_FLAG_AFFINITY_LOCK) + (void)bank->affinity_unlock_cb(bank->cb_arg); + + ret = WC_CHECK_FOR_INTR_SIGNALS(); + if (ret == WC_NO_ERR_TRACE(INTERRUPTED_E)) + return ret; /* immediate return -- no locks held */ + + if (timeout_secs > 0) { + ts2 = XTIME(0); + if (ts2 - ts1 >= timeout_secs) + return WC_TIMEOUT_E; /* immediate return -- no locks held */ + } + WC_RELAX_LONG_LOOP(); + + if (flags & WC_RNG_BANK_FLAG_AFFINITY_LOCK) { + ret = bank->affinity_lock_cb(bank->cb_arg); + if (ret) + return ret; /* immediate return -- no locks held */ + } + + /* Note that we may have been migrated at this point, but it doesn't + * matter -- we only reach this point if we have to retry/iterate. + */ + } + + if (flags & WC_RNG_BANK_FLAG_AFFINITY_LOCK) + (void)bank->affinity_unlock_cb(bank->cb_arg); + + return ret; +} + +#ifdef WC_DRBG_BANKREF +WOLFSSL_LOCAL int wc_local_rng_bank_checkout_for_bankref( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst **rng_inst) +{ + return wc_rng_bank_checkout( + bank, rng_inst, 0, 0, + WC_RNG_BANK_FLAG_CAN_FAIL_OVER_INST | + WC_RNG_BANK_FLAG_CAN_WAIT | + ((bank->affinity_get_id_cb != NULL) ? WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST : 0) | + ((bank->affinity_lock_cb != NULL) ? WC_RNG_BANK_FLAG_AFFINITY_LOCK : 0)); +} +#endif /* WC_DRBG_BANKREF */ + +WOLFSSL_API int wc_rng_bank_checkin( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst **rng_inst) +{ + int lockval; + + if ((bank == NULL) || (rng_inst == NULL) || (*rng_inst == NULL)) + return BAD_FUNC_ARG; + + lockval = (int)WOLFSSL_ATOMIC_LOAD((*rng_inst)->lock); + + WOLFSSL_ATOMIC_STORE((*rng_inst)->lock, WC_RNG_BANK_INST_LOCK_FREE); + + *rng_inst = NULL; + + if (lockval & WC_RNG_BANK_INST_LOCK_VEC_OPS_INH) + REENABLE_VECTOR_REGISTERS(); + + if (lockval & WC_RNG_BANK_INST_LOCK_AFFINITY_LOCKED) + return bank->affinity_unlock_cb(bank->cb_arg); + else + return 0; +} + +/* note the rng_inst passed to wc_rng_bank_inst_reinit() must have been obtained + * via wc_rng_bank_checkout() to assure that the caller holds the proper locks. + */ +WOLFSSL_API int wc_rng_bank_inst_reinit( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst *rng_inst, + int timeout_secs, + word32 flags) +{ + int ret; + time_t ts1 = 0; + int devId; + + if ((rng_inst == NULL) || + (rng_inst->rng.drbg == NULL)) + { + return BAD_FUNC_ARG; + } + + if ((timeout_secs > 0) && (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) + ts1 = XTIME(0); + +#if defined(WOLFSSL_ASYNC_CRYPT) || defined(WOLF_CRYPTO_CB) + devId = rng_inst->rng.devId; +#else + devId = INVALID_DEVID; +#endif + + wc_FreeRng(&rng_inst->rng); + + for (;;) { + ret = wc_InitRngNonce_ex(WC_RNG_BANK_INST_TO_RNG(rng_inst), + (byte *)&rng_inst, sizeof(byte *), + bank->heap, devId); + if (ret == 0) + break; + if (! (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "WARNING: wc_rng_bank_inst_reinit() returning err %d.\n", ret); +#endif + break; + } + + if (timeout_secs > 0) { + time_t ts2 = XTIME(0); + if (ts2 - ts1 >= timeout_secs) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "WARNING: wc_rng_bank_inst_reinit() timed out, err %d.\n", + ret); +#endif + break; + } + } + } + + return ret; +} + +WOLFSSL_API int wc_rng_bank_seed(struct wc_rng_bank *bank, + const byte* seed, word32 seedSz, + int timeout_secs, + word32 flags) +{ + int ret = 0; + int n; + + if ((bank == NULL) || + (! (bank->flags & WC_RNG_BANK_FLAG_INITED))) + { + return BAD_FUNC_ARG; + } + + if (seedSz == 0) + return 0; + + /* this iteration counts down, whereas the iteration in get_drbg() counts + * up, to assure they can't possibly phase-lock to each other. + */ + for (n = bank->n_rngs - 1; n >= 0; --n) { + struct wc_rng_bank_inst *drbg; + ret = wc_rng_bank_checkout(bank, &drbg, n, timeout_secs, flags); + if (ret != 0) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "WARNING: wc_rng_bank_seed(): wc_rng_bank_checkout() for " + "inst#%d returned err %d.\n", n, ret); +#endif + break; + } + else if (drbg->rng.drbg == NULL) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "WARNING: wc_rng_bank_seed(): inst#%d has null .drbg.\n", n); +#endif + ret = BAD_STATE_E; + } + else if ((ret = wc_RNG_DRBG_Reseed(WC_RNG_BANK_INST_TO_RNG(drbg), seed, + seedSz)) != 0) + { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "WARNING: wc_rng_bank_seed(): Hash_DRBG_Reseed() for inst#%d " + "returned %d\n", n, ret); +#endif + } + + (void)wc_rng_bank_checkin(bank, &drbg); + + if (ret != 0) + break; + } + + return ret; +} + +WOLFSSL_API int wc_rng_bank_reseed(struct wc_rng_bank *bank, + int timeout_secs, + word32 flags) +{ + int n; + int ret; + time_t ts1 = 0; + + if (! bank) + return BAD_FUNC_ARG; + + if (flags & (WC_RNG_BANK_FLAG_CAN_FAIL_OVER_INST | + WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST)) + return BAD_FUNC_ARG; + + if ((timeout_secs > 0) && (flags & WC_RNG_BANK_FLAG_CAN_WAIT)) + ts1 = XTIME(0); + + for (n = bank->n_rngs - 1; n >= 0; --n) { + struct wc_rng_bank_inst *drbg; + + ret = wc_rng_bank_checkout(bank, &drbg, n, timeout_secs, flags); + if (ret != 0) + return ret; + + ((struct DRBG_internal *)drbg->rng.drbg)->reseedCtr = + WC_RESEED_INTERVAL; + + if (flags & WC_RNG_BANK_FLAG_CAN_WAIT) { + byte scratch[4]; + for (;;) { + time_t ts2; + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(drbg), scratch, + (word32)sizeof(scratch)); + if (ret == 0) + break; + if ((timeout_secs <= 0) || + (! (flags & WC_RNG_BANK_FLAG_CAN_WAIT))) + { + break; + } + ts2 = XTIME(0); + if (ts2 - ts1 > timeout_secs) { +#ifdef WC_VERBOSE_RNG + WOLFSSL_DEBUG_PRINTF( + "ERROR: timeout after attempted reseed by " + "wc_RNG_GenerateBlock() for DRBG #%d, err %d.", n, ret); +#endif + ret = WC_TIMEOUT_E; + break; + } + } +#ifdef WC_VERBOSE_RNG + if ((ret != 0) && (ret != WC_NO_ERR_TRACE(WC_TIMEOUT_E))) + WOLFSSL_DEBUG_PRINTF( + "ERROR: wc_crng_reseed() wc_RNG_GenerateBlock() " + "for DRBG #%d returned %d.", n, ret); +#endif + (void)wc_rng_bank_checkin(bank, &drbg); + if (ret == WC_NO_ERR_TRACE(WC_TIMEOUT_E)) + return ret; + ret = WC_CHECK_FOR_INTR_SIGNALS(); + if (ret == WC_NO_ERR_TRACE(INTERRUPTED_E)) + return ret; + WC_RELAX_LONG_LOOP(); + } + else { + (void)wc_rng_bank_checkin(bank, &drbg); + } + } + + return 0; +} + +#ifdef WC_DRBG_BANKREF + +WOLFSSL_API int wc_InitRng_BankRef(struct wc_rng_bank *bank, WC_RNG *rng) +{ + int ret; + + if ((bank == NULL) || + (! (bank->flags & WC_RNG_BANK_FLAG_INITED)) || + (rng == NULL)) + { + return BAD_FUNC_ARG; + } + + XMEMSET(rng, 0, sizeof(*rng)); + + wolfSSL_RefInc(&bank->refcount, &ret); + + if (ret != 0) + return ret; + + rng->heap = bank->heap; + rng->status = WC_DRBG_BANKREF; + rng->bankref = bank; + + return 0; +} + +WOLFSSL_API int wc_BankRef_Release(WC_RNG *rng) +{ + int isZero = 0; + int ret = 0; + if (rng->bankref == NULL) + return BAD_FUNC_ARG; + wolfSSL_RefDec(&rng->bankref->refcount, &isZero, &ret); +#ifdef WC_VERBOSE_RNG + if (isZero) + WOLFSSL_DEBUG_PRINTF( + "BUG: wc_BankRef_Release() popped refcount to zero.\n"); +#else + (void)isZero; +#endif + rng->heap = NULL; + rng->status = WC_DRBG_NOT_INIT; + rng->bankref = NULL; + return ret; +} + +#ifndef WC_RNG_BANK_STATIC +WOLFSSL_API int wc_rng_new_bankref(struct wc_rng_bank *bank, WC_RNG **rng) { + int ret; + + if ((bank == NULL) || + (! (bank->flags & WC_RNG_BANK_FLAG_INITED)) || + (rng == NULL)) + { + return BAD_FUNC_ARG; + } + + *rng = (WC_RNG*)XMALLOC(sizeof(WC_RNG), bank->heap, DYNAMIC_TYPE_RNG); + if (*rng == NULL) { + return MEMORY_E; + } + + ret = wc_InitRng_BankRef(bank, *rng); + if (ret != 0) { + XFREE(*rng, bank->heap, DYNAMIC_TYPE_RNG); + *rng = NULL; + } + + return ret; +} +#endif /* !WC_RNG_BANK_STATIC */ + +#endif /* WC_DRBG_BANKREF */ + +#endif /* WC_RNG_BANK_SUPPORT */ diff --git a/wolfcrypt/src/rsa.c b/wolfcrypt/src/rsa.c index 392a93f74..6767231c1 100644 --- a/wolfcrypt/src/rsa.c +++ b/wolfcrypt/src/rsa.c @@ -3115,8 +3115,7 @@ int cc310_RsaSSL_Verify(const byte* in, word32 inLen, byte* sig, #endif /* WOLFSSL_CRYPTOCELL */ #ifndef WOLF_CRYPTO_CB_ONLY_RSA -#if !defined(WOLFSSL_RSA_VERIFY_ONLY) && !defined(TEST_UNPAD_CONSTANT_TIME) && \ - !defined(NO_RSA_BOUNDS_CHECK) +#if !defined(NO_RSA_BOUNDS_CHECK) /* Check that 1 < in < n-1. (Requirement of 800-56B.) */ int RsaFunctionCheckIn(const byte* in, word32 inLen, RsaKey* key, int checkSmallCt) @@ -3158,8 +3157,7 @@ int RsaFunctionCheckIn(const byte* in, word32 inLen, RsaKey* key, return ret; } -#endif /* !WOLFSSL_RSA_VERIFY_ONLY && !TEST_UNPAD_CONSTANT_TIME && - * !NO_RSA_BOUNDS_CHECK */ +#endif /* !NO_RSA_BOUNDS_CHECK */ #endif /* WOLF_CRYPTO_CB_ONLY_RSA */ static int wc_RsaFunction_ex(const byte* in, word32 inLen, byte* out, @@ -3228,6 +3226,17 @@ static int wc_RsaFunction_ex(const byte* in, word32 inLen, byte* out, } #endif /* !WOLFSSL_RSA_VERIFY_ONLY && !TEST_UNPAD_CONSTANT_TIME && \ * !NO_RSA_BOUNDS_CHECK */ +#if !defined(NO_RSA_BOUNDS_CHECK) + if (type == RSA_PUBLIC_DECRYPT && + key->state == RSA_STATE_DECRYPT_EXPTMOD) { + + ret = RsaFunctionCheckIn(in, inLen, key, checkSmallCt); + if (ret != 0) { + RESTORE_VECTOR_REGISTERS(); + return ret; + } + } +#endif #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_RSA) if (key->asyncDev.marker == WOLFSSL_ASYNC_MARKER_RSA && @@ -3783,7 +3792,7 @@ int wc_RsaPrivateDecryptInline(byte* in, word32 inLen, byte** out, RsaKey* key) { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } @@ -3807,7 +3816,7 @@ int wc_RsaPrivateDecryptInline_ex(byte* in, word32 inLen, byte** out, { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } @@ -3830,7 +3839,7 @@ int wc_RsaPrivateDecrypt(const byte* in, word32 inLen, byte* out, { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } @@ -3854,7 +3863,7 @@ int wc_RsaPrivateDecrypt_ex(const byte* in, word32 inLen, byte* out, { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } @@ -3877,7 +3886,7 @@ int wc_RsaSSL_VerifyInline(byte* in, word32 inLen, byte** out, RsaKey* key) { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } @@ -3922,7 +3931,7 @@ int wc_RsaSSL_Verify_ex2(const byte* in, word32 inLen, byte* out, word32 outLen return BAD_FUNC_ARG; } -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) rng = key->rng; #else rng = NULL; @@ -3988,7 +3997,7 @@ int wc_RsaPSS_VerifyInline_ex(byte* in, word32 inLen, byte** out, { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } @@ -4046,7 +4055,7 @@ int wc_RsaPSS_Verify_ex(const byte* in, word32 inLen, byte* out, word32 outLen, { WC_RNG* rng; int ret; -#ifdef WC_RSA_BLINDING +#if defined(WC_RSA_BLINDING) && !defined(WC_NO_RNG) if (key == NULL) { return BAD_FUNC_ARG; } diff --git a/wolfcrypt/src/sha512.c b/wolfcrypt/src/sha512.c index 2b777beb7..6564b438d 100644 --- a/wolfcrypt/src/sha512.c +++ b/wolfcrypt/src/sha512.c @@ -867,6 +867,7 @@ static int InitSha512_Family(wc_Sha512* sha512, void* heap, int devId, return BAD_FUNC_ARG; } + XMEMSET(sha512, 0, sizeof(*sha512)); sha512->heap = heap; #ifdef WOLFSSL_SMALL_STACK_CACHE @@ -884,26 +885,33 @@ static int InitSha512_Family(wc_Sha512* sha512, void* heap, int devId, sha512->devCtx = NULL; #endif - /* call the initialization function pointed to by initfp */ - ret = initfp(sha512); - if (ret != 0) - return ret; - #ifdef WOLFSSL_HASH_KEEP sha512->msg = NULL; - sha512->len = 0; - sha512->used = 0; #endif + /* call the initialization function pointed to by initfp */ + ret = initfp(sha512); + #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA512) - ret = wolfAsync_DevCtxInit(&sha512->asyncDev, + if (ret == 0) { + ret = wolfAsync_DevCtxInit(&sha512->asyncDev, WOLFSSL_ASYNC_MARKER_SHA512, sha512->heap, devId); + } #else (void)devId; #endif /* WOLFSSL_ASYNC_CRYPT */ #ifdef WOLFSSL_IMXRT1170_CAAM - ret = wc_CAAM_HashInit(&sha512->hndl, &sha512->ctx, WC_HASH_TYPE_SHA512); + if (ret == 0) + ret = wc_CAAM_HashInit(&sha512->hndl, &sha512->ctx, WC_HASH_TYPE_SHA512); #endif + +#ifdef WOLFSSL_SMALL_STACK_CACHE + if (ret != 0) { + XFREE(sha512->W, sha512->heap, DYNAMIC_TYPE_DIGEST); + sha512->W = NULL; + } +#endif + return ret; } /* InitSha512_Family */ diff --git a/wolfcrypt/src/sp_arm32.c b/wolfcrypt/src/sp_arm32.c index 168168f09..411b69370 100644 --- a/wolfcrypt/src/sp_arm32.c +++ b/wolfcrypt/src/sp_arm32.c @@ -75318,7 +75318,7 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -75330,9 +75330,9 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, sp_256_mont_sqr_8(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_8(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -75363,6 +75363,7 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_8(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_8(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -93332,7 +93333,7 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -93344,9 +93345,9 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, sp_384_mont_sqr_12(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_12(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -93377,6 +93378,7 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_12(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_12(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -120444,7 +120446,7 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -120456,9 +120458,9 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, sp_521_mont_sqr_17(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_17(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -120489,6 +120491,7 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_17(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_17(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -150311,7 +150314,7 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -150323,9 +150326,9 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, sp_1024_mont_sqr_32(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_32(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -150356,6 +150359,7 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_32(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_32(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/sp_arm64.c b/wolfcrypt/src/sp_arm64.c index 71a31716d..82ce529bd 100644 --- a/wolfcrypt/src/sp_arm64.c +++ b/wolfcrypt/src/sp_arm64.c @@ -23280,7 +23280,7 @@ static void sp_256_proj_point_dbl_n_4(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -23292,9 +23292,9 @@ static void sp_256_proj_point_dbl_n_4(sp_point_256* p, int i, sp_256_mont_sqr_4(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_4(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -23323,6 +23323,7 @@ static void sp_256_proj_point_dbl_n_4(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_4(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_4(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -43892,7 +43893,7 @@ static void sp_384_proj_point_dbl_n_6(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -43904,9 +43905,9 @@ static void sp_384_proj_point_dbl_n_6(sp_point_384* p, int i, sp_384_mont_sqr_6(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_6(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -43937,6 +43938,7 @@ static void sp_384_proj_point_dbl_n_6(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_6(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_6(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -71836,7 +71838,7 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -71848,9 +71850,9 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, sp_521_mont_sqr_9(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_9(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -71881,6 +71883,7 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_9(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_9(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -115498,7 +115501,7 @@ static void sp_1024_proj_point_dbl_n_16(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -115510,9 +115513,9 @@ static void sp_1024_proj_point_dbl_n_16(sp_point_1024* p, int i, sp_1024_mont_sqr_16(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_16(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -115543,6 +115546,7 @@ static void sp_1024_proj_point_dbl_n_16(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_16(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_16(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/sp_armthumb.c b/wolfcrypt/src/sp_armthumb.c index 98516b901..efcdca728 100644 --- a/wolfcrypt/src/sp_armthumb.c +++ b/wolfcrypt/src/sp_armthumb.c @@ -100770,7 +100770,7 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -100782,9 +100782,9 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, sp_256_mont_sqr_8(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_8(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -100815,6 +100815,7 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_8(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_8(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -111184,7 +111185,7 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -111196,9 +111197,9 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, sp_384_mont_sqr_12(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_12(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -111229,6 +111230,7 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_12(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_12(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -124383,7 +124385,7 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -124395,9 +124397,9 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, sp_521_mont_sqr_17(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_17(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -124428,6 +124430,7 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_17(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_17(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -208692,7 +208695,7 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -208704,9 +208707,9 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, sp_1024_mont_sqr_32(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_32(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -208737,6 +208740,7 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_32(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_32(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/sp_c32.c b/wolfcrypt/src/sp_c32.c index f52cf4485..5caa829c5 100644 --- a/wolfcrypt/src/sp_c32.c +++ b/wolfcrypt/src/sp_c32.c @@ -20536,7 +20536,7 @@ static void sp_256_proj_point_dbl_n_9(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -20548,9 +20548,9 @@ static void sp_256_proj_point_dbl_n_9(sp_point_256* p, int i, sp_256_mont_sqr_9(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_9(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -20581,6 +20581,7 @@ static void sp_256_proj_point_dbl_n_9(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_9(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_9(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -27592,7 +27593,7 @@ static void sp_384_proj_point_dbl_n_15(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -27604,9 +27605,9 @@ static void sp_384_proj_point_dbl_n_15(sp_point_384* p, int i, sp_384_mont_sqr_15(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_15(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -27637,6 +27638,7 @@ static void sp_384_proj_point_dbl_n_15(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_15(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_15(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -34710,7 +34712,7 @@ static void sp_521_proj_point_dbl_n_21(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -34722,9 +34724,9 @@ static void sp_521_proj_point_dbl_n_21(sp_point_521* p, int i, sp_521_mont_sqr_21(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_21(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -34755,6 +34757,7 @@ static void sp_521_proj_point_dbl_n_21(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_21(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_21(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -43099,7 +43102,7 @@ static void sp_1024_proj_point_dbl_n_42(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -43111,9 +43114,9 @@ static void sp_1024_proj_point_dbl_n_42(sp_point_1024* p, int i, sp_1024_mont_sqr_42(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_42(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -43144,6 +43147,7 @@ static void sp_1024_proj_point_dbl_n_42(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_42(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_42(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/sp_c64.c b/wolfcrypt/src/sp_c64.c index 21473732a..7ce2ae398 100644 --- a/wolfcrypt/src/sp_c64.c +++ b/wolfcrypt/src/sp_c64.c @@ -21130,7 +21130,7 @@ static void sp_256_proj_point_dbl_n_5(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -21142,9 +21142,9 @@ static void sp_256_proj_point_dbl_n_5(sp_point_256* p, int i, sp_256_mont_sqr_5(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_5(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -21175,6 +21175,7 @@ static void sp_256_proj_point_dbl_n_5(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_5(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_5(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -27653,7 +27654,7 @@ static void sp_384_proj_point_dbl_n_7(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -27665,9 +27666,9 @@ static void sp_384_proj_point_dbl_n_7(sp_point_384* p, int i, sp_384_mont_sqr_7(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_7(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -27698,6 +27699,7 @@ static void sp_384_proj_point_dbl_n_7(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_7(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_7(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -34649,7 +34651,7 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -34661,9 +34663,9 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, sp_521_mont_sqr_9(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_9(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -34694,6 +34696,7 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_9(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_9(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -42100,7 +42103,7 @@ static void sp_1024_proj_point_dbl_n_18(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -42112,9 +42115,9 @@ static void sp_1024_proj_point_dbl_n_18(sp_point_1024* p, int i, sp_1024_mont_sqr_18(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_18(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -42145,6 +42148,7 @@ static void sp_1024_proj_point_dbl_n_18(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_18(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_18(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/sp_cortexm.c b/wolfcrypt/src/sp_cortexm.c index 801978d12..1d4842f5a 100644 --- a/wolfcrypt/src/sp_cortexm.c +++ b/wolfcrypt/src/sp_cortexm.c @@ -36708,7 +36708,7 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -36720,9 +36720,9 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, sp_256_mont_sqr_8(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_8(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -36753,6 +36753,7 @@ static void sp_256_proj_point_dbl_n_8(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_8(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_8(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -46642,7 +46643,7 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -46654,9 +46655,9 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, sp_384_mont_sqr_12(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_12(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -46687,6 +46688,7 @@ static void sp_384_proj_point_dbl_n_12(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_12(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_12(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -58466,7 +58468,7 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -58478,9 +58480,9 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, sp_521_mont_sqr_17(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_17(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -58511,6 +58513,7 @@ static void sp_521_proj_point_dbl_n_17(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_17(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_17(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -72851,7 +72854,7 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -72863,9 +72866,9 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, sp_1024_mont_sqr_32(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_32(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -72896,6 +72899,7 @@ static void sp_1024_proj_point_dbl_n_32(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_32(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_32(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/sp_int.c b/wolfcrypt/src/sp_int.c index 0890ffb84..7e72759cf 100644 --- a/wolfcrypt/src/sp_int.c +++ b/wolfcrypt/src/sp_int.c @@ -5579,7 +5579,7 @@ int sp_abs(const sp_int* a, sp_int* r) #endif /* WOLFSSL_SP_INT_NEGATIVE */ #if defined(WOLFSSL_SP_MATH_ALL) || !defined(NO_DH) || defined(HAVE_ECC) || \ - (!defined(NO_RSA) && !defined(WOLFSSL_RSA_VERIFY_ONLY)) + !defined(NO_RSA) /* Compare absolute value of two multi-precision numbers. * * @param [in] a SP integer. @@ -5662,9 +5662,7 @@ int sp_cmp_mag(const sp_int* a, const sp_int* b) #endif #if defined(WOLFSSL_SP_MATH_ALL) || defined(HAVE_ECC) || !defined(NO_DSA) || \ - defined(OPENSSL_EXTRA) || !defined(NO_DH) || \ - (!defined(NO_RSA) && (!defined(WOLFSSL_RSA_VERIFY_ONLY) || \ - defined(WOLFSSL_KEY_GEN))) + defined(OPENSSL_EXTRA) || !defined(NO_DH) || !defined(NO_RSA) /* Compare two multi-precision numbers. * * Assumes a and b are not NULL. @@ -5706,9 +5704,8 @@ static int _sp_cmp(const sp_int* a, const sp_int* b) } #endif -#if (!defined(NO_RSA) && !defined(WOLFSSL_RSA_VERIFY_ONLY)) || \ - !defined(NO_DSA) || defined(HAVE_ECC) || !defined(NO_DH) || \ - defined(WOLFSSL_SP_MATH_ALL) +#if !defined(NO_RSA) || !defined(NO_DSA) || defined(HAVE_ECC) || \ + !defined(NO_DH) || defined(WOLFSSL_SP_MATH_ALL) /* Compare two multi-precision numbers. * * Pointers are compared such that NULL is less than not NULL. @@ -6197,9 +6194,8 @@ int sp_set_int(sp_int* a, unsigned long n) } #endif /* WOLFSSL_SP_MATH_ALL || !NO_RSA */ -#if defined(WOLFSSL_SP_MATH_ALL) || \ - (!defined(NO_RSA) && !defined(WOLFSSL_RSA_VERIFY_ONLY)) || \ - !defined(NO_DH) || defined(HAVE_ECC) +#if defined(WOLFSSL_SP_MATH_ALL) || !defined(NO_RSA) || !defined(NO_DH) || \ + defined(HAVE_ECC) /* Compare a one digit number with a multi-precision number. * * When a is NULL, MP_LT is returned. @@ -18258,14 +18254,14 @@ int sp_to_unsigned_bin_len_ct(const sp_int* a, byte* out, int outSz) /* Start at the end of the buffer - least significant byte. */ int j; unsigned int i; - volatile sp_int_digit mask = (sp_int_digit)-1; + byte mask = (byte)-1; sp_int_digit d; /* Put each digit in. */ i = 0; for (j = outSz - 1; j >= 0; ) { unsigned int b; - volatile unsigned int notFull = (i < (unsigned int)a->used - 1); + volatile byte notFull = ctMaskLT((int)i, (int)a->used - 1); d = a->dp[i]; /* Place each byte of a digit into the buffer. */ @@ -18273,7 +18269,7 @@ int sp_to_unsigned_bin_len_ct(const sp_int* a, byte* out, int outSz) out[j--] = (byte)(d & mask); d >>= 8; } - mask &= (sp_int_digit)(-(int)notFull); + mask &= notFull; i += (unsigned int)(1 & mask); } } diff --git a/wolfcrypt/src/sp_x86_64.c b/wolfcrypt/src/sp_x86_64.c index b79217597..dcefe798f 100644 --- a/wolfcrypt/src/sp_x86_64.c +++ b/wolfcrypt/src/sp_x86_64.c @@ -8296,7 +8296,7 @@ static void sp_256_proj_point_dbl_n_4(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -8308,9 +8308,9 @@ static void sp_256_proj_point_dbl_n_4(sp_point_256* p, int i, sp_256_mont_sqr_4(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_4(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -8339,6 +8339,7 @@ static void sp_256_proj_point_dbl_n_4(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_4(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_4(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -9420,7 +9421,7 @@ static void sp_256_proj_point_dbl_n_avx2_4(sp_point_256* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -9432,9 +9433,9 @@ static void sp_256_proj_point_dbl_n_avx2_4(sp_point_256* p, int i, sp_256_mont_sqr_avx2_4(w, z, p256_mod, p256_mp_mod); sp_256_mont_sqr_avx2_4(w, w, p256_mod, p256_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -9463,6 +9464,7 @@ static void sp_256_proj_point_dbl_n_avx2_4(sp_point_256* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_256_mont_mul_avx2_4(y, b, a, p256_mod, p256_mp_mod); sp_256_mont_sub_avx2_4(y, y, t1, p256_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -26979,7 +26981,7 @@ static void sp_384_proj_point_dbl_n_6(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -26991,9 +26993,9 @@ static void sp_384_proj_point_dbl_n_6(sp_point_384* p, int i, sp_384_mont_sqr_6(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_6(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -27024,6 +27026,7 @@ static void sp_384_proj_point_dbl_n_6(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_6(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_6(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -28156,7 +28159,7 @@ static void sp_384_proj_point_dbl_n_avx2_6(sp_point_384* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -28168,9 +28171,9 @@ static void sp_384_proj_point_dbl_n_avx2_6(sp_point_384* p, int i, sp_384_mont_sqr_avx2_6(w, z, p384_mod, p384_mp_mod); sp_384_mont_sqr_avx2_6(w, w, p384_mod, p384_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -28201,6 +28204,7 @@ static void sp_384_proj_point_dbl_n_avx2_6(sp_point_384* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_384_mont_mul_avx2_6(y, b, a, p384_mod, p384_mp_mod); sp_384_mont_sub_avx2_6(y, y, t1, p384_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -51452,7 +51456,7 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -51464,9 +51468,9 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, sp_521_mont_sqr_9(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_9(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -51497,6 +51501,7 @@ static void sp_521_proj_point_dbl_n_9(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_9(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_9(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -52608,7 +52613,7 @@ static void sp_521_proj_point_dbl_n_avx2_9(sp_point_521* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -52620,9 +52625,9 @@ static void sp_521_proj_point_dbl_n_avx2_9(sp_point_521* p, int i, sp_521_mont_sqr_avx2_9(w, z, p521_mod, p521_mp_mod); sp_521_mont_sqr_avx2_9(w, w, p521_mod, p521_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -52653,6 +52658,7 @@ static void sp_521_proj_point_dbl_n_avx2_9(sp_point_521* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_521_mont_mul_avx2_9(y, b, a, p521_mod, p521_mp_mod); sp_521_mont_sub_avx2_9(y, y, t1, p521_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -92277,7 +92283,7 @@ static void sp_1024_proj_point_dbl_n_16(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -92289,9 +92295,9 @@ static void sp_1024_proj_point_dbl_n_16(sp_point_1024* p, int i, sp_1024_mont_sqr_16(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_16(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -92322,6 +92328,7 @@ static void sp_1024_proj_point_dbl_n_16(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_16(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_16(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ @@ -93406,7 +93413,7 @@ static void sp_1024_proj_point_dbl_n_avx2_16(sp_point_1024* p, int i, sp_digit* x; sp_digit* y; sp_digit* z; - volatile int n = i; + volatile int n = i - 1; x = p->x; y = p->y; @@ -93418,9 +93425,9 @@ static void sp_1024_proj_point_dbl_n_avx2_16(sp_point_1024* p, int i, sp_1024_mont_sqr_avx2_16(w, z, p1024_mod, p1024_mp_mod); sp_1024_mont_sqr_avx2_16(w, w, p1024_mod, p1024_mp_mod); #ifndef WOLFSSL_SP_SMALL - while (--n > 0) + while (n > 0) #else - while (--n >= 0) + while (n >= 0) #endif { /* A = 3*(X^2 - W) */ @@ -93451,6 +93458,7 @@ static void sp_1024_proj_point_dbl_n_avx2_16(sp_point_1024* p, int i, /* y = 2*A*(B - X) - Y^4 */ sp_1024_mont_mul_avx2_16(y, b, a, p1024_mod, p1024_mp_mod); sp_1024_mont_sub_avx2_16(y, y, t1, p1024_mod); + n = n - 1; } #ifndef WOLFSSL_SP_SMALL /* A = 3*(X^2 - W) */ diff --git a/wolfcrypt/src/wc_mlkem.c b/wolfcrypt/src/wc_mlkem.c index 67f34013f..6cd325888 100644 --- a/wolfcrypt/src/wc_mlkem.c +++ b/wolfcrypt/src/wc_mlkem.c @@ -1793,7 +1793,9 @@ int wc_MlKemKey_DecodePublicKey(MlKemKey* key, const unsigned char* in, if (ret == 0) { mlkemkey_decode_public(key->pub, key->pubSeed, p, k); - + ret = mlkem_check_public(key->pub, k); + } + if (ret == 0) { /* Calculate public hash. */ ret = MLKEM_HASH_H(&key->hash, in, len, key->h); } diff --git a/wolfcrypt/src/wc_mlkem_poly.c b/wolfcrypt/src/wc_mlkem_poly.c index b13d9305a..6e8ce95f7 100644 --- a/wolfcrypt/src/wc_mlkem_poly.c +++ b/wolfcrypt/src/wc_mlkem_poly.c @@ -6074,4 +6074,27 @@ void mlkem_to_bytes(byte* b, sword16* p, int k) } } +/** + * Check the public key values are smaller than the modulus. + * + * @param [in] pub Public key - vector. + * @param [in] k Number of polynomials in vector. + * @return 0 when all values are in range. + * @return PUBLIC_KEY_E when at least one value is out of range. + */ +int mlkem_check_public(sword16* pub, int k) +{ + int ret = 0; + int i; + + for (i = 0; i < k * MLKEM_N; i++) { + if (pub[i] >= MLKEM_Q) { + ret = PUBLIC_KEY_E; + break; + } + } + + return ret; +} + #endif /* WOLFSSL_WC_MLKEM */ diff --git a/wolfcrypt/src/wc_port.c b/wolfcrypt/src/wc_port.c index 462e85a6c..4bc487e75 100644 --- a/wolfcrypt/src/wc_port.c +++ b/wolfcrypt/src/wc_port.c @@ -68,7 +68,7 @@ #if defined(WOLFSSL_RENESAS_RX64_HASH) #include #endif -#if defined(WOLFSSL_STSAFEA100) +#ifdef WOLFSSL_STSAFE #include #endif @@ -151,7 +151,11 @@ #endif /* prevent multiple mutex initializations */ -static volatile int initRefCount = 0; +#ifdef WOLFSSL_ATOMIC_OPS + wolfSSL_Atomic_Int initRefCount = WOLFSSL_ATOMIC_INITIALIZER(0); +#else + static int initRefCount = 0; +#endif #if defined(__aarch64__) && defined(WOLFSSL_ARMASM_BARRIER_DETECT) int aarch64_use_sb = 0; @@ -164,7 +168,8 @@ WOLFSSL_ABI int wolfCrypt_Init(void) { int ret = 0; - if (initRefCount == 0) { + int my_initRefCount = wolfSSL_Atomic_Int_FetchAdd(&initRefCount, 1); + if (my_initRefCount == 0) { WOLFSSL_ENTER("wolfCrypt_Init"); #if defined(__aarch64__) && defined(WOLFSSL_ARMASM_BARRIER_DETECT) @@ -298,8 +303,12 @@ int wolfCrypt_Init(void) return ret; } #endif - #if defined(WOLFSSL_STSAFEA100) - stsafe_interface_init(); + #ifdef WOLFSSL_STSAFE + ret = stsafe_interface_init(); + if (ret != 0) { + WOLFSSL_MSG("STSAFE init failed"); + return ret; + } #endif #if defined(WOLFSSL_TROPIC01) ret = Tropic01_Init(); @@ -444,8 +453,16 @@ int wolfCrypt_Init(void) return ret; } #endif + + /* increment to 2, to signify successful initialization: */ + (void)wolfSSL_Atomic_Int_FetchAdd(&initRefCount, 1); + } + else { + if (my_initRefCount < 2) { + (void)wolfSSL_Atomic_Int_FetchSub(&initRefCount, 1); + ret = BUSY_E; + } } - initRefCount++; return ret; } @@ -469,12 +486,9 @@ WOLFSSL_ABI int wolfCrypt_Cleanup(void) { int ret = 0; + int my_initRefCount = wolfSSL_Atomic_Int_SubFetch(&initRefCount, 1); - initRefCount--; - if (initRefCount < 0) - initRefCount = 0; - - if (initRefCount == 0) { + if (my_initRefCount == 1) { WOLFSSL_ENTER("wolfCrypt_Cleanup"); #ifdef HAVE_ECC @@ -564,11 +578,18 @@ int wolfCrypt_Cleanup(void) * must be freed. */ wc_MemZero_Free(); #endif - } + + (void)wolfSSL_Atomic_Int_SubFetch(&initRefCount, 1); #if defined(HAVE_LIBOQS) - wolfSSL_liboqsClose(); + wolfSSL_liboqsClose(); #endif + } + else if (my_initRefCount < 0) { + (void)wolfSSL_Atomic_Int_AddFetch(&initRefCount, 1); + WOLFSSL_MSG("wolfCrypt_Cleanup() called with initRefCount <= 0."); + ret = ALREADY_E; + } return ret; } @@ -1462,9 +1483,17 @@ int wolfSSL_Atomic_Ptr_CompareExchange( * atomic_compare_exchange_strong_explicit(), to sidestep _Atomic type * requirements. */ - return __atomic_compare_exchange_n( - c, expected_ptr, new_ptr, 0 /* weak */, - __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE); + if (__atomic_compare_exchange_n( + c, expected_ptr, new_ptr, +#ifdef WOLF_C89 + 0 /* weak */, +#else + (_Bool)0 /* weak */, +#endif + __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE)) + return 1; + else + return 0; } #elif defined(__GNUC__) && defined(__ATOMIC_RELAXED) @@ -1698,6 +1727,7 @@ void wolfSSL_RefWithMutexFree(wolfSSL_RefWithMutex* ref) if (wc_FreeMutex(&ref->mutex) != 0) { WOLFSSL_MSG("Failed to free mutex of reference counting!"); } + ref->count = 0; } void wolfSSL_RefWithMutexInc(wolfSSL_RefWithMutex* ref, int* err) diff --git a/wolfcrypt/test/test.c b/wolfcrypt/test/test.c index c87ab9c53..037ef6b94 100644 --- a/wolfcrypt/test/test.c +++ b/wolfcrypt/test/test.c @@ -325,6 +325,9 @@ static const byte const_byte_array[] = "A+Gd\0\0\0"; #include #if !defined(WC_NO_RNG) #include + #ifdef WC_RNG_BANK_SUPPORT + #include + #endif #endif #include #include @@ -687,6 +690,9 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t dsa_test(void); WOLFSSL_TEST_SUBROUTINE wc_test_ret_t srp_test(void); #ifndef WC_NO_RNG WOLFSSL_TEST_SUBROUTINE wc_test_ret_t random_test(void); +#ifdef WC_RNG_BANK_SUPPORT +WOLFSSL_TEST_SUBROUTINE wc_test_ret_t random_bank_test(void); +#endif #endif /* WC_NO_RNG */ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t pwdbased_test(void); #if defined(USE_CERT_BUFFERS_2048) && \ @@ -770,9 +776,11 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t scrypt_test(void); #endif #ifdef HAVE_BLAKE2 WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2b_test(void); + WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2b_hmac_test(void); #endif #ifdef HAVE_BLAKE2S WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2s_test(void); + WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2s_hmac_test(void); #endif #ifdef HAVE_LIBZ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t compress_test(void); @@ -2133,6 +2141,12 @@ options: [-s max_relative_stack_bytes] [-m max_relative_heap_memory_bytes]\n\ TEST_FAIL("RANDOM test failed!\n", ret); else TEST_PASS("RANDOM test passed!\n"); +#ifdef WC_RNG_BANK_SUPPORT + if ((ret = random_bank_test()) != 0) + TEST_FAIL("RNGBANK test failed!\n", ret); + else + TEST_PASS("RNGBANK test passed!\n"); +#endif #endif /* WC_NO_RNG */ #ifdef WOLFSSL_SHAKE128 @@ -2175,12 +2189,20 @@ options: [-s max_relative_stack_bytes] [-m max_relative_heap_memory_bytes]\n\ TEST_FAIL("BLAKE2b test failed!\n", ret); else TEST_PASS("BLAKE2b test passed!\n"); + if ( (ret = blake2b_hmac_test()) != 0) + TEST_FAIL("HMAC-BLAKE2b test failed!\n", ret); + else + TEST_PASS("HMAC-BLAKE2b test passed!\n"); #endif #ifdef HAVE_BLAKE2S if ( (ret = blake2s_test()) != 0) TEST_FAIL("BLAKE2s test failed!\n", ret); else TEST_PASS("BLAKE2s test passed!\n"); + if ( (ret = blake2s_hmac_test()) != 0) + TEST_FAIL("HMAC-BLAKE2s test failed!\n", ret); + else + TEST_PASS("HMAC-BLAKE2s test passed!\n"); #endif #ifndef NO_HMAC @@ -4569,7 +4591,6 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t ripemd_test(void) #ifdef HAVE_BLAKE2 - #define BLAKE2B_TESTS 3 static const byte blake2b_vec[BLAKE2B_TESTS][BLAKE2B_OUTBYTES] = @@ -4639,10 +4660,97 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2b_test(void) return 0; } + +WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2b_hmac_test(void) +{ + static const byte key1[] = {0x41, 0x42, 0x43, 0x44}; /* ABCD */ + static const byte message1[] = {0x48, 0x65, 0x6c, 0x6c, 0x6f}; /* Hello */ + static const byte expected1[] = { + 0x46, 0x76, 0xbb, 0x0e, 0xf8, 0xa1, 0x56, 0x33, + 0xde, 0xdc, 0x44, 0xe3, 0x2b, 0xf3, 0xee, 0x5b, + 0x5f, 0x7f, 0x04, 0x00, 0x2c, 0xaa, 0xd4, 0x93, + 0xc6, 0xa6, 0xb4, 0xf3, 0x14, 0x8d, 0x6d, 0x9c, + 0x6a, 0x12, 0x02, 0x85, 0x66, 0xed, 0x9b, 0x5d, + 0x8d, 0x0e, 0x3d, 0xf4, 0x78, 0xee, 0x5a, 0xf6, + 0x2f, 0x97, 0xa5, 0x77, 0x88, 0x8c, 0xc4, 0x66, + 0x46, 0xb1, 0xba, 0x51, 0x29, 0x19, 0xd7, 0xaa, + }; + static const byte key2[] = { + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, + 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, + 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, + 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33 + }; /* 0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123 */ + static const byte message2[] = { + 0x61, 0x62, 0x63, 0x64, 0x62, 0x63, 0x64, 0x65, 0x63, 0x64, 0x65, 0x66, + 0x64, 0x65, 0x66, 0x67, 0x65, 0x66, 0x67, 0x68, 0x66, 0x67, 0x68, 0x69, + 0x67, 0x68, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6b, 0x69, 0x6a, 0x6b, 0x6c, + 0x6a, 0x6b, 0x6c, 0x6d, 0x6b, 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6d, 0x6e, 0x6f, 0x70, 0x6e, 0x6f, 0x70, 0x71 + }; /* abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq */ + static const byte expected2[] = { + 0x2a, 0xda, 0xf6, 0x94, 0x79, 0xce, 0xe2, 0xd2, + 0x5d, 0x89, 0x8b, 0xd7, 0x0d, 0xbc, 0x11, 0x1f, + 0x98, 0x99, 0xe0, 0x17, 0x7c, 0x5b, 0x8f, 0x94, + 0xf5, 0x95, 0xbc, 0x1b, 0xb1, 0x95, 0xe8, 0x60, + 0xbb, 0x29, 0xa4, 0xd9, 0x27, 0x2e, 0x00, 0xea, + 0xba, 0xc3, 0x3e, 0xe6, 0x9c, 0xc7, 0xd7, 0x8d, + 0x69, 0xc7, 0xb4, 0xf7, 0x31, 0x4a, 0xb1, 0xf0, + 0x3c, 0xed, 0x06, 0x49, 0x6f, 0x46, 0x99, 0xea, + }; + + byte out[BLAKE2B_OUTBYTES]; + int ret; + Blake2b b2b; + + ret = wc_Blake2bHmac(message1, sizeof(message1), + key1, sizeof(key1), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + if (XMEMCMP(out, expected1, sizeof(out)) != 0) + return WC_TEST_RET_ENC_NC; + + ret = wc_Blake2bHmac(message2, sizeof(message2), + key2, sizeof(key2), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + if (XMEMCMP(out, expected2, sizeof(out)) != 0) + return WC_TEST_RET_ENC_NC; + + ret = wc_Blake2bHmacInit(&b2b, key1, sizeof(key1)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2bHmacUpdate(&b2b, message1, sizeof(message1) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2bHmacUpdate(&b2b, &message1[sizeof(message1) / 2U], sizeof(message1) - sizeof(message1) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2bHmacFinal(&b2b, key1, sizeof(key1), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + + ret = wc_Blake2bHmacInit(&b2b, key2, sizeof(key2)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2bHmacUpdate(&b2b, message2, sizeof(message2) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2bHmacUpdate(&b2b, &message2[sizeof(message2) / 2U], sizeof(message2) - sizeof(message2) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2bHmacFinal(&b2b, key2, sizeof(key2), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + + return 0; +} #endif /* HAVE_BLAKE2 */ -#ifdef HAVE_BLAKE2S +#ifdef HAVE_BLAKE2S #define BLAKE2S_TESTS 3 @@ -4701,6 +4809,82 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2s_test(void) return 0; } + +WOLFSSL_TEST_SUBROUTINE wc_test_ret_t blake2s_hmac_test(void) +{ + static const byte key1[] = {0x41, 0x42, 0x43, 0x44}; /* ABCD */ + static const byte message1[] = {0x48, 0x65, 0x6c, 0x6c, 0x6f}; /* Hello */ + static const byte expected1[] = { + 0x96, 0xca, 0x1d, 0xaa, 0x9a, 0x33, 0x97, 0x3d, + 0xc5, 0x95, 0x3e, 0xce, 0x49, 0x93, 0x75, 0xc1, + 0x2a, 0x7c, 0x8f, 0x5b, 0xf0, 0x28, 0xef, 0xc3, + 0xfb, 0xc5, 0x97, 0xcd, 0xcc, 0x74, 0x44, 0x68, + }; + static const byte key2[] = { + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, + 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33 + }; /* 0123456789ABCDEF0123456789ABCDEF0123 */ + static const byte message2[] = { + 0x61, 0x62, 0x63, 0x64, 0x62, 0x63, 0x64, 0x65, 0x63, 0x64, 0x65, 0x66, + 0x64, 0x65, 0x66, 0x67, 0x65, 0x66, 0x67, 0x68, 0x66, 0x67, 0x68, 0x69, + 0x67, 0x68, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6b, 0x69, 0x6a, 0x6b, 0x6c, + 0x6a, 0x6b, 0x6c, 0x6d, 0x6b, 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6d, 0x6e, 0x6f, 0x70, 0x6e, 0x6f, 0x70, 0x71 + }; /* abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq */ + static const byte expected2[] = { + 0xc4, 0x63, 0xdb, 0x28, 0x97, 0x60, 0x6a, 0xa7, + 0x1e, 0xe6, 0xcf, 0x93, 0x85, 0x3c, 0x90, 0x71, + 0xea, 0x76, 0x7f, 0x6a, 0xa7, 0x20, 0x80, 0x35, + 0xe1, 0x68, 0x95, 0xfe, 0x65, 0x65, 0x43, 0x76, + }; + + byte out[BLAKE2S_OUTBYTES]; + int ret; + Blake2s b2s; + + ret = wc_Blake2sHmac(message1, sizeof(message1), + key1, sizeof(key1), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + if (XMEMCMP(out, expected1, sizeof(out)) != 0) + return WC_TEST_RET_ENC_NC; + + ret = wc_Blake2sHmac(message2, sizeof(message2), + key2, sizeof(key2), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + if (XMEMCMP(out, expected2, sizeof(out)) != 0) + return WC_TEST_RET_ENC_NC; + + ret = wc_Blake2sHmacInit(&b2s, key1, sizeof(key1)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2sHmacUpdate(&b2s, message1, sizeof(message1) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2sHmacUpdate(&b2s, &message1[sizeof(message1) / 2U], sizeof(message1) - sizeof(message1) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2sHmacFinal(&b2s, key1, sizeof(key1), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + + ret = wc_Blake2sHmacInit(&b2s, key2, sizeof(key2)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2sHmacUpdate(&b2s, message2, sizeof(message2) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2sHmacUpdate(&b2s, &message2[sizeof(message2) / 2U], sizeof(message2) - sizeof(message2) / 2U); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + ret = wc_Blake2sHmacFinal(&b2s, key2, sizeof(key2), out, sizeof(out)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + + return 0; +} #endif /* HAVE_BLAKE2S */ @@ -4851,6 +5035,9 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t sha256_test(void) #endif XMEMSET(&shaCopy, 0, sizeof(shaCopy)); +#ifndef NO_WOLFSSL_SHA256_INTERLEAVE + XMEMSET(&i_shaCopy, 0, sizeof(i_shaCopy)); +#endif for (i = 0; i < times; ++i) { ret = wc_Sha256Update(&sha, (byte*)test_sha[i].input, @@ -7442,7 +7629,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_md5_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -7587,7 +7774,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -7738,7 +7925,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha224_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -7895,7 +8082,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha256_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -8061,7 +8248,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha384_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -8216,7 +8403,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha512_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -8428,7 +8615,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha3_test(void) XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) - WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, + WC_CALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); #endif @@ -20133,7 +20320,453 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t random_test(void) } #endif /* !HAVE_HASHDRBG || CUSTOM_RAND_GENERATE_BLOCK || HAVE_INTEL_RDRAND */ -#endif /* WC_NO_RNG */ + +#ifdef WC_RNG_BANK_SUPPORT + +static char *rng_bank_affinity_lock_lock; +static int rng_bank_affinity_lock(void *arg) { + rng_bank_affinity_lock_lock = (char *)arg; + return 0; +} + +static int rng_bank_affinity_get_id_id; +static int rng_bank_affinity_get_id(void *arg, int *id) { + if (rng_bank_affinity_lock_lock != (char *)arg) + return BAD_STATE_E; + rng_bank_affinity_lock_lock = (char *)arg + 1; + *id = rng_bank_affinity_get_id_id; + return 0; +} + +static int rng_bank_affinity_unlock(void *arg) { + rng_bank_affinity_lock_lock = (char *)arg + 2; + return 0; +} + +WOLFSSL_TEST_SUBROUTINE wc_test_ret_t random_bank_test(void) +{ + int ret; + WC_DECLARE_VAR(bank, struct wc_rng_bank, 1, HEAP_HINT); + struct wc_rng_bank_inst *rng_inst = NULL; +#ifdef WC_DRBG_BANKREF + WC_DECLARE_VAR(rng, WC_RNG, 1, HEAP_HINT); +#endif +#ifndef WC_RNG_BANK_STATIC + struct wc_rng_bank *bank2 = NULL; + struct wc_rng_bank_inst *rng_inst2 = NULL; +#ifdef WC_DRBG_BANKREF + WC_RNG *rng2 = NULL; +#endif +#endif /* !WC_RNG_BANK_STATIC */ + static const char bank_arg[] = "hi"; + byte outbuf1[16], outbuf2[16]; + int i; + + WC_CALLOC_VAR_EX(bank, struct wc_rng_bank, 1, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER, + return WC_TEST_RET_ENC_EC(MEMORY_E)); + +#ifdef WC_DRBG_BANKREF + WC_ALLOC_VAR_EX(rng, WC_RNG, 1, HEAP_HINT, + DYNAMIC_TYPE_TMP_BUFFER, + return WC_TEST_RET_ENC_EC(MEMORY_E)); + XMEMSET(rng, 0, sizeof(*rng)); +#endif + + ret = wc_rng_bank_init(NULL, WC_RNG_BANK_STATIC_SIZE, WC_RNG_BANK_FLAG_CAN_WAIT, 10, HEAP_HINT, INVALID_DEVID); + if (ret != WC_NO_ERR_TRACE(BAD_FUNC_ARG)) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + +#ifdef WC_RNG_BANK_STATIC + ret = wc_rng_bank_init(bank, WC_RNG_BANK_STATIC_SIZE + 1, WC_RNG_BANK_FLAG_CAN_WAIT, 10, HEAP_HINT, INVALID_DEVID); + if (ret != WC_NO_ERR_TRACE(BAD_LENGTH_E)) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); +#endif + + ret = wc_rng_bank_init(bank, WC_RNG_BANK_STATIC_SIZE, + WC_RNG_BANK_FLAG_NO_VECTOR_OPS | + WC_RNG_BANK_FLAG_CAN_WAIT, + 10, HEAP_HINT, INVALID_DEVID); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_set_affinity_handlers( + bank, + rng_bank_affinity_lock, + rng_bank_affinity_get_id, + rng_bank_affinity_unlock, + (char *)bank_arg); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = 4; + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != WC_NO_ERR_TRACE(BAD_INDEX_E)) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = 2; + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng_inst != bank->rngs + 2) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + if (rng_bank_affinity_lock_lock != bank_arg + 1) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + /* if we can, confirm that WC_RNG_BANK_FLAG_NO_VECTOR_OPS worked. */ +#if defined(USE_INTEL_SPEEDUP) && \ + defined(WOLFSSL_KERNEL_MODE) && \ + defined(WOLFSSL_SMALL_STACK_CACHE) && \ + defined(WC_C_DYNAMIC_FALLBACK) && \ + defined(HAVE_HASHDRBG) && \ + defined(WC_NO_INTERNAL_FUNCTION_POINTERS) + if (((struct DRBG_internal *)rng_inst->rng.drbg)->sha256.sha_method != 7 /* SHA256_C */) + ERROR_OUT(WC_TEST_RET_ENC_I(((struct DRBG_internal *)rng_inst->rng.drbg)->sha256.sha_method), out); +#endif + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng_inst != NULL) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + if (rng_bank_affinity_lock_lock != bank_arg + 2) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_checkout(bank, &rng_inst, 3, 10, WC_RNG_BANK_FLAG_NONE); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng_inst != bank->rngs + 3) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = 3; + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng_inst != bank->rngs + 3) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf2, sizeof(outbuf2)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng_inst != NULL) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + +#ifdef WC_DRBG_BANKREF + ret = wc_InitRng_BankRef(bank, rng); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = 1; + ret = wc_RNG_GenerateBlock(rng, outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); +#endif + + ret = wc_rng_bank_reseed(bank, 10, WC_RNG_BANK_FLAG_NONE); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + for (i = 0; i < bank->n_rngs; ++i) { + if (((struct DRBG_internal *)bank->rngs[i].rng.drbg) + ->reseedCtr != WC_RESEED_INTERVAL) + { + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); + } + } + + rng_bank_affinity_get_id_id = 0; + /* WC_RNG_BANK_FLAG_CAN_WAIT needed to avoiding warning message that the + * instance needs reseed. + */ + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_CAN_WAIT | WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = 1; + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_CAN_WAIT | WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf2, sizeof(outbuf2)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_seed(bank, (byte *)bank_arg, (word32)sizeof(bank_arg), 10, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = 0; + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + /* can't wc_rng_bank_seed() while holding an inst (deadlock/timeout) -- + * check in then check back out. + */ + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_seed(bank, (byte *)bank_arg, (word32)sizeof(bank_arg), 10, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_checkout(bank, &rng_inst, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf2, sizeof(outbuf2)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + /* even though we passed in the same seed, the state is different, because + * Hash_DRBG_Reseed() chains in the previous state, and also churns in the + * "type" only on reseed. + */ + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_inst_reinit(bank, rng_inst, 10, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_checkin(bank, &rng_inst); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + +#ifdef WC_DRBG_BANKREF + if (wolfSSL_RefCur(bank->refcount) != 2) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_fini(bank); + if (ret != WC_NO_ERR_TRACE(BUSY_E)) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + wc_FreeRng(rng); + + if (wolfSSL_RefCur(bank->refcount) != 1) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); +#endif + +#ifdef WC_RNG_BANK_STATIC + + ret = 0; + +#else /* !WC_RNG_BANK_STATIC */ + + ret = wc_rng_bank_new(&bank2, WC_RNG_BANK_STATIC_SIZE + 1, WC_RNG_BANK_FLAG_NONE, 10, HEAP_HINT, INVALID_DEVID); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_set_affinity_handlers( + bank2, + rng_bank_affinity_lock, + rng_bank_affinity_get_id, + rng_bank_affinity_unlock, + (char *)bank_arg); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + rng_bank_affinity_get_id_id = WC_RNG_BANK_STATIC_SIZE; + ret = wc_rng_bank_checkout(bank2, &rng_inst2, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng_inst2 != bank2->rngs + WC_RNG_BANK_STATIC_SIZE) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst2), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + /* can't wc_rng_bank_seed() while holding an inst (deadlock/timeout) -- + * check in then check back out. + */ + ret = wc_rng_bank_checkin(bank2, &rng_inst2); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_seed(bank2, (byte *)bank_arg, (word32)sizeof(bank_arg), 10, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_checkout(bank2, &rng_inst2, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst2), outbuf2, sizeof(outbuf2)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_checkin(bank2, &rng_inst2); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_seed(bank2, (byte *)bank_arg, (word32)sizeof(bank_arg), 10, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_rng_bank_checkout(bank2, &rng_inst2, -1, 10, WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST | WC_RNG_BANK_FLAG_AFFINITY_LOCK); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst2), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + /* even though we passed in the same seed, the state is different, because + * Hash_DRBG_Reseed() chains in the previous state, and also churns in the + * "type" only on reseed. + */ + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_inst_reinit(bank2, rng_inst2, 10, WC_RNG_BANK_FLAG_CAN_WAIT); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + ret = wc_RNG_GenerateBlock(WC_RNG_BANK_INST_TO_RNG(rng_inst2), outbuf1, sizeof(outbuf1)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_checkin(bank2, &rng_inst2); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + +#ifdef WC_DRBG_BANKREF + ret = wc_rng_new_bankref(bank2, &rng2); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (rng2 == NULL) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + rng_bank_affinity_get_id_id = 1; + ret = wc_RNG_GenerateBlock(rng2, outbuf2, sizeof(outbuf2)); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + + if (XMEMCMP(outbuf1, outbuf2, sizeof(outbuf1)) == 0) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + if (wolfSSL_RefCur(bank2->refcount) != 2) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + ret = wc_rng_bank_free(&bank2); + if (ret != WC_NO_ERR_TRACE(BUSY_E)) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + + wc_rng_free(rng2); + rng2 = NULL; + + if (wolfSSL_RefCur(bank2->refcount) != 1) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + +#endif /* WC_DRBG_BANKREF */ + + ret = wc_rng_bank_free(&bank2); + if (ret != 0) + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + if (bank2 != NULL) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); + +#endif /* !WC_RNG_BANK_STATIC */ + +out: + + { + int cleanup_ret; + +#ifdef WC_DRBG_BANKREF + cleanup_ret = wc_FreeRng(rng); + if ((cleanup_ret != 0) && (ret == 0)) + ret = WC_TEST_RET_ENC_EC(cleanup_ret); + WC_FREE_VAR_EX(rng, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); +#endif /* WC_DRBG_BANKREF */ + if (rng_inst) { + cleanup_ret = wc_rng_bank_checkin(bank, &rng_inst); + if ((cleanup_ret != 0) && (ret == 0)) + ret = WC_TEST_RET_ENC_EC(cleanup_ret); + if ((rng_inst != NULL) && (ret == 0)) + ret = WC_TEST_RET_ENC_NC; + } + cleanup_ret = wc_rng_bank_fini(bank); + if ((cleanup_ret != 0) && (ret == 0)) + ret = WC_TEST_RET_ENC_EC(cleanup_ret); + WC_FREE_VAR_EX(bank, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + +#ifndef WC_RNG_BANK_STATIC +#ifdef WC_DRBG_BANKREF + if (rng2) + wc_rng_free(rng2); +#endif + if (rng_inst2) { + cleanup_ret = wc_rng_bank_checkin(bank2, &rng_inst2); + if ((cleanup_ret != 0) && (ret == 0)) + ret = WC_TEST_RET_ENC_EC(cleanup_ret); + if ((rng_inst2 != NULL) && (ret == 0)) + ret = WC_TEST_RET_ENC_NC; + } + cleanup_ret = wc_rng_bank_free(&bank2); + if ((cleanup_ret != 0) && (ret == 0)) + ret = WC_TEST_RET_ENC_EC(cleanup_ret); + if ((bank2 != NULL) && (ret == 0)) + ret = WC_TEST_RET_ENC_NC; +#endif /* !WC_RNG_BANK_STATIC */ + } + + return ret; +} + +#endif /* WC_RNG_BANK_SUPPORT */ + +#endif /* !WC_NO_RNG */ #ifndef MEM_TEST_SZ #define MEM_TEST_SZ 1024 @@ -31983,7 +32616,7 @@ static wc_test_ret_t ecc_test_deterministic_k(WC_RNG* rng) #endif WC_ALLOC_VAR_EX(key, ecc_key, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, - return MEMORY_E); + return WC_TEST_RET_ENC_EC(MEMORY_E)); ret = wc_ecc_init_ex(key, HEAP_HINT, devId); if (ret != 0) @@ -36140,7 +36773,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t ecc_test(void) ECC_MIN_KEY_SZ <= 521 ret = ecc521_test_deterministic_k(&rng); if (ret != 0) { - printf("ecc512_test_deterministic_k failed!\n"); + printf("ecc521_test_deterministic_k failed!\n"); goto done; } #endif @@ -48953,13 +49586,13 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t dilithium_test(void) #ifndef WOLFSSL_DILITHIUM_NO_VERIFY ret = dilithium_param_44_vfy_test(); if (ret != 0) - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); #endif #endif #ifndef WOLFSSL_DILITHIUM_NO_MAKE_KEY ret = dilithium_param_test(WC_ML_DSA_44, &rng); if (ret != 0) - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); #endif #endif #ifndef WOLFSSL_NO_ML_DSA_65 @@ -48967,13 +49600,13 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t dilithium_test(void) #ifndef WOLFSSL_DILITHIUM_NO_VERIFY ret = dilithium_param_65_vfy_test(); if (ret != 0) - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); #endif #endif #ifndef WOLFSSL_DILITHIUM_NO_MAKE_KEY ret = dilithium_param_test(WC_ML_DSA_65, &rng); if (ret != 0) - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); #endif #endif #ifndef WOLFSSL_NO_ML_DSA_87 @@ -48981,13 +49614,13 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t dilithium_test(void) #ifndef WOLFSSL_DILITHIUM_NO_VERIFY ret = dilithium_param_87_vfy_test(); if (ret != 0) - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); #endif #endif #ifndef WOLFSSL_DILITHIUM_NO_MAKE_KEY ret = dilithium_param_test(WC_ML_DSA_87, &rng); if (ret != 0) - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); #endif #endif @@ -48997,7 +49630,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t dilithium_test(void) !defined(WOLFSSL_DILITHIUM_NO_VERIFY)) ret = dilithium_decode_test(); if (ret != 0) { - ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); + ERROR_OUT(ret, out); } #endif /* (WOLFSSL_DILITHIUM_PUBLIC_KEY && !WOLFSSL_DILITHIUM_NO_VERIFY) || * (WOLFSSL_DILITHIUM_PRIVATE_KEY && !WOLFSSL_DILITHIUM_NO_SIGN) */ @@ -49044,8 +49677,13 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t xmss_test(void) word32 skSz = 0; word32 sigSz = 0; word32 bufSz = 0; - unsigned char * sk = NULL; - unsigned char * old_sk = NULL; +#ifdef WOLFSSL_NO_MALLOC + static byte sk[2048]; + static byte old_sk[2048]; +#else + byte * sk = NULL; + byte * old_sk = NULL; +#endif const char * msg = "XMSS post quantum signature test"; word32 msgSz = (word32) XSTRLEN(msg); #if WOLFSSL_XMSS_MIN_HEIGHT <= 10 @@ -49057,7 +49695,11 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t xmss_test(void) #else const char * param = "XMSSMT-SHA2_60/12_256"; #endif +#ifdef WOLFSSL_NO_MALLOC + static byte sig[4096]; +#else byte * sig = NULL; +#endif int ret2 = -1; int ret = WC_TEST_RET_ENC_NC; WOLFSSL_ENTER("xmss_test"); @@ -49094,8 +49736,13 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t xmss_test(void) if (ret != 0) { ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); } /* Allocate signature array. */ +#ifdef WOLFSSL_NO_MALLOC + if (sigSz > sizeof(sig)) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); +#else sig = (byte *)XMALLOC(sigSz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); if (sig == NULL) { ERROR_OUT(WC_TEST_RET_ENC_ERRNO, out); } +#endif bufSz = sigSz; @@ -49107,11 +49754,16 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t xmss_test(void) #endif /* Allocate current and old secret keys.*/ +#ifdef WOLFSSL_NO_MALLOC + if (skSz > sizeof(sk)) + ERROR_OUT(WC_TEST_RET_ENC_NC, out); +#else sk = (unsigned char *)XMALLOC(skSz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); if (sk == NULL) { ERROR_OUT(WC_TEST_RET_ENC_ERRNO, out); } old_sk = (unsigned char *)XMALLOC(skSz, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); if (old_sk == NULL) { ERROR_OUT(WC_TEST_RET_ENC_ERRNO, out); } +#endif XMEMSET(sk, 0, skSz); XMEMSET(old_sk, 0, skSz); @@ -49172,6 +49824,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t xmss_test(void) out: /* Cleanup everything. */ +#ifndef WOLFSSL_NO_MALLOC XFREE(sig, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); sig = NULL; @@ -49180,6 +49833,7 @@ out: XFREE(old_sk, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); old_sk = NULL; +#endif /* !WOLFSSL_NO_MALLOC */ wc_XmssKey_Free(&signingKey); wc_XmssKey_Free(&verifyKey); @@ -49694,6 +50348,9 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t lms_test(void) int sigsLeft = 0; LmsKey signingKey; LmsKey verifyKey; +#if defined(WOLFSSL_NO_MALLOC) && defined(NO_WOLFSSL_MEMORY) + static byte signingKey_priv_data[4096]; +#endif WC_RNG rng; word32 sigSz = 0; const char * msg = "LMS HSS post quantum signature test"; @@ -49749,6 +50406,9 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t lms_test(void) ret = wc_LmsKey_Init(&signingKey, NULL, devId); if (ret != 0) { ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); } +#if defined(WOLFSSL_NO_MALLOC) && defined(NO_WOLFSSL_MEMORY) + signingKey.priv_data = signingKey_priv_data; +#endif ret = wc_LmsKey_Init(&verifyKey, NULL, devId); if (ret != 0) { ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); } @@ -49858,6 +50518,9 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t lms_test(void) out: +#if defined(WOLFSSL_NO_MALLOC) && defined(NO_WOLFSSL_MEMORY) + signingKey.priv_data = NULL; +#endif wc_LmsKey_Free(&signingKey); wc_LmsKey_Free(&verifyKey); @@ -51839,44 +52502,67 @@ static wc_test_ret_t sakke_kat_derive_test(SakkeKey* key, ecc_point* rsk) return WC_TEST_RET_ENC_EC(ret); if (iTableLen != 0) { iTable = (byte*)XMALLOC(iTableLen, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - if (iTable == NULL) - return WC_TEST_RET_ENC_ERRNO; + if (iTable == NULL) { + ret = WC_TEST_RET_ENC_ERRNO; + goto out; + } ret = wc_GenerateSakkePointITable(key, iTable, &iTableLen); - if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + if (ret != 0) { + ret = WC_TEST_RET_ENC_EC(ret); + goto out; + } } len = 0; ret = wc_GenerateSakkeRskTable(key, rsk, NULL, &len); - if (ret != WC_NO_ERR_TRACE(LENGTH_ONLY_E)) - return WC_TEST_RET_ENC_EC(ret); + if (ret != WC_NO_ERR_TRACE(LENGTH_ONLY_E)) { + ret = WC_TEST_RET_ENC_EC(ret); + goto out; + } if (len > 0) { table = (byte*)XMALLOC(len, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); - if (table == NULL) - return WC_TEST_RET_ENC_ERRNO; + if (table == NULL) { + ret = WC_TEST_RET_ENC_ERRNO; + goto out; + } ret = wc_GenerateSakkeRskTable(key, rsk, table, &len); - if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + if (ret != 0) { + ret = WC_TEST_RET_ENC_EC(ret); + goto out; + } } ret = wc_SetSakkeRsk(key, rsk, table, len); - if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + if (ret != 0) { + ret = WC_TEST_RET_ENC_EC(ret); + goto out; + } XMEMCPY(tmpSsv, encSsv, sizeof(encSsv)); ret = wc_DeriveSakkeSSV(key, WC_HASH_TYPE_SHA256, tmpSsv, sizeof(tmpSsv), auth, sizeof(auth)); - if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); - if (XMEMCMP(tmpSsv, ssv, sizeof(ssv)) != 0) - return WC_TEST_RET_ENC_NC; + if (ret != 0) { + ret = WC_TEST_RET_ENC_EC(ret); + goto out; + } + if (XMEMCMP(tmpSsv, ssv, sizeof(ssv)) != 0) { + ret = WC_TEST_RET_ENC_NC; + goto out; + } /* Don't reference table that is about to be freed. */ ret = wc_ClearSakkePointITable(key); - if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + if (ret != 0) { + ret = WC_TEST_RET_ENC_EC(ret); + } + +out: /* Dispose of tables */ XFREE(iTable, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); XFREE(table, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); + /* return error code if encountered */ + if (ret != 0) { + return ret; + } /* Make sure the key public key is exportable - convert to Montgomery form * in Validation. diff --git a/wolfssl/internal.h b/wolfssl/internal.h index 4603db3bc..b6869090d 100644 --- a/wolfssl/internal.h +++ b/wolfssl/internal.h @@ -1526,15 +1526,10 @@ enum Misc { MAXEARLYDATASZ_LEN = 4, /* maxEarlyDataSz size in ticket */ #endif #endif -#if defined(HAVE_FALCON) || defined(HAVE_DILITHIUM) - ENCRYPT_LEN = 5120, /* Allow 5k byte buffer for dilithium and - * hybridization with other algs. */ -#else #ifndef NO_PSK ENCRYPT_LEN = (ENCRYPT_BASE_BITS / 8) + MAX_PSK_KEY_LEN + 2, #else ENCRYPT_LEN = (ENCRYPT_BASE_BITS / 8), -#endif #endif SIZEOF_SENDER = 4, /* clnt or srvr */ FINISHED_SZ = 36, /* WC_MD5_DIGEST_SIZE + WC_SHA_DIGEST_SIZE */ @@ -1673,6 +1668,12 @@ enum Misc { MAX_REQUEST_SZ = 256, /* Maximum cert req len (no auth yet */ SESSION_FLUSH_COUNT = 256, /* Flush session cache unless user turns off */ TLS_MAX_PAD_SZ = 255, /* Max padding in TLS */ + MAX_EXT_DATA_LEN = 65535, + /* Max extension data length <0..2^16-1> RFC 8446 + * Section 4.2 */ + MAX_SV_EXT_LEN = 255, + /* Max supported_versions extension length + * <2..254> RFC 8446 Section 4.2.1.*/ #if defined(HAVE_NULL_CIPHER) && defined(WOLFSSL_TLS13) #if defined(WOLFSSL_SHA384) && WC_MAX_SYM_KEY_SIZE < 48 @@ -2189,9 +2190,9 @@ WOLFSSL_LOCAL WARN_UNUSED_RESULT DerBuffer *wolfssl_priv_der_unblind( const DerBuffer* key, const DerBuffer* mask); WOLFSSL_LOCAL void wolfssl_priv_der_unblind_free(DerBuffer* key); #endif -WOLFSSL_LOCAL int DecodePrivateKey(WOLFSSL *ssl, word32* length); +WOLFSSL_LOCAL int DecodePrivateKey(WOLFSSL *ssl, word32* sigLen); #ifdef WOLFSSL_DUAL_ALG_CERTS -WOLFSSL_LOCAL int DecodeAltPrivateKey(WOLFSSL *ssl, word32* length); +WOLFSSL_LOCAL int DecodeAltPrivateKey(WOLFSSL *ssl, word32* sigLen); #endif #if defined(WOLF_PRIVATE_KEY_ID) || defined(HAVE_PK_CALLBACKS) WOLFSSL_LOCAL int GetPrivateKeySigSize(WOLFSSL* ssl); @@ -2504,7 +2505,7 @@ struct CRL_Entry { /* DupCRL_Entry copies data after the `verifyMutex` member. Using the mutex * as the marker because clang-tidy doesn't like taking the sizeof a * pointer. */ - byte crlNumber[CRL_MAX_NUM_SZ]; /* CRL number extension */ + char crlNumber[CRL_MAX_NUM_HEX_STR_SZ]; /* CRL number extension */ byte issuerHash[CRL_DIGEST_SIZE]; /* issuer hash */ /* byte crlHash[CRL_DIGEST_SIZE]; raw crl data hash */ /* restore the hash here if needed for optimized comparisons */ @@ -5773,7 +5774,11 @@ typedef struct Dtls13Rtx { Dtls13RtxRecord *rtxRecords; Dtls13RtxRecord **rtxRecordTailPtr; Dtls13RecordNumber *seenRecords; +#ifdef WOLFSSL_32BIT_MILLI_TIME word32 lastRtx; +#else + sword64 lastRtx; +#endif byte triggeredRtxs; /* Unused? */ byte sendAcks; byte retransmit; @@ -6829,7 +6834,8 @@ WOLFSSL_LOCAL word32 MacSize(const WOLFSSL* ssl); WOLFSSL_LOCAL void WriteSEQ(WOLFSSL* ssl, int verifyOrder, byte* out); -#if defined(WOLFSSL_TLS13) && (defined(HAVE_SESSION_TICKET) || !defined(NO_PSK)) +#if defined(WOLFSSL_TLS13) && (defined(HAVE_SESSION_TICKET) || \ + !defined(NO_PSK) || defined(WOLFSSL_DTLS13)) #ifdef WOLFSSL_32BIT_MILLI_TIME WOLFSSL_LOCAL word32 TimeNowInMilliseconds(void); #else diff --git a/wolfssl/openssl/conf.h b/wolfssl/openssl/conf.h index d059c5975..1007b64a1 100644 --- a/wolfssl/openssl/conf.h +++ b/wolfssl/openssl/conf.h @@ -24,22 +24,30 @@ #ifndef WOLFSSL_conf_H_ #define WOLFSSL_conf_H_ +#include +#include + #ifdef __cplusplus extern "C" { #endif -#include -#include - typedef struct WOLFSSL_CONF_VALUE { char *section; char *name; char *value; } WOLFSSL_CONF_VALUE; +#ifdef __cplusplus +} /* extern "C" */ +#endif + /* ssl.h requires WOLFSSL_CONF_VALUE */ #include +#ifdef __cplusplus + extern "C" { +#endif + typedef struct WOLFSSL_CONF { void *meth_data; WOLF_LHASH_OF(WOLFSSL_CONF_VALUE) *data; diff --git a/wolfssl/openssl/obj_mac.h b/wolfssl/openssl/obj_mac.h index d0eb3d350..6865779d6 100644 --- a/wolfssl/openssl/obj_mac.h +++ b/wolfssl/openssl/obj_mac.h @@ -23,6 +23,9 @@ #ifndef WOLFSSL_OBJ_MAC_H_ #define WOLFSSL_OBJ_MAC_H_ +/* include SN_xxx definitions from asn.h */ +#include + #ifdef __cplusplus extern "C" { #endif @@ -82,8 +85,6 @@ #endif /* !OPENSSL_COEXIST */ -/* the definition is for Qt Unit test */ -#define SN_jurisdictionCountryName "jurisdictionC" #ifdef __cplusplus } /* extern "C" */ #endif diff --git a/wolfssl/openssl/x509.h b/wolfssl/openssl/x509.h index c61fbbb24..23ad0789e 100644 --- a/wolfssl/openssl/x509.h +++ b/wolfssl/openssl/x509.h @@ -68,9 +68,6 @@ #define WOLFSSL_XN_FLAG_MULTILINE 0xFFFF #define WOLFSSL_XN_FLAG_ONELINE (WOLFSSL_XN_FLAG_SEP_CPLUS_SPC | WOLFSSL_XN_FLAG_SPC_EQ | WOLFSSL_XN_FLAG_FN_SN) -#define WOLFSSL_X509_V_ERR_CRL_HAS_EXPIRED 12 -#define WOLFSSL_X509_V_ERR_UNABLE_TO_GET_CRL 3 - #ifndef OPENSSL_COEXIST /* wolfSSL_X509_print_ex flags */ diff --git a/wolfssl/ssl.h b/wolfssl/ssl.h index 888139af9..9c95b21cd 100644 --- a/wolfssl/ssl.h +++ b/wolfssl/ssl.h @@ -270,6 +270,10 @@ typedef struct WOLFSSL_BY_DIR WOLFSSL_BY_DIR; /* redeclare guard */ #define WOLFSSL_TYPES_DEFINED +#ifdef __cplusplus + } /* extern "C" */ +#endif + #include /* The WOLFSSL_RSA type is required in all build configurations. */ @@ -277,6 +281,10 @@ typedef struct WOLFSSL_BY_DIR WOLFSSL_BY_DIR; #include #endif +#ifdef __cplusplus + extern "C" { +#endif + #ifndef WC_RNG_TYPE_DEFINED /* guard on redeclaration */ typedef struct WC_RNG WC_RNG; #define WC_RNG_TYPE_DEFINED @@ -2158,12 +2166,6 @@ WOLFSSL_API int wolfSSL_BIO_set_mem_buf(WOLFSSL_BIO* bio, WOLFSSL_BUF_MEM* bufMe #endif WOLFSSL_API int wolfSSL_BIO_get_len(WOLFSSL_BIO *bio); -#ifdef WOLFSSL_HAVE_BIO_ADDR -WOLFSSL_API WOLFSSL_BIO_ADDR *wolfSSL_BIO_ADDR_new(void); -WOLFSSL_API void wolfSSL_BIO_ADDR_free(WOLFSSL_BIO_ADDR *addr); -WOLFSSL_API void wolfSSL_BIO_ADDR_clear(WOLFSSL_BIO_ADDR *addr); -#endif /* WOLFSSL_HAVE_BIO_ADDR */ - #endif /* !NO_BIO */ WOLFSSL_API void wolfSSL_RAND_screen(void); @@ -2612,9 +2614,11 @@ WOLFSSL_API void* wolfSSL_get_app_data( const WOLFSSL *ssl); */ enum { WOLFSSL_X509_V_OK = 0, + WOLFSSL_X509_V_ERR_UNABLE_TO_GET_CRL = 3, WOLFSSL_X509_V_ERR_CERT_SIGNATURE_FAILURE = 7, WOLFSSL_X509_V_ERR_CERT_NOT_YET_VALID = 9, WOLFSSL_X509_V_ERR_CERT_HAS_EXPIRED = 10, + WOLFSSL_X509_V_ERR_CRL_HAS_EXPIRED = 12, WOLFSSL_X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD = 13, WOLFSSL_X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD = 14, WOLFSSL_X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT = 18, @@ -2626,6 +2630,7 @@ enum { WOLFSSL_X509_V_ERR_PATH_LENGTH_EXCEEDED = 25, WOLFSSL_X509_V_ERR_CERT_REJECTED = 28, WOLFSSL_X509_V_ERR_SUBJECT_ISSUER_MISMATCH = 29, + WC_OSSL_V509_V_ERR_MAX = 30, #ifdef HAVE_OCSP /* OCSP Flags */ @@ -3795,7 +3800,7 @@ typedef int (*CbCrlIO)(WOLFSSL_CRL* crl, const char* url, int urlSz); #ifdef HAVE_CRL_UPDATE_CB typedef struct CrlInfo { - byte crlNumber[CRL_MAX_NUM_SZ]; + char crlNumber[CRL_MAX_NUM_HEX_STR_SZ]; byte *issuerHash; word32 issuerHashLen; byte *lastDate; diff --git a/wolfssl/wolfcrypt/asn.h b/wolfssl/wolfcrypt/asn.h index 84ad93f1b..3b3779cb3 100644 --- a/wolfssl/wolfcrypt/asn.h +++ b/wolfssl/wolfcrypt/asn.h @@ -37,7 +37,8 @@ that can be serialized and deserialized in a cross-platform way. #include #if !defined(NO_ASN) || !defined(NO_PWDBASED) - +/* included openssl/obj_mac.h directly for SN_xxx definitions */ +#if !defined(WOLFSSL_OBJ_MAC_H_) #if !defined(NO_ASN_TIME) && defined(NO_TIME_H) #define NO_ASN_TIME /* backwards compatibility with NO_TIME_H */ #endif @@ -880,8 +881,10 @@ extern const WOLFSSL_ObjectInfo wolfssl_object_info[]; #else #define WC_MAX_CERT_VERIFY_SZ 1024 /* max default */ #endif - -#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) +#endif /* !NO_ASN */ +#endif /* !WOLFSSL_OBJ_MAC_H_ */ +#if defined(WOLFSSL_OBJ_MAC_H_) || \ + defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) /* short names */ #define WC_SN_md4 "MD4" #define WC_SN_md5 "MD5" @@ -946,29 +949,91 @@ extern const WOLFSSL_ObjectInfo wolfssl_object_info[]; #define WC_NID_tlsfeature TLS_FEATURE_OID /* id-pe 24 */ #define WC_NID_buildingName 1494 +#define WC_SN_dnQualifier "dnQualifier" +#define WC_LN_dnQualifier "dnQualifier" #define WC_NID_dnQualifier 174 /* 2.5.4.46 */ + +#define WC_SN_commonName "CN" +#define WC_LN_commonName "commonName" #define WC_NID_commonName 14 /* CN Changed to not conflict - * with PBE_SHA1_DES3 */ + * with PBE_SHA1_DES3 */ +#define WC_LN_name "name" +#define WC_SN_name "name" #define WC_NID_name 173 /* N , OID = 2.5.4.41 */ + +#define WC_LN_surname "surname" +#define WC_SN_surname "SN" #define WC_NID_surname 0x04 /* SN */ + +#define WC_LN_serialNumber "serialNumber" #define WC_NID_serialNumber 0x05 /* serialNumber */ + +#define WC_LN_countryName "countryName" +#define WC_SN_countryName "C" #define WC_NID_countryName 0x06 /* C */ + +#define WC_LN_localityName "localityName" +#define WC_SN_localityName "L" #define WC_NID_localityName 0x07 /* L */ + +#define WC_LN_stateOrProvinceName "stateOrProvinceName" +#define WC_SN_stateOrProvinceName "ST" #define WC_NID_stateOrProvinceName 0x08 /* ST */ + +#define WC_LN_streetAddress "streetAddress" +#define WC_SN_streetAddress "street" #define WC_NID_streetAddress ASN_STREET_ADDR /* street */ + +#define WC_LN_organizationName "organizationName" +#define WC_SN_organizationName "O" #define WC_NID_organizationName 0x0a /* O */ + +#define WC_LN_organizationalUnitName "organizationalUnitName" +#define WC_SN_organizationalUnitName "OU" #define WC_NID_organizationalUnitName 0x0b /* OU */ + +#define WC_LN_jurisdictionCountryName "jurisdictionCountryName" +#define WC_SN_jurisdictionCountryName "jurisdictionC" #define WC_NID_jurisdictionCountryName 0xc + +#define WC_LN_jurisdictionStateOrProvinceName "jurisdictionStateOrProvinceName" +#define WC_SN_jurisdictionStateOrProvinceName "jurisdictionST" #define WC_NID_jurisdictionStateOrProvinceName 0xd + +#define WC_LN_businessCategory "businessCategory" #define WC_NID_businessCategory ASN_BUS_CAT + +#define WC_SN_domainComponent "DC" +#define WC_LN_domainComponent "domainComponent" #define WC_NID_domainComponent ASN_DOMAIN_COMPONENT + +#define WC_LN_postalCode "postalCode" #define WC_NID_postalCode ASN_POSTAL_CODE /* postalCode */ + +#define WC_SN_rfc822Mailbox "mail" +#define WC_LN_rfc822Mailbox "rfc822Mailbox" #define WC_NID_rfc822Mailbox 460 + +#define WC_SN_favouriteDrink "coldBrew" +#define WC_LN_favouriteDrink "favouriteDrink" #define WC_NID_favouriteDrink 462 + +#define WC_SN_userId "UID" +#define WC_LN_userId "userId" #define WC_NID_userId 458 + +#define WC_LN_registeredAddress "registeredAddress" #define WC_NID_registeredAddress 870 + +#define WC_LN_emailAddress "emailAddress" #define WC_NID_emailAddress 0x30 /* emailAddress */ + +#define WC_SN_id_on_dnsSRV "id-on-dnsSRV" +#define WC_LN_id_on_dnsSRV "SRVName" #define WC_NID_id_on_dnsSRV 82 /* 1.3.6.1.5.5.7.8.7 */ + +#define WC_SN_ms_upn "msUPN" +#define WC_LN_ms_upn "Microsoft User Principal Name" #define WC_NID_ms_upn UPN_OID /* 1.3.6.1.4.1.311.20.2.3 */ #define WC_NID_X9_62_prime_field 406 /* 1.2.840.10045.1.1 */ @@ -1021,27 +1086,91 @@ extern const WOLFSSL_ObjectInfo wolfssl_object_info[]; #define NID_tlsfeature WC_NID_tlsfeature #define NID_buildingName WC_NID_buildingName +#define SN_dnQualifier WC_SN_dnQualifier +#define LN_dnQualifier WC_LN_dnQualifier #define NID_dnQualifier WC_NID_dnQualifier + +#define SN_commonName WC_SN_commonName +#define LN_commonName WC_LN_commonName #define NID_commonName WC_NID_commonName + +#define LN_name WC_LN_name +#define SN_name WC_SN_name #define NID_name WC_NID_name + +#define LN_surname WC_LN_surname +#define SN_surname WC_SN_surname #define NID_surname WC_NID_surname + +#define LN_serialNumber WC_LN_serialNumber #define NID_serialNumber WC_NID_serialNumber + +#define LN_countryName WC_LN_countryName +#define SN_countryName WC_SN_countryName #define NID_countryName WC_NID_countryName + +#define LN_localityName WC_LN_localityName +#define SN_localityName WC_SN_localityName #define NID_localityName WC_NID_localityName + +#define LN_stateOrProvinceName WC_LN_stateOrProvinceName +#define SN_stateOrProvinceName WC_SN_stateOrProvinceName #define NID_stateOrProvinceName WC_NID_stateOrProvinceName + +#define LN_streetAddress WC_LN_streetAddress +#define SN_streetAddress WC_SN_streetAddress #define NID_streetAddress WC_NID_streetAddress + +#define LN_organizationName WC_LN_organizationName +#define SN_organizationName WC_SN_organizationName #define NID_organizationName WC_NID_organizationName + +#define LN_organizationalUnitName WC_LN_organizationalUnitName +#define SN_organizationalUnitName WC_SN_organizationalUnitName #define NID_organizationalUnitName WC_NID_organizationalUnitName + +#define LN_jurisdictionCountryName WC_LN_jurisdictionCountryName +#define SN_jurisdictionCountryName WC_SN_jurisdictionCountryName #define NID_jurisdictionCountryName WC_NID_jurisdictionCountryName + +#define LN_jurisdictionStateOrProvinceName WC_LN_jurisdictionStateOrProvinceName +#define SN_jurisdictionStateOrProvinceName WC_SN_jurisdictionStateOrProvinceName #define NID_jurisdictionStateOrProvinceName WC_NID_jurisdictionStateOrProvinceName + +#define LN_businessCategory WC_LN_businessCategory #define NID_businessCategory WC_NID_businessCategory + +#define SN_domainComponent WC_SN_domainComponent +#define LN_domainComponent WC_LN_domainComponent #define NID_domainComponent WC_NID_domainComponent + +#define LN_postalCode WC_LN_postalCode #define NID_postalCode WC_NID_postalCode + +#define SN_rfc822Mailbox WC_SN_rfc822Mailbox +#define LN_rfc822Mailbox WC_LN_rfc822Mailbox #define NID_rfc822Mailbox WC_NID_rfc822Mailbox + +#define SN_favouriteDrink WC_SN_favouriteDrink +#define LN_favouriteDrink WC_LN_favouriteDrink #define NID_favouriteDrink WC_NID_favouriteDrink + +#define SN_userId WC_SN_userId +#define LN_userId WC_LN_userId #define NID_userId WC_NID_userId + +#define LN_registeredAddress WC_LN_registeredAddress +#define NID_registeredAddress WC_NID_registeredAddress + +#define LN_emailAddress WC_LN_emailAddress #define NID_emailAddress WC_NID_emailAddress + +#define SN_id_on_dnsSRV WC_SN_id_on_dnsSRV +#define LN_id_on_dnsSRV WC_LN_id_on_dnsSRV #define NID_id_on_dnsSRV WC_NID_id_on_dnsSRV + +#define SN_ms_upn WC_SN_ms_upn +#define LN_ms_upn WC_LN_ms_upn #define NID_ms_upn WC_NID_ms_upn #define NID_X9_62_prime_field WC_NID_X9_62_prime_field @@ -1052,8 +1181,9 @@ extern const WOLFSSL_ObjectInfo wolfssl_object_info[]; #endif /* !OPENSSL_COEXIST */ -#endif /* OPENSSL_EXTRA || OPENSSL_EXTRA_X509_SMALL */ - +#endif /* WOLFSSL_OBJ_MAC_H_ || OPENSSL_EXTRA || OPENSSL_EXTRA_X509_SMALL */ +#if !defined(WOLFSSL_OBJ_MAC_H_) +#if !defined(NO_ASN) enum ECC_TYPES { ECC_PREFIX_0 = 160, @@ -2244,7 +2374,8 @@ WOLFSSL_LOCAL int wc_GetKeyOID(byte* key, word32 keySz, const byte** curveOID, typedef struct tm wolfssl_tm; #ifdef WOLFSSL_ASN_TIME_STRING -WOLFSSL_LOCAL int GetTimeString(byte* date, int format, char* buf, int len); +WOLFSSL_LOCAL int GetTimeString(byte* date, int format, char* buf, int len, + int dateLen); #endif #if !defined(NO_ASN_TIME) && !defined(USER_TIME) && \ !defined(TIME_OVERRIDES) && (defined(OPENSSL_EXTRA) || defined(HAVE_PKCS7)) @@ -2252,12 +2383,13 @@ WOLFSSL_LOCAL int GetFormattedTime(void* currTime, byte* buf, word32 len); WOLFSSL_LOCAL int GetAsnTimeString(void* currTime, byte* buf, word32 len); #endif WOLFSSL_LOCAL int ExtractDate(const unsigned char* date, unsigned char format, - wolfssl_tm* certTime, int* idx); + wolfssl_tm* certTime, int* idx, int len); WOLFSSL_LOCAL int DateGreaterThan(const struct tm* a, const struct tm* b); -WOLFSSL_LOCAL int wc_ValidateDate(const byte* date, byte format, int dateType); +WOLFSSL_LOCAL int wc_ValidateDate(const byte* date, byte format, int dateType, + int len); #ifndef NO_ASN_TIME WOLFSSL_LOCAL int wc_ValidateDateWithTime(const byte* date, byte format, - int dateType, time_t checkTime); + int dateType, time_t checkTime, int len); #endif WOLFSSL_TEST_VIS int wc_AsnSetSkipDateCheck(int skip_p); WOLFSSL_LOCAL int wc_AsnGetSkipDateCheck(void); @@ -2691,6 +2823,8 @@ struct RevokedCert { #define CRL_MAX_NUM_SZ 20 /* RFC5280 states that CRL number can be up to 20 */ #endif /* octets long */ +#define CRL_MAX_NUM_HEX_STR_SZ (CRL_MAX_NUM_SZ * 2 + 1) +#define CRL_MAX_NUM_SZ_BITS (CRL_MAX_NUM_SZ * CHAR_BIT) typedef struct DecodedCRL DecodedCRL; @@ -2704,7 +2838,7 @@ struct DecodedCRL { word32 sigParamsLength; /* length of signature parameters */ #endif byte* signature; /* pointer into raw source, not owned */ - byte crlNumber[CRL_MAX_NUM_SZ]; /* CRL number extension */ + char crlNumber[CRL_MAX_NUM_HEX_STR_SZ]; /* CRL number extension */ byte issuerHash[SIGNER_DIGEST_SIZE]; /* issuer name hash */ byte crlHash[SIGNER_DIGEST_SIZE]; /* raw crl data hash */ byte lastDate[MAX_DATE_SIZE]; /* last date updated */ @@ -2787,6 +2921,12 @@ WOLFSSL_LOCAL int VerifyX509Acert(const byte* cert, word32 certSz, #endif /* WOLFSSL_ACERT */ +#ifndef IGNORE_NAME_CONSTRAINTS +WOLFSSL_TEST_VIS int wolfssl_local_MatchBaseName(int type, const char* name, + int nameSz, const char* base, + int baseSz); +#endif + #if ((defined(HAVE_ED25519) && defined(HAVE_ED25519_KEY_IMPORT)) \ || (defined(HAVE_CURVE25519) && defined(HAVE_CURVE25519_KEY_IMPORT)) \ || (defined(HAVE_ED448) && defined(HAVE_ED448_KEY_IMPORT)) \ @@ -2862,6 +3002,7 @@ enum PKCSTypes { } /* extern "C" */ #endif +#endif /* WOLFSSL_OBJ_MAC_H_ */ #endif /* !NO_ASN || !NO_PWDBASED */ #endif /* WOLF_CRYPT_ASN_H */ diff --git a/wolfssl/wolfcrypt/async.h b/wolfssl/wolfcrypt/async.h index 8eac4aa9c..d6ab016a4 100644 --- a/wolfssl/wolfcrypt/async.h +++ b/wolfssl/wolfcrypt/async.h @@ -186,7 +186,7 @@ struct WC_ASYNC_DEV; typedef struct WC_ASYNC_SW { void* ctx; - #if HAVE_ANONYMOUS_INLINE_AGGREGATES + #ifdef HAVE_ANONYMOUS_INLINE_AGGREGATES union { #endif #ifdef HAVE_ECC @@ -211,7 +211,7 @@ struct WC_ASYNC_DEV; #ifndef NO_DES3 struct AsyncCryptSwDes des; #endif /* !NO_DES3 */ - #if HAVE_ANONYMOUS_INLINE_AGGREGATES + #ifdef HAVE_ANONYMOUS_INLINE_AGGREGATES }; /* union */ #endif byte type; /* enum WC_ASYNC_SW_TYPE */ diff --git a/wolfssl/wolfcrypt/blake2.h b/wolfssl/wolfcrypt/blake2.h index da66d9ba9..06d312472 100644 --- a/wolfssl/wolfcrypt/blake2.h +++ b/wolfssl/wolfcrypt/blake2.h @@ -88,6 +88,16 @@ WOLFSSL_API int wc_InitBlake2b_WithKey(Blake2b* b2b, word32 digestSz, const byte *key, word32 keylen); WOLFSSL_API int wc_Blake2bUpdate(Blake2b* b2b, const byte* data, word32 sz); WOLFSSL_API int wc_Blake2bFinal(Blake2b* b2b, byte* final, word32 requestSz); +WOLFSSL_API int wc_Blake2bHmacInit(Blake2b* b2b, + const byte* key, size_t key_len); +WOLFSSL_API int wc_Blake2bHmacUpdate(Blake2b* b2b, + const byte* in, size_t in_len); +WOLFSSL_API int wc_Blake2bHmacFinal(Blake2b* b2b, + const byte* key, size_t key_len, + byte* out, size_t out_len); +WOLFSSL_API int wc_Blake2bHmac(const byte* in, size_t in_len, + const byte* key, size_t key_len, + byte* out, size_t out_len); #endif #ifdef HAVE_BLAKE2S @@ -96,6 +106,16 @@ WOLFSSL_API int wc_InitBlake2s_WithKey(Blake2s* b2s, word32 digestSz, const byte *key, word32 keylen); WOLFSSL_API int wc_Blake2sUpdate(Blake2s* b2s, const byte* data, word32 sz); WOLFSSL_API int wc_Blake2sFinal(Blake2s* b2s, byte* final, word32 requestSz); +WOLFSSL_API int wc_Blake2sHmacInit(Blake2s* b2s, + const byte* key, size_t key_len); +WOLFSSL_API int wc_Blake2sHmacUpdate(Blake2s* b2s, + const byte* in, size_t in_len); +WOLFSSL_API int wc_Blake2sHmacFinal(Blake2s* b2s, + const byte* key, size_t key_len, + byte* out, size_t out_len); +WOLFSSL_API int wc_Blake2sHmac(const byte* in, size_t in_len, + const byte* key, size_t key_len, + byte* out, size_t out_len); #endif diff --git a/wolfssl/wolfcrypt/chacha20_poly1305.h b/wolfssl/wolfcrypt/chacha20_poly1305.h index ca5754543..3af10755b 100644 --- a/wolfssl/wolfcrypt/chacha20_poly1305.h +++ b/wolfssl/wolfcrypt/chacha20_poly1305.h @@ -129,8 +129,8 @@ WOLFSSL_API WARN_UNUSED_RESULT int wc_ChaCha20Poly1305_Final(ChaChaPoly_Aead* ae WOLFSSL_API int wc_XChaCha20Poly1305_Init( ChaChaPoly_Aead* aead, const byte *ad, word32 ad_len, - const byte *inKey, word32 inKeySz, - const byte *inIV, word32 inIVSz, + const byte *nonce, word32 nonce_len, + const byte *key, word32 key_len, int isEncrypt); WOLFSSL_API int wc_XChaCha20Poly1305_Encrypt( diff --git a/wolfssl/wolfcrypt/dilithium.h b/wolfssl/wolfcrypt/dilithium.h index abded5ce2..3ebaa49a1 100644 --- a/wolfssl/wolfcrypt/dilithium.h +++ b/wolfssl/wolfcrypt/dilithium.h @@ -200,23 +200,23 @@ /* Number of dropped bits. */ #define DILITHIUM_D 13 /* Maximum value of dropped bits. */ -#define DILITHIUM_D_MAX (1 << DILITHIUM_D) +#define DILITHIUM_D_MAX ((sword32)1 << DILITHIUM_D) /* Half maximum value. */ -#define DILITHIUM_D_MAX_HALF (1 << (DILITHIUM_D - 1)) +#define DILITHIUM_D_MAX_HALF ((sword32)1 << (DILITHIUM_D - 1)) /* Number of undropped bits. */ #define DILITHIUM_U (DILITHIUM_Q_BITS - DILITHIUM_D) /* Bits in coefficient range of y, GAMMA1, of 2^17 is 17. */ #define DILITHIUM_GAMMA1_BITS_17 17 /* Coefficient range of y, GAMMA1, of 2^17. */ -#define DILITHIUM_GAMMA1_17 (1 << 17) +#define DILITHIUM_GAMMA1_17 ((sword32)1 << 17) /* # encoding bits of y is GAMMA1 + 1. */ #define DILITHIUM_GAMMA1_17_ENC_BITS 18 /* Coefficient range of y, GAMMA1, of 2^17. */ /* Bits in coefficient range of y, GAMMA1, of 2^19 is 19. */ #define DILITHIUM_GAMMA1_BITS_19 19 /* Coefficient range of y, GAMMA1, of 2^19. */ -#define DILITHIUM_GAMMA1_19 (1 << 19) +#define DILITHIUM_GAMMA1_19 ((sword32)1 << 19) /* # encoding bits of y is GAMMA1 + 1. */ #define DILITHIUM_GAMMA1_19_ENC_BITS 20 @@ -265,13 +265,14 @@ #define PARAMS_ML_DSA_44_TAU 39 /* BETA = TAU * ETA for ML-DSA-44. */ #define PARAMS_ML_DSA_44_BETA \ - (PARAMS_ML_DSA_44_TAU * PARAMS_ML_DSA_44_ETA) +(PARAMS_ML_DSA_44_TAU * PARAMS_ML_DSA_44_ETA) /* Max # 1's in the hint h, OMEGA, for ML-DSA-44. */ #define PARAMS_ML_DSA_44_OMEGA 80 /* Bits in coefficient range of y, GAMMA1, for ML-DSA-44. */ #define PARAMS_ML_DSA_44_GAMMA1_BITS DILITHIUM_GAMMA1_BITS_17 /* Ccoefficient range of y, GAMMA1, for ML-DSA-44. */ -#define PARAMS_ML_DSA_44_GAMMA1 (1 << PARAMS_ML_DSA_44_GAMMA1_BITS) +#define PARAMS_ML_DSA_44_GAMMA1 \ + ((sword32)1 << PARAMS_ML_DSA_44_GAMMA1_BITS) /* Low-order rounding range, GAMMA2, for ML-DSA-44. */ #define PARAMS_ML_DSA_44_GAMMA2 DILITHIUM_Q_LOW_88 /* Bits in high-order rounding range, GAMMA2, for ML-DSA-44. */ @@ -331,8 +332,9 @@ #define PARAMS_ML_DSA_65_OMEGA 55 /* Bits in coefficient range of y, GAMMA1, for ML-DSA-65. */ #define PARAMS_ML_DSA_65_GAMMA1_BITS DILITHIUM_GAMMA1_BITS_19 -/* Ccoefficient range of y, GAMMA1, for ML-DSA-65. */ -#define PARAMS_ML_DSA_65_GAMMA1 (1 << PARAMS_ML_DSA_65_GAMMA1_BITS) +/* Coefficient range of y, GAMMA1, for ML-DSA-65. */ +#define PARAMS_ML_DSA_65_GAMMA1 \ + ((sword32)1 << PARAMS_ML_DSA_65_GAMMA1_BITS) /* Low-order rounding range, GAMMA2, for ML-DSA-65. */ #define PARAMS_ML_DSA_65_GAMMA2 DILITHIUM_Q_LOW_32 /* Bits in high-order rounding range, GAMMA2, for ML-DSA-65. */ @@ -393,7 +395,8 @@ /* Bits in coefficient range of y, GAMMA1, for ML-DSA-87. */ #define PARAMS_ML_DSA_87_GAMMA1_BITS DILITHIUM_GAMMA1_BITS_19 /* Ccoefficient range of y, GAMMA1, for ML-DSA-87. */ -#define PARAMS_ML_DSA_87_GAMMA1 (1 << PARAMS_ML_DSA_87_GAMMA1_BITS) +#define PARAMS_ML_DSA_87_GAMMA1 \ + ((sword32)1 << PARAMS_ML_DSA_87_GAMMA1_BITS) /* Low-order rounding range, GAMMA2, for ML-DSA-87. */ #define PARAMS_ML_DSA_87_GAMMA2 DILITHIUM_Q_LOW_32 /* Bits in high-order rounding range, GAMMA2, for ML-DSA-87. */ @@ -538,6 +541,50 @@ #endif /* LITTLE_ENDIAN_ORDER && WOLFSSL_DILITHIUM_ALIGNMENT == 0 */ #endif +#ifndef WOLFSSL_NO_ML_DSA_87 + +#define DILITHIUM_MAX_KEY_SIZE DILITHIUM_LEVEL5_KEY_SIZE +#define DILITHIUM_MAX_SIG_SIZE DILITHIUM_LEVEL5_SIG_SIZE +#define DILITHIUM_MAX_PUB_KEY_SIZE DILITHIUM_LEVEL5_PUB_KEY_SIZE +#define DILITHIUM_MAX_PRV_KEY_SIZE DILITHIUM_LEVEL5_PRV_KEY_SIZE +/* Buffer sizes large enough to store exported DER encoded keys */ +#define DILITHIUM_MAX_PUB_KEY_DER_SIZE DILITHIUM_LEVEL5_PUB_KEY_DER_SIZE +#define DILITHIUM_MAX_PRV_KEY_DER_SIZE DILITHIUM_LEVEL5_PRV_KEY_DER_SIZE +#define DILITHIUM_MAX_BOTH_KEY_DER_SIZE DILITHIUM_LEVEL5_BOTH_KEY_DER_SIZE +/* PEM size with the header "-----BEGIN ML_DSA_LEVEL5 PRIVATE KEY-----" and + * the footer "-----END ML_DSA_LEVEL5 PRIVATE KEY-----" */ +#define DILITHIUM_MAX_BOTH_KEY_PEM_SIZE DILITHIUM_LEVEL5_BOTH_KEY_PEM_SIZE + +#elif !defined(WOLFSSL_NO_ML_DSA_65) + +#define DILITHIUM_MAX_KEY_SIZE DILITHIUM_LEVEL3_KEY_SIZE +#define DILITHIUM_MAX_SIG_SIZE DILITHIUM_LEVEL3_SIG_SIZE +#define DILITHIUM_MAX_PUB_KEY_SIZE DILITHIUM_LEVEL3_PUB_KEY_SIZE +#define DILITHIUM_MAX_PRV_KEY_SIZE DILITHIUM_LEVEL3_PRV_KEY_SIZE +/* Buffer sizes large enough to store exported DER encoded keys */ +#define DILITHIUM_MAX_PUB_KEY_DER_SIZE DILITHIUM_LEVEL3_PUB_KEY_DER_SIZE +#define DILITHIUM_MAX_PRV_KEY_DER_SIZE DILITHIUM_LEVEL3_PRV_KEY_DER_SIZE +#define DILITHIUM_MAX_BOTH_KEY_DER_SIZE DILITHIUM_LEVEL3_BOTH_KEY_DER_SIZE +/* PEM size with the header "-----BEGIN ML_DSA_LEVEL5 PRIVATE KEY-----" and + * the footer "-----END ML_DSA_LEVEL5 PRIVATE KEY-----" */ +#define DILITHIUM_MAX_BOTH_KEY_PEM_SIZE DILITHIUM_LEVEL3_BOTH_KEY_PEM_SIZE + +#else + +#define DILITHIUM_MAX_KEY_SIZE DILITHIUM_LEVEL2_KEY_SIZE +#define DILITHIUM_MAX_SIG_SIZE DILITHIUM_LEVEL2_SIG_SIZE +#define DILITHIUM_MAX_PUB_KEY_SIZE DILITHIUM_LEVEL2_PUB_KEY_SIZE +#define DILITHIUM_MAX_PRV_KEY_SIZE DILITHIUM_LEVEL2_PRV_KEY_SIZE +/* Buffer sizes large enough to store exported DER encoded keys */ +#define DILITHIUM_MAX_PUB_KEY_DER_SIZE DILITHIUM_LEVEL2_PUB_KEY_DER_SIZE +#define DILITHIUM_MAX_PRV_KEY_DER_SIZE DILITHIUM_LEVEL2_PRV_KEY_DER_SIZE +#define DILITHIUM_MAX_BOTH_KEY_DER_SIZE DILITHIUM_LEVEL2_BOTH_KEY_DER_SIZE +/* PEM size with the header "-----BEGIN ML_DSA_LEVEL5 PRIVATE KEY-----" and + * the footer "-----END ML_DSA_LEVEL5 PRIVATE KEY-----" */ +#define DILITHIUM_MAX_BOTH_KEY_PEM_SIZE DILITHIUM_LEVEL2_BOTH_KEY_PEM_SIZE + +#endif + #elif defined(HAVE_LIBOQS) #define DILITHIUM_LEVEL2_KEY_SIZE OQS_SIG_ml_dsa_44_ipd_length_secret_key @@ -618,8 +665,6 @@ * the footer "-----END ML_DSA_LEVEL5 PRIVATE KEY-----" */ #define ML_DSA_LEVEL5_BOTH_KEY_PEM_SIZE DILITHIUM_LEVEL5_BOTH_KEY_PEM_SIZE -#endif - #define DILITHIUM_MAX_KEY_SIZE DILITHIUM_LEVEL5_KEY_SIZE #define DILITHIUM_MAX_SIG_SIZE DILITHIUM_LEVEL5_SIG_SIZE #define DILITHIUM_MAX_PUB_KEY_SIZE DILITHIUM_LEVEL5_PUB_KEY_SIZE @@ -631,6 +676,8 @@ /* PEM size with the header "-----BEGIN ML_DSA_LEVEL5 PRIVATE KEY-----" and * the footer "-----END ML_DSA_LEVEL5 PRIVATE KEY-----" */ #define DILITHIUM_MAX_BOTH_KEY_PEM_SIZE DILITHIUM_LEVEL5_BOTH_KEY_PEM_SIZE +#endif + #ifdef WOLF_PRIVATE_KEY_ID @@ -794,10 +841,12 @@ int wc_dilithium_verify_ctx_hash(const byte* sig, word32 sigLen, const byte* ctx, word32 ctxLen, int hashAlg, const byte* hash, word32 hashLen, int* res, dilithium_key* key); +#ifndef WC_NO_CONSTRUCTORS WOLFSSL_API dilithium_key* wc_dilithium_new(void* heap, int devId); WOLFSSL_API int wc_dilithium_delete(dilithium_key* key, dilithium_key** key_p); +#endif /* !WC_NO_CONSTRUCTORS */ WOLFSSL_API int wc_dilithium_init(dilithium_key* key); @@ -1009,33 +1058,37 @@ WOLFSSL_LOCAL void wc_mldsa_poly_make_pos_avx2(sword32* a); #define MlDsaKey dilithium_key -#define wc_MlDsaKey_Init(key, heap, devId) \ +#define wc_MlDsaKey_Init(key, heap, devId) \ wc_dilithium_init_ex(key, heap, devId) -#define wc_MlDsaKey_SetParams(key, id) \ +#define wc_MlDsaKey_SetParams(key, id) \ wc_dilithium_set_level(key, id) -#define wc_MlDsaKey_GetParams(key, id) \ +#define wc_MlDsaKey_GetParams(key, id) \ wc_dilithium_get_level(key, id) -#define wc_MlDsaKey_MakeKey(key, rng) \ +#define wc_MlDsaKey_MakeKey(key, rng) \ wc_dilithium_make_key(key, rng) -#define wc_MlDsaKey_ExportPrivRaw(key, out, outLen) \ +#define wc_MlDsaKey_ExportPrivRaw(key, out, outLen) \ wc_dilithium_export_private_only(key, out, outLen) -#define wc_MlDsaKey_ImportPrivRaw(key, in, inLen) \ +#define wc_MlDsaKey_ImportPrivRaw(key, in, inLen) \ wc_dilithium_import_private_only(in, inLen, key) -#define wc_MlDsaKey_Sign(key, sig, sigSz, msg, msgSz, rng) \ +#define wc_MlDsaKey_Sign(key, sig, sigSz, msg, msgSz, rng) \ wc_dilithium_sign_msg(msg, msgSz, sig, sigSz, key, rng) -#define wc_MlDsaKey_Free(key) \ +#define wc_MlDsaKey_SignCtx(key, ctx, ctxSz, sig, sigSz, msg, msgSz, rng) \ + wc_dilithium_sign_ctx_msg(ctx, ctxSz, msg, msgSz, sig, sigSz, key, rng) +#define wc_MlDsaKey_Free(key) \ wc_dilithium_free(key) -#define wc_MlDsaKey_ExportPubRaw(key, out, outLen) \ +#define wc_MlDsaKey_ExportPubRaw(key, out, outLen) \ wc_dilithium_export_public(key, out, outLen) -#define wc_MlDsaKey_ImportPubRaw(key, in, inLen) \ +#define wc_MlDsaKey_ImportPubRaw(key, in, inLen) \ wc_dilithium_import_public(in, inLen, key) -#define wc_MlDsaKey_Verify(key, sig, sigSz, msg, msgSz, res) \ +#define wc_MlDsaKey_Verify(key, sig, sigSz, msg, msgSz, res) \ wc_dilithium_verify_msg(sig, sigSz, msg, msgSz, res, key) +#define wc_MlDsaKey_VerifyCtx(key, sig, sigSz, ctx, ctxSz, msg, msgSz, res) \ + wc_dilithium_verify_msg_ctx(sig, sigSz, ctx, ctxSz, msg, msgSz, res, key) -#define wc_MlDsaKey_PublicKeyToDer(key, output, len, withAlg) \ +#define wc_MlDsaKey_PublicKeyToDer(key, output, len, withAlg) \ wc_Dilithium_PublicKeyToDer(key, output, len, withAlg) -#define wc_MlDsaKey_PrivateKeyToDer(key, output, len) \ +#define wc_MlDsaKey_PrivateKeyToDer(key, output, len) \ wc_Dilithium_PrivateKeyToDer(key, output, len) diff --git a/wolfssl/wolfcrypt/error-crypt.h b/wolfssl/wolfcrypt/error-crypt.h index 0e7d21cfb..e4db03cd6 100644 --- a/wolfssl/wolfcrypt/error-crypt.h +++ b/wolfssl/wolfcrypt/error-crypt.h @@ -309,9 +309,11 @@ enum wolfCrypt_ErrorCodes { INTERRUPTED_E = -1004, /* Process interrupted */ MLKEM_PUB_HASH_E = -1005, /* Encoded public key in decapsulation key does * not match stored hash*/ + BUSY_E = -1006, /* Object is busy */ + ALREADY_E = -1007, /* Operation was redundant or preempted */ - WC_SPAN2_LAST_E = -1005, /* Update to indicate last used error code */ - WC_LAST_E = -1005, /* the last code used either here or in + WC_SPAN2_LAST_E = -1007, /* Update to indicate last used error code */ + WC_LAST_E = -1007, /* the last code used either here or in * error-ssl.h */ WC_SPAN2_MIN_CODE_E = -1999, /* Last usable code in span 2 */ diff --git a/wolfssl/wolfcrypt/falcon.h b/wolfssl/wolfcrypt/falcon.h index 904a68c23..dd0079dbe 100644 --- a/wolfssl/wolfcrypt/falcon.h +++ b/wolfssl/wolfcrypt/falcon.h @@ -74,8 +74,8 @@ /* Structs */ struct falcon_key { - bool pubKeySet; - bool prvKeySet; + WC_BITFIELD pubKeySet:1; + WC_BITFIELD prvKeySet:1; byte level; #ifdef WOLF_CRYPTO_CB diff --git a/wolfssl/wolfcrypt/fe_operations.h b/wolfssl/wolfcrypt/fe_operations.h index 4959cf2b8..02490e246 100644 --- a/wolfssl/wolfcrypt/fe_operations.h +++ b/wolfssl/wolfcrypt/fe_operations.h @@ -49,7 +49,8 @@ #endif #if (defined(CURVED25519_ASM_64BIT) || defined(HAVE_ED25519)) && \ - !defined(WOLFSSL_CURVE25519_BLINDING) + !defined(WOLFSSL_CURVE25519_BLINDING) && \ + !defined(WOLFSSL_CURVE25519_NOT_USE_ED25519) #undef WOLFSSL_CURVE25519_USE_ED25519 #define WOLFSSL_CURVE25519_USE_ED25519 #endif @@ -133,6 +134,8 @@ WOLFSSL_LOCAL void fe_pow22523(fe out,const fe z); #ifdef CURVED25519_ASM WOLFSSL_LOCAL void fe_cmov_table(fe* r, fe* base, signed char b); + +WOLFSSL_LOCAL void fe_invert_nct(fe r, const fe a); #endif /* CURVED25519_ASM */ #endif /* !CURVE25519_SMALL || !ED25519_SMALL */ diff --git a/wolfssl/wolfcrypt/ge_operations.h b/wolfssl/wolfcrypt/ge_operations.h index 6b3d24405..62074fdf2 100644 --- a/wolfssl/wolfcrypt/ge_operations.h +++ b/wolfssl/wolfcrypt/ge_operations.h @@ -85,6 +85,11 @@ WOLFSSL_LOCAL void sc_reduce(byte* s); WOLFSSL_LOCAL void sc_muladd(byte* s, const byte* a, const byte* b, const byte* c); WOLFSSL_LOCAL void ge_tobytes(unsigned char *s,const ge_p2 *h); +#ifndef ED25519_SMALL +WOLFSSL_LOCAL void ge_tobytes_nct(unsigned char *s,const ge_p2 *h); +#else +#define ge_tobytes_nct ge_tobytes +#endif #ifndef GE_P3_TOBYTES_IMPL #define ge_p3_tobytes(s, h) ge_tobytes((s), (const ge_p2 *)(h)) #else diff --git a/wolfssl/wolfcrypt/include.am b/wolfssl/wolfcrypt/include.am index ff1dc50ca..fd23791b9 100644 --- a/wolfssl/wolfcrypt/include.am +++ b/wolfssl/wolfcrypt/include.am @@ -48,6 +48,7 @@ nobase_include_HEADERS+= \ wolfssl/wolfcrypt/chacha20_poly1305.h \ wolfssl/wolfcrypt/random.h \ wolfssl/wolfcrypt/wolfentropy.h \ + wolfssl/wolfcrypt/rng_bank.h \ wolfssl/wolfcrypt/ripemd.h \ wolfssl/wolfcrypt/rsa.h \ wolfssl/wolfcrypt/rc2.h \ diff --git a/wolfssl/wolfcrypt/mem_track.h b/wolfssl/wolfcrypt/mem_track.h index 8dcfee8b3..9ae999f8f 100644 --- a/wolfssl/wolfcrypt/mem_track.h +++ b/wolfssl/wolfcrypt/mem_track.h @@ -79,7 +79,8 @@ !defined(WOLFSSL_STATIC_MEMORY) #define DO_MEM_STATS -#if (defined(__linux__) && !defined(WOLFSSL_KERNEL_MODE)) || defined(__MACH__) +#if (defined(__linux__) && !defined(WOLFSSL_KERNEL_MODE)) || \ + defined(__MACH__) || defined(__ZEPHYR__) #define DO_MEM_LIST #endif diff --git a/wolfssl/wolfcrypt/port/st/stsafe.h b/wolfssl/wolfcrypt/port/st/stsafe.h index 08e39f08e..6e19233e1 100644 --- a/wolfssl/wolfcrypt/port/st/stsafe.h +++ b/wolfssl/wolfcrypt/port/st/stsafe.h @@ -23,6 +23,7 @@ #define _WOLFPORT_STSAFE_H_ #include +#include #include #include @@ -34,22 +35,135 @@ #include #endif -#ifdef WOLFSSL_STSAFEA100 +#ifdef WOLFSSL_STSAFE -/* The wolf STSAFE interface layer */ -/* Please contact wolfSSL for the STSAFE port files */ -#include "stsafe_interface.h" +/* -------------------------------------------------------------------------- */ +/* External Interface Support (Backwards Compatibility) */ +/* -------------------------------------------------------------------------- */ + +/* Define WOLFSSL_STSAFE_INTERFACE_EXTERNAL to use an external stsafe_ + * interface.h file that provides customer-specific implementations. + * This maintains backwards compatibility with older integrations that + * used a separate interface file. + * + * When NOT set (the default): All code is self-contained in stsafe.c using + * the appropriate SDK (STSELib for A120, STSAFE-A1xx SDK for A100/A110). + * + * When defined: Include customer-provided stsafe_interface.h which must define: + * - stsafe_curve_id_t, stsafe_slot_t types + * - STSAFE_ECC_CURVE_P256, STSAFE_ECC_CURVE_P384 macros + * - STSAFE_KEY_SLOT_0, STSAFE_KEY_SLOT_1, STSAFE_KEY_SLOT_EPHEMERAL macros + * - STSAFE_A_OK return code macro + * - STSAFE_MAX_KEY_LEN, STSAFE_MAX_PUBKEY_RAW_LEN, STSAFE_MAX_SIG_LEN macros + * - Function prototypes for interface functions (see stsafe.c) + */ +#ifdef WOLFSSL_STSAFE_INTERFACE_EXTERNAL + #include "stsafe_interface.h" +#else + +/* -------------------------------------------------------------------------- */ +/* STSAFE SDK Type Abstractions */ +/* -------------------------------------------------------------------------- */ + +#ifdef WOLFSSL_STSAFEA120 + /* STSAFE-A120 uses STSELib (open source BSD-3) */ + /* Note: stselib.h is included in stsafe.c to avoid warnings in headers */ + + /* Type mappings for STSELib - using byte for curve ID to avoid + * including full STSELib headers which have strict-prototype warnings */ + typedef byte stsafe_curve_id_t; + typedef byte stsafe_slot_t; + + /* Curve ID mappings - values depend on stse_conf.h settings! + * With only NIST P-256 and P-384 enabled: + * STSE_ECC_KT_NIST_P_256 = 0, STSE_ECC_KT_NIST_P_384 = 1 + * NOTE: If other curves are enabled, these values change! + * + * Compile-time static assertions and runtime checks in stsafe_interface_init() + * verify that these constants match the actual STSE_ECC_KT enum values. */ + #define STSAFE_ECC_CURVE_P256 0 /* STSE_ECC_KT_NIST_P_256 */ + #define STSAFE_ECC_CURVE_P384 1 /* STSE_ECC_KT_NIST_P_384 */ + #define STSAFE_ECC_CURVE_BP256 2 /* STSE_ECC_KT_BP_P_256 */ + #define STSAFE_ECC_CURVE_BP384 3 /* STSE_ECC_KT_BP_P_384 */ + + /* Slot mappings */ + #define STSAFE_KEY_SLOT_0 0 + #define STSAFE_KEY_SLOT_1 1 + #define STSAFE_KEY_SLOT_EPHEMERAL 0xFF + + /* Return codes */ + #define STSAFE_A_OK 0 /* STSE_OK */ + + /* Key usage limits */ + #define STSAFE_PERSISTENT_KEY_USAGE_LIMIT 255 /* Usage limit for persistent keys in slot 1 */ + #define STSAFE_EPHEMERAL_KEY_USAGE_LIMIT 1 /* Usage limit for ephemeral keys in slot 0xFF */ + + /* Hash types - must match stse_hash_algorithm_t values in STSELib */ + #define STSAFE_HASH_SHA256 0 /* STSE_SHA_256 */ + #define STSAFE_HASH_SHA384 1 /* STSE_SHA_384 */ + +#else /* WOLFSSL_STSAFEA100 */ + /* STSAFE-A100/A110 uses legacy ST STSAFE-A1xx SDK */ + /* User must provide path to STSAFE-A1xx SDK headers */ + #include + + /* Type mappings for legacy SDK */ + typedef StSafeA_CurveId stsafe_curve_id_t; + typedef StSafeA_KeySlotNumber stsafe_slot_t; + + /* Curve ID mappings */ + #define STSAFE_ECC_CURVE_P256 STSAFE_A_NIST_P_256 + #define STSAFE_ECC_CURVE_P384 STSAFE_A_NIST_P_384 + #define STSAFE_ECC_CURVE_BP256 STSAFE_A_BRAINPOOL_P_256 + #define STSAFE_ECC_CURVE_BP384 STSAFE_A_BRAINPOOL_P_384 + + /* Slot mappings */ + #define STSAFE_KEY_SLOT_0 STSAFE_A_SLOT_0 + #define STSAFE_KEY_SLOT_1 STSAFE_A_SLOT_1 + #define STSAFE_KEY_SLOT_EPHEMERAL STSAFE_A_SLOT_EPHEMERAL + + /* Return codes - STSAFE_A_OK already defined in SDK */ + + /* Hash types */ + #define STSAFE_HASH_SHA256 STSAFE_A_SHA_256 + #define STSAFE_HASH_SHA384 STSAFE_A_SHA_384 + +#endif /* WOLFSSL_STSAFEA120 */ + +/* -------------------------------------------------------------------------- */ +/* Common Definitions */ +/* -------------------------------------------------------------------------- */ #ifndef STSAFE_MAX_KEY_LEN - #define STSAFE_MAX_KEY_LEN ((uint32_t)48) /* for up to 384-bit keys */ + #define STSAFE_MAX_KEY_LEN 48 /* for up to 384-bit keys */ #endif #ifndef STSAFE_MAX_PUBKEY_RAW_LEN - #define STSAFE_MAX_PUBKEY_RAW_LEN ((uint32_t)STSAFE_MAX_KEY_LEN * 2) /* x/y */ + #define STSAFE_MAX_PUBKEY_RAW_LEN (STSAFE_MAX_KEY_LEN * 2) /* x/y */ #endif #ifndef STSAFE_MAX_SIG_LEN - #define STSAFE_MAX_SIG_LEN ((uint32_t)STSAFE_MAX_KEY_LEN * 2) /* r/s */ + #define STSAFE_MAX_SIG_LEN (STSAFE_MAX_KEY_LEN * 2) /* r/s */ #endif +/* Default I2C address */ +#ifndef STSAFE_I2C_ADDR + #define STSAFE_I2C_ADDR 0x20 +#endif + +/* Default curve mode (for signing operations) */ +#ifndef STSAFE_DEFAULT_CURVE + #define STSAFE_DEFAULT_CURVE STSAFE_ECC_CURVE_P256 +#endif + +#endif /* !WOLFSSL_STSAFE_INTERFACE_EXTERNAL */ + +/* -------------------------------------------------------------------------- */ +/* Public API Functions */ +/* -------------------------------------------------------------------------- */ + +/* Initialize STSAFE device - called automatically by wolfCrypt_Init() */ +WOLFSSL_API int stsafe_interface_init(void); + +/* Load device certificate from STSAFE secure storage */ WOLFSSL_API int SSL_STSAFE_LoadDeviceCertificate(byte** pRawCertificate, word32* pRawCertificateLen); @@ -94,6 +208,6 @@ WOLFSSL_API int wolfSSL_STSAFE_CryptoDevCb(int devId, wc_CryptoInfo* info, #endif /* WOLF_CRYPTO_CB */ -#endif /* WOLFSSL_STSAFEA100 */ +#endif /* WOLFSSL_STSAFE */ #endif /* _WOLFPORT_STSAFE_H_ */ diff --git a/wolfssl/wolfcrypt/random.h b/wolfssl/wolfcrypt/random.h index 29ad26d3e..6381c2f7a 100644 --- a/wolfssl/wolfcrypt/random.h +++ b/wolfssl/wolfcrypt/random.h @@ -156,6 +156,10 @@ struct OS_Seed { ProviderHandle handle; #else int fd; + #if defined(WOLFSSL_KEEP_RNG_SEED_FD_OPEN) + WC_BITFIELD seedFdOpen:1; + WC_BITFIELD keepSeedFdOpen:1; + #endif #endif #if defined(WOLF_CRYPTO_CB) int devId; @@ -243,12 +247,6 @@ struct OS_Seed { #define RNG_HEALTH_TEST_CHECK_SIZE (WC_SHA256_DIGEST_SIZE * 4) -/* RNG health states */ -#define WC_DRBG_NOT_INIT 0 -#define WC_DRBG_OK 1 -#define WC_DRBG_FAILED 2 -#define WC_DRBG_CONT_FAILED 3 - struct DRBG_internal { #ifdef WORD64_AVAILABLE word64 reseedCtr; @@ -267,26 +265,61 @@ struct DRBG_internal { byte digest_scratch[WC_SHA256_DIGEST_SIZE]; #endif }; +#endif /* HAVE_HASHDRBG */ + +/* RNG health states */ +#define WC_DRBG_NOT_INIT 0 +#define WC_DRBG_OK 1 +#define WC_DRBG_FAILED 2 +#define WC_DRBG_CONT_FAILED 3 +#ifdef WC_RNG_BANK_SUPPORT + #define WC_DRBG_BANKREF 4 /* Marks the WC_RNG as a ref to a wc_rng_bank, + * with no usable DRBG of its own. + */ #endif /* RNG context */ struct WC_RNG { struct OS_Seed seed; void* heap; -#ifdef HAVE_HASHDRBG - /* Hash-based Deterministic Random Bit Generator */ - struct DRBG* drbg; -#if defined(WOLFSSL_NO_MALLOC) && !defined(WOLFSSL_STATIC_MEMORY) - struct DRBG_internal drbg_data; -#endif -#ifdef WOLFSSL_SMALL_STACK_CACHE - /* Scratch buffer slots -- everything is preallocated by _InitRng(). */ - struct DRBG_internal *drbg_scratch; - byte *health_check_scratch; - byte *newSeed_buf; -#endif byte status; -#endif /* HAVE_HASHDRBG */ + +#if defined(WC_RNG_BANK_SUPPORT) || defined(HAVE_HASHDRBG) + +#ifdef HAVE_ANONYMOUS_INLINE_AGGREGATES + union { +#endif + + #ifdef WC_RNG_BANK_SUPPORT + struct wc_rng_bank *bankref; + #endif + + #ifdef HAVE_HASHDRBG + #ifdef HAVE_ANONYMOUS_INLINE_AGGREGATES + struct { + #endif + /* Hash-based Deterministic Random Bit Generator */ + struct DRBG* drbg; + #if defined(WOLFSSL_NO_MALLOC) && !defined(WOLFSSL_STATIC_MEMORY) + struct DRBG_internal drbg_data; + #endif + #ifdef WOLFSSL_SMALL_STACK_CACHE + /* Scratch buffers -- all preallocated by _InitRng(). */ + struct DRBG_internal *drbg_scratch; + byte *health_check_scratch; + byte *newSeed_buf; + #endif + #ifdef HAVE_ANONYMOUS_INLINE_AGGREGATES + }; + #endif + #endif /* HAVE_HASHDRBG */ + +#ifdef HAVE_ANONYMOUS_INLINE_AGGREGATES + }; +#endif + +#endif /* WC_RNG_BANK_SUPPORT || HAVE_HASHDRBG */ + #if defined(HAVE_GETPID) && !defined(WOLFSSL_NO_GETPID) pid_t pid; #endif diff --git a/wolfssl/wolfcrypt/rng_bank.h b/wolfssl/wolfcrypt/rng_bank.h new file mode 100644 index 000000000..f8b502620 --- /dev/null +++ b/wolfssl/wolfcrypt/rng_bank.h @@ -0,0 +1,168 @@ +/* rng_bank.h + * + * Copyright (C) 2006-2026 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +/*! + \file wolfssl/wolfcrypt/rng_bank.h +*/ + +/* This facility allocates and manages a bank of persistent RNGs with thread + * safety and provisions for automatic affinity. It is typically used in kernel + * applications. + */ + +#ifndef WOLF_CRYPT_RNG_BANK_H +#define WOLF_CRYPT_RNG_BANK_H + +#include + +#ifdef WC_RNG_BANK_SUPPORT + +#ifdef WC_NO_RNG + #error WC_RNG_BANK_SUPPORT requires RNG support. +#endif + +#define WC_RNG_BANK_FLAG_NONE 0 +#define WC_RNG_BANK_FLAG_INITED (1<<0) +#define WC_RNG_BANK_FLAG_CAN_FAIL_OVER_INST (1<<1) +#define WC_RNG_BANK_FLAG_CAN_WAIT (1<<2) +#define WC_RNG_BANK_FLAG_NO_VECTOR_OPS (1<<3) +#define WC_RNG_BANK_FLAG_PREFER_AFFINITY_INST (1<<4) +#define WC_RNG_BANK_FLAG_AFFINITY_LOCK (1<<5) + +#define WC_RNG_BANK_INST_LOCK_FREE 0 +#define WC_RNG_BANK_INST_LOCK_HELD (1<<0) +#define WC_RNG_BANK_INST_LOCK_AFFINITY_LOCKED (1<<1) +#define WC_RNG_BANK_INST_LOCK_VEC_OPS_INH (1<<2) + +typedef int (*wc_affinity_lock_fn_t)(void *arg); +typedef int (*wc_affinity_get_id_fn_t)(void *arg, int *id); +typedef int (*wc_affinity_unlock_fn_t)(void *arg); + +struct wc_rng_bank_inst { +#ifdef WOLFSSL_NO_ATOMICS + int lock; +#else + wolfSSL_Atomic_Int lock; +#endif + WC_RNG rng; +}; + +#if defined(WOLFSSL_NO_MALLOC) && defined(NO_WOLFSSL_MEMORY) && \ + !defined(WC_RNG_BANK_STATIC) + #define WC_RNG_BANK_STATIC +#endif + +#ifndef WC_RNG_BANK_STATIC_SIZE + #define WC_RNG_BANK_STATIC_SIZE 4 +#endif + +struct wc_rng_bank { + wolfSSL_Ref refcount; + void *heap; + word32 flags; + wc_affinity_lock_fn_t affinity_lock_cb; + wc_affinity_get_id_fn_t affinity_get_id_cb; + wc_affinity_unlock_fn_t affinity_unlock_cb; + void *cb_arg; /* if mutable, caller is responsible for thread safety. */ + int n_rngs; +#ifdef WC_RNG_BANK_STATIC + struct wc_rng_bank_inst rngs[WC_RNG_BANK_STATIC_SIZE]; +#else + struct wc_rng_bank_inst *rngs; /* typically one per CPU ID, plus a few */ +#endif +}; + +#ifndef WC_RNG_BANK_STATIC +WOLFSSL_API int wc_rng_bank_new( + struct wc_rng_bank **ctx, + int n_rngs, + word32 flags, + int timeout_secs, + void *heap, + int devId); +#endif + +WOLFSSL_API int wc_rng_bank_init( + struct wc_rng_bank *ctx, + int n_rngs, + word32 flags, + int timeout_secs, + void *heap, + int devId); + +WOLFSSL_API int wc_rng_bank_set_affinity_handlers( + struct wc_rng_bank *ctx, + wc_affinity_lock_fn_t affinity_lock_cb, + wc_affinity_get_id_fn_t affinity_get_id_cb, + wc_affinity_unlock_fn_t affinity_unlock_cb, + void *cb_arg); + +WOLFSSL_API int wc_rng_bank_fini(struct wc_rng_bank *ctx); + +#ifndef WC_RNG_BANK_STATIC +WOLFSSL_API int wc_rng_bank_free(struct wc_rng_bank **ctx); +#endif + +WOLFSSL_API int wc_rng_bank_checkout( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst **rng_inst, + int preferred_inst_offset, + int timeout_secs, + word32 flags); + +WOLFSSL_LOCAL int wc_local_rng_bank_checkout_for_bankref( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst **rng_inst); + +WOLFSSL_API int wc_rng_bank_checkin( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst **rng_inst); + +WOLFSSL_API int wc_rng_bank_inst_reinit( + struct wc_rng_bank *bank, + struct wc_rng_bank_inst *rng_inst, + int timeout_secs, + word32 flags); + +WOLFSSL_API int wc_rng_bank_seed(struct wc_rng_bank *bank, + const byte* seed, word32 seedSz, + int timeout_secs, + word32 flags); + +WOLFSSL_API int wc_rng_bank_reseed(struct wc_rng_bank *bank, + int timeout_secs, + word32 flags); + +#ifdef WC_DRBG_BANKREF +WOLFSSL_API int wc_InitRng_BankRef(struct wc_rng_bank *bank, WC_RNG *rng); + +WOLFSSL_API int wc_BankRef_Release(WC_RNG *rng); + +#ifndef WC_RNG_BANK_STATIC +WOLFSSL_API int wc_rng_new_bankref(struct wc_rng_bank *bank, WC_RNG **rng); +#endif +#endif /* WC_DRBG_BANKREF */ + +#define WC_RNG_BANK_INST_TO_RNG(rng_inst) (&(rng_inst)->rng) + +#endif /* WC_RNG_BANK_SUPPORT */ + +#endif /* WOLF_CRYPT_RNG_BANK_H */ diff --git a/wolfssl/wolfcrypt/settings.h b/wolfssl/wolfcrypt/settings.h index fdb526afa..58ae0c41a 100644 --- a/wolfssl/wolfcrypt/settings.h +++ b/wolfssl/wolfcrypt/settings.h @@ -369,12 +369,22 @@ #warning "No configuration for wolfSSL detected, check header order" #endif -/* Ensure WOLFSSL_DEBUG_CERTS is always set when DEBUG_WOLFSSL is enabled */ -#ifdef DEBUG_WOLFSSL - #undef WOLFSSL_DEBUG_CERTS +/* Ensure WOLFSSL_DEBUG_CERTS is set when DEBUG_WOLFSSL is enabled, unless + * expressly requested otherwise. + */ +#if defined(DEBUG_WOLFSSL) && !defined(WOLFSSL_NO_DEBUG_CERTS) && \ + !defined(WOLFSSL_DEBUG_CERTS) #define WOLFSSL_DEBUG_CERTS #endif +/* Ensure WC_VERBOSE_RNG is set when DEBUG_WOLFSSL is enabled, unless expressly + * requested otherwise. Relies on a working WOLFSSL_DEBUG_PRINTF. + */ +#if defined(DEBUG_WOLFSSL) && defined(WOLFSSL_DEBUG_PRINTF) && \ + !defined(WC_NO_VERBOSE_RNG) && !defined(WC_VERBOSE_RNG) + #define WC_VERBOSE_RNG +#endif + #include /*------------------------------------------------------------*/ @@ -431,6 +441,8 @@ (WOLFSSL_FIPS_VERSION2_CODE >= WOLFSSL_MAKE_FIPS_VERSION(major,minor)) #define FIPS_VERSION_GT(major,minor) \ (WOLFSSL_FIPS_VERSION2_CODE > WOLFSSL_MAKE_FIPS_VERSION(major,minor)) +#define FIPS_VERSION_NE(major,minor) \ + (WOLFSSL_FIPS_VERSION2_CODE != WOLFSSL_MAKE_FIPS_VERSION(major,minor)) #define FIPS_VERSION3_LT(major,minor,patch) \ (WOLFSSL_FIPS_VERSION_CODE < WOLFSSL_MAKE_FIPS_VERSION3(major,minor,patch)) @@ -2137,6 +2149,12 @@ extern void uITRON4_free(void *p) ; #endif /* WOLFSSL_MAXQ1065 || WOLFSSL_MAXQ108X */ +/* Combined STSAFE macro - enables when either A100 or A120 is defined */ +#if defined(WOLFSSL_STSAFEA100) || defined(WOLFSSL_STSAFEA120) + #undef WOLFSSL_STSAFE + #define WOLFSSL_STSAFE +#endif + #if defined(WOLFSSL_STM32F2) || defined(WOLFSSL_STM32F4) || \ defined(WOLFSSL_STM32F7) || defined(WOLFSSL_STM32F1) || \ defined(WOLFSSL_STM32L4) || defined(WOLFSSL_STM32L5) || \ @@ -2592,6 +2610,10 @@ extern void uITRON4_free(void *p) ; #endif /*(WOLFSSL_APACHE_MYNEWT)*/ #ifdef WOLFSSL_ZEPHYR + #ifdef __cplusplus + } /* extern "C" */ + #endif + #include #if KERNEL_VERSION_NUMBER >= 0x30100 #include @@ -2604,6 +2626,10 @@ extern void uITRON4_free(void *p) ; #endif #include + #ifdef __cplusplus + extern "C" { + #endif + #define WOLFSSL_DH_CONST #define NO_WRITEV #define NO_STDLIB_ISASCII @@ -2942,8 +2968,7 @@ extern void uITRON4_free(void *p) ; /* Determine when mp_add_d is required. */ #if !defined(NO_PWDBASED) || defined(WOLFSSL_KEY_GEN) || !defined(NO_DH) || \ !defined(NO_DSA) || defined(HAVE_ECC) || \ - (!defined(NO_RSA) && !defined(WOLFSSL_RSA_VERIFY_ONLY)) || \ - defined(OPENSSL_EXTRA) + !defined(NO_RSA) || defined(OPENSSL_EXTRA) #define WOLFSSL_SP_ADD_D #endif @@ -3324,7 +3349,8 @@ extern void uITRON4_free(void *p) ; #endif /* if desktop type system and fastmath increase default max bits */ -#if defined(WOLFSSL_X86_64_BUILD) || defined(WOLFSSL_AARCH64_BUILD) +#if defined(WOLFSSL_X86_64_BUILD) || defined(WOLFSSL_AARCH64_BUILD) || \ + defined(OPENSSL_EXTRA) #if defined(USE_FAST_MATH) && !defined(FP_MAX_BITS) #if MIN_FFDHE_FP_MAX_BITS <= 8192 #define FP_MAX_BITS 8192 @@ -3681,9 +3707,6 @@ extern void uITRON4_free(void *p) ; #ifndef WOLFSSL_KERNEL_MODE #define WOLFSSL_KERNEL_MODE #endif - #ifndef WOLFSSL_API_PREFIX_MAP - #define WOLFSSL_API_PREFIX_MAP - #endif #if defined(WOLFSSL_LINUXKM_VERBOSE_DEBUG) && \ !defined(WOLFSSL_KERNEL_VERBOSE_DEBUG) #define WOLFSSL_KERNEL_VERBOSE_DEBUG @@ -3716,21 +3739,11 @@ extern void uITRON4_free(void *p) ; #ifndef USE_WOLF_STRTOK #define USE_WOLF_STRTOK #endif - #ifndef WOLFSSL_OLD_PRIME_CHECK - #define WOLFSSL_OLD_PRIME_CHECK - #endif #ifdef LINUXKM_LKCAPI_REGISTER #ifndef WC_TEST_EXPORT_SUBTESTS #define WC_TEST_EXPORT_SUBTESTS #endif #endif - #ifndef WOLFSSL_TEST_SUBROUTINE - #ifdef WC_TEST_EXPORT_SUBTESTS - #define WOLFSSL_TEST_SUBROUTINE - #else - #define WOLFSSL_TEST_SUBROUTINE static - #endif - #endif #undef HAVE_PTHREAD /* linuxkm uses linux/string.h, included by linuxkm_wc_port.h. */ #undef HAVE_STRINGS_H @@ -3791,21 +3804,6 @@ extern void uITRON4_free(void *p) ; #undef HAVE_PUBLIC_FFDHE #endif - #ifndef NO_OLD_WC_NAMES - #define NO_OLD_WC_NAMES - #endif - #ifndef NO_OLD_SHA_NAMES - #define NO_OLD_SHA_NAMES - #endif - #ifndef NO_OLD_MD5_NAME - #define NO_OLD_MD5_NAME - #endif - #ifndef OPENSSL_COEXIST - #define OPENSSL_COEXIST - #endif - #ifndef NO_OLD_SSL_NAMES - #define NO_OLD_SSL_NAMES - #endif #undef WOLFSSL_MIN_AUTH_TAG_SZ #define WOLFSSL_MIN_AUTH_TAG_SZ 4 @@ -3815,22 +3813,10 @@ extern void uITRON4_free(void *p) ; */ #define WOLFSSL_ASN_INT_LEAD_0_ANY #endif - - #if !defined(WC_RESEED_INTERVAL) && defined(LINUXKM_LKCAPI_REGISTER) - /* If installing handlers, use the maximum reseed interval allowed by - * NIST SP 800-90A Rev. 1, to avoid unnecessary delays in DRBG - * generation. - */ - #if defined(HAVE_FIPS) && FIPS_VERSION_LT(6,0) - #define WC_RESEED_INTERVAL UINT_MAX - #else - #define WC_RESEED_INTERVAL (((word64)1UL)<<48UL) - #endif - #endif #if defined(__aarch64__) && !defined(WOLFSSL_AARCH64_PRIVILEGE_MODE) #define WOLFSSL_AARCH64_PRIVILEGE_MODE #endif -#endif +#endif /* WOLFSSL_LINUXKM */ /* FreeBSD Kernel Module */ #ifdef WOLFSSL_BSDKM @@ -3870,16 +3856,6 @@ extern void uITRON4_free(void *p) ; #ifndef USE_WOLF_STRTOK #define USE_WOLF_STRTOK #endif - #ifndef WOLFSSL_OLD_PRIME_CHECK - #define WOLFSSL_OLD_PRIME_CHECK - #endif - #ifndef WOLFSSL_TEST_SUBROUTINE - #ifndef NO_CRYPT_TEST - #define WOLFSSL_TEST_SUBROUTINE - #else - #define WOLFSSL_TEST_SUBROUTINE static - #endif - #endif /* bsdkm uses kernel headers, included in bsdkm_wc_port.h. */ #undef HAVE_PTHREAD #undef HAVE_STRINGS_H @@ -3910,6 +3886,19 @@ extern void uITRON4_free(void *p) ; #define WOLFSSL_SP_DIV_WORD_HALF #endif + /* FreeBSD kernel defines its own min, max functions in sys/libkern.h */ + #undef WOLFSSL_HAVE_MIN + #define WOLFSSL_HAVE_MIN + + #undef WOLFSSL_HAVE_MAX + #define WOLFSSL_HAVE_MAX +#endif /* WOLFSSL_BSDKM */ + +/* Common setup for kernel mode builds */ +#ifdef WOLFSSL_KERNEL_MODE + #ifndef WOLFSSL_API_PREFIX_MAP + #define WOLFSSL_API_PREFIX_MAP + #endif #ifndef NO_OLD_WC_NAMES #define NO_OLD_WC_NAMES #endif @@ -3926,13 +3915,36 @@ extern void uITRON4_free(void *p) ; #define NO_OLD_SSL_NAMES #endif - /* FreeBSD kernel defines its own min, max functions in sys/libkern.h */ - #undef WOLFSSL_HAVE_MIN - #define WOLFSSL_HAVE_MIN + #ifndef WOLFSSL_TEST_SUBROUTINE + #ifdef WC_TEST_EXPORT_SUBTESTS + #define WOLFSSL_TEST_SUBROUTINE + #else + #define WOLFSSL_TEST_SUBROUTINE static + #endif + #endif - #undef WOLFSSL_HAVE_MAX - #define WOLFSSL_HAVE_MAX -#endif + #if !defined(WOLFSSL_OLD_PRIME_CHECK) && \ + !defined(WOLFSSL_NEW_PRIME_CHECK) && !defined(HAVE_FIPS) + #define WOLFSSL_OLD_PRIME_CHECK + #endif + + #ifndef WC_RESEED_INTERVAL + /* In kernel mode, use the maximum reseed interval allowed by + * NIST SP 800-90A Rev. 1, to avoid unnecessary delays in DRBG + * generation. + */ + #if defined(HAVE_FIPS) && \ + FIPS_VERSION_LT(6,0) && FIPS_VERSION3_NE(5,2,4) + #define WC_RESEED_INTERVAL UINT_MAX + #else + #define WC_RESEED_INTERVAL (((word64)1UL)<<48UL) + #endif + #endif + + #if !defined(WC_NO_VERBOSE_RNG) && !defined(WC_VERBOSE_RNG) + #define WC_VERBOSE_RNG + #endif +#endif /* WOLFSSL_KERNEL_MODE */ #if defined(WC_SYM_RELOC_TABLES) && defined(HAVE_FIPS) && \ !defined(WC_PIE_RELOC_TABLES) diff --git a/wolfssl/wolfcrypt/sphincs.h b/wolfssl/wolfcrypt/sphincs.h index 06928aa22..59fac81fe 100644 --- a/wolfssl/wolfcrypt/sphincs.h +++ b/wolfssl/wolfcrypt/sphincs.h @@ -83,8 +83,8 @@ /* Structs */ struct sphincs_key { - bool pubKeySet; - bool prvKeySet; + WC_BITFIELD pubKeySet:1; + WC_BITFIELD prvKeySet:1; byte level; /* 1,3 or 5 */ byte optim; /* FAST_VARIANT or SMALL_VARIANT */ byte p[SPHINCS_MAX_PUB_KEY_SIZE]; diff --git a/wolfssl/wolfcrypt/types.h b/wolfssl/wolfcrypt/types.h index feded367e..2c048d44b 100644 --- a/wolfssl/wolfcrypt/types.h +++ b/wolfssl/wolfcrypt/types.h @@ -125,7 +125,7 @@ typedef const char wcchar[]; /* if a version is available, pivot on the version, otherwise guess it's * disallowed, subject to override. */ - #if !defined(WOLF_C89) && (!defined(__STDC__) \ + #if !defined(WOLF_C89) && !defined(_MSC_VER) && (!defined(__STDC__) \ || (!defined(__STDC_VERSION__) && !defined(__cplusplus)) \ || (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201101L)) \ || (defined(__cplusplus) && (__cplusplus >= 201103L))) @@ -307,8 +307,11 @@ typedef const char wcchar[]; #endif #if defined(WORD64_AVAILABLE) && !defined(WC_16BIT_CPU) - /* These platforms have 64-bit CPU registers. */ - #if (defined(__alpha__) || defined(__ia64__) || defined(_ARCH_PPC64) || \ + #if defined(WC_64BIT_CPU) + /* explicitly configured for 64 bit. */ + #elif defined(WC_32BIT_CPU) + /* explicitly configured for 32 bit. */ + #elif (defined(__alpha__) || defined(__ia64__) || defined(_ARCH_PPC64) || \ (defined(__mips64) && \ ((defined(_ABI64) && (_MIPS_SIM == _ABI64)) || \ (defined(_ABIO64) && (_MIPS_SIM == _ABIO64)))) || \ @@ -317,6 +320,7 @@ typedef const char wcchar[]; (defined(__riscv_xlen) && (__riscv_xlen == 64)) || defined(_M_ARM64) || \ defined(__aarch64__) || defined(__ppc64__) || \ (defined(__DCC__) && (defined(__LP64) || defined(__LP64__))) + /* The above platforms have 64-bit CPU registers. */ #define WC_64BIT_CPU #elif (defined(sun) || defined(__sun)) && \ (defined(LP64) || defined(_LP64)) @@ -876,6 +880,13 @@ enum { ONFAIL; \ } \ } while (0) + #define WC_CALLOC_VAR_EX(VAR_NAME, VAR_TYPE, VAR_SIZE, HEAP, TY, ONFAIL)\ + do { \ + WC_ALLOC_VAR_EX(VAR_NAME, VAR_TYPE, VAR_SIZE, HEAP, TY, ONFAIL);\ + if ((VAR_NAME) != NULL) { \ + XMEMSET(VAR_NAME, 0, sizeof(VAR_TYPE) * (VAR_SIZE)); \ + } \ + } while (0) #define WC_CALLOC_VAR(VAR_NAME, VAR_TYPE, VAR_SIZE, HEAP) \ do { \ WC_ALLOC_VAR(VAR_NAME, VAR_TYPE, VAR_SIZE, HEAP); \ @@ -905,7 +916,9 @@ enum { WC_DO_NOTHING #define WC_VAR_OK(VAR_NAME) 1 #define WC_CALLOC_VAR(VAR_NAME, VAR_TYPE, VAR_SIZE, HEAP) \ - XMEMSET(VAR_NAME, 0, sizeof(var)) + XMEMSET(VAR_NAME, 0, sizeof(VAR_TYPE)) + #define WC_CALLOC_VAR_EX(VAR_NAME, VAR_TYPE, VAR_SIZE, HEAP, TY, ONFAIL)\ + XMEMSET(VAR_NAME, 0, sizeof(VAR_TYPE)) #define WC_FREE_VAR(VAR_NAME, HEAP) WC_DO_NOTHING \ /* nothing to free, its stack */ #define WC_FREE_VAR_EX(VAR_NAME, HEAP, TYPE) WC_DO_NOTHING diff --git a/wolfssl/wolfcrypt/wc_mlkem.h b/wolfssl/wolfcrypt/wc_mlkem.h index 91e015f36..460b13ee3 100644 --- a/wolfssl/wolfcrypt/wc_mlkem.h +++ b/wolfssl/wolfcrypt/wc_mlkem.h @@ -238,6 +238,8 @@ WOLFSSL_LOCAL void mlkem_from_bytes(sword16* p, const byte* b, int k); WOLFSSL_LOCAL void mlkem_to_bytes(byte* b, sword16* p, int k); +WOLFSSL_LOCAL +int mlkem_check_public(sword16* p, int k); #ifdef USE_INTEL_SPEEDUP WOLFSSL_LOCAL diff --git a/wolfssl/wolfcrypt/wc_port.h b/wolfssl/wolfcrypt/wc_port.h index d9d97f308..384edab03 100644 --- a/wolfssl/wolfcrypt/wc_port.h +++ b/wolfssl/wolfcrypt/wc_port.h @@ -283,8 +283,6 @@ #elif defined(WOLFSSL_APACHE_MYNEWT) /* do nothing */ #elif defined(WOLFSSL_ZEPHYR) - /* Zephyr SDK can use a cpp compiler which will cause - * problems with extern "C" linkage if not handled */ #ifdef __cplusplus } /* extern "C" */ #endif @@ -694,13 +692,15 @@ typedef struct wolfSSL_RefWithMutex { #endif int count; } wolfSSL_RefWithMutex; - +#define wolfSSL_RefWithMutexCur(ref) ((ref).count) #if defined(WOLFSSL_ATOMIC_OPS) && !defined(SINGLE_THREADED) typedef struct wolfSSL_Ref { wolfSSL_Atomic_Int count; } wolfSSL_Ref; +#define wolfSSL_RefCur(ref) WOLFSSL_ATOMIC_LOAD((ref).count) #else typedef struct wolfSSL_RefWithMutex wolfSSL_Ref; +#define wolfSSL_RefCur(ref) wolfSSL_RefWithMutexCur(ref) #endif #if defined(SINGLE_THREADED) || defined(WOLFSSL_ATOMIC_OPS) @@ -710,7 +710,10 @@ typedef struct wolfSSL_RefWithMutex wolfSSL_Ref; wolfSSL_Atomic_Int_Init(&(ref)->count, 1); \ *(err) = 0; \ } while(0) -#define wolfSSL_RefFree(ref) WC_DO_NOTHING +#define wolfSSL_RefFree(ref) \ + do { \ + wolfSSL_Atomic_Int_Init(&(ref)->count, 0); \ + } while(0) #define wolfSSL_RefInc(ref, err) \ do { \ (void)wolfSSL_Atomic_Int_FetchAdd(&(ref)->count, 1); \ @@ -989,8 +992,16 @@ WOLFSSL_ABI WOLFSSL_API int wolfCrypt_Cleanup(void); #define XFGETS(b,s,f) -2 /* Not ported yet */ #elif defined(WOLFSSL_ZEPHYR) + #ifdef __cplusplus + } /* extern "C" */ + #endif + #include + #ifdef __cplusplus + extern "C" { + #endif + #define XFILE struct fs_file_t* /* These are our wrappers for opening and closing files to @@ -1010,6 +1021,7 @@ WOLFSSL_ABI WOLFSSL_API int wolfCrypt_Cleanup(void); #define XSEEK_SET FS_SEEK_SET #define XSEEK_END FS_SEEK_END #define XBADFILE NULL + #define XBADFD (-1) #define XFGETS(b,s,f) -2 /* Not ported yet */ #define XSTAT fs_stat @@ -1119,6 +1131,7 @@ WOLFSSL_ABI WOLFSSL_API int wolfCrypt_Cleanup(void); #define XSEEK_SET SEEK_SET #define XSEEK_END SEEK_END #define XBADFILE NULL + #define XBADFD (-1) #define XFGETS fgets #define XFPRINTF fprintf #define XFFLUSH fflush @@ -1474,6 +1487,10 @@ WOLFSSL_ABI WOLFSSL_API int wolfCrypt_Cleanup(void); #define USE_WOLF_TIME_T #elif defined(WOLFSSL_ZEPHYR) + #ifdef __cplusplus + } /* extern "C" */ + #endif + #include #ifndef _POSIX_C_SOURCE #if KERNEL_VERSION_NUMBER >= 0x30100 @@ -1493,6 +1510,10 @@ WOLFSSL_ABI WOLFSSL_API int wolfCrypt_Cleanup(void); #endif #endif + #ifdef __cplusplus + extern "C" { + #endif + time_t z_time(time_t *timer); #define XTIME(tl) z_time((tl)) @@ -1586,7 +1607,7 @@ WOLFSSL_ABI WOLFSSL_API int wolfCrypt_Cleanup(void); #endif #if !defined(XVALIDATE_DATE) && !defined(HAVE_VALIDATE_DATE) #define USE_WOLF_VALIDDATE - #define XVALIDATE_DATE(d, f, t) wc_ValidateDate((d), (f), (t)) + #define XVALIDATE_DATE(d, f, t, l) wc_ValidateDate((d), (f), (t), (l)) #endif /* wolf struct tm and time_t */ diff --git a/wolfssl/wolfio.h b/wolfssl/wolfio.h index 2ddd830ef..8430fdf44 100644 --- a/wolfssl/wolfio.h +++ b/wolfssl/wolfio.h @@ -176,6 +176,10 @@ #include #include #elif defined(WOLFSSL_ZEPHYR) + #ifdef __cplusplus + } /* extern "C" */ + #endif + #include #if KERNEL_VERSION_NUMBER >= 0x30100 #include @@ -188,6 +192,10 @@ #include #endif #endif + + #ifdef __cplusplus + extern "C" { + #endif #elif defined(MICROCHIP_PIC32) #include #elif defined(HAVE_NETX) @@ -533,7 +541,11 @@ typedef struct hostent HOSTENT; #endif /* HAVE_SOCKADDR */ - #if defined(HAVE_GETADDRINFO) + #if defined(WOLFSSL_ZEPHYR) + typedef struct zsock_addrinfo ADDRINFO; + #define getaddrinfo zsock_getaddrinfo + #define freeaddrinfo zsock_freeaddrinfo + #elif defined(HAVE_GETADDRINFO) typedef struct addrinfo ADDRINFO; #endif #endif /* WOLFSSL_NO_SOCK */ @@ -573,6 +585,10 @@ union WOLFSSL_BIO_ADDR { typedef union WOLFSSL_BIO_ADDR WOLFSSL_BIO_ADDR; +WOLFSSL_API WOLFSSL_BIO_ADDR *wolfSSL_BIO_ADDR_new(void); +WOLFSSL_API void wolfSSL_BIO_ADDR_free(WOLFSSL_BIO_ADDR *addr); +WOLFSSL_API void wolfSSL_BIO_ADDR_clear(WOLFSSL_BIO_ADDR *addr); + #if defined(WOLFSSL_DTLS) && defined(OPENSSL_EXTRA) WOLFSSL_API int wolfIO_SendTo(SOCKET_T sd, WOLFSSL_BIO_ADDR *addr, char *buf, int sz, int wrFlags); WOLFSSL_API int wolfIO_RecvFrom(SOCKET_T sd, WOLFSSL_BIO_ADDR *addr, char *buf, int sz, int rdFlags); @@ -994,6 +1010,8 @@ WOLFSSL_API void wolfSSL_SetIOWriteFlags(WOLFSSL* ssl, int flags); #endif #elif defined(FREESCALE_MQX) #define XINET_PTON(a,b,c,d) inet_pton((a),(b),(c),(d)) + #elif defined(WOLFSSL_ZEPHYR) + #define XINET_PTON(a,b,c) zsock_inet_pton((a),(b),(c)) #else #define XINET_PTON(a,b,c) inet_pton((a),(b),(c)) #endif diff --git a/wrapper/rust/include.am b/wrapper/rust/include.am index 5b5338e31..3d2ce03d6 100644 --- a/wrapper/rust/include.am +++ b/wrapper/rust/include.am @@ -4,6 +4,7 @@ EXTRA_DIST += wrapper/rust/Makefile EXTRA_DIST += wrapper/rust/README.md +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/CHANGELOG.md EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/Cargo.lock EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/Cargo.toml EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/Makefile @@ -11,7 +12,10 @@ EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/README.md EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/build.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/headers.h EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/aes.rs +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/blake2.rs +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/chacha20_poly1305.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/cmac.rs +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/curve25519.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/dh.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/ecc.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/ed25519.rs @@ -26,7 +30,10 @@ EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/rsa.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/sha.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/src/sys.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_aes.rs +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_blake2.rs +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_chacha20_poly1305.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_cmac.rs +EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_curve25519.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_dh.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_ecc.rs EXTRA_DIST += wrapper/rust/wolfssl-wolfcrypt/tests/test_ed25519.rs diff --git a/wrapper/rust/wolfssl-wolfcrypt/README.md b/wrapper/rust/wolfssl-wolfcrypt/README.md index 87db93864..20c0c93c8 100644 --- a/wrapper/rust/wolfssl-wolfcrypt/README.md +++ b/wrapper/rust/wolfssl-wolfcrypt/README.md @@ -24,18 +24,21 @@ functionality: * AES * CBC, CCM, CFB, CTR, EAX, ECB, GCM, OFB, XTS + * BLAKE2 * CMAC + * ChaCha20-Poly1305 + * Curve25519 * DH * ECC - * Ed448 * Ed25519 + * Ed448 * HKDF * HMAC * PBKDF2 * PKCS #12 PBKDF * PRF - * RSA * RNG + * RSA * SHA * SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA3-224, SHA3-256, SHA3-384, SHA3-512, SHAKE128, SHAKE256 diff --git a/wrapper/rust/wolfssl-wolfcrypt/build.rs b/wrapper/rust/wolfssl-wolfcrypt/build.rs index a60acf182..608c100e2 100644 --- a/wrapper/rust/wolfssl-wolfcrypt/build.rs +++ b/wrapper/rust/wolfssl-wolfcrypt/build.rs @@ -126,9 +126,23 @@ fn scan_cfg() -> Result<()> { check_cfg(&binding, "wc_AesXtsInit", "aes_xts"); check_cfg(&binding, "wc_AesXtsEncryptInit", "aes_xts_stream"); + /* blake2 */ + check_cfg(&binding, "wc_InitBlake2b", "blake2b"); + check_cfg(&binding, "wc_Blake2bHmac", "blake2b_hmac"); + check_cfg(&binding, "wc_InitBlake2s", "blake2s"); + check_cfg(&binding, "wc_Blake2sHmac", "blake2s_hmac"); + + /* chacha20_poly1305 */ + check_cfg(&binding, "wc_ChaCha20Poly1305_Encrypt", "chacha20_poly1305"); + check_cfg(&binding, "wc_XChaCha20Poly1305_Encrypt", "xchacha20_poly1305"); + /* cmac */ check_cfg(&binding, "wc_InitCmac", "cmac"); + /* curve25519 */ + check_cfg(&binding, "wc_curve25519_make_pub", "curve25519"); + check_cfg(&binding, "wc_curve25519_make_pub_blind", "curve25519_blinding"); + /* dh */ check_cfg(&binding, "wc_InitDhKey", "dh"); check_cfg(&binding, "wc_DhGenerateParams", "dh_keygen"); diff --git a/wrapper/rust/wolfssl-wolfcrypt/src/blake2.rs b/wrapper/rust/wolfssl-wolfcrypt/src/blake2.rs new file mode 100644 index 000000000..4b8daeb39 --- /dev/null +++ b/wrapper/rust/wolfssl-wolfcrypt/src/blake2.rs @@ -0,0 +1,582 @@ +/* + * Copyright (C) 2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +/*! +This module provides a Rust wrapper for the wolfCrypt library's BLAKE2 +functionality. +*/ + +#![cfg(any(blake2b, blake2s))] + +use crate::sys; +use std::mem::MaybeUninit; + +/// Context for BLAKE2b computation. +#[cfg(blake2b)] +pub struct BLAKE2b { + wc_blake2b: sys::Blake2b, +} + +#[cfg(blake2b)] +impl BLAKE2b { + /// Build a new BLAKE2b instance. + /// + /// # Parameters + /// + /// * `digest_size`: Length of the blake 2 digest to implement. + /// + /// # Returns + /// + /// Returns either Ok(blake2b) containing the BLAKE2b struct instance or + /// Err(e) containing the wolfSSL library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2b; + /// let blake2b = BLAKE2b::new(64).expect("Error with new()"); + /// ``` + pub fn new(digest_size: usize) -> Result { + let digest_size = digest_size as u32; + let mut wc_blake2b: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_InitBlake2b(wc_blake2b.as_mut_ptr(), digest_size) + }; + if rc != 0 { + return Err(rc); + } + let wc_blake2b = unsafe { wc_blake2b.assume_init() }; + let blake2b = BLAKE2b { wc_blake2b }; + Ok(blake2b) + } + + /// Build a new BLAKE2b instance. + /// + /// # Parameters + /// + /// * `digest_size`: Length of the blake 2 digest to implement. + /// * `key`: Key to use for BLAKE2b operation. + /// + /// # Returns + /// + /// Returns either Ok(blake2b) containing the BLAKE2b struct instance or + /// Err(e) containing the wolfSSL library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2b; + /// let key = [42u8; 32]; + /// let blake2b = BLAKE2b::new_with_key(64, &key).expect("Error with new()"); + /// ``` + pub fn new_with_key(digest_size: usize, key: &[u8]) -> Result { + let digest_size = digest_size as u32; + let mut wc_blake2b: MaybeUninit = MaybeUninit::uninit(); + let key_size = key.len() as u32; + let rc = unsafe { + sys::wc_InitBlake2b_WithKey(wc_blake2b.as_mut_ptr(), digest_size, + key.as_ptr(), key_size) + }; + if rc != 0 { + return Err(rc); + } + let wc_blake2b = unsafe { wc_blake2b.assume_init() }; + let blake2b = BLAKE2b { wc_blake2b }; + Ok(blake2b) + } + + /// Update the BLAKE2b hash with the input data. + /// + /// This method may be called several times and then the finalize() + /// method should be called to retrieve the final hash. + /// + /// # Parameters + /// + /// * `data`: Input data to hash. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2b; + /// let mut blake2b = BLAKE2b::new(64).expect("Error with new()"); + /// blake2b.update(&[0u8; 16]).expect("Error with update()"); + /// ``` + pub fn update(&mut self, data: &[u8]) -> Result<(), i32> { + let data_size = data.len() as u32; + let rc = unsafe { + sys::wc_Blake2bUpdate(&mut self.wc_blake2b, data.as_ptr(), data_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute and retrieve the final BLAKE2b hash value. + /// + /// # Parameters + /// + /// * `hash`: Output buffer in which to store the computed BLAKE2b hash + /// value. It can be any length. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2b; + /// let mut blake2b = BLAKE2b::new(64).expect("Error with new()"); + /// blake2b.update(&[0u8; 16]).expect("Error with update()"); + /// let mut hash = [0u8; 64]; + /// blake2b.finalize(&mut hash).expect("Error with finalize()"); + /// ``` + pub fn finalize(&mut self, hash: &mut [u8]) -> Result<(), i32> { + let hash_size = hash.len() as u32; + let rc = unsafe { + sys::wc_Blake2bFinal(&mut self.wc_blake2b, hash.as_mut_ptr(), hash_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } +} + + +/// Context for HMAC-BLAKE2b computation. +#[cfg(blake2b_hmac)] +pub struct BLAKE2bHmac { + wc_blake2b: sys::Blake2b, +} + +#[cfg(blake2b_hmac)] +impl BLAKE2bHmac { + /// HMAC-BLAKE2b digest size. + pub const DIGEST_SIZE: usize = sys::WC_BLAKE2B_DIGEST_SIZE as usize; + + /// Build a new BLAKE2bHmac instance. + /// + /// # Parameters + /// + /// * `key`: Key to use for HMAC-BLAKE2b computation. + /// + /// # Returns + /// + /// Returns either Ok(hmac_blake2b) or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2bHmac; + /// let key = [42u8, 43, 44]; + /// let hmac_blake2b = BLAKE2bHmac::new(&key).expect("Error with new()"); + /// ``` + pub fn new(key: &[u8]) -> Result { + let mut wc_blake2b: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_Blake2bHmacInit(wc_blake2b.as_mut_ptr(), key.as_ptr(), key.len()) + }; + if rc != 0 { + return Err(rc); + } + let wc_blake2b = unsafe { wc_blake2b.assume_init() }; + let hmac_blake2b = BLAKE2bHmac { wc_blake2b }; + Ok(hmac_blake2b) + } + + /// Update the HMAC-BLAKE2b computation with the input data. + /// + /// This method may be called several times and then the finalize() + /// method should be called to retrieve the final MAC. + /// + /// # Parameters + /// + /// * `data`: Input data to hash. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2bHmac; + /// let key = [42u8, 43, 44]; + /// let mut hmac_blake2b = BLAKE2bHmac::new(&key).expect("Error with new()"); + /// let data = [33u8, 34, 35]; + /// hmac_blake2b.update(&data).expect("Error with update()"); + /// ``` + pub fn update(&mut self, data: &[u8]) -> Result<(), i32> { + let rc = unsafe { + sys::wc_Blake2bHmacUpdate(&mut self.wc_blake2b, data.as_ptr(), data.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute and retrieve the final HMAC-BLAKE2b MAC. + /// + /// # Parameters + /// + /// * `key`: Key to use for HMAC-BLAKE2b computation. + /// * `mac`: Output buffer in which to store the computed HMAC-BLAKE2b MAC. + /// It must be 64 bytes long. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2bHmac; + /// let key = [42u8, 43, 44]; + /// let mut hmac_blake2b = BLAKE2bHmac::new(&key).expect("Error with new()"); + /// let data = [33u8, 34, 35]; + /// hmac_blake2b.update(&data).expect("Error with update()"); + /// let mut mac = [0u8; 64]; + /// hmac_blake2b.finalize(&key, &mut mac).expect("Error with finalize()"); + /// ``` + pub fn finalize(&mut self, key: &[u8], mac: &mut [u8; Self::DIGEST_SIZE]) -> Result<(), i32> { + let rc = unsafe { + sys::wc_Blake2bHmacFinal(&mut self.wc_blake2b, + key.as_ptr(), key.len(), mac.as_mut_ptr(), mac.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute the HMAC-BLAKE2b message authentication code of the given + /// input data using the given key (one-shot API). + /// + /// # Parameters + /// + /// * `data`: Input data to create MAC from. + /// * `key`: Key to use for MAC creation. + /// * `out`: Buffer in which to store the computed MAC. It must be 64 bytes + /// long. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn hmac(data: &[u8], key: &[u8], out: &mut [u8; Self::DIGEST_SIZE]) -> Result<(), i32> { + let rc = unsafe { + sys::wc_Blake2bHmac(data.as_ptr(), data.len(), key.as_ptr(), + key.len(), out.as_mut_ptr(), out.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } +} + + +/// Context for BLAKE2s computation. +#[cfg(blake2s)] +pub struct BLAKE2s { + wc_blake2s: sys::Blake2s, +} + +#[cfg(blake2s)] +impl BLAKE2s { + /// Build a new BLAKE2s instance. + /// + /// # Parameters + /// + /// * `digest_size`: Length of the blake 2 digest to implement. + /// + /// # Returns + /// + /// Returns either Ok(blake2s) containing the BLAKE2s struct instance or + /// Err(e) containing the wolfSSL library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2s; + /// let blake2s = BLAKE2s::new(32).expect("Error with new()"); + /// ``` + pub fn new(digest_size: usize) -> Result { + let digest_size = digest_size as u32; + let mut wc_blake2s: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_InitBlake2s(wc_blake2s.as_mut_ptr(), digest_size) + }; + if rc != 0 { + return Err(rc); + } + let wc_blake2s = unsafe { wc_blake2s.assume_init() }; + let blake2s = BLAKE2s { wc_blake2s }; + Ok(blake2s) + } + + /// Build a new BLAKE2s instance. + /// + /// # Parameters + /// + /// * `digest_size`: Length of the blake 2 digest to implement. + /// * `key`: Key to use for BLAKE2s operation. + /// + /// # Returns + /// + /// Returns either Ok(blake2s) containing the BLAKE2s struct instance or + /// Err(e) containing the wolfSSL library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2s; + /// let key = [42u8; 32]; + /// let blake2s = BLAKE2s::new_with_key(32, &key).expect("Error with new()"); + /// ``` + pub fn new_with_key(digest_size: usize, key: &[u8]) -> Result { + let digest_size = digest_size as u32; + let mut wc_blake2s: MaybeUninit = MaybeUninit::uninit(); + let key_size = key.len() as u32; + let rc = unsafe { + sys::wc_InitBlake2s_WithKey(wc_blake2s.as_mut_ptr(), digest_size, + key.as_ptr(), key_size) + }; + if rc != 0 { + return Err(rc); + } + let wc_blake2s = unsafe { wc_blake2s.assume_init() }; + let blake2s = BLAKE2s { wc_blake2s }; + Ok(blake2s) + } + + /// Update the BLAKE2s hash with the input data. + /// + /// This method may be called several times and then the finalize() + /// method should be called to retrieve the final hash. + /// + /// # Parameters + /// + /// * `data`: Input data to hash. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2s; + /// let mut blake2s = BLAKE2s::new(32).expect("Error with new()"); + /// blake2s.update(&[0u8; 16]).expect("Error with update()"); + /// ``` + pub fn update(&mut self, data: &[u8]) -> Result<(), i32> { + let data_size = data.len() as u32; + let rc = unsafe { + sys::wc_Blake2sUpdate(&mut self.wc_blake2s, data.as_ptr(), data_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute and retrieve the final BLAKE2s hash value. + /// + /// # Parameters + /// + /// * `hash`: Output buffer in which to store the computed BLAKE2s hash + /// value. It can be any length. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2s; + /// let mut blake2s = BLAKE2s::new(32).expect("Error with new()"); + /// blake2s.update(&[0u8; 16]).expect("Error with update()"); + /// let mut hash = [0u8; 32]; + /// blake2s.finalize(&mut hash).expect("Error with finalize()"); + /// ``` + pub fn finalize(&mut self, hash: &mut [u8]) -> Result<(), i32> { + let hash_size = hash.len() as u32; + let rc = unsafe { + sys::wc_Blake2sFinal(&mut self.wc_blake2s, hash.as_mut_ptr(), hash_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } +} + + +/// Context for HMAC-BLAKE2s computation. +#[cfg(blake2s_hmac)] +pub struct BLAKE2sHmac { + wc_blake2s: sys::Blake2s, +} + +#[cfg(blake2s_hmac)] +impl BLAKE2sHmac { + /// HMAC-BLAKE2s digest size. + pub const DIGEST_SIZE: usize = sys::WC_BLAKE2S_DIGEST_SIZE as usize; + + /// Build a new BLAKE2sHmac instance. + /// + /// # Parameters + /// + /// * `key`: Key to use for HMAC-BLAKE2s computation. + /// + /// # Returns + /// + /// Returns either Ok(hmac_blake2s) or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2sHmac; + /// let key = [42u8, 43, 44]; + /// let hmac_blake2s = BLAKE2sHmac::new(&key).expect("Error with new()"); + /// ``` + pub fn new(key: &[u8]) -> Result { + let mut wc_blake2s: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_Blake2sHmacInit(wc_blake2s.as_mut_ptr(), key.as_ptr(), key.len()) + }; + if rc != 0 { + return Err(rc); + } + let wc_blake2s = unsafe { wc_blake2s.assume_init() }; + let hmac_blake2s = BLAKE2sHmac { wc_blake2s }; + Ok(hmac_blake2s) + } + + /// Update the HMAC-BLAKE2s computation with the input data. + /// + /// This method may be called several times and then the finalize() + /// method should be called to retrieve the final MAC. + /// + /// # Parameters + /// + /// * `data`: Input data to hash. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2sHmac; + /// let key = [42u8, 43, 44]; + /// let mut hmac_blake2s = BLAKE2sHmac::new(&key).expect("Error with new()"); + /// let data = [33u8, 34, 35]; + /// hmac_blake2s.update(&data).expect("Error with update()"); + /// ``` + pub fn update(&mut self, data: &[u8]) -> Result<(), i32> { + let rc = unsafe { + sys::wc_Blake2sHmacUpdate(&mut self.wc_blake2s, data.as_ptr(), data.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute and retrieve the final HMAC-BLAKE2s MAC. + /// + /// # Parameters + /// + /// * `key`: Key to use for HMAC-BLAKE2s computation. + /// * `mac`: Output buffer in which to store the computed HMAC-BLAKE2s MAC. + /// It must be 32 bytes long. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + /// + /// # Example + /// + /// ```rust + /// use wolfssl_wolfcrypt::blake2::BLAKE2sHmac; + /// let key = [42u8, 43, 44]; + /// let mut hmac_blake2s = BLAKE2sHmac::new(&key).expect("Error with new()"); + /// let data = [33u8, 34, 35]; + /// hmac_blake2s.update(&data).expect("Error with update()"); + /// let mut mac = [0u8; 32]; + /// hmac_blake2s.finalize(&key, &mut mac).expect("Error with finalize()"); + /// ``` + pub fn finalize(&mut self, key: &[u8], mac: &mut [u8; Self::DIGEST_SIZE]) -> Result<(), i32> { + let rc = unsafe { + sys::wc_Blake2sHmacFinal(&mut self.wc_blake2s, + key.as_ptr(), key.len(), mac.as_mut_ptr(), mac.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute the HMAC-BLAKE2s message authentication code of the given + /// input data using the given key (one-shot API). + /// + /// # Parameters + /// + /// * `data`: Input data to create MAC from. + /// * `key`: Key to use for MAC creation. + /// * `out`: Buffer in which to store the computed MAC. It must be 32 bytes + /// long. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn hmac(data: &[u8], key: &[u8], out: &mut [u8; Self::DIGEST_SIZE]) -> Result<(), i32> { + let rc = unsafe { + sys::wc_Blake2sHmac(data.as_ptr(), data.len(), key.as_ptr(), + key.len(), out.as_mut_ptr(), out.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } +} diff --git a/wrapper/rust/wolfssl-wolfcrypt/src/chacha20_poly1305.rs b/wrapper/rust/wolfssl-wolfcrypt/src/chacha20_poly1305.rs new file mode 100644 index 000000000..a9221b89e --- /dev/null +++ b/wrapper/rust/wolfssl-wolfcrypt/src/chacha20_poly1305.rs @@ -0,0 +1,342 @@ +/* + * Copyright (C) 2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +/*! +This module provides a Rust wrapper for the wolfCrypt library's +ChaCha20-Poly1305 functionality. +*/ + +#![cfg(chacha20_poly1305)] + +use crate::sys; +use std::mem::MaybeUninit; + +pub struct ChaCha20Poly1305 { + wc_ccp: sys::ChaChaPoly_Aead, +} + +impl ChaCha20Poly1305 { + /// Key size for ChaCha20-Poly1305 stream cipher. + pub const KEYSIZE: usize = sys::CHACHA20_POLY1305_AEAD_KEYSIZE as usize; + /// IV size for ChaCha20-Poly1305 stream cipher. + pub const IV_SIZE: usize = sys::CHACHA20_POLY1305_AEAD_IV_SIZE as usize; + /// Authentication tag size for ChaCha20-Poly1305 stream cipher. + pub const AUTH_TAG_SIZE: usize = sys::CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE as usize; + + /// Decrypt an input message from `ciphertext` using the ChaCha20 stream + /// cipher into the `plaintext` output buffer. It also performs Poly-1305 + /// authentication, comparing the given `auth_tag` to an authentication + /// generated with the `aad` (additional authentication data). If Err is + /// returned, the output data, `plaintext` is undefined. However, callers + /// must unconditionally zeroize the output buffer to guard against + /// leakage of cleartext data. + /// + /// # Parameters + /// + /// * `key`: Encryption key (must be 32 bytes). + /// * `iv`: Initialization Vector (must be 12 bytes). + /// * `aad`: Additional authenticated data (can be any length). + /// * `ciphertext`: Input buffer containing encrypted cipher text. + /// * `auth_tag`: Input buffer containing authentication tag (must be 16 + /// bytes). + /// * `plaintext`: Output buffer containing decrypted plain text. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn decrypt(key: &[u8], iv: &[u8], aad: &[u8], ciphertext: &[u8], + auth_tag: &[u8], plaintext: &mut [u8]) -> Result<(), i32> { + if key.len() != Self::KEYSIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if iv.len() != Self::IV_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if auth_tag.len() != Self::AUTH_TAG_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let aad_size = aad.len() as u32; + let ciphertext_size = ciphertext.len() as u32; + let rc = unsafe { + sys::wc_ChaCha20Poly1305_Decrypt(key.as_ptr(), iv.as_ptr(), + aad.as_ptr(), aad_size, ciphertext.as_ptr(), + ciphertext_size, auth_tag.as_ptr(), plaintext.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Encrypt an input message from `plaintext` using the ChaCha20 stream + /// cipher into the `ciphertext` output buffer performing Poly-1305 + /// authentication on the cipher text and storing the generated + /// authentication tag in the `auth_tag` output buffer. + /// + /// # Parameters + /// + /// * `key`: Encryption key (must be 32 bytes). + /// * `iv`: Initialization Vector (must be 12 bytes). + /// * `aad`: Additional authenticated data (can be any length). + /// * `plaintext`: Input plain text to encrypt. + /// * `ciphertext`: Output buffer for encrypted cipher text. + /// * `auth_tag`: Output buffer for authentication tag (must be 16 bytes). + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn encrypt(key: &[u8], iv: &[u8], aad: &[u8], plaintext: &[u8], + ciphertext: &mut [u8], auth_tag: &mut [u8]) -> Result<(), i32> { + if key.len() != Self::KEYSIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if iv.len() != Self::IV_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if auth_tag.len() != Self::AUTH_TAG_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let aad_size = aad.len() as u32; + let plaintext_size = plaintext.len() as u32; + let rc = unsafe { + sys::wc_ChaCha20Poly1305_Encrypt(key.as_ptr(), iv.as_ptr(), + aad.as_ptr(), aad_size, plaintext.as_ptr(), plaintext_size, + ciphertext.as_mut_ptr(), auth_tag.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Create a new ChaCha20Poly1305 instance. + /// + /// # Parameters + /// + /// * `key`: Encryption key (must be 32 bytes). + /// * `iv`: Initialization Vector (must be 12 bytes). + /// * `encrypt`: Whether the instance will be used to encrypt (true) or + /// decrypt (false). + /// + /// Returns either Ok(chacha20poly1305) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn new(key: &[u8], iv: &[u8], encrypt: bool) -> Result { + if key.len() != Self::KEYSIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if iv.len() != Self::IV_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let mut wc_ccp: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_ChaCha20Poly1305_Init(wc_ccp.as_mut_ptr(), key.as_ptr(), + iv.as_ptr(), if encrypt {1} else {0}) + }; + if rc != 0 { + return Err(rc); + } + let wc_ccp = unsafe { wc_ccp.assume_init() }; + let chacha20poly1305 = ChaCha20Poly1305 { wc_ccp }; + Ok(chacha20poly1305) + } + + /// Update AAD (additional authenticated data). + /// + /// This function should be called before `update_data()`. + /// + /// # Parameters + /// + /// * `aad`: Additional authenticated data. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn update_aad(&mut self, aad: &[u8]) -> Result<(), i32> { + let aad_size = aad.len() as u32; + let rc = unsafe { + sys::wc_ChaCha20Poly1305_UpdateAad(&mut self.wc_ccp, + aad.as_ptr(), aad_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Update data (add additional input data to decrypt or encrypt). + /// + /// This function can be called multiple times. If AAD is used, the + /// `update_aad()` function must be called before this function. The + /// `finalize()` function should be called after adding all input data to + /// finalize the operation and compute the authentication tag. + /// + /// # Parameters + /// + /// * `din`: Additional input data to decrypt or encrypt. + /// * `dout`: Buffer in which to store output data (must be the same length + /// as the input buffer). + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn update_data(&mut self, din: &[u8], dout: &mut [u8]) -> Result<(), i32> { + if din.len() != dout.len() { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let din_size = din.len() as u32; + let rc = unsafe { + sys::wc_ChaCha20Poly1305_UpdateData(&mut self.wc_ccp, + din.as_ptr(), dout.as_mut_ptr(), din_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Finalize the decrypt/encrypt operation. + /// + /// This function consumes the `ChaCha20Poly1305` instance. The + /// `update_data()` function must be called before calling this function to + /// add all input data. + /// + /// # Parameters + /// + /// * `auth_tag`: Output buffer for authentication tag (must be 16 bytes). + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn finalize(mut self, auth_tag: &mut [u8]) -> Result<(), i32> { + if auth_tag.len() != Self::AUTH_TAG_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let rc = unsafe { + sys::wc_ChaCha20Poly1305_Final(&mut self.wc_ccp, + auth_tag.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } +} + +#[cfg(xchacha20_poly1305)] +pub struct XChaCha20Poly1305 { +} + +#[cfg(xchacha20_poly1305)] +impl XChaCha20Poly1305 { + /// Key size for XChaCha20-Poly1305 stream cipher. + pub const KEYSIZE: usize = sys::CHACHA20_POLY1305_AEAD_KEYSIZE as usize; + /// IV size for XChaCha20-Poly1305 stream cipher. + pub const IV_SIZE: usize = sys::XCHACHA20_POLY1305_AEAD_NONCE_SIZE as usize; + /// Authentication tag size for XChaCha20-Poly1305 stream cipher. + pub const AUTH_TAG_SIZE: usize = sys::CHACHA20_POLY1305_AEAD_AUTHTAG_SIZE as usize; + + /// Decrypt an input message from `ciphertext` using the XChaCha20 stream + /// cipher into the `plaintext` output buffer. It also performs Poly-1305 + /// authentication. The authentication tag is expected to be located in the + /// last 16 bytes of the `ciphertext` buffer. + /// If Err is returned, the output data, `plaintext` is undefined. + /// However, callers must unconditionally zeroize the output buffer to + /// guard against leakage of cleartext data. + /// + /// # Parameters + /// + /// * `key`: Encryption key (must be 32 bytes). + /// * `iv`: Initialization Vector (must be 24 bytes). + /// * `aad`: Additional authenticated data (can be any length). + /// * `ciphertext`: Input buffer containing encrypted cipher text. + /// * `plaintext`: Output buffer containing decrypted plain text. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn decrypt(key: &[u8], iv: &[u8], aad: &[u8], ciphertext: &[u8], + plaintext: &mut [u8]) -> Result<(), i32> { + if key.len() != Self::KEYSIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if iv.len() != Self::IV_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let rc = unsafe { + sys::wc_XChaCha20Poly1305_Decrypt( + plaintext.as_mut_ptr(), plaintext.len(), + ciphertext.as_ptr(), ciphertext.len(), + aad.as_ptr(), aad.len(), + iv.as_ptr(), iv.len(), + key.as_ptr(), key.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Encrypt an input message from `plaintext` using the XChaCha20 stream + /// cipher into the `ciphertext` output buffer performing Poly-1305 + /// authentication on the cipher text. + /// The authentication tag is stored in the last 16 bytes of the + /// `ciphertext` buffer, so the `ciphertext` buffer must be large enough + /// for both the cipher text and authentication tag. + /// + /// # Parameters + /// + /// * `key`: Encryption key (must be 32 bytes). + /// * `iv`: Initialization Vector (must be 24 bytes). + /// * `aad`: Additional authenticated data (can be any length). + /// * `plaintext`: Input plain text to encrypt. + /// * `ciphertext`: Output buffer for encrypted cipher text. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn encrypt(key: &[u8], iv: &[u8], aad: &[u8], plaintext: &[u8], + ciphertext: &mut [u8]) -> Result<(), i32> { + if key.len() != Self::KEYSIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + if iv.len() != Self::IV_SIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let rc = unsafe { + sys::wc_XChaCha20Poly1305_Encrypt( + ciphertext.as_mut_ptr(), ciphertext.len(), + plaintext.as_ptr(), plaintext.len(), + aad.as_ptr(), aad.len(), + iv.as_ptr(), iv.len(), + key.as_ptr(), key.len()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } +} diff --git a/wrapper/rust/wolfssl-wolfcrypt/src/curve25519.rs b/wrapper/rust/wolfssl-wolfcrypt/src/curve25519.rs new file mode 100644 index 000000000..1629ec21b --- /dev/null +++ b/wrapper/rust/wolfssl-wolfcrypt/src/curve25519.rs @@ -0,0 +1,668 @@ +/* + * Copyright (C) 2025 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +/*! +This module provides a Rust wrapper for the wolfCrypt library's Curve25519 +functionality. +*/ + +#![cfg(curve25519)] + +#[cfg(random)] +use crate::random::RNG; +use crate::sys; +use std::mem::MaybeUninit; + +pub struct Curve25519Key { + wc_key: sys::curve25519_key, +} + +impl Curve25519Key { + /// Curve 25519 key size (32 bytes). + pub const KEYSIZE: usize = sys::CURVE25519_KEYSIZE as usize; + + /// Check that a public key buffer holds a valid Curve25519 key value + /// given the endian ordering. + /// + /// # Parameters + /// + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn check_public(public: &[u8], big_endian: bool) -> Result<(), i32> { + let public_size = public.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_check_public(public.as_ptr(), public_size, + endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Generate a new private key. + /// + /// # Parameters + /// + /// * `rng`: Random number generator struct to use for blinding operation. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + #[cfg(random)] + pub fn generate(rng: &mut RNG) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let rc = unsafe { + sys::wc_curve25519_make_key(&mut rng.wc_rng, Self::KEYSIZE as i32, + &mut curve25519key.wc_key) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Generate a new private key as a bare vector. + /// + /// # Parameters + /// + /// * `rng`: Random number generator struct to use for blinding operation. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + #[cfg(random)] + pub fn generate_priv(rng: &mut RNG, out: &mut [u8]) -> Result<(), i32> { + if out.len() != Self::KEYSIZE { + return Err(sys::wolfCrypt_ErrorCodes_BUFFER_E); + } + let rc = unsafe { + sys::wc_curve25519_make_priv(&mut rng.wc_rng, Self::KEYSIZE as i32, out.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Import a Curve25519 private key only (big-endian only). + /// + /// # Parameters + /// + /// * `private`: Buffer containing the Curve25519 private key. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn import_private(private: &[u8]) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let private_size = private.len() as u32; + let rc = unsafe { + sys::wc_curve25519_import_private(private.as_ptr(), private_size, + &mut curve25519key.wc_key) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Import a Curve25519 private key only (big or little endian). + /// + /// # Parameters + /// + /// * `private`: Buffer containing the Curve25519 private key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn import_private_ex(private: &[u8], big_endian: bool) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let private_size = private.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_import_private_ex(private.as_ptr(), + private_size, &mut curve25519key.wc_key, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Import a Curve25519 public/private key pair (big-endian only). + /// + /// # Parameters + /// + /// * `private`: Buffer containing the Curve25519 private key. + /// * `public`: Buffer containing the Curve25519 public key. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn import_private_raw(private: &[u8], public: &[u8]) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let private_size = private.len() as u32; + let public_size = public.len() as u32; + let rc = unsafe { + sys::wc_curve25519_import_private_raw(private.as_ptr(), + private_size, public.as_ptr(), public_size, + &mut curve25519key.wc_key) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Import a Curve25519 public/private key pair (big or little endian). + /// + /// # Parameters + /// + /// * `private`: Buffer containing the Curve25519 private key. + /// * `public`: Buffer containing the Curve25519 public key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn import_private_raw_ex(private: &[u8], public: &[u8], big_endian: bool) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let private_size = private.len() as u32; + let public_size = public.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_import_private_raw_ex(private.as_ptr(), + private_size, public.as_ptr(), public_size, + &mut curve25519key.wc_key, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Import a Curve25519 public key (big-endian only). + /// + /// # Parameters + /// + /// * `public`: Buffer containing the Curve25519 public key. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn import_public(public: &[u8]) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let public_size = public.len() as u32; + let rc = unsafe { + sys::wc_curve25519_import_public(public.as_ptr(), public_size, + &mut curve25519key.wc_key) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Import a Curve25519 public key (big or little endian). + /// + /// # Parameters + /// + /// * `public`: Buffer containing the Curve25519 public key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(curve25519key) on success or Err(e) containing the + /// wolfSSL library error code value. + pub fn import_public_ex(public: &[u8], big_endian: bool) -> Result { + let mut wc_key: MaybeUninit = MaybeUninit::uninit(); + let rc = unsafe { + sys::wc_curve25519_init(wc_key.as_mut_ptr()) + }; + if rc != 0 { + return Err(rc); + } + let wc_key = unsafe { wc_key.assume_init() }; + let mut curve25519key = Curve25519Key { wc_key }; + let public_size = public.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_import_public_ex(public.as_ptr(), public_size, + &mut curve25519key.wc_key, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(curve25519key) + } + + /// Compute the public key from an existing private key using bare vectors. + /// + /// # Parameters + /// + /// * `private`: Private key (input). + /// * `public`: Buffer in which to store the computed public key. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn make_pub(private: &[u8], public: &mut [u8]) -> Result<(), i32> { + let private_size = private.len() as i32; + let public_size = public.len() as i32; + let rc = unsafe { + sys::wc_curve25519_make_pub(public_size, public.as_mut_ptr(), + private_size, private.as_ptr()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute the public key from an existing private key using bare vectors + /// with blinding. + /// + /// # Parameters + /// + /// * `private`: Private key (input). + /// * `public`: Buffer in which to store the computed public key. + /// * `rng`: Random number generator struct to use for blinding operation. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + #[cfg(all(curve25519_blinding, random))] + pub fn make_pub_blind(private: &[u8], public: &mut [u8], rng: &mut RNG) -> Result<(), i32> { + let private_size = private.len() as i32; + let public_size = public.len() as i32; + let rc = unsafe { + sys::wc_curve25519_make_pub_blind(public_size, public.as_mut_ptr(), + private_size, private.as_ptr(), &mut rng.wc_rng) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute the public key from an existing private key with supplied + /// basepoint, using bare vectors. + /// + /// # Parameters + /// + /// * `private`: Private key (input). + /// * `public`: Buffer in which to store the computed public key. + /// * `basepoint`: Basepoint value to use. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn make_pub_generic(private: &[u8], public: &mut [u8], basepoint: &[u8]) -> Result<(), i32> { + let private_size = private.len() as i32; + let public_size = public.len() as i32; + let basepoint_size = basepoint.len() as i32; + let rc = unsafe { + sys::wc_curve25519_generic(public_size, public.as_mut_ptr(), + private_size, private.as_ptr(), basepoint_size, basepoint.as_ptr()) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute the public key from an existing private key with supplied + /// basepoint, using bare vectors. + /// + /// # Parameters + /// + /// * `private`: Private key (input). + /// * `public`: Buffer in which to store the computed public key. + /// * `basepoint`: Basepoint value to use. + /// * `rng`: Random number generator struct to use for blinding operation. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + #[cfg(all(curve25519_blinding, random))] + pub fn make_pub_generic_blind(private: &[u8], public: &mut [u8], basepoint: &[u8], rng: &mut RNG) -> Result<(), i32> { + let private_size = private.len() as i32; + let public_size = public.len() as i32; + let basepoint_size = basepoint.len() as i32; + let rc = unsafe { + sys::wc_curve25519_generic_blind(public_size, public.as_mut_ptr(), + private_size, private.as_ptr(), basepoint_size, basepoint.as_ptr(), + &mut rng.wc_rng) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute a shared secret key given a secret private key and a received + /// public key. It stores the generated secret key in the buffer out and + /// returns the generated key size. Only supports big endian. + /// + /// # Parameters + /// + /// * `private_key`: Curve25519Key struct holding the user's private key. + /// * `public_key`: Curve25519Key struct holding the received public key. + /// * `out`: Output buffer in which to store the generated secret key. + /// + /// # Returns + /// + /// Returns either Ok(size) containing the number of bytes written to `out` + /// on success or Err(e) containing the wolfSSL library error code value. + pub fn shared_secret(private_key: &mut Curve25519Key, public_key: &mut Curve25519Key, out: &mut [u8]) -> Result { + let mut outlen = out.len() as u32; + let rc = unsafe { + sys::wc_curve25519_shared_secret(&mut private_key.wc_key, + &mut public_key.wc_key, out.as_mut_ptr(), &mut outlen) + }; + if rc != 0 { + return Err(rc); + } + Ok(outlen as usize) + } + + /// Associates a `RNG` instance with this `Curve25519Key` instance. + /// + /// This is necessary when generating a shared secret if wolfSSL is built + /// with the `WOLFSSL_CURVE25519_BLINDING` build option enabled. + /// + /// # Parameters + /// + /// * `rng`: The `RNG` struct instance to associate with this + /// `Curve25519Key` instance. The `RNG` struct should not be moved in + /// memory after calling this method. + /// + /// # Returns + /// + /// Returns Ok(()) on success or Err(e) containing the wolfSSL library + /// error code value. + #[cfg(all(curve25519_blinding, random))] + pub fn set_rng(&mut self, rng: &mut RNG) -> Result<(), i32> { + let rc = unsafe { + sys::wc_curve25519_set_rng(&mut self.wc_key, &mut rng.wc_rng) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Compute a shared secret key given a secret private key and a received + /// public key. It stores the generated secret key in the buffer out and + /// returns the generated key size. Supports big or little endian. + /// + /// # Parameters + /// + /// * `private_key`: Curve25519Key struct holding the user's private key. + /// * `public_key`: Curve25519Key struct holding the received public key. + /// * `out`: Output buffer in which to store the generated secret key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(size) containing the number of bytes written to `out` + /// on success or Err(e) containing the wolfSSL library error code value. + pub fn shared_secret_ex(private_key: &mut Curve25519Key, public_key: &mut Curve25519Key, out: &mut [u8], big_endian: bool) -> Result { + let mut outlen = out.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_shared_secret_ex(&mut private_key.wc_key, + &mut public_key.wc_key, out.as_mut_ptr(), &mut outlen, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(outlen as usize) + } + + /// Export public and private keys from Curve25519Key struct to raw buffers + /// (big-endian only). + /// + /// # Parameters + /// + /// * `private`: Buffer in which to store the raw private key. + /// * `public`: Buffer in which to store the raw public key. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn export_key_raw(&mut self, private: &mut [u8], public: &mut [u8]) -> Result<(), i32> { + let mut private_size = private.len() as u32; + let mut public_size = public.len() as u32; + let rc = unsafe { + sys::wc_curve25519_export_key_raw(&mut self.wc_key, + private.as_mut_ptr(), &mut private_size, + public.as_mut_ptr(), &mut public_size) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Export public and private keys from Curve25519Key struct to raw buffers + /// (big or little endian). + /// + /// # Parameters + /// + /// * `private`: Buffer in which to store the raw private key. + /// * `public`: Buffer in which to store the raw public key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(()) on success or Err(e) containing the wolfSSL + /// library error code value. + pub fn export_key_raw_ex(&mut self, private: &mut [u8], public: &mut [u8], big_endian: bool) -> Result<(), i32> { + let mut private_size = private.len() as u32; + let mut public_size = public.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_export_key_raw_ex(&mut self.wc_key, + private.as_mut_ptr(), &mut private_size, + public.as_mut_ptr(), &mut public_size, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(()) + } + + /// Export private key from Curve25519Key struct to a raw buffer + /// (big-endian only). + /// + /// # Parameters + /// + /// * `out`: Buffer in which to store the raw private key. + /// + /// # Returns + /// + /// Returns either Ok(size) containing the number of bytes written to `out` + /// on success or Err(e) containing the wolfSSL library error code value. + pub fn export_private_raw(&mut self, out: &mut [u8]) -> Result { + let mut outlen = out.len() as u32; + let rc = unsafe { + sys::wc_curve25519_export_private_raw(&mut self.wc_key, + out.as_mut_ptr(), &mut outlen) + }; + if rc != 0 { + return Err(rc); + } + Ok(outlen as usize) + } + + /// Export private key from Curve25519Key struct to a raw buffer + /// (big or little endian). + /// + /// # Parameters + /// + /// * `out`: Buffer in which to store the raw private key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(size) containing the number of bytes written to `out` + /// on success or Err(e) containing the wolfSSL library error code value. + pub fn export_private_raw_ex(&mut self, out: &mut [u8], big_endian: bool) -> Result { + let mut outlen = out.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_export_private_raw_ex(&mut self.wc_key, + out.as_mut_ptr(), &mut outlen, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(outlen as usize) + } + + /// Export public key from Curve25519Key struct to a raw buffer + /// (big-endian only). + /// + /// # Parameters + /// + /// * `out`: Buffer in which to store the raw public key. + /// + /// # Returns + /// + /// Returns either Ok(size) containing the number of bytes written to `out` + /// on success or Err(e) containing the wolfSSL library error code value. + pub fn export_public(&mut self, out: &mut [u8]) -> Result { + let mut outlen = out.len() as u32; + let rc = unsafe { + sys::wc_curve25519_export_public(&mut self.wc_key, + out.as_mut_ptr(), &mut outlen) + }; + if rc != 0 { + return Err(rc); + } + Ok(outlen as usize) + } + + /// Export public key from Curve25519Key struct to a raw buffer + /// (big or little endian). + /// + /// # Parameters + /// + /// * `out`: Buffer in which to store the raw public key. + /// * `big_endian`: True for big-endian, false for little-endian. + /// + /// # Returns + /// + /// Returns either Ok(size) containing the number of bytes written to `out` + /// on success or Err(e) containing the wolfSSL library error code value. + pub fn export_public_ex(&mut self, out: &mut [u8], big_endian: bool) -> Result { + let mut outlen = out.len() as u32; + let endian = if big_endian {sys::EC25519_BIG_ENDIAN} else {sys::EC25519_LITTLE_ENDIAN}; + let rc = unsafe { + sys::wc_curve25519_export_public_ex(&mut self.wc_key, + out.as_mut_ptr(), &mut outlen, endian as i32) + }; + if rc != 0 { + return Err(rc); + } + Ok(outlen as usize) + } +} + +impl Drop for Curve25519Key { + /// Safely free the underlying wolfSSL Curve25519Key context. + /// + /// This calls the `wc_curve25519_free` wolfssl library function. + /// + /// The Rust Drop trait guarantees that this method is called when the + /// struct goes out of scope, automatically cleaning up resources and + /// preventing memory leaks. + fn drop(&mut self) { + unsafe { sys::wc_curve25519_free(&mut self.wc_key); } + } +} diff --git a/wrapper/rust/wolfssl-wolfcrypt/src/lib.rs b/wrapper/rust/wolfssl-wolfcrypt/src/lib.rs index d7c96fd66..dfeef39b7 100644 --- a/wrapper/rust/wolfssl-wolfcrypt/src/lib.rs +++ b/wrapper/rust/wolfssl-wolfcrypt/src/lib.rs @@ -22,7 +22,10 @@ pub mod sys; pub mod aes; +pub mod blake2; +pub mod chacha20_poly1305; pub mod cmac; +pub mod curve25519; pub mod dh; pub mod ecc; pub mod ed25519; @@ -47,6 +50,8 @@ pub mod sha; /// ```rust /// use wolfssl_wolfcrypt::*; /// wolfcrypt_init().expect("Error with wolfcrypt_init()"); +/// // ... use the library ... +/// wolfcrypt_cleanup().expect("wolfCrypt_Cleanup failed"); /// ``` pub fn wolfcrypt_init() -> Result<(), i32> { let rc = unsafe { sys::wolfCrypt_Init() }; @@ -63,12 +68,7 @@ pub fn wolfcrypt_init() -> Result<(), i32> { /// Returns either Ok(()) on success or Err(e) containing the wolfSSL /// library error code value. /// -/// # Example -/// -/// ```rust -/// use wolfssl_wolfcrypt::*; -/// wolfcrypt_cleanup().expect("Error with wolfcrypt_cleanup()"); -/// ``` +/// See also: [`wolfcrypt_init`] pub fn wolfcrypt_cleanup() -> Result<(), i32> { let rc = unsafe { sys::wolfCrypt_Cleanup() }; if rc != 0 { diff --git a/wrapper/rust/wolfssl-wolfcrypt/tests/test_blake2.rs b/wrapper/rust/wolfssl-wolfcrypt/tests/test_blake2.rs new file mode 100644 index 000000000..4b8bff27f --- /dev/null +++ b/wrapper/rust/wolfssl-wolfcrypt/tests/test_blake2.rs @@ -0,0 +1,205 @@ +#[cfg(any(blake2b, blake2s))] +use wolfssl_wolfcrypt::blake2::*; + +#[test] +#[cfg(blake2b)] +fn test_blake2b() { + let expected_hashes: [&[u8]; 3] = [ + &[ + 0x78, 0x6A, 0x02, 0xF7, 0x42, 0x01, 0x59, 0x03, + 0xC6, 0xC6, 0xFD, 0x85, 0x25, 0x52, 0xD2, 0x72, + 0x91, 0x2F, 0x47, 0x40, 0xE1, 0x58, 0x47, 0x61, + 0x8A, 0x86, 0xE2, 0x17, 0xF7, 0x1F, 0x54, 0x19, + 0xD2, 0x5E, 0x10, 0x31, 0xAF, 0xEE, 0x58, 0x53, + 0x13, 0x89, 0x64, 0x44, 0x93, 0x4E, 0xB0, 0x4B, + 0x90, 0x3A, 0x68, 0x5B, 0x14, 0x48, 0xB7, 0x55, + 0xD5, 0x6F, 0x70, 0x1A, 0xFE, 0x9B, 0xE2, 0xCE + ], + &[ + 0x2F, 0xA3, 0xF6, 0x86, 0xDF, 0x87, 0x69, 0x95, + 0x16, 0x7E, 0x7C, 0x2E, 0x5D, 0x74, 0xC4, 0xC7, + 0xB6, 0xE4, 0x8F, 0x80, 0x68, 0xFE, 0x0E, 0x44, + 0x20, 0x83, 0x44, 0xD4, 0x80, 0xF7, 0x90, 0x4C, + 0x36, 0x96, 0x3E, 0x44, 0x11, 0x5F, 0xE3, 0xEB, + 0x2A, 0x3A, 0xC8, 0x69, 0x4C, 0x28, 0xBC, 0xB4, + 0xF5, 0xA0, 0xF3, 0x27, 0x6F, 0x2E, 0x79, 0x48, + 0x7D, 0x82, 0x19, 0x05, 0x7A, 0x50, 0x6E, 0x4B + ], + &[ + 0x1C, 0x08, 0x79, 0x8D, 0xC6, 0x41, 0xAB, 0xA9, + 0xDE, 0xE4, 0x35, 0xE2, 0x25, 0x19, 0xA4, 0x72, + 0x9A, 0x09, 0xB2, 0xBF, 0xE0, 0xFF, 0x00, 0xEF, + 0x2D, 0xCD, 0x8E, 0xD6, 0xF8, 0xA0, 0x7D, 0x15, + 0xEA, 0xF4, 0xAE, 0xE5, 0x2B, 0xBF, 0x18, 0xAB, + 0x56, 0x08, 0xA6, 0x19, 0x0F, 0x70, 0xB9, 0x04, + 0x86, 0xC8, 0xA7, 0xD4, 0x87, 0x37, 0x10, 0xB1, + 0x11, 0x5D, 0x3D, 0xEB, 0xBB, 0x43, 0x27, 0xB5 + ], + ]; + + for (i, expected_hash) in expected_hashes.iter().enumerate() { + let mut blake2b = BLAKE2b::new(expected_hash.len()).expect("Error with new()"); + let mut input = vec![0u8; i]; + for idx in 0..input.len() { + input[idx] = idx as u8; + } + blake2b.update(&input).expect("Error with update()"); + let mut hash = [0u8; 64]; + blake2b.finalize(&mut hash).expect("error with finalize()"); + assert_eq!(hash, *expected_hash); + } +} + +#[test] +#[cfg(blake2b_hmac)] +fn test_blake2b_hmac() { + let key1 = [0x41u8, 0x42, 0x43, 0x44]; + let message1 = [0x48u8, 0x65, 0x6c, 0x6c, 0x6f]; + let expected1 = [ + 0x46u8, 0x76, 0xbb, 0x0e, 0xf8, 0xa1, 0x56, 0x33, + 0xde, 0xdc, 0x44, 0xe3, 0x2b, 0xf3, 0xee, 0x5b, + 0x5f, 0x7f, 0x04, 0x00, 0x2c, 0xaa, 0xd4, 0x93, + 0xc6, 0xa6, 0xb4, 0xf3, 0x14, 0x8d, 0x6d, 0x9c, + 0x6a, 0x12, 0x02, 0x85, 0x66, 0xed, 0x9b, 0x5d, + 0x8d, 0x0e, 0x3d, 0xf4, 0x78, 0xee, 0x5a, 0xf6, + 0x2f, 0x97, 0xa5, 0x77, 0x88, 0x8c, 0xc4, 0x66, + 0x46, 0xb1, 0xba, 0x51, 0x29, 0x19, 0xd7, 0xaa, + ]; + let key2 = [ + 0x30u8, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, + 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, + 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, + 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33 + ]; + let message2 = [ + 0x61u8, 0x62, 0x63, 0x64, 0x62, 0x63, 0x64, 0x65, 0x63, 0x64, 0x65, 0x66, + 0x64, 0x65, 0x66, 0x67, 0x65, 0x66, 0x67, 0x68, 0x66, 0x67, 0x68, 0x69, + 0x67, 0x68, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6b, 0x69, 0x6a, 0x6b, 0x6c, + 0x6a, 0x6b, 0x6c, 0x6d, 0x6b, 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6d, 0x6e, 0x6f, 0x70, 0x6e, 0x6f, 0x70, 0x71 + ]; + let expected2 = [ + 0x2au8, 0xda, 0xf6, 0x94, 0x79, 0xce, 0xe2, 0xd2, + 0x5d, 0x89, 0x8b, 0xd7, 0x0d, 0xbc, 0x11, 0x1f, + 0x98, 0x99, 0xe0, 0x17, 0x7c, 0x5b, 0x8f, 0x94, + 0xf5, 0x95, 0xbc, 0x1b, 0xb1, 0x95, 0xe8, 0x60, + 0xbb, 0x29, 0xa4, 0xd9, 0x27, 0x2e, 0x00, 0xea, + 0xba, 0xc3, 0x3e, 0xe6, 0x9c, 0xc7, 0xd7, 0x8d, + 0x69, 0xc7, 0xb4, 0xf7, 0x31, 0x4a, 0xb1, 0xf0, + 0x3c, 0xed, 0x06, 0x49, 0x6f, 0x46, 0x99, 0xea, + ]; + + let mut out1 = [0u8; 64]; + BLAKE2bHmac::hmac(&message1, &key1, &mut out1).expect("Error with hmac()"); + assert_eq!(out1, expected1); + + let mut out2 = [0u8; 64]; + BLAKE2bHmac::hmac(&message2, &key2, &mut out2).expect("Error with hmac()"); + assert_eq!(out2, expected2); + + let mut hmac_blake2b = BLAKE2bHmac::new(&key1).expect("Error with new()"); + hmac_blake2b.update(&message1[0..4]).expect("Error with update()"); + hmac_blake2b.update(&message1[4..]).expect("Error with update()"); + let mut out1 = [0u8; 64]; + hmac_blake2b.finalize(&key1, &mut out1).expect("Error with finalize()"); + assert_eq!(out1, expected1); + + let mut hmac_blake2b = BLAKE2bHmac::new(&key2).expect("Error with new()"); + hmac_blake2b.update(&message2[0..48]).expect("Error with update()"); + hmac_blake2b.update(&message2[48..]).expect("Error with update()"); + let mut out2 = [0u8; 64]; + hmac_blake2b.finalize(&key2, &mut out2).expect("Error with finalize()"); + assert_eq!(out2, expected2); +} + +#[test] +#[cfg(blake2s)] +fn test_blake2s() { + let expected_hashes: [&[u8]; 3] = [ + &[ + 0x69, 0x21, 0x7a, 0x30, 0x79, 0x90, 0x80, 0x94, + 0xe1, 0x11, 0x21, 0xd0, 0x42, 0x35, 0x4a, 0x7c, + 0x1f, 0x55, 0xb6, 0x48, 0x2c, 0xa1, 0xa5, 0x1e, + 0x1b, 0x25, 0x0d, 0xfd, 0x1e, 0xd0, 0xee, 0xf9, + ], + &[ + 0xe3, 0x4d, 0x74, 0xdb, 0xaf, 0x4f, 0xf4, 0xc6, + 0xab, 0xd8, 0x71, 0xcc, 0x22, 0x04, 0x51, 0xd2, + 0xea, 0x26, 0x48, 0x84, 0x6c, 0x77, 0x57, 0xfb, + 0xaa, 0xc8, 0x2f, 0xe5, 0x1a, 0xd6, 0x4b, 0xea, + ], + &[ + 0xdd, 0xad, 0x9a, 0xb1, 0x5d, 0xac, 0x45, 0x49, + 0xba, 0x42, 0xf4, 0x9d, 0x26, 0x24, 0x96, 0xbe, + 0xf6, 0xc0, 0xba, 0xe1, 0xdd, 0x34, 0x2a, 0x88, + 0x08, 0xf8, 0xea, 0x26, 0x7c, 0x6e, 0x21, 0x0c, + ], + ]; + + for (i, expected_hash) in expected_hashes.iter().enumerate() { + let mut blake2s = BLAKE2s::new(expected_hash.len()).expect("Error with new()"); + let mut input = vec![0u8; i]; + for idx in 0..input.len() { + input[idx] = idx as u8; + } + blake2s.update(&input).expect("Error with update()"); + let mut hash = [0u8; 32]; + blake2s.finalize(&mut hash).expect("error with finalize()"); + assert_eq!(hash, *expected_hash); + } +} + +#[test] +#[cfg(blake2s_hmac)] +fn test_blake2s_hmac() { + let key1 = [0x41u8, 0x42, 0x43, 0x44]; + let message1 = [0x48u8, 0x65, 0x6c, 0x6c, 0x6f]; + let expected1 = [ + 0x96u8, 0xca, 0x1d, 0xaa, 0x9a, 0x33, 0x97, 0x3d, + 0xc5, 0x95, 0x3e, 0xce, 0x49, 0x93, 0x75, 0xc1, + 0x2a, 0x7c, 0x8f, 0x5b, 0xf0, 0x28, 0xef, 0xc3, + 0xfb, 0xc5, 0x97, 0xcd, 0xcc, 0x74, 0x44, 0x68, + ]; + let key2 = [ + 0x30u8, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, + 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x30, 0x31, 0x32, 0x33, + ]; + let message2 = [ + 0x61u8, 0x62, 0x63, 0x64, 0x62, 0x63, 0x64, 0x65, 0x63, 0x64, 0x65, 0x66, + 0x64, 0x65, 0x66, 0x67, 0x65, 0x66, 0x67, 0x68, 0x66, 0x67, 0x68, 0x69, + 0x67, 0x68, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6b, 0x69, 0x6a, 0x6b, 0x6c, + 0x6a, 0x6b, 0x6c, 0x6d, 0x6b, 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6d, 0x6e, 0x6f, 0x70, 0x6e, 0x6f, 0x70, 0x71 + ]; + let expected2 = [ + 0xc4u8, 0x63, 0xdb, 0x28, 0x97, 0x60, 0x6a, 0xa7, + 0x1e, 0xe6, 0xcf, 0x93, 0x85, 0x3c, 0x90, 0x71, + 0xea, 0x76, 0x7f, 0x6a, 0xa7, 0x20, 0x80, 0x35, + 0xe1, 0x68, 0x95, 0xfe, 0x65, 0x65, 0x43, 0x76, + ]; + + let mut out1 = [0u8; 32]; + BLAKE2sHmac::hmac(&message1, &key1, &mut out1).expect("Error with hmac()"); + assert_eq!(out1, expected1); + + let mut out2 = [0u8; 32]; + BLAKE2sHmac::hmac(&message2, &key2, &mut out2).expect("Error with hmac()"); + assert_eq!(out2, expected2); + + let mut hmac_blake2s = BLAKE2sHmac::new(&key1).expect("Error with new()"); + hmac_blake2s.update(&message1[0..4]).expect("Error with update()"); + hmac_blake2s.update(&message1[4..]).expect("Error with update()"); + let mut out1 = [0u8; 32]; + hmac_blake2s.finalize(&key1, &mut out1).expect("Error with finalize()"); + assert_eq!(out1, expected1); + + let mut hmac_blake2s = BLAKE2sHmac::new(&key2).expect("Error with new()"); + hmac_blake2s.update(&message2[0..48]).expect("Error with update()"); + hmac_blake2s.update(&message2[48..]).expect("Error with update()"); + let mut out2 = [0u8; 32]; + hmac_blake2s.finalize(&key2, &mut out2).expect("Error with finalize()"); + assert_eq!(out2, expected2); +} diff --git a/wrapper/rust/wolfssl-wolfcrypt/tests/test_chacha20_poly1305.rs b/wrapper/rust/wolfssl-wolfcrypt/tests/test_chacha20_poly1305.rs new file mode 100644 index 000000000..3f6698d6b --- /dev/null +++ b/wrapper/rust/wolfssl-wolfcrypt/tests/test_chacha20_poly1305.rs @@ -0,0 +1,275 @@ +#![cfg(chacha20_poly1305)] + +use wolfssl_wolfcrypt::chacha20_poly1305::*; + +#[test] +fn test_chacha20_poly1305_1() { + let key1 = [ + 0x80u8, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, + 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, + 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, + 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f + ]; + + let plaintext1 = [ + 0x4cu8, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, + 0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63, + 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, + 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f, + 0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20, + 0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75, + 0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73, + 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f, + 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, + 0x74, 0x2e + ]; + + let iv1 = [ + 0x07u8, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43, + 0x44, 0x45, 0x46, 0x47 + ]; + + let aad1 = [ + 0x50u8, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3, + 0xc4, 0xc5, 0xc6, 0xc7 + ]; + + let cipher1 = [ + 0xd3u8, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb, + 0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2, + 0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe, + 0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6, + 0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12, + 0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b, + 0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29, + 0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36, + 0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c, + 0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58, + 0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94, + 0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc, + 0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d, + 0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b, + 0x61, 0x16 + ]; + + let auth_tag_1 = [ + 0x1au8, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a, + 0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91 + ]; + + /* Encrypt */ + let mut ccp = ChaCha20Poly1305::new(&key1, &iv1, true).expect("Error with new()"); + ccp.update_aad(&aad1).expect("Error with update_aad()"); + let mut out_cipher1 = [0u8; 114]; + ccp.update_data(&plaintext1, &mut out_cipher1).expect("Error with update_data()"); + let mut out_auth_tag_1 = [0u8; ChaCha20Poly1305::AUTH_TAG_SIZE]; + ccp.finalize(&mut out_auth_tag_1).expect("Error with finalize()"); + assert_eq!(out_cipher1, cipher1); + assert_eq!(out_auth_tag_1, auth_tag_1); + + /* Decrypt */ + let mut ccp = ChaCha20Poly1305::new(&key1, &iv1, false).expect("Error with new()"); + ccp.update_aad(&aad1).expect("Error with update_aad()"); + let mut out_plaintext1 = [0u8; 114]; + ccp.update_data(&cipher1, &mut out_plaintext1).expect("Error with update_data()"); + let mut out_auth_tag_1 = [0u8; ChaCha20Poly1305::AUTH_TAG_SIZE]; + ccp.finalize(&mut out_auth_tag_1).expect("Error with finalize()"); + assert_eq!(out_plaintext1, plaintext1); + assert_eq!(out_auth_tag_1, auth_tag_1); +} + +#[test] +fn test_chacha20_poly1305_2() { + let key2 = [ + 0x1cu8, 0x92, 0x40, 0xa5, 0xeb, 0x55, 0xd3, 0x8a, + 0xf3, 0x33, 0x88, 0x86, 0x04, 0xf6, 0xb5, 0xf0, + 0x47, 0x39, 0x17, 0xc1, 0x40, 0x2b, 0x80, 0x09, + 0x9d, 0xca, 0x5c, 0xbc, 0x20, 0x70, 0x75, 0xc0 + ]; + + let plaintext2 = [ + 0x49u8, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74, + 0x2d, 0x44, 0x72, 0x61, 0x66, 0x74, 0x73, 0x20, + 0x61, 0x72, 0x65, 0x20, 0x64, 0x72, 0x61, 0x66, + 0x74, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x20, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x20, + 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x20, + 0x6f, 0x66, 0x20, 0x73, 0x69, 0x78, 0x20, 0x6d, + 0x6f, 0x6e, 0x74, 0x68, 0x73, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x6d, 0x61, 0x79, 0x20, 0x62, 0x65, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, + 0x2c, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x64, 0x2c, 0x20, 0x6f, 0x72, 0x20, 0x6f, + 0x62, 0x73, 0x6f, 0x6c, 0x65, 0x74, 0x65, 0x64, + 0x20, 0x62, 0x79, 0x20, 0x6f, 0x74, 0x68, 0x65, + 0x72, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x20, 0x61, 0x74, 0x20, 0x61, + 0x6e, 0x79, 0x20, 0x74, 0x69, 0x6d, 0x65, 0x2e, + 0x20, 0x49, 0x74, 0x20, 0x69, 0x73, 0x20, 0x69, + 0x6e, 0x61, 0x70, 0x70, 0x72, 0x6f, 0x70, 0x72, + 0x69, 0x61, 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, + 0x75, 0x73, 0x65, 0x20, 0x49, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x65, 0x74, 0x2d, 0x44, 0x72, 0x61, + 0x66, 0x74, 0x73, 0x20, 0x61, 0x73, 0x20, 0x72, + 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x20, 0x6d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, + 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x6f, 0x20, + 0x63, 0x69, 0x74, 0x65, 0x20, 0x74, 0x68, 0x65, + 0x6d, 0x20, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x20, + 0x74, 0x68, 0x61, 0x6e, 0x20, 0x61, 0x73, 0x20, + 0x2f, 0xe2, 0x80, 0x9c, 0x77, 0x6f, 0x72, 0x6b, + 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x67, + 0x72, 0x65, 0x73, 0x73, 0x2e, 0x2f, 0xe2, 0x80, + 0x9d + ]; + + let iv2 = [ + 0x00u8, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, + 0x05, 0x06, 0x07, 0x08 + ]; + + let aad2 = [ + 0xf3u8, 0x33, 0x88, 0x86, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x4e, 0x91 + ]; + + let cipher2 = [ + 0x64u8, 0xa0, 0x86, 0x15, 0x75, 0x86, 0x1a, 0xf4, + 0x60, 0xf0, 0x62, 0xc7, 0x9b, 0xe6, 0x43, 0xbd, + 0x5e, 0x80, 0x5c, 0xfd, 0x34, 0x5c, 0xf3, 0x89, + 0xf1, 0x08, 0x67, 0x0a, 0xc7, 0x6c, 0x8c, 0xb2, + 0x4c, 0x6c, 0xfc, 0x18, 0x75, 0x5d, 0x43, 0xee, + 0xa0, 0x9e, 0xe9, 0x4e, 0x38, 0x2d, 0x26, 0xb0, + 0xbd, 0xb7, 0xb7, 0x3c, 0x32, 0x1b, 0x01, 0x00, + 0xd4, 0xf0, 0x3b, 0x7f, 0x35, 0x58, 0x94, 0xcf, + 0x33, 0x2f, 0x83, 0x0e, 0x71, 0x0b, 0x97, 0xce, + 0x98, 0xc8, 0xa8, 0x4a, 0xbd, 0x0b, 0x94, 0x81, + 0x14, 0xad, 0x17, 0x6e, 0x00, 0x8d, 0x33, 0xbd, + 0x60, 0xf9, 0x82, 0xb1, 0xff, 0x37, 0xc8, 0x55, + 0x97, 0x97, 0xa0, 0x6e, 0xf4, 0xf0, 0xef, 0x61, + 0xc1, 0x86, 0x32, 0x4e, 0x2b, 0x35, 0x06, 0x38, + 0x36, 0x06, 0x90, 0x7b, 0x6a, 0x7c, 0x02, 0xb0, + 0xf9, 0xf6, 0x15, 0x7b, 0x53, 0xc8, 0x67, 0xe4, + 0xb9, 0x16, 0x6c, 0x76, 0x7b, 0x80, 0x4d, 0x46, + 0xa5, 0x9b, 0x52, 0x16, 0xcd, 0xe7, 0xa4, 0xe9, + 0x90, 0x40, 0xc5, 0xa4, 0x04, 0x33, 0x22, 0x5e, + 0xe2, 0x82, 0xa1, 0xb0, 0xa0, 0x6c, 0x52, 0x3e, + 0xaf, 0x45, 0x34, 0xd7, 0xf8, 0x3f, 0xa1, 0x15, + 0x5b, 0x00, 0x47, 0x71, 0x8c, 0xbc, 0x54, 0x6a, + 0x0d, 0x07, 0x2b, 0x04, 0xb3, 0x56, 0x4e, 0xea, + 0x1b, 0x42, 0x22, 0x73, 0xf5, 0x48, 0x27, 0x1a, + 0x0b, 0xb2, 0x31, 0x60, 0x53, 0xfa, 0x76, 0x99, + 0x19, 0x55, 0xeb, 0xd6, 0x31, 0x59, 0x43, 0x4e, + 0xce, 0xbb, 0x4e, 0x46, 0x6d, 0xae, 0x5a, 0x10, + 0x73, 0xa6, 0x72, 0x76, 0x27, 0x09, 0x7a, 0x10, + 0x49, 0xe6, 0x17, 0xd9, 0x1d, 0x36, 0x10, 0x94, + 0xfa, 0x68, 0xf0, 0xff, 0x77, 0x98, 0x71, 0x30, + 0x30, 0x5b, 0xea, 0xba, 0x2e, 0xda, 0x04, 0xdf, + 0x99, 0x7b, 0x71, 0x4d, 0x6c, 0x6f, 0x2c, 0x29, + 0xa6, 0xad, 0x5c, 0xb4, 0x02, 0x2b, 0x02, 0x70, + 0x9b + ]; + + let auth_tag_2 = [ + 0xeeu8, 0xad, 0x9d, 0x67, 0x89, 0x0c, 0xbb, 0x22, + 0x39, 0x23, 0x36, 0xfe, 0xa1, 0x85, 0x1f, 0x38 + ]; + + /* Encrypt */ + let mut ccp = ChaCha20Poly1305::new(&key2, &iv2, true).expect("Error with new()"); + ccp.update_aad(&aad2).expect("Error with update_aad()"); + let mut out_cipher2 = [0u8; 265]; + ccp.update_data(&plaintext2[0..128], &mut out_cipher2[0..128]).expect("Error with update_data()"); + ccp.update_data(&plaintext2[128..265], &mut out_cipher2[128..265]).expect("Error with update_data()"); + let mut out_auth_tag_2 = [0u8; ChaCha20Poly1305::AUTH_TAG_SIZE]; + ccp.finalize(&mut out_auth_tag_2).expect("Error with finalize()"); + assert_eq!(out_cipher2, cipher2); + assert_eq!(out_auth_tag_2, auth_tag_2); + + /* Decrypt */ + let mut ccp = ChaCha20Poly1305::new(&key2, &iv2, false).expect("Error with new()"); + ccp.update_aad(&aad2).expect("Error with update_aad()"); + let mut out_plaintext2 = [0u8; 265]; + ccp.update_data(&cipher2[0..128], &mut out_plaintext2[0..128]).expect("Error with update_data()"); + ccp.update_data(&cipher2[128..265], &mut out_plaintext2[128..265]).expect("Error with update_data()"); + let mut out_auth_tag_2 = [0u8; ChaCha20Poly1305::AUTH_TAG_SIZE]; + ccp.finalize(&mut out_auth_tag_2).expect("Error with finalize()"); + assert_eq!(out_plaintext2, plaintext2); + assert_eq!(out_auth_tag_2, auth_tag_2); +} + +#[test] +#[cfg(xchacha20_poly1305)] +fn test_xchacha20_poly1305() { + const PLAINTEXT: &[u8] = &[ + 0x4cu8, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c, /* Ladies and Gentl */ + 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73, /* emen of the clas */ + 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, + 0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63, /* s of '99: If I c */ + 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, + 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f, /* ould offer you o */ + 0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20, + 0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20, /* nly one tip for */ + 0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75, + 0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73, /* the future, suns */ + 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f, + 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, /* creen would be i */ + 0x74, 0x2e ]; /* t. */ + + let aad = [ + 0x50u8, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3, + 0xc4, 0xc5, 0xc6, 0xc7 + ]; /* PQRS........ */ + + let key = [ + 0x80u8, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, + 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, + 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, + 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f + ]; + + let iv = [ + 0x40u8, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, + 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, /* @ABCDEFGHIJKLMNO */ + 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57 /* PQRSTUVW */ + ]; + + let expected_ciphertext = [ + 0xbdu8, 0x6d, 0x17, 0x9d, 0x3e, 0x83, 0xd4, 0x3b, + 0x95, 0x76, 0x57, 0x94, 0x93, 0xc0, 0xe9, 0x39, + 0x57, 0x2a, 0x17, 0x00, 0x25, 0x2b, 0xfa, 0xcc, + 0xbe, 0xd2, 0x90, 0x2c, 0x21, 0x39, 0x6c, 0xbb, + 0x73, 0x1c, 0x7f, 0x1b, 0x0b, 0x4a, 0xa6, 0x44, + 0x0b, 0xf3, 0xa8, 0x2f, 0x4e, 0xda, 0x7e, 0x39, + 0xae, 0x64, 0xc6, 0x70, 0x8c, 0x54, 0xc2, 0x16, + 0xcb, 0x96, 0xb7, 0x2e, 0x12, 0x13, 0xb4, 0x52, + 0x2f, 0x8c, 0x9b, 0xa4, 0x0d, 0xb5, 0xd9, 0x45, + 0xb1, 0x1b, 0x69, 0xb9, 0x82, 0xc1, 0xbb, 0x9e, + 0x3f, 0x3f, 0xac, 0x2b, 0xc3, 0x69, 0x48, 0x8f, + 0x76, 0xb2, 0x38, 0x35, 0x65, 0xd3, 0xff, 0xf9, + 0x21, 0xf9, 0x66, 0x4c, 0x97, 0x63, 0x7d, 0xa9, + 0x76, 0x88, 0x12, 0xf6, 0x15, 0xc6, 0x8b, 0x13, + 0xb5, 0x2e + ]; + + let expected_tag = [ + 0xc0u8, 0x87, 0x59, 0x24, 0xc1, 0xc7, 0x98, 0x79, + 0x47, 0xde, 0xaf, 0xd8, 0x78, 0x0a, 0xcf, 0x49 + ]; + + let mut ciphertext_buffer = [0u8; PLAINTEXT.len() + XChaCha20Poly1305::AUTH_TAG_SIZE]; + XChaCha20Poly1305::encrypt(&key, &iv, &aad, PLAINTEXT, &mut ciphertext_buffer).expect("Error with encrypt()"); + assert_eq!(ciphertext_buffer[0..expected_ciphertext.len()], expected_ciphertext); + assert_eq!(ciphertext_buffer[expected_ciphertext.len()..], expected_tag); + let mut plaintext_buffer = [0u8; PLAINTEXT.len()]; + XChaCha20Poly1305::decrypt(&key, &iv, &aad, &ciphertext_buffer, &mut plaintext_buffer).expect("Error with decrypt()"); + assert_eq!(plaintext_buffer, PLAINTEXT); +} diff --git a/wrapper/rust/wolfssl-wolfcrypt/tests/test_curve25519.rs b/wrapper/rust/wolfssl-wolfcrypt/tests/test_curve25519.rs new file mode 100644 index 000000000..ce1e3cc39 --- /dev/null +++ b/wrapper/rust/wolfssl-wolfcrypt/tests/test_curve25519.rs @@ -0,0 +1,146 @@ +#![cfg(all(curve25519, random))] + +use wolfssl_wolfcrypt::curve25519::*; +use wolfssl_wolfcrypt::random::RNG; + +#[test] +fn test_check_pub() { + let mut rng = RNG::new().expect("Error with new()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::generate_priv(&mut rng, &mut private_buffer).expect("Error with generate_priv()"); + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::make_pub(&private_buffer, &mut public_buffer).expect("Error with make_pub()"); + Curve25519Key::check_public(&public_buffer, false).expect("Error with check_public()"); +} + +#[test] +fn test_generate_priv() { + let mut rng = RNG::new().expect("Error with new()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::generate_priv(&mut rng, &mut private_buffer).expect("Error with generate_priv()"); +} + +#[test] +fn test_import_export_private() { + let mut rng = RNG::new().expect("Error with new()"); + let mut curve25519key = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + curve25519key.export_private_raw(&mut private_buffer).expect("Error with export_private_raw()"); + Curve25519Key::import_private(&private_buffer).expect("Error with import_private()"); +} + +#[test] +fn test_import_export_private_ex() { + let mut rng = RNG::new().expect("Error with new()"); + let mut curve25519key = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + curve25519key.export_private_raw_ex(&mut private_buffer, false).expect("Error with export_private_raw_ex()"); + Curve25519Key::import_private_ex(&private_buffer, false).expect("Error with import_private_ex()"); +} + +#[test] +fn test_import_export_raw() { + let mut rng = RNG::new().expect("Error with new()"); + let mut curve25519key = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + curve25519key.export_key_raw(&mut private_buffer, &mut public_buffer).expect("Error with export_key_raw()"); + Curve25519Key::import_private_raw(&private_buffer, &public_buffer).expect("Error with import_private_raw()"); +} + +#[test] +fn test_import_export_raw_ex() { + let mut rng = RNG::new().expect("Error with new()"); + let mut curve25519key = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + curve25519key.export_key_raw_ex(&mut private_buffer, &mut public_buffer, false).expect("Error with export_key_raw_ex()"); + Curve25519Key::import_private_raw_ex(&private_buffer, &public_buffer, false).expect("Error with import_private_raw_ex()"); +} + +#[test] +fn test_import_export_public() { + let mut rng = RNG::new().expect("Error with new()"); + let mut curve25519key = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + curve25519key.export_public(&mut public_buffer).expect("Error with export_public()"); + Curve25519Key::import_public(&public_buffer).expect("Error with import_public()"); +} + +#[test] +fn test_import_export_public_ex() { + let mut rng = RNG::new().expect("Error with new()"); + let mut curve25519key = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + curve25519key.export_public_ex(&mut public_buffer, false).expect("Error with export_public_ex()"); + Curve25519Key::import_public_ex(&public_buffer, false).expect("Error with import_public_ex()"); +} + +#[test] +fn test_make_pub() { + let mut rng = RNG::new().expect("Error with new()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::generate_priv(&mut rng, &mut private_buffer).expect("Error with generate_priv()"); + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::make_pub(&private_buffer, &mut public_buffer).expect("Error with make_pub()"); +} + +#[test] +#[cfg(curve25519_blinding)] +fn test_make_pub_blind() { + let mut rng = RNG::new().expect("Error with new()"); + let mut private_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::generate_priv(&mut rng, &mut private_buffer).expect("Error with generate_priv()"); + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::make_pub_blind(&private_buffer, &mut public_buffer, &mut rng).expect("Error with make_pub_blind()"); +} + +#[test] +fn test_shared_secret() { + let mut rng = RNG::new().expect("Error with new()"); + let mut key1 = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut key2 = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + + #[cfg(curve25519_blinding)] + key1.set_rng(&mut rng).expect("Error with set_rng()"); + #[cfg(curve25519_blinding)] + key2.set_rng(&mut rng).expect("Error with set_rng()"); + + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + key1.export_public(&mut public_buffer).expect("Error with export_public()"); + let mut key1public = Curve25519Key::import_public(&public_buffer).expect("Error with import_public()"); + key2.export_public(&mut public_buffer).expect("Error with export_public()"); + let mut key2public = Curve25519Key::import_public(&public_buffer).expect("Error with import_public()"); + + let mut ss1 = [0u8; Curve25519Key::KEYSIZE]; + let mut ss2 = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::shared_secret(&mut key1, &mut key2public, &mut ss1).expect("Error with shared_secret()"); + Curve25519Key::shared_secret(&mut key2, &mut key1public, &mut ss2).expect("Error with shared_secret()"); + + assert_eq!(ss1, ss2); +} + +#[test] +fn test_shared_secret_ex() { + let mut rng = RNG::new().expect("Error with new()"); + let mut key1 = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + let mut key2 = Curve25519Key::generate(&mut rng).expect("Error with generate()"); + + #[cfg(curve25519_blinding)] + key1.set_rng(&mut rng).expect("Error with set_rng()"); + #[cfg(curve25519_blinding)] + key2.set_rng(&mut rng).expect("Error with set_rng()"); + + let mut public_buffer = [0u8; Curve25519Key::KEYSIZE]; + key1.export_public(&mut public_buffer).expect("Error with export_public()"); + let mut key1public = Curve25519Key::import_public(&public_buffer).expect("Error with import_public()"); + key2.export_public(&mut public_buffer).expect("Error with export_public()"); + let mut key2public = Curve25519Key::import_public(&public_buffer).expect("Error with import_public()"); + + let mut ss1 = [0u8; Curve25519Key::KEYSIZE]; + let mut ss2 = [0u8; Curve25519Key::KEYSIZE]; + Curve25519Key::shared_secret_ex(&mut key1, &mut key2public, &mut ss1, false).expect("Error with shared_secret()"); + Curve25519Key::shared_secret_ex(&mut key2, &mut key1public, &mut ss2, false).expect("Error with shared_secret()"); + + assert_eq!(ss1, ss2); +} diff --git a/wrapper/rust/wolfssl-wolfcrypt/tests/test_wolfcrypt.rs b/wrapper/rust/wolfssl-wolfcrypt/tests/test_wolfcrypt.rs index 768246fd3..8189e4af8 100644 --- a/wrapper/rust/wolfssl-wolfcrypt/tests/test_wolfcrypt.rs +++ b/wrapper/rust/wolfssl-wolfcrypt/tests/test_wolfcrypt.rs @@ -1,11 +1,7 @@ use wolfssl_wolfcrypt::*; #[test] -fn test_wolfcrypt_init() { +fn test_wolfcrypt_init_and_cleanup() { wolfcrypt_init().expect("Error with wolfcrypt_init()"); -} - -#[test] -fn test_wolfcrypt_cleanup() { wolfcrypt_cleanup().expect("Error with wolfcrypt_cleanup()"); }