12#include "hw/top/aes_regs.h"
26#define AES_WAIT_FOR_STATUS(aes_, flag_, value_) \
27 while (mmio_region_get_bit32(aes->base_addr, AES_STATUS_REG_OFFSET, \
28 (flag_)) != value_) { \
31static bool aes_idle(
const dif_aes_t *aes) {
32 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
36static bool aes_stalled(
const dif_aes_t *aes) {
37 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
38 AES_STATUS_STALL_BIT);
41static bool aes_output_lost(
const dif_aes_t *aes) {
42 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
43 AES_STATUS_OUTPUT_LOST_BIT);
46static bool aes_output_valid(
const dif_aes_t *aes) {
47 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
48 AES_STATUS_OUTPUT_VALID_BIT);
51static bool aes_input_ready(
const dif_aes_t *aes) {
52 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
53 AES_STATUS_INPUT_READY_BIT);
56static bool aes_alert_fatal(
const dif_aes_t *aes) {
57 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
58 AES_STATUS_ALERT_FATAL_FAULT_BIT);
61static bool aes_alert_recoverable(
const dif_aes_t *aes) {
62 return mmio_region_get_bit32(aes->
base_addr, AES_STATUS_REG_OFFSET,
63 AES_STATUS_ALERT_RECOV_CTRL_UPDATE_ERR_BIT);
66static void aes_shadowed_write(
mmio_region_t base, ptrdiff_t offset,
68 mmio_region_write32(base, offset, value);
69 mmio_region_write32(base, offset, value);
72static void aes_clear_internal_state(
const dif_aes_t *aes) {
74 AES_WAIT_FOR_STATUS(aes, AES_STATUS_IDLE_BIT,
true);
79 bitfield_bit32_write(0, AES_CTRL_SHADOWED_MANUAL_OPERATION_BIT,
true);
81 aes_shadowed_write(aes->
base_addr, AES_CTRL_SHADOWED_REG_OFFSET, ctrl_reg);
83 uint32_t trigger_reg =
84 bitfield_bit32_write(0, AES_TRIGGER_KEY_IV_DATA_IN_CLEAR_BIT,
true);
87 bitfield_bit32_write(trigger_reg, AES_TRIGGER_DATA_OUT_CLEAR_BIT,
true);
89 mmio_region_write32(aes->
base_addr, AES_TRIGGER_REG_OFFSET, trigger_reg);
92 AES_WAIT_FOR_STATUS(aes, AES_STATUS_IDLE_BIT,
true);
104 uint32_t reg = bitfield_field32_write(0, AES_CTRL_SHADOWED_OPERATION_FIELD,
105 transaction->operation);
107 reg = bitfield_field32_write(reg, AES_CTRL_SHADOWED_MODE_FIELD,
110 reg = bitfield_field32_write(reg, AES_CTRL_SHADOWED_KEY_LEN_FIELD,
111 transaction->key_len);
113 reg = bitfield_field32_write(reg, AES_CTRL_SHADOWED_PRNG_RESEED_RATE_FIELD,
114 transaction->mask_reseeding);
117 reg = bitfield_bit32_write(reg, AES_CTRL_SHADOWED_MANUAL_OPERATION_BIT, flag);
120 reg = bitfield_bit32_write(reg, AES_CTRL_SHADOWED_SIDELOAD_BIT, flag);
122 aes_shadowed_write(aes->
base_addr, AES_CTRL_SHADOWED_REG_OFFSET, reg);
138 mmio_region_read32(aes->
base_addr, AES_CTRL_AUX_REGWEN_REG_OFFSET);
141 mmio_region_read32(aes->
base_addr, AES_CTRL_AUX_SHADOWED_REG_OFFSET);
142 if (bitfield_bit32_read(
143 reg_val, AES_CTRL_AUX_SHADOWED_KEY_TOUCH_FORCES_RESEED_BIT) !=
145 bitfield_bit32_read(reg_val, AES_CTRL_AUX_SHADOWED_FORCE_MASKS_BIT) !=
153 bitfield_bit32_write(0, AES_CTRL_AUX_SHADOWED_KEY_TOUCH_FORCES_RESEED_BIT,
155 reg_val = bitfield_bit32_write(reg_val, AES_CTRL_AUX_SHADOWED_FORCE_MASKS_BIT,
157 aes_shadowed_write(aes->
base_addr, AES_CTRL_AUX_SHADOWED_REG_OFFSET, reg_val);
160 mmio_region_write32(aes->
base_addr, AES_CTRL_AUX_REGWEN_REG_OFFSET, reg_val);
173static void aes_set_multireg(
const dif_aes_t *aes,
const uint32_t *data,
174 size_t regs_num, ptrdiff_t reg0_offset) {
175 for (
int i = 0; i < regs_num; ++i) {
176 ptrdiff_t offset = reg0_offset + (ptrdiff_t)i * (ptrdiff_t)
sizeof(uint32_t);
178 mmio_region_write32(aes->
base_addr, offset, data[i]);
182static void aes_read_multireg(
const dif_aes_t *aes, uint32_t *data,
183 size_t regs_num, ptrdiff_t reg0_offset) {
184 for (
int i = 0; i < regs_num; ++i) {
185 ptrdiff_t offset = reg0_offset + (ptrdiff_t)i * (ptrdiff_t)
sizeof(uint32_t);
187 data[i] = mmio_region_read32(aes->
base_addr, offset);
196 aes_clear_internal_state(aes);
199 uint32_t reg = bitfield_field32_write(0, AES_CTRL_SHADOWED_OPERATION_FIELD,
200 AES_CTRL_SHADOWED_OPERATION_MASK);
202 reg = bitfield_field32_write(reg, AES_CTRL_SHADOWED_MODE_FIELD,
203 AES_CTRL_SHADOWED_MODE_VALUE_AES_NONE);
205 reg = bitfield_field32_write(reg, AES_CTRL_SHADOWED_KEY_LEN_FIELD,
206 AES_CTRL_SHADOWED_KEY_LEN_MASK);
208 aes_shadowed_write(aes->
base_addr, AES_CTRL_SHADOWED_REG_OFFSET, reg);
217 if (aes == NULL || transaction == NULL ||
224 if (!aes_idle(aes)) {
233 result = configure_aux(aes, transaction);
241 aes, AES_CTRL_GCM_SHADOWED_PHASE_VALUE_GCM_INIT, 16));
245 aes_set_multireg(aes, &key->share0[0], AES_KEY_SHARE0_MULTIREG_COUNT,
246 AES_KEY_SHARE0_0_REG_OFFSET);
248 aes_set_multireg(aes, &key->share1[0], AES_KEY_SHARE1_MULTIREG_COUNT,
249 AES_KEY_SHARE1_0_REG_OFFSET);
256 AES_WAIT_FOR_STATUS(aes, AES_STATUS_IDLE_BIT,
true);
257 aes_set_multireg(aes, &iv->iv[0], AES_IV_MULTIREG_COUNT,
258 AES_IV_0_REG_OFFSET);
269 if (!aes_idle(aes)) {
273 aes_clear_internal_state(aes);
284 if (!aes_input_ready(aes)) {
288 aes_set_multireg(aes, &data.data[0], AES_DATA_IN_MULTIREG_COUNT,
289 AES_DATA_IN_0_REG_OFFSET);
295 if (aes == NULL || data == NULL) {
299 if (!aes_output_valid(aes)) {
303 aes_read_multireg(aes, data->data, AES_DATA_OUT_MULTIREG_COUNT,
304 AES_DATA_OUT_0_REG_OFFSET);
310 size_t num_valid_bytes) {
315 if (!aes_idle(aes)) {
319 uint32_t reg = bitfield_field32_write(
320 0, AES_CTRL_GCM_SHADOWED_NUM_VALID_BYTES_FIELD, num_valid_bytes);
322 reg = bitfield_field32_write(reg, AES_CTRL_GCM_SHADOWED_PHASE_FIELD, phase);
323 aes_shadowed_write(aes->
base_addr, AES_CTRL_GCM_SHADOWED_REG_OFFSET, reg);
335 if (!aes_input_ready(aes)) {
341 len_aad_ptx.data[0] = __builtin_bswap32((uint32_t)(len_aad >> 32));
342 len_aad_ptx.data[1] = __builtin_bswap32((uint32_t)(len_aad & 0xFFFFFFFF));
343 len_aad_ptx.data[2] = __builtin_bswap32((uint32_t)(len_ptx >> 32));
344 len_aad_ptx.data[3] = __builtin_bswap32((uint32_t)(len_ptx & 0xFFFFFFFF));
346 aes_set_multireg(aes, &len_aad_ptx.data[0], AES_DATA_IN_MULTIREG_COUNT,
347 AES_DATA_IN_0_REG_OFFSET);
355 size_t block_count) {
356 if (aes == NULL || plain_text == NULL || cipher_text == NULL ||
363 if (block_count < 2) {
365 return dif_aes_read_output(aes, cipher_text);
369 if (!aes_input_ready(aes)) {
374 aes_set_multireg(aes, plain_text[0].data, AES_DATA_IN_MULTIREG_COUNT,
375 AES_DATA_IN_0_REG_OFFSET);
380 AES_WAIT_FOR_STATUS(aes, AES_STATUS_INPUT_READY_BIT,
true);
382 for (
size_t i = 0; i < block_count; ++i) {
387 if (i + 1 < block_count) {
388 aes_set_multireg(aes, plain_text[i + 1].data, AES_DATA_IN_MULTIREG_COUNT,
389 AES_DATA_IN_0_REG_OFFSET);
395 AES_WAIT_FOR_STATUS(aes, AES_STATUS_OUTPUT_VALID_BIT,
true);
399 aes_read_multireg(aes, cipher_text[i].data, AES_DATA_OUT_MULTIREG_COUNT,
400 AES_DATA_OUT_0_REG_OFFSET);
411 uint32_t reg = bitfield_bit32_write(0, trigger,
true);
412 mmio_region_write32(aes->
base_addr, AES_TRIGGER_REG_OFFSET, reg);
419 if (aes == NULL || set == NULL) {
425 *set = aes_idle(aes);
428 *set = aes_stalled(aes);
431 *set = aes_output_lost(aes);
434 *set = aes_output_valid(aes);
437 *set = aes_input_ready(aes);
440 *set = aes_alert_fatal(aes);
443 *set = aes_alert_recoverable(aes);
453 if (aes == NULL || iv == NULL) {
457 if (!aes_idle(aes)) {
461 for (
int i = 0; i < AES_IV_MULTIREG_COUNT; ++i) {
463 AES_IV_0_REG_OFFSET + (ptrdiff_t)i * (ptrdiff_t)
sizeof(uint32_t);
465 iv->iv[i] = mmio_region_read32(aes->
base_addr, offset);