1 В избранное 0 Ответвления 0

OSCHINA-MIRROR/lupyuen-LoRaMac-node-nuttx

Присоединиться к Gitlife
Откройте для себя и примите участие в публичных проектах с открытым исходным кодом с участием более 10 миллионов разработчиков. Приватные репозитории также полностью бесплатны :)
Присоединиться бесплатно
Клонировать/Скачать
hri_aes_l21.h 33 КБ
Копировать Редактировать Исходные данные Просмотреть построчно История
Miguel Luis Отправлено 7 лет назад 8f4693d
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222
/**
* \file
*
* \brief SAM AES
*
* Copyright (C) 2016 Atmel Corporation. All rights reserved.
*
* \asf_license_start
*
* \page License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The name of Atmel may not be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* 4. This software may only be redistributed and used in connection with an
* Atmel microcontroller product.
*
* THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
* EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* \asf_license_stop
*/
#ifdef _SAML21_AES_COMPONENT_
#ifndef _HRI_AES_L21_H_INCLUDED_
#define _HRI_AES_L21_H_INCLUDED_
#ifdef __cplusplus
extern "C" {
#endif
#include <stdbool.h>
#include <hal_atomic.h>
#if defined(ENABLE_AES_CRITICAL_SECTIONS)
#define AES_CRITICAL_SECTION_ENTER() CRITICAL_SECTION_ENTER()
#define AES_CRITICAL_SECTION_LEAVE() CRITICAL_SECTION_LEAVE()
#else
#define AES_CRITICAL_SECTION_ENTER()
#define AES_CRITICAL_SECTION_LEAVE()
#endif
typedef uint32_t hri_aes_ciplen_reg_t;
typedef uint32_t hri_aes_ctrla_reg_t;
typedef uint32_t hri_aes_ghash_reg_t;
typedef uint32_t hri_aes_hashkey_reg_t;
typedef uint32_t hri_aes_indata_reg_t;
typedef uint32_t hri_aes_intvectv_reg_t;
typedef uint32_t hri_aes_keyword_reg_t;
typedef uint32_t hri_aes_randseed_reg_t;
typedef uint8_t hri_aes_ctrlb_reg_t;
typedef uint8_t hri_aes_databufptr_reg_t;
typedef uint8_t hri_aes_dbgctrl_reg_t;
typedef uint8_t hri_aes_intenset_reg_t;
typedef uint8_t hri_aes_intflag_reg_t;
static inline void hri_aes_set_INTEN_ENCCMP_bit(const void *const hw)
{
((Aes *)hw)->INTENSET.reg = AES_INTENSET_ENCCMP;
}
static inline bool hri_aes_get_INTEN_ENCCMP_bit(const void *const hw)
{
return (((Aes *)hw)->INTENSET.reg & AES_INTENSET_ENCCMP) >> AES_INTENSET_ENCCMP_Pos;
}
static inline void hri_aes_write_INTEN_ENCCMP_bit(const void *const hw, bool value)
{
if (value == 0x0) {
((Aes *)hw)->INTENCLR.reg = AES_INTENSET_ENCCMP;
} else {
((Aes *)hw)->INTENSET.reg = AES_INTENSET_ENCCMP;
}
}
static inline void hri_aes_clear_INTEN_ENCCMP_bit(const void *const hw)
{
((Aes *)hw)->INTENCLR.reg = AES_INTENSET_ENCCMP;
}
static inline void hri_aes_set_INTEN_GFMCMP_bit(const void *const hw)
{
((Aes *)hw)->INTENSET.reg = AES_INTENSET_GFMCMP;
}
static inline bool hri_aes_get_INTEN_GFMCMP_bit(const void *const hw)
{
return (((Aes *)hw)->INTENSET.reg & AES_INTENSET_GFMCMP) >> AES_INTENSET_GFMCMP_Pos;
}
static inline void hri_aes_write_INTEN_GFMCMP_bit(const void *const hw, bool value)
{
if (value == 0x0) {
((Aes *)hw)->INTENCLR.reg = AES_INTENSET_GFMCMP;
} else {
((Aes *)hw)->INTENSET.reg = AES_INTENSET_GFMCMP;
}
}
static inline void hri_aes_clear_INTEN_GFMCMP_bit(const void *const hw)
{
((Aes *)hw)->INTENCLR.reg = AES_INTENSET_GFMCMP;
}
static inline void hri_aes_set_INTEN_reg(const void *const hw, hri_aes_intenset_reg_t mask)
{
((Aes *)hw)->INTENSET.reg = mask;
}
static inline hri_aes_intenset_reg_t hri_aes_get_INTEN_reg(const void *const hw, hri_aes_intenset_reg_t mask)
{
uint8_t tmp;
tmp = ((Aes *)hw)->INTENSET.reg;
tmp &= mask;
return tmp;
}
static inline hri_aes_intenset_reg_t hri_aes_read_INTEN_reg(const void *const hw)
{
return ((Aes *)hw)->INTENSET.reg;
}
static inline void hri_aes_write_INTEN_reg(const void *const hw, hri_aes_intenset_reg_t data)
{
((Aes *)hw)->INTENSET.reg = data;
((Aes *)hw)->INTENCLR.reg = ~data;
}
static inline void hri_aes_clear_INTEN_reg(const void *const hw, hri_aes_intenset_reg_t mask)
{
((Aes *)hw)->INTENCLR.reg = mask;
}
static inline bool hri_aes_get_INTFLAG_ENCCMP_bit(const void *const hw)
{
return (((Aes *)hw)->INTFLAG.reg & AES_INTFLAG_ENCCMP) >> AES_INTFLAG_ENCCMP_Pos;
}
static inline void hri_aes_clear_INTFLAG_ENCCMP_bit(const void *const hw)
{
((Aes *)hw)->INTFLAG.reg = AES_INTFLAG_ENCCMP;
}
static inline bool hri_aes_get_INTFLAG_GFMCMP_bit(const void *const hw)
{
return (((Aes *)hw)->INTFLAG.reg & AES_INTFLAG_GFMCMP) >> AES_INTFLAG_GFMCMP_Pos;
}
static inline void hri_aes_clear_INTFLAG_GFMCMP_bit(const void *const hw)
{
((Aes *)hw)->INTFLAG.reg = AES_INTFLAG_GFMCMP;
}
static inline bool hri_aes_get_interrupt_ENCCMP_bit(const void *const hw)
{
return (((Aes *)hw)->INTFLAG.reg & AES_INTFLAG_ENCCMP) >> AES_INTFLAG_ENCCMP_Pos;
}
static inline void hri_aes_clear_interrupt_ENCCMP_bit(const void *const hw)
{
((Aes *)hw)->INTFLAG.reg = AES_INTFLAG_ENCCMP;
}
static inline bool hri_aes_get_interrupt_GFMCMP_bit(const void *const hw)
{
return (((Aes *)hw)->INTFLAG.reg & AES_INTFLAG_GFMCMP) >> AES_INTFLAG_GFMCMP_Pos;
}
static inline void hri_aes_clear_interrupt_GFMCMP_bit(const void *const hw)
{
((Aes *)hw)->INTFLAG.reg = AES_INTFLAG_GFMCMP;
}
static inline hri_aes_intflag_reg_t hri_aes_get_INTFLAG_reg(const void *const hw, hri_aes_intflag_reg_t mask)
{
uint8_t tmp;
tmp = ((Aes *)hw)->INTFLAG.reg;
tmp &= mask;
return tmp;
}
static inline hri_aes_intflag_reg_t hri_aes_read_INTFLAG_reg(const void *const hw)
{
return ((Aes *)hw)->INTFLAG.reg;
}
static inline void hri_aes_clear_INTFLAG_reg(const void *const hw, hri_aes_intflag_reg_t mask)
{
((Aes *)hw)->INTFLAG.reg = mask;
}
static inline void hri_aes_write_DBGCTRL_reg(const void *const hw, hri_aes_dbgctrl_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DBGCTRL.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_write_KEYWORD_reg(const void *const hw, uint8_t index, hri_aes_keyword_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->KEYWORD[index].reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_write_INTVECTV_reg(const void *const hw, uint8_t index, hri_aes_intvectv_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->INTVECTV[index].reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_SWRST_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_SWRST;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_SWRST_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_SWRST) >> AES_CTRLA_SWRST_Pos;
return (bool)tmp;
}
static inline void hri_aes_set_CTRLA_ENABLE_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_ENABLE;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_ENABLE_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_ENABLE) >> AES_CTRLA_ENABLE_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLA_ENABLE_bit(const void *const hw, bool value)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_ENABLE;
tmp |= value << AES_CTRLA_ENABLE_Pos;
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_ENABLE_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_ENABLE;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_ENABLE_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_ENABLE;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_CIPHER_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_CIPHER;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_CIPHER_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_CIPHER) >> AES_CTRLA_CIPHER_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLA_CIPHER_bit(const void *const hw, bool value)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_CIPHER;
tmp |= value << AES_CTRLA_CIPHER_Pos;
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_CIPHER_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_CIPHER;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_CIPHER_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_CIPHER;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_STARTMODE_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_STARTMODE;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_STARTMODE_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_STARTMODE) >> AES_CTRLA_STARTMODE_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLA_STARTMODE_bit(const void *const hw, bool value)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_STARTMODE;
tmp |= value << AES_CTRLA_STARTMODE_Pos;
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_STARTMODE_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_STARTMODE;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_STARTMODE_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_STARTMODE;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_LOD_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_LOD;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_LOD_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_LOD) >> AES_CTRLA_LOD_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLA_LOD_bit(const void *const hw, bool value)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_LOD;
tmp |= value << AES_CTRLA_LOD_Pos;
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_LOD_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_LOD;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_LOD_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_LOD;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_KEYGEN_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_KEYGEN;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_KEYGEN_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_KEYGEN) >> AES_CTRLA_KEYGEN_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLA_KEYGEN_bit(const void *const hw, bool value)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_KEYGEN;
tmp |= value << AES_CTRLA_KEYGEN_Pos;
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_KEYGEN_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_KEYGEN;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_KEYGEN_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_KEYGEN;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_XORKEY_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_XORKEY;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLA_XORKEY_bit(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_XORKEY) >> AES_CTRLA_XORKEY_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLA_XORKEY_bit(const void *const hw, bool value)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_XORKEY;
tmp |= value << AES_CTRLA_XORKEY_Pos;
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_XORKEY_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_XORKEY;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_XORKEY_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_XORKEY;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLA_AESMODE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_AESMODE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_get_CTRLA_AESMODE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_AESMODE(mask)) >> AES_CTRLA_AESMODE_Pos;
return tmp;
}
static inline void hri_aes_write_CTRLA_AESMODE_bf(const void *const hw, hri_aes_ctrla_reg_t data)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_AESMODE_Msk;
tmp |= AES_CTRLA_AESMODE(data);
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_AESMODE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_AESMODE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_AESMODE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_AESMODE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_read_CTRLA_AESMODE_bf(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_AESMODE_Msk) >> AES_CTRLA_AESMODE_Pos;
return tmp;
}
static inline void hri_aes_set_CTRLA_CFBS_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_CFBS(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_get_CTRLA_CFBS_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_CFBS(mask)) >> AES_CTRLA_CFBS_Pos;
return tmp;
}
static inline void hri_aes_write_CTRLA_CFBS_bf(const void *const hw, hri_aes_ctrla_reg_t data)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_CFBS_Msk;
tmp |= AES_CTRLA_CFBS(data);
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_CFBS_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_CFBS(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_CFBS_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_CFBS(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_read_CTRLA_CFBS_bf(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_CFBS_Msk) >> AES_CTRLA_CFBS_Pos;
return tmp;
}
static inline void hri_aes_set_CTRLA_KEYSIZE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_KEYSIZE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_get_CTRLA_KEYSIZE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_KEYSIZE(mask)) >> AES_CTRLA_KEYSIZE_Pos;
return tmp;
}
static inline void hri_aes_write_CTRLA_KEYSIZE_bf(const void *const hw, hri_aes_ctrla_reg_t data)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_KEYSIZE_Msk;
tmp |= AES_CTRLA_KEYSIZE(data);
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_KEYSIZE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_KEYSIZE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_KEYSIZE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_KEYSIZE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_read_CTRLA_KEYSIZE_bf(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_KEYSIZE_Msk) >> AES_CTRLA_KEYSIZE_Pos;
return tmp;
}
static inline void hri_aes_set_CTRLA_CTYPE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= AES_CTRLA_CTYPE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_get_CTRLA_CTYPE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_CTYPE(mask)) >> AES_CTRLA_CTYPE_Pos;
return tmp;
}
static inline void hri_aes_write_CTRLA_CTYPE_bf(const void *const hw, hri_aes_ctrla_reg_t data)
{
uint32_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= ~AES_CTRLA_CTYPE_Msk;
tmp |= AES_CTRLA_CTYPE(data);
((Aes *)hw)->CTRLA.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_CTYPE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~AES_CTRLA_CTYPE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_CTYPE_bf(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= AES_CTRLA_CTYPE(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_read_CTRLA_CTYPE_bf(const void *const hw)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp = (tmp & AES_CTRLA_CTYPE_Msk) >> AES_CTRLA_CTYPE_Pos;
return tmp;
}
static inline void hri_aes_set_CTRLA_reg(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_get_CTRLA_reg(const void *const hw, hri_aes_ctrla_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CTRLA.reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_CTRLA_reg(const void *const hw, hri_aes_ctrla_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLA_reg(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLA_reg(const void *const hw, hri_aes_ctrla_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLA.reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrla_reg_t hri_aes_read_CTRLA_reg(const void *const hw)
{
return ((Aes *)hw)->CTRLA.reg;
}
static inline void hri_aes_set_CTRLB_START_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg |= AES_CTRLB_START;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLB_START_bit(const void *const hw)
{
uint8_t tmp;
tmp = ((Aes *)hw)->CTRLB.reg;
tmp = (tmp & AES_CTRLB_START) >> AES_CTRLB_START_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLB_START_bit(const void *const hw, bool value)
{
uint8_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLB.reg;
tmp &= ~AES_CTRLB_START;
tmp |= value << AES_CTRLB_START_Pos;
((Aes *)hw)->CTRLB.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLB_START_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg &= ~AES_CTRLB_START;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLB_START_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg ^= AES_CTRLB_START;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLB_NEWMSG_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg |= AES_CTRLB_NEWMSG;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLB_NEWMSG_bit(const void *const hw)
{
uint8_t tmp;
tmp = ((Aes *)hw)->CTRLB.reg;
tmp = (tmp & AES_CTRLB_NEWMSG) >> AES_CTRLB_NEWMSG_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLB_NEWMSG_bit(const void *const hw, bool value)
{
uint8_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLB.reg;
tmp &= ~AES_CTRLB_NEWMSG;
tmp |= value << AES_CTRLB_NEWMSG_Pos;
((Aes *)hw)->CTRLB.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLB_NEWMSG_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg &= ~AES_CTRLB_NEWMSG;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLB_NEWMSG_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg ^= AES_CTRLB_NEWMSG;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLB_EOM_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg |= AES_CTRLB_EOM;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLB_EOM_bit(const void *const hw)
{
uint8_t tmp;
tmp = ((Aes *)hw)->CTRLB.reg;
tmp = (tmp & AES_CTRLB_EOM) >> AES_CTRLB_EOM_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLB_EOM_bit(const void *const hw, bool value)
{
uint8_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLB.reg;
tmp &= ~AES_CTRLB_EOM;
tmp |= value << AES_CTRLB_EOM_Pos;
((Aes *)hw)->CTRLB.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLB_EOM_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg &= ~AES_CTRLB_EOM;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLB_EOM_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg ^= AES_CTRLB_EOM;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLB_GFMUL_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg |= AES_CTRLB_GFMUL;
AES_CRITICAL_SECTION_LEAVE();
}
static inline bool hri_aes_get_CTRLB_GFMUL_bit(const void *const hw)
{
uint8_t tmp;
tmp = ((Aes *)hw)->CTRLB.reg;
tmp = (tmp & AES_CTRLB_GFMUL) >> AES_CTRLB_GFMUL_Pos;
return (bool)tmp;
}
static inline void hri_aes_write_CTRLB_GFMUL_bit(const void *const hw, bool value)
{
uint8_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->CTRLB.reg;
tmp &= ~AES_CTRLB_GFMUL;
tmp |= value << AES_CTRLB_GFMUL_Pos;
((Aes *)hw)->CTRLB.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLB_GFMUL_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg &= ~AES_CTRLB_GFMUL;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLB_GFMUL_bit(const void *const hw)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg ^= AES_CTRLB_GFMUL;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_set_CTRLB_reg(const void *const hw, hri_aes_ctrlb_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrlb_reg_t hri_aes_get_CTRLB_reg(const void *const hw, hri_aes_ctrlb_reg_t mask)
{
uint8_t tmp;
tmp = ((Aes *)hw)->CTRLB.reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_CTRLB_reg(const void *const hw, hri_aes_ctrlb_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CTRLB_reg(const void *const hw, hri_aes_ctrlb_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CTRLB_reg(const void *const hw, hri_aes_ctrlb_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CTRLB.reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ctrlb_reg_t hri_aes_read_CTRLB_reg(const void *const hw)
{
return ((Aes *)hw)->CTRLB.reg;
}
static inline void hri_aes_set_DATABUFPTR_INDATAPTR_bf(const void *const hw, hri_aes_databufptr_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg |= AES_DATABUFPTR_INDATAPTR(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_databufptr_reg_t hri_aes_get_DATABUFPTR_INDATAPTR_bf(const void *const hw,
hri_aes_databufptr_reg_t mask)
{
uint8_t tmp;
tmp = ((Aes *)hw)->DATABUFPTR.reg;
tmp = (tmp & AES_DATABUFPTR_INDATAPTR(mask)) >> AES_DATABUFPTR_INDATAPTR_Pos;
return tmp;
}
static inline void hri_aes_write_DATABUFPTR_INDATAPTR_bf(const void *const hw, hri_aes_databufptr_reg_t data)
{
uint8_t tmp;
AES_CRITICAL_SECTION_ENTER();
tmp = ((Aes *)hw)->DATABUFPTR.reg;
tmp &= ~AES_DATABUFPTR_INDATAPTR_Msk;
tmp |= AES_DATABUFPTR_INDATAPTR(data);
((Aes *)hw)->DATABUFPTR.reg = tmp;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_DATABUFPTR_INDATAPTR_bf(const void *const hw, hri_aes_databufptr_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg &= ~AES_DATABUFPTR_INDATAPTR(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_DATABUFPTR_INDATAPTR_bf(const void *const hw, hri_aes_databufptr_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg ^= AES_DATABUFPTR_INDATAPTR(mask);
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_databufptr_reg_t hri_aes_read_DATABUFPTR_INDATAPTR_bf(const void *const hw)
{
uint8_t tmp;
tmp = ((Aes *)hw)->DATABUFPTR.reg;
tmp = (tmp & AES_DATABUFPTR_INDATAPTR_Msk) >> AES_DATABUFPTR_INDATAPTR_Pos;
return tmp;
}
static inline void hri_aes_set_DATABUFPTR_reg(const void *const hw, hri_aes_databufptr_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_databufptr_reg_t hri_aes_get_DATABUFPTR_reg(const void *const hw, hri_aes_databufptr_reg_t mask)
{
uint8_t tmp;
tmp = ((Aes *)hw)->DATABUFPTR.reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_DATABUFPTR_reg(const void *const hw, hri_aes_databufptr_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_DATABUFPTR_reg(const void *const hw, hri_aes_databufptr_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_DATABUFPTR_reg(const void *const hw, hri_aes_databufptr_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->DATABUFPTR.reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_databufptr_reg_t hri_aes_read_DATABUFPTR_reg(const void *const hw)
{
return ((Aes *)hw)->DATABUFPTR.reg;
}
static inline void hri_aes_set_INDATA_reg(const void *const hw, hri_aes_indata_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->INDATA.reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_indata_reg_t hri_aes_get_INDATA_reg(const void *const hw, hri_aes_indata_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->INDATA.reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_INDATA_reg(const void *const hw, hri_aes_indata_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->INDATA.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_INDATA_reg(const void *const hw, hri_aes_indata_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->INDATA.reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_INDATA_reg(const void *const hw, hri_aes_indata_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->INDATA.reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_indata_reg_t hri_aes_read_INDATA_reg(const void *const hw)
{
return ((Aes *)hw)->INDATA.reg;
}
static inline void hri_aes_set_HASHKEY_reg(const void *const hw, uint8_t index, hri_aes_hashkey_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->HASHKEY[index].reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_hashkey_reg_t hri_aes_get_HASHKEY_reg(const void *const hw, uint8_t index,
hri_aes_hashkey_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->HASHKEY[index].reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_HASHKEY_reg(const void *const hw, uint8_t index, hri_aes_hashkey_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->HASHKEY[index].reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_HASHKEY_reg(const void *const hw, uint8_t index, hri_aes_hashkey_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->HASHKEY[index].reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_HASHKEY_reg(const void *const hw, uint8_t index, hri_aes_hashkey_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->HASHKEY[index].reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_hashkey_reg_t hri_aes_read_HASHKEY_reg(const void *const hw, uint8_t index)
{
return ((Aes *)hw)->HASHKEY[index].reg;
}
static inline void hri_aes_set_GHASH_reg(const void *const hw, uint8_t index, hri_aes_ghash_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->GHASH[index].reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ghash_reg_t hri_aes_get_GHASH_reg(const void *const hw, uint8_t index, hri_aes_ghash_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->GHASH[index].reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_GHASH_reg(const void *const hw, uint8_t index, hri_aes_ghash_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->GHASH[index].reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_GHASH_reg(const void *const hw, uint8_t index, hri_aes_ghash_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->GHASH[index].reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_GHASH_reg(const void *const hw, uint8_t index, hri_aes_ghash_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->GHASH[index].reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ghash_reg_t hri_aes_read_GHASH_reg(const void *const hw, uint8_t index)
{
return ((Aes *)hw)->GHASH[index].reg;
}
static inline void hri_aes_set_CIPLEN_reg(const void *const hw, hri_aes_ciplen_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CIPLEN.reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ciplen_reg_t hri_aes_get_CIPLEN_reg(const void *const hw, hri_aes_ciplen_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->CIPLEN.reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_CIPLEN_reg(const void *const hw, hri_aes_ciplen_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CIPLEN.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_CIPLEN_reg(const void *const hw, hri_aes_ciplen_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CIPLEN.reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_CIPLEN_reg(const void *const hw, hri_aes_ciplen_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->CIPLEN.reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_ciplen_reg_t hri_aes_read_CIPLEN_reg(const void *const hw)
{
return ((Aes *)hw)->CIPLEN.reg;
}
static inline void hri_aes_set_RANDSEED_reg(const void *const hw, hri_aes_randseed_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->RANDSEED.reg |= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_randseed_reg_t hri_aes_get_RANDSEED_reg(const void *const hw, hri_aes_randseed_reg_t mask)
{
uint32_t tmp;
tmp = ((Aes *)hw)->RANDSEED.reg;
tmp &= mask;
return tmp;
}
static inline void hri_aes_write_RANDSEED_reg(const void *const hw, hri_aes_randseed_reg_t data)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->RANDSEED.reg = data;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_clear_RANDSEED_reg(const void *const hw, hri_aes_randseed_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->RANDSEED.reg &= ~mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline void hri_aes_toggle_RANDSEED_reg(const void *const hw, hri_aes_randseed_reg_t mask)
{
AES_CRITICAL_SECTION_ENTER();
((Aes *)hw)->RANDSEED.reg ^= mask;
AES_CRITICAL_SECTION_LEAVE();
}
static inline hri_aes_randseed_reg_t hri_aes_read_RANDSEED_reg(const void *const hw)
{
return ((Aes *)hw)->RANDSEED.reg;
}
#ifdef __cplusplus
}
#endif
#endif /* _HRI_AES_L21_H_INCLUDED */
#endif /* _SAML21_AES_COMPONENT_ */

Опубликовать ( 0 )

Вы можете оставить комментарий после Вход в систему

1
https://gitlife.ru/oschina-mirror/lupyuen-LoRaMac-node-nuttx.git
git@gitlife.ru:oschina-mirror/lupyuen-LoRaMac-node-nuttx.git
oschina-mirror
lupyuen-LoRaMac-node-nuttx
lupyuen-LoRaMac-node-nuttx
v4.5.0