sha256.c 71 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268
  1. /* sha256.c
  2. *
  3. * Copyright (C) 2006-2023 wolfSSL Inc.
  4. *
  5. * This file is part of wolfSSL.
  6. *
  7. * wolfSSL is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * wolfSSL is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  20. */
  21. /* For more info on the algorithm, see https://tools.ietf.org/html/rfc6234
  22. *
  23. * For more information on NIST FIPS PUB 180-4, see
  24. * https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
  25. */
  26. /*
  27. DESCRIPTION
  28. This library provides the interface to SHA-256 secure hash algorithms.
  29. SHA-256 performs processing on message blocks to produce a final hash digest
  30. output. It can be used to hash a message, M, having a length of L bits,
  31. where 0 <= L < 2^64.
  32. Note that in some cases, hardware acceleration may be enabled, depending
  33. on the specific device platform.
  34. */
  35. #ifdef HAVE_CONFIG_H
  36. #include <config.h>
  37. #endif
  38. #include <wolfssl/wolfcrypt/settings.h>
  39. #include <wolfssl/wolfcrypt/types.h>
  40. /*
  41. * SHA256 Build Options:
  42. * USE_SLOW_SHA256: Reduces code size by not partially unrolling
  43. (~2KB smaller and ~25% slower) (default OFF)
  44. * WOLFSSL_SHA256_BY_SPEC: Uses the Ch/Maj based on SHA256 specification
  45. (default ON)
  46. * WOLFSSL_SHA256_ALT_CH_MAJ: Alternate Ch/Maj that is easier for compilers to
  47. optimize and recognize as SHA256 (default OFF)
  48. * SHA256_MANY_REGISTERS: A SHA256 version that keeps all data in registers
  49. and partial unrolled (default OFF)
  50. */
  51. /* Default SHA256 to use Ch/Maj based on specification */
  52. #if !defined(WOLFSSL_SHA256_BY_SPEC) && !defined(WOLFSSL_SHA256_ALT_CH_MAJ)
  53. #define WOLFSSL_SHA256_BY_SPEC
  54. #endif
  55. #if !defined(NO_SHA256) && (!defined(WOLFSSL_ARMASM) && \
  56. !defined(WOLFSSL_ARMASM_NO_NEON))
  57. #if defined(HAVE_FIPS) && defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  58. /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
  59. #define FIPS_NO_WRAPPERS
  60. #ifdef USE_WINDOWS_API
  61. #pragma code_seg(".fipsA$d")
  62. #pragma const_seg(".fipsB$d")
  63. #endif
  64. #endif
  65. #include <wolfssl/wolfcrypt/sha256.h>
  66. #include <wolfssl/wolfcrypt/error-crypt.h>
  67. #include <wolfssl/wolfcrypt/cpuid.h>
  68. #include <wolfssl/wolfcrypt/hash.h>
  69. #ifdef WOLF_CRYPTO_CB
  70. #include <wolfssl/wolfcrypt/cryptocb.h>
  71. #endif
  72. #ifdef WOLFSSL_IMXRT1170_CAAM
  73. #include <wolfssl/wolfcrypt/port/caam/wolfcaam_fsl_nxp.h>
  74. #endif
  75. /* determine if we are using Espressif SHA hardware acceleration */
  76. #undef WOLFSSL_USE_ESP32_CRYPT_HASH_HW
  77. #if defined(WOLFSSL_ESP32_CRYPT) && \
  78. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH)
  79. /* define a single keyword for simplicity & readability
  80. *
  81. * by default the HW acceleration is on for ESP32-WROOM32
  82. * but individual components can be turned off.
  83. */
  84. #define WOLFSSL_USE_ESP32_CRYPT_HASH_HW
  85. #else
  86. #undef WOLFSSL_USE_ESP32_CRYPT_HASH_HW
  87. #endif
  88. #ifdef WOLFSSL_ESPIDF
  89. /* Define the ESP_LOGx(TAG, "" value for output messages here.
  90. **
  91. ** Beware of possible conflict in test.c (that one now named TEST_TAG)
  92. */
  93. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW)
  94. static const char* TAG = "wc_sha256";
  95. #endif
  96. #endif
  97. #if defined(WOLFSSL_TI_HASH)
  98. /* #include <wolfcrypt/src/port/ti/ti-hash.c> included by wc_port.c */
  99. #elif defined(WOLFSSL_CRYPTOCELL)
  100. /* wc_port.c includes wolfcrypt/src/port/arm/cryptoCellHash.c */
  101. #elif defined(WOLFSSL_PSOC6_CRYPTO)
  102. #else
  103. #include <wolfssl/wolfcrypt/logging.h>
  104. #ifdef NO_INLINE
  105. #include <wolfssl/wolfcrypt/misc.h>
  106. #else
  107. #define WOLFSSL_MISC_INCLUDED
  108. #include <wolfcrypt/src/misc.c>
  109. #endif
  110. #ifdef WOLFSSL_DEVCRYPTO_HASH
  111. #include <wolfssl/wolfcrypt/port/devcrypto/wc_devcrypto.h>
  112. #endif
  113. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_HASH)
  114. #include <wolfssl/wolfcrypt/port/nxp/se050_port.h>
  115. #endif
  116. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP)
  117. #if defined(__GNUC__) && ((__GNUC__ < 4) || \
  118. (__GNUC__ == 4 && __GNUC_MINOR__ <= 8))
  119. #undef NO_AVX2_SUPPORT
  120. #define NO_AVX2_SUPPORT
  121. #endif
  122. #if defined(__clang__) && ((__clang_major__ < 3) || \
  123. (__clang_major__ == 3 && __clang_minor__ <= 5))
  124. #define NO_AVX2_SUPPORT
  125. #elif defined(__clang__) && defined(NO_AVX2_SUPPORT)
  126. #undef NO_AVX2_SUPPORT
  127. #endif
  128. #define HAVE_INTEL_AVX1
  129. #ifndef NO_AVX2_SUPPORT
  130. #define HAVE_INTEL_AVX2
  131. #endif
  132. #else
  133. #undef HAVE_INTEL_AVX1
  134. #undef HAVE_INTEL_AVX2
  135. #endif /* WOLFSSL_X86_64_BUILD && USE_INTEL_SPEEDUP */
  136. #if defined(HAVE_INTEL_AVX2)
  137. #define HAVE_INTEL_RORX
  138. #endif
  139. #if !defined(WOLFSSL_PIC32MZ_HASH) && !defined(STM32_HASH_SHA2) && \
  140. (!defined(WOLFSSL_IMX6_CAAM) || defined(NO_IMX6_CAAM_HASH) || \
  141. defined(WOLFSSL_QNX_CAAM)) && \
  142. !defined(WOLFSSL_AFALG_HASH) && !defined(WOLFSSL_DEVCRYPTO_HASH) && \
  143. (!defined(WOLFSSL_ESP32_CRYPT) || defined(NO_WOLFSSL_ESP32_CRYPT_HASH)) && \
  144. ((!defined(WOLFSSL_RENESAS_TSIP_TLS) && \
  145. !defined(WOLFSSL_RENESAS_TSIP_CRYPTONLY)) || \
  146. defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)) && \
  147. !defined(WOLFSSL_PSOC6_CRYPTO) && !defined(WOLFSSL_IMXRT_DCP) && !defined(WOLFSSL_SILABS_SE_ACCEL) && \
  148. !defined(WOLFSSL_KCAPI_HASH) && !defined(WOLFSSL_SE050_HASH) && \
  149. ((!defined(WOLFSSL_RENESAS_SCEPROTECT) && \
  150. !defined(WOLFSSL_RENESAS_RSIP)) \
  151. || defined(NO_WOLFSSL_RENESAS_FSPSM_HASH)) && \
  152. (!defined(WOLFSSL_HAVE_PSA) || defined(WOLFSSL_PSA_NO_HASH)) && \
  153. !defined(WOLFSSL_RENESAS_RX64_HASH)
  154. static int InitSha256(wc_Sha256* sha256)
  155. {
  156. int ret = 0;
  157. if (sha256 == NULL)
  158. return BAD_FUNC_ARG;
  159. XMEMSET(sha256->digest, 0, sizeof(sha256->digest));
  160. sha256->digest[0] = 0x6A09E667L;
  161. sha256->digest[1] = 0xBB67AE85L;
  162. sha256->digest[2] = 0x3C6EF372L;
  163. sha256->digest[3] = 0xA54FF53AL;
  164. sha256->digest[4] = 0x510E527FL;
  165. sha256->digest[5] = 0x9B05688CL;
  166. sha256->digest[6] = 0x1F83D9ABL;
  167. sha256->digest[7] = 0x5BE0CD19L;
  168. sha256->buffLen = 0;
  169. sha256->loLen = 0;
  170. sha256->hiLen = 0;
  171. #ifdef WOLFSSL_HASH_FLAGS
  172. sha256->flags = 0;
  173. #endif
  174. #ifdef WOLFSSL_HASH_KEEP
  175. sha256->msg = NULL;
  176. sha256->len = 0;
  177. sha256->used = 0;
  178. #endif
  179. #ifdef WOLF_CRYPTO_CB
  180. sha256->devId = wc_CryptoCb_DefaultDevID();
  181. #endif
  182. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  183. XMEMSET(&sha256->maxq_ctx, 0, sizeof(sha256->maxq_ctx));
  184. #endif
  185. #ifdef HAVE_ARIA
  186. sha256->hSession = NULL;
  187. #endif
  188. return ret;
  189. }
  190. #endif
  191. /* Hardware Acceleration */
  192. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  193. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  194. /* in case intel instructions aren't available, plus we need the K[] global */
  195. #define NEED_SOFT_SHA256
  196. /*****
  197. Intel AVX1/AVX2 Macro Control Structure
  198. #define HAVE_INTEL_AVX1
  199. #define HAVE_INTEL_AVX2
  200. #define HAVE_INTEL_RORX
  201. int InitSha256(wc_Sha256* sha256) {
  202. Save/Recover XMM, YMM
  203. ...
  204. }
  205. #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
  206. Transform_Sha256(); Function prototype
  207. #else
  208. Transform_Sha256() { }
  209. int Sha256Final() {
  210. Save/Recover XMM, YMM
  211. ...
  212. }
  213. #endif
  214. #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
  215. #if defined(HAVE_INTEL_RORX
  216. #define RND with rorx instruction
  217. #else
  218. #define RND
  219. #endif
  220. #endif
  221. #if defined(HAVE_INTEL_AVX1)
  222. #define XMM Instructions/inline asm
  223. int Transform_Sha256() {
  224. Stitched Message Sched/Round
  225. }
  226. #elif defined(HAVE_INTEL_AVX2)
  227. #define YMM Instructions/inline asm
  228. int Transform_Sha256() {
  229. More granular Stitched Message Sched/Round
  230. }
  231. #endif
  232. */
  233. /* Each platform needs to query info type 1 from cpuid to see if aesni is
  234. * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
  235. */
  236. /* #if defined(HAVE_INTEL_AVX1/2) at the tail of sha256 */
  237. static int Transform_Sha256(wc_Sha256* sha256, const byte* data);
  238. #ifdef __cplusplus
  239. extern "C" {
  240. #endif
  241. #if defined(HAVE_INTEL_AVX1)
  242. extern int Transform_Sha256_AVX1(wc_Sha256 *sha256, const byte* data);
  243. extern int Transform_Sha256_AVX1_Len(wc_Sha256* sha256,
  244. const byte* data, word32 len);
  245. #endif
  246. #if defined(HAVE_INTEL_AVX2)
  247. extern int Transform_Sha256_AVX2(wc_Sha256 *sha256, const byte* data);
  248. extern int Transform_Sha256_AVX2_Len(wc_Sha256* sha256,
  249. const byte* data, word32 len);
  250. #ifdef HAVE_INTEL_RORX
  251. extern int Transform_Sha256_AVX1_RORX(wc_Sha256 *sha256, const byte* data);
  252. extern int Transform_Sha256_AVX1_RORX_Len(wc_Sha256* sha256,
  253. const byte* data, word32 len);
  254. extern int Transform_Sha256_AVX2_RORX(wc_Sha256 *sha256, const byte* data);
  255. extern int Transform_Sha256_AVX2_RORX_Len(wc_Sha256* sha256,
  256. const byte* data, word32 len);
  257. #endif /* HAVE_INTEL_RORX */
  258. #endif /* HAVE_INTEL_AVX2 */
  259. #ifdef __cplusplus
  260. } /* extern "C" */
  261. #endif
  262. static int (*Transform_Sha256_p)(wc_Sha256* sha256, const byte* data);
  263. /* = _Transform_Sha256 */
  264. static int (*Transform_Sha256_Len_p)(wc_Sha256* sha256, const byte* data,
  265. word32 len);
  266. /* = NULL */
  267. static int transform_check = 0;
  268. static word32 intel_flags;
  269. static int Transform_Sha256_is_vectorized = 0;
  270. static WC_INLINE int inline_XTRANSFORM(wc_Sha256* S, const byte* D) {
  271. int ret;
  272. ret = (*Transform_Sha256_p)(S, D);
  273. return ret;
  274. }
  275. #define XTRANSFORM(...) inline_XTRANSFORM(__VA_ARGS__)
  276. static WC_INLINE int inline_XTRANSFORM_LEN(wc_Sha256* S, const byte* D, word32 L) {
  277. int ret;
  278. ret = (*Transform_Sha256_Len_p)(S, D, L);
  279. return ret;
  280. }
  281. #define XTRANSFORM_LEN(...) inline_XTRANSFORM_LEN(__VA_ARGS__)
  282. static void Sha256_SetTransform(void)
  283. {
  284. if (transform_check)
  285. return;
  286. intel_flags = cpuid_get_flags();
  287. #ifdef HAVE_INTEL_AVX2
  288. if (1 && IS_INTEL_AVX2(intel_flags)) {
  289. #ifdef HAVE_INTEL_RORX
  290. if (IS_INTEL_BMI2(intel_flags)) {
  291. Transform_Sha256_p = Transform_Sha256_AVX2_RORX;
  292. Transform_Sha256_Len_p = Transform_Sha256_AVX2_RORX_Len;
  293. Transform_Sha256_is_vectorized = 1;
  294. }
  295. else
  296. #endif
  297. if (1)
  298. {
  299. Transform_Sha256_p = Transform_Sha256_AVX2;
  300. Transform_Sha256_Len_p = Transform_Sha256_AVX2_Len;
  301. Transform_Sha256_is_vectorized = 1;
  302. }
  303. #ifdef HAVE_INTEL_RORX
  304. else {
  305. Transform_Sha256_p = Transform_Sha256_AVX1_RORX;
  306. Transform_Sha256_Len_p = Transform_Sha256_AVX1_RORX_Len;
  307. Transform_Sha256_is_vectorized = 1;
  308. }
  309. #endif
  310. }
  311. else
  312. #endif
  313. #ifdef HAVE_INTEL_AVX1
  314. if (IS_INTEL_AVX1(intel_flags)) {
  315. Transform_Sha256_p = Transform_Sha256_AVX1;
  316. Transform_Sha256_Len_p = Transform_Sha256_AVX1_Len;
  317. Transform_Sha256_is_vectorized = 1;
  318. }
  319. else
  320. #endif
  321. {
  322. Transform_Sha256_p = Transform_Sha256;
  323. Transform_Sha256_Len_p = NULL;
  324. Transform_Sha256_is_vectorized = 0;
  325. }
  326. transform_check = 1;
  327. }
  328. #if !defined(WOLFSSL_KCAPI_HASH)
  329. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  330. {
  331. int ret = 0;
  332. if (sha256 == NULL)
  333. return BAD_FUNC_ARG;
  334. sha256->heap = heap;
  335. #ifdef WOLF_CRYPTO_CB
  336. sha256->devId = devId;
  337. sha256->devCtx = NULL;
  338. #endif
  339. #ifdef WOLFSSL_SMALL_STACK_CACHE
  340. sha256->W = NULL;
  341. #endif
  342. ret = InitSha256(sha256);
  343. if (ret != 0)
  344. return ret;
  345. /* choose best Transform function under this runtime environment */
  346. Sha256_SetTransform();
  347. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
  348. ret = wolfAsync_DevCtxInit(&sha256->asyncDev,
  349. WOLFSSL_ASYNC_MARKER_SHA256, sha256->heap, devId);
  350. #else
  351. (void)devId;
  352. #endif /* WOLFSSL_ASYNC_CRYPT */
  353. return ret;
  354. }
  355. #endif /* !WOLFSSL_KCAPI_HASH */
  356. #elif defined(FREESCALE_LTC_SHA)
  357. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  358. {
  359. (void)heap;
  360. (void)devId;
  361. LTC_HASH_Init(LTC_BASE, &sha256->ctx, kLTC_Sha256, NULL, 0);
  362. return 0;
  363. }
  364. #elif defined(FREESCALE_MMCAU_SHA)
  365. #ifdef FREESCALE_MMCAU_CLASSIC_SHA
  366. #include "cau_api.h"
  367. #else
  368. #include "fsl_mmcau.h"
  369. #endif
  370. #define XTRANSFORM(S, D) Transform_Sha256((S),(D))
  371. #define XTRANSFORM_LEN(S, D, L) Transform_Sha256_Len((S),(D),(L))
  372. #ifndef WC_HASH_DATA_ALIGNMENT
  373. /* these hardware API's require 4 byte (word32) alignment */
  374. #define WC_HASH_DATA_ALIGNMENT 4
  375. #endif
  376. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  377. {
  378. int ret = 0;
  379. (void)heap;
  380. (void)devId;
  381. ret = wolfSSL_CryptHwMutexLock();
  382. if (ret != 0) {
  383. return ret;
  384. }
  385. #ifdef FREESCALE_MMCAU_CLASSIC_SHA
  386. cau_sha256_initialize_output(sha256->digest);
  387. #else
  388. MMCAU_SHA256_InitializeOutput((uint32_t*)sha256->digest);
  389. #endif
  390. wolfSSL_CryptHwMutexUnLock();
  391. sha256->buffLen = 0;
  392. sha256->loLen = 0;
  393. sha256->hiLen = 0;
  394. #ifdef WOLFSSL_SMALL_STACK_CACHE
  395. sha256->W = NULL;
  396. #endif
  397. return ret;
  398. }
  399. static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
  400. {
  401. int ret = wolfSSL_CryptHwMutexLock();
  402. if (ret == 0) {
  403. #ifdef FREESCALE_MMCAU_CLASSIC_SHA
  404. cau_sha256_hash_n((byte*)data, 1, sha256->digest);
  405. #else
  406. MMCAU_SHA256_HashN((byte*)data, 1, (uint32_t*)sha256->digest);
  407. #endif
  408. wolfSSL_CryptHwMutexUnLock();
  409. }
  410. return ret;
  411. }
  412. static int Transform_Sha256_Len(wc_Sha256* sha256, const byte* data,
  413. word32 len)
  414. {
  415. int ret = wolfSSL_CryptHwMutexLock();
  416. if (ret == 0) {
  417. #if defined(WC_HASH_DATA_ALIGNMENT) && WC_HASH_DATA_ALIGNMENT > 0
  418. if ((wc_ptr_t)data % WC_HASH_DATA_ALIGNMENT) {
  419. /* data pointer is NOT aligned,
  420. * so copy and perform one block at a time */
  421. byte* local = (byte*)sha256->buffer;
  422. while (len >= WC_SHA256_BLOCK_SIZE) {
  423. XMEMCPY(local, data, WC_SHA256_BLOCK_SIZE);
  424. #ifdef FREESCALE_MMCAU_CLASSIC_SHA
  425. cau_sha256_hash_n(local, 1, sha256->digest);
  426. #else
  427. MMCAU_SHA256_HashN(local, 1, (uint32_t*)sha256->digest);
  428. #endif
  429. data += WC_SHA256_BLOCK_SIZE;
  430. len -= WC_SHA256_BLOCK_SIZE;
  431. }
  432. }
  433. else
  434. #endif
  435. {
  436. #ifdef FREESCALE_MMCAU_CLASSIC_SHA
  437. cau_sha256_hash_n((byte*)data, len/WC_SHA256_BLOCK_SIZE,
  438. sha256->digest);
  439. #else
  440. MMCAU_SHA256_HashN((byte*)data, len/WC_SHA256_BLOCK_SIZE,
  441. (uint32_t*)sha256->digest);
  442. #endif
  443. }
  444. wolfSSL_CryptHwMutexUnLock();
  445. }
  446. return ret;
  447. }
  448. #elif defined(WOLFSSL_PIC32MZ_HASH)
  449. #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
  450. #elif defined(STM32_HASH_SHA2)
  451. /* Supports CubeMX HAL or Standard Peripheral Library */
  452. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  453. {
  454. if (sha256 == NULL)
  455. return BAD_FUNC_ARG;
  456. (void)devId;
  457. (void)heap;
  458. XMEMSET(sha256, 0, sizeof(wc_Sha256));
  459. wc_Stm32_Hash_Init(&sha256->stmCtx);
  460. return 0;
  461. }
  462. int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
  463. {
  464. int ret = 0;
  465. if (sha256 == NULL || (data == NULL && len > 0)) {
  466. return BAD_FUNC_ARG;
  467. }
  468. ret = wolfSSL_CryptHwMutexLock();
  469. if (ret == 0) {
  470. ret = wc_Stm32_Hash_Update(&sha256->stmCtx,
  471. HASH_AlgoSelection_SHA256, data, len, WC_SHA256_BLOCK_SIZE);
  472. wolfSSL_CryptHwMutexUnLock();
  473. }
  474. return ret;
  475. }
  476. int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
  477. {
  478. int ret = 0;
  479. if (sha256 == NULL || hash == NULL) {
  480. return BAD_FUNC_ARG;
  481. }
  482. ret = wolfSSL_CryptHwMutexLock();
  483. if (ret == 0) {
  484. ret = wc_Stm32_Hash_Final(&sha256->stmCtx,
  485. HASH_AlgoSelection_SHA256, hash, WC_SHA256_DIGEST_SIZE);
  486. wolfSSL_CryptHwMutexUnLock();
  487. }
  488. (void)wc_InitSha256(sha256); /* reset state */
  489. return ret;
  490. }
  491. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_HASH) && \
  492. !defined(WOLFSSL_QNX_CAAM)
  493. /* functions defined in wolfcrypt/src/port/caam/caam_sha256.c */
  494. #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_HASH)
  495. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  496. {
  497. if (sha256 == NULL) {
  498. return BAD_FUNC_ARG;
  499. }
  500. (void)devId;
  501. return se050_hash_init(&sha256->se050Ctx, heap);
  502. }
  503. int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
  504. {
  505. return se050_hash_update(&sha256->se050Ctx, data, len);
  506. }
  507. int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
  508. {
  509. int ret = 0;
  510. ret = se050_hash_final(&sha256->se050Ctx, hash, WC_SHA256_DIGEST_SIZE,
  511. kAlgorithm_SSS_SHA256);
  512. return ret;
  513. }
  514. int wc_Sha256FinalRaw(wc_Sha256* sha256, byte* hash)
  515. {
  516. int ret = 0;
  517. ret = se050_hash_final(&sha256->se050Ctx, hash, WC_SHA256_DIGEST_SIZE,
  518. kAlgorithm_SSS_SHA256);
  519. return ret;
  520. }
  521. #elif defined(WOLFSSL_AFALG_HASH)
  522. /* implemented in wolfcrypt/src/port/af_alg/afalg_hash.c */
  523. #elif defined(WOLFSSL_DEVCRYPTO_HASH)
  524. /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
  525. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_HASH)
  526. #include "hal_data.h"
  527. #ifndef WOLFSSL_SCE_SHA256_HANDLE
  528. #define WOLFSSL_SCE_SHA256_HANDLE g_sce_hash_0
  529. #endif
  530. #define WC_SHA256_DIGEST_WORD_SIZE 16
  531. #define XTRANSFORM(S, D) wc_Sha256SCE_XTRANSFORM((S), (D))
  532. static int wc_Sha256SCE_XTRANSFORM(wc_Sha256* sha256, const byte* data)
  533. {
  534. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  535. CRYPTO_WORD_ENDIAN_LITTLE)
  536. {
  537. ByteReverseWords((word32*)data, (word32*)data,
  538. WC_SHA256_BLOCK_SIZE);
  539. ByteReverseWords(sha256->digest, sha256->digest,
  540. WC_SHA256_DIGEST_SIZE);
  541. }
  542. if (WOLFSSL_SCE_SHA256_HANDLE.p_api->hashUpdate(
  543. WOLFSSL_SCE_SHA256_HANDLE.p_ctrl, (word32*)data,
  544. WC_SHA256_DIGEST_WORD_SIZE, sha256->digest) != SSP_SUCCESS){
  545. WOLFSSL_MSG("Unexpected hardware return value");
  546. return WC_HW_E;
  547. }
  548. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  549. CRYPTO_WORD_ENDIAN_LITTLE)
  550. {
  551. ByteReverseWords((word32*)data, (word32*)data,
  552. WC_SHA256_BLOCK_SIZE);
  553. ByteReverseWords(sha256->digest, sha256->digest,
  554. WC_SHA256_DIGEST_SIZE);
  555. }
  556. return 0;
  557. }
  558. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  559. {
  560. int ret = 0;
  561. if (sha256 == NULL)
  562. return BAD_FUNC_ARG;
  563. sha256->heap = heap;
  564. ret = InitSha256(sha256);
  565. if (ret != 0)
  566. return ret;
  567. (void)devId;
  568. return ret;
  569. }
  570. #elif defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW)
  571. /* HW may fail since there's only one, so we still need SW */
  572. #define NEED_SOFT_SHA256
  573. /*
  574. ** An Espressif-specific InitSha256()
  575. **
  576. ** soft SHA needs initialization digest, but HW does not.
  577. */
  578. static int InitSha256(wc_Sha256* sha256)
  579. {
  580. int ret = 0; /* zero = success */
  581. if (sha256 == NULL) {
  582. return BAD_FUNC_ARG;
  583. }
  584. /* We may or may not need initial digest for HW.
  585. * Always needed for SW-only. */
  586. sha256->digest[0] = 0x6A09E667L;
  587. sha256->digest[1] = 0xBB67AE85L;
  588. sha256->digest[2] = 0x3C6EF372L;
  589. sha256->digest[3] = 0xA54FF53AL;
  590. sha256->digest[4] = 0x510E527FL;
  591. sha256->digest[5] = 0x9B05688CL;
  592. sha256->digest[6] = 0x1F83D9ABL;
  593. sha256->digest[7] = 0x5BE0CD19L;
  594. sha256->buffLen = 0;
  595. sha256->loLen = 0;
  596. sha256->hiLen = 0;
  597. #ifndef NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256
  598. ret = esp_sha_init(&(sha256->ctx), WC_HASH_TYPE_SHA256);
  599. #endif
  600. return ret;
  601. }
  602. /*
  603. ** An Espressif-specific wolfCrypt InitSha256 external wrapper.
  604. **
  605. ** we'll assume this is ALWAYS for a new, uninitialized sha256
  606. */
  607. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  608. {
  609. (void)devId;
  610. if (sha256 == NULL) {
  611. return BAD_FUNC_ARG;
  612. }
  613. #ifdef WOLFSSL_USE_ESP32_CRYPT_HASH_HW
  614. #ifndef NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256
  615. /* We know this is a fresh, uninitialized item, so set to INIT */
  616. if (sha256->ctx.mode != ESP32_SHA_INIT) {
  617. ESP_LOGV(TAG, "Set ctx mode from prior value: "
  618. "%d", sha256->ctx.mode);
  619. }
  620. sha256->ctx.mode = ESP32_SHA_INIT;
  621. #endif
  622. #endif
  623. return InitSha256(sha256);
  624. }
  625. #elif (defined(WOLFSSL_RENESAS_TSIP_TLS) || \
  626. defined(WOLFSSL_RENESAS_TSIP_CRYPTONLY)) && \
  627. !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)
  628. /* implemented in wolfcrypt/src/port/Renesas/renesas_tsip_sha.c */
  629. #elif (defined(WOLFSSL_RENESAS_SCEPROTECT) || defined(WOLFSSL_RENESAS_RSIP)) \
  630. && !defined(NO_WOLFSSL_RENESAS_FSPSM_HASH)
  631. /* implemented in wolfcrypt/src/port/Renesas/renesas_fspsm_sha.c */
  632. #elif defined(WOLFSSL_PSOC6_CRYPTO)
  633. /* implemented in wolfcrypt/src/port/cypress/psoc6_crypto.c */
  634. #elif defined(WOLFSSL_IMXRT_DCP)
  635. #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
  636. /* implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  637. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  638. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  639. #elif defined(WOLFSSL_KCAPI_HASH)
  640. /* implemented in wolfcrypt/src/port/kcapi/kcapi_hash.c */
  641. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_HASH)
  642. /* implemented in wolfcrypt/src/port/psa/psa_hash.c */
  643. #elif defined(WOLFSSL_RENESAS_RX64_HASH)
  644. /* implemented in wolfcrypt/src/port/Renesas/renesas_rx64_hw_sha.c */
  645. #else
  646. #define NEED_SOFT_SHA256
  647. int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
  648. {
  649. int ret = 0;
  650. if (sha256 == NULL)
  651. return BAD_FUNC_ARG;
  652. ret = InitSha256(sha256);
  653. if (ret != 0)
  654. return ret;
  655. sha256->heap = heap;
  656. #ifdef WOLF_CRYPTO_CB
  657. sha256->devId = devId;
  658. sha256->devCtx = NULL;
  659. #endif
  660. #ifdef WOLFSSL_SMALL_STACK_CACHE
  661. sha256->W = NULL;
  662. #endif
  663. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
  664. ret = wolfAsync_DevCtxInit(&sha256->asyncDev,
  665. WOLFSSL_ASYNC_MARKER_SHA256, sha256->heap, devId);
  666. #else
  667. (void)devId;
  668. #endif /* WOLFSSL_ASYNC_CRYPT */
  669. #ifdef WOLFSSL_IMXRT1170_CAAM
  670. ret = wc_CAAM_HashInit(&sha256->hndl, &sha256->ctx, WC_HASH_TYPE_SHA256);
  671. #endif
  672. return ret;
  673. }
  674. #endif /* End Hardware Acceleration */
  675. #ifdef NEED_SOFT_SHA256
  676. static const FLASH_QUALIFIER ALIGN32 word32 K[64] = {
  677. 0x428A2F98L, 0x71374491L, 0xB5C0FBCFL, 0xE9B5DBA5L, 0x3956C25BL,
  678. 0x59F111F1L, 0x923F82A4L, 0xAB1C5ED5L, 0xD807AA98L, 0x12835B01L,
  679. 0x243185BEL, 0x550C7DC3L, 0x72BE5D74L, 0x80DEB1FEL, 0x9BDC06A7L,
  680. 0xC19BF174L, 0xE49B69C1L, 0xEFBE4786L, 0x0FC19DC6L, 0x240CA1CCL,
  681. 0x2DE92C6FL, 0x4A7484AAL, 0x5CB0A9DCL, 0x76F988DAL, 0x983E5152L,
  682. 0xA831C66DL, 0xB00327C8L, 0xBF597FC7L, 0xC6E00BF3L, 0xD5A79147L,
  683. 0x06CA6351L, 0x14292967L, 0x27B70A85L, 0x2E1B2138L, 0x4D2C6DFCL,
  684. 0x53380D13L, 0x650A7354L, 0x766A0ABBL, 0x81C2C92EL, 0x92722C85L,
  685. 0xA2BFE8A1L, 0xA81A664BL, 0xC24B8B70L, 0xC76C51A3L, 0xD192E819L,
  686. 0xD6990624L, 0xF40E3585L, 0x106AA070L, 0x19A4C116L, 0x1E376C08L,
  687. 0x2748774CL, 0x34B0BCB5L, 0x391C0CB3L, 0x4ED8AA4AL, 0x5B9CCA4FL,
  688. 0x682E6FF3L, 0x748F82EEL, 0x78A5636FL, 0x84C87814L, 0x8CC70208L,
  689. 0x90BEFFFAL, 0xA4506CEBL, 0xBEF9A3F7L, 0xC67178F2L
  690. };
  691. /* Both versions of Ch and Maj are logically the same, but with the second set
  692. the compilers can recognize them better for optimization */
  693. #ifdef WOLFSSL_SHA256_BY_SPEC
  694. /* SHA256 math based on specification */
  695. #define Ch(x,y,z) ((z) ^ ((x) & ((y) ^ (z))))
  696. #define Maj(x,y,z) ((((x) | (y)) & (z)) | ((x) & (y)))
  697. #else
  698. /* SHA256 math reworked for easier compiler optimization */
  699. #define Ch(x,y,z) ((((y) ^ (z)) & (x)) ^ (z))
  700. #define Maj(x,y,z) ((((x) ^ (y)) & ((y) ^ (z))) ^ (y))
  701. #endif
  702. #define R(x, n) (((x) & 0xFFFFFFFFU) >> (n))
  703. #define S(x, n) rotrFixed(x, n)
  704. #define Sigma0(x) (S(x, 2) ^ S(x, 13) ^ S(x, 22))
  705. #define Sigma1(x) (S(x, 6) ^ S(x, 11) ^ S(x, 25))
  706. #define Gamma0(x) (S(x, 7) ^ S(x, 18) ^ R(x, 3))
  707. #define Gamma1(x) (S(x, 17) ^ S(x, 19) ^ R(x, 10))
  708. #define a(i) S[(0-(i)) & 7]
  709. #define b(i) S[(1-(i)) & 7]
  710. #define c(i) S[(2-(i)) & 7]
  711. #define d(i) S[(3-(i)) & 7]
  712. #define e(i) S[(4-(i)) & 7]
  713. #define f(i) S[(5-(i)) & 7]
  714. #define g(i) S[(6-(i)) & 7]
  715. #define h(i) S[(7-(i)) & 7]
  716. #ifndef XTRANSFORM
  717. #define XTRANSFORM(S, D) Transform_Sha256((S),(D))
  718. #endif
  719. #ifndef SHA256_MANY_REGISTERS
  720. #define RND(j) \
  721. t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+(j)] + W[i+(j)]; \
  722. t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
  723. d(j) += t0; \
  724. h(j) = t0 + t1
  725. static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
  726. {
  727. word32 S[8], t0, t1;
  728. int i;
  729. #ifdef WOLFSSL_SMALL_STACK_CACHE
  730. word32* W = sha256->W;
  731. if (W == NULL) {
  732. W = (word32*)XMALLOC(sizeof(word32) * WC_SHA256_BLOCK_SIZE, NULL,
  733. DYNAMIC_TYPE_DIGEST);
  734. if (W == NULL)
  735. return MEMORY_E;
  736. sha256->W = W;
  737. }
  738. #elif defined(WOLFSSL_SMALL_STACK)
  739. word32* W;
  740. W = (word32*)XMALLOC(sizeof(word32) * WC_SHA256_BLOCK_SIZE, NULL,
  741. DYNAMIC_TYPE_TMP_BUFFER);
  742. if (W == NULL)
  743. return MEMORY_E;
  744. #else
  745. word32 W[WC_SHA256_BLOCK_SIZE];
  746. #endif
  747. /* Copy context->state[] to working vars */
  748. for (i = 0; i < 8; i++)
  749. S[i] = sha256->digest[i];
  750. for (i = 0; i < 16; i++)
  751. W[i] = *((const word32*)&data[i*(int)sizeof(word32)]);
  752. for (i = 16; i < WC_SHA256_BLOCK_SIZE; i++)
  753. W[i] = Gamma1(W[i-2]) + W[i-7] + Gamma0(W[i-15]) + W[i-16];
  754. #ifdef USE_SLOW_SHA256
  755. /* not unrolled - ~2k smaller and ~25% slower */
  756. for (i = 0; i < WC_SHA256_BLOCK_SIZE; i += 8) {
  757. int j;
  758. for (j = 0; j < 8; j++) { /* braces needed here for macros {} */
  759. RND(j);
  760. }
  761. }
  762. #else
  763. /* partially loop unrolled */
  764. for (i = 0; i < WC_SHA256_BLOCK_SIZE; i += 8) {
  765. RND(0); RND(1); RND(2); RND(3);
  766. RND(4); RND(5); RND(6); RND(7);
  767. }
  768. #endif /* USE_SLOW_SHA256 */
  769. /* Add the working vars back into digest state[] */
  770. for (i = 0; i < 8; i++) {
  771. sha256->digest[i] += S[i];
  772. }
  773. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_SMALL_STACK_CACHE)
  774. ForceZero(W, sizeof(word32) * WC_SHA256_BLOCK_SIZE);
  775. XFREE(W, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  776. #endif
  777. return 0;
  778. }
  779. #else
  780. /* SHA256 version that keeps all data in registers */
  781. #define SCHED1(j) (W[j] = *((word32*)&data[j*sizeof(word32)]))
  782. #define SCHED(j) ( \
  783. W[ j & 15] += \
  784. Gamma1(W[(j-2) & 15])+ \
  785. W[(j-7) & 15] + \
  786. Gamma0(W[(j-15) & 15]) \
  787. )
  788. #define RND1(j) \
  789. t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + SCHED1(j); \
  790. t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
  791. d(j) += t0; \
  792. h(j) = t0 + t1
  793. #define RNDN(j) \
  794. t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + SCHED(j); \
  795. t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
  796. d(j) += t0; \
  797. h(j) = t0 + t1
  798. static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
  799. {
  800. word32 S[8], t0, t1;
  801. int i;
  802. word32 W[WC_SHA256_BLOCK_SIZE/sizeof(word32)];
  803. /* Copy digest to working vars */
  804. S[0] = sha256->digest[0];
  805. S[1] = sha256->digest[1];
  806. S[2] = sha256->digest[2];
  807. S[3] = sha256->digest[3];
  808. S[4] = sha256->digest[4];
  809. S[5] = sha256->digest[5];
  810. S[6] = sha256->digest[6];
  811. S[7] = sha256->digest[7];
  812. i = 0;
  813. RND1( 0); RND1( 1); RND1( 2); RND1( 3);
  814. RND1( 4); RND1( 5); RND1( 6); RND1( 7);
  815. RND1( 8); RND1( 9); RND1(10); RND1(11);
  816. RND1(12); RND1(13); RND1(14); RND1(15);
  817. /* 64 operations, partially loop unrolled */
  818. for (i = 16; i < 64; i += 16) {
  819. RNDN( 0); RNDN( 1); RNDN( 2); RNDN( 3);
  820. RNDN( 4); RNDN( 5); RNDN( 6); RNDN( 7);
  821. RNDN( 8); RNDN( 9); RNDN(10); RNDN(11);
  822. RNDN(12); RNDN(13); RNDN(14); RNDN(15);
  823. }
  824. /* Add the working vars back into digest */
  825. sha256->digest[0] += S[0];
  826. sha256->digest[1] += S[1];
  827. sha256->digest[2] += S[2];
  828. sha256->digest[3] += S[3];
  829. sha256->digest[4] += S[4];
  830. sha256->digest[5] += S[5];
  831. sha256->digest[6] += S[6];
  832. sha256->digest[7] += S[7];
  833. return 0;
  834. }
  835. #endif /* SHA256_MANY_REGISTERS */
  836. #endif
  837. /* End wc_ software implementation */
  838. #ifdef XTRANSFORM
  839. static WC_INLINE void AddLength(wc_Sha256* sha256, word32 len)
  840. {
  841. word32 tmp = sha256->loLen;
  842. if ((sha256->loLen += len) < tmp) {
  843. sha256->hiLen++; /* carry low to high */
  844. }
  845. }
  846. /* do block size increments/updates */
  847. static WC_INLINE int Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
  848. {
  849. int ret = 0;
  850. word32 blocksLen;
  851. byte* local;
  852. if (sha256 == NULL || (data == NULL && len > 0)) {
  853. return BAD_FUNC_ARG;
  854. }
  855. if (data == NULL && len == 0) {
  856. /* valid, but do nothing */
  857. return 0;
  858. }
  859. /* check that internal buffLen is valid */
  860. if (sha256->buffLen >= WC_SHA256_BLOCK_SIZE) {
  861. return BUFFER_E;
  862. }
  863. /* add length for final */
  864. AddLength(sha256, len);
  865. local = (byte*)sha256->buffer;
  866. /* process any remainder from previous operation */
  867. if (sha256->buffLen > 0) {
  868. blocksLen = min(len, WC_SHA256_BLOCK_SIZE - sha256->buffLen);
  869. XMEMCPY(&local[sha256->buffLen], data, blocksLen);
  870. sha256->buffLen += blocksLen;
  871. data += blocksLen;
  872. len -= blocksLen;
  873. if (sha256->buffLen == WC_SHA256_BLOCK_SIZE) {
  874. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  875. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  876. if (sha256->ctx.mode == ESP32_SHA_INIT) {
  877. ESP_LOGV(TAG, "Sha256Update try hardware");
  878. esp_sha_try_hw_lock(&sha256->ctx);
  879. }
  880. #endif
  881. #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
  882. #if defined(WOLFSSL_X86_64_BUILD) && \
  883. defined(USE_INTEL_SPEEDUP) && \
  884. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  885. if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
  886. #endif
  887. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  888. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  889. defined(WOLFSSL_ESP32_CRYPT) && \
  890. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  891. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  892. if (esp_sha_need_byte_reversal(&sha256->ctx))
  893. #endif
  894. {
  895. ByteReverseWords(sha256->buffer, sha256->buffer,
  896. WC_SHA256_BLOCK_SIZE);
  897. }
  898. #endif
  899. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  900. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  901. if (sha256->ctx.mode == ESP32_SHA_SW) {
  902. #if defined(WOLFSSL_DEBUG_MUTEX)
  903. {
  904. ESP_LOGI(TAG, "Sha256Update process software");
  905. }
  906. #endif
  907. #ifdef WOLFSSL_HW_METRICS
  908. {
  909. /* Track of # SW during transforms during active HW */
  910. esp_sw_sha256_count_add();
  911. }
  912. #endif /* WOLFSSL_HW_METRICS */
  913. ret = XTRANSFORM(sha256, (const byte*)local);
  914. }
  915. else {
  916. #if defined(WOLFSSL_DEBUG_MUTEX)
  917. {
  918. ESP_LOGI(TAG, "Sha256Update process hardware");
  919. }
  920. #endif
  921. esp_sha256_process(sha256, (const byte*)local);
  922. }
  923. #else
  924. /* Always SW */
  925. ret = XTRANSFORM(sha256, (const byte*)local);
  926. #endif
  927. if (ret == 0)
  928. sha256->buffLen = 0;
  929. else
  930. len = 0; /* error */
  931. }
  932. }
  933. /* process blocks */
  934. #ifdef XTRANSFORM_LEN
  935. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  936. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  937. if (Transform_Sha256_Len_p != NULL)
  938. #endif
  939. {
  940. /* get number of blocks */
  941. /* 64-1 = 0x3F (~ Inverted = 0xFFFFFFC0) */
  942. /* len (masked by 0xFFFFFFC0) returns block aligned length */
  943. blocksLen = len & ~((word32)WC_SHA256_BLOCK_SIZE-1);
  944. if (blocksLen > 0) {
  945. /* Byte reversal and alignment handled in function if required */
  946. XTRANSFORM_LEN(sha256, data, blocksLen);
  947. data += blocksLen;
  948. len -= blocksLen;
  949. }
  950. }
  951. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  952. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  953. else
  954. #endif
  955. #endif /* XTRANSFORM_LEN */
  956. #if !defined(XTRANSFORM_LEN) || \
  957. (defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  958. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)))
  959. {
  960. while (len >= WC_SHA256_BLOCK_SIZE) {
  961. word32* local32 = sha256->buffer;
  962. /* optimization to avoid memcpy if data pointer is properly aligned */
  963. /* Intel transform function requires use of sha256->buffer */
  964. /* Little Endian requires byte swap, so can't use data directly */
  965. #if defined(WC_HASH_DATA_ALIGNMENT) && !defined(LITTLE_ENDIAN_ORDER) && \
  966. !(defined(WOLFSSL_X86_64_BUILD) && \
  967. defined(USE_INTEL_SPEEDUP) && \
  968. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)))
  969. if (((wc_ptr_t)data % WC_HASH_DATA_ALIGNMENT) == 0) {
  970. local32 = (word32*)data;
  971. }
  972. else
  973. #endif
  974. {
  975. XMEMCPY(local32, data, WC_SHA256_BLOCK_SIZE);
  976. }
  977. data += WC_SHA256_BLOCK_SIZE;
  978. len -= WC_SHA256_BLOCK_SIZE;
  979. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  980. !defined( NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  981. if (sha256->ctx.mode == ESP32_SHA_INIT){
  982. ESP_LOGV(TAG, "Sha256Update try hardware loop");
  983. esp_sha_try_hw_lock(&sha256->ctx);
  984. }
  985. #endif
  986. #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
  987. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  988. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  989. defined(WOLFSSL_ESP32_CRYPT) && \
  990. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  991. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  992. if (esp_sha_need_byte_reversal(&sha256->ctx))
  993. #endif
  994. #if defined(WOLFSSL_X86_64_BUILD) && \
  995. defined(USE_INTEL_SPEEDUP) && \
  996. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  997. if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
  998. #endif
  999. {
  1000. ByteReverseWords(local32, local32, WC_SHA256_BLOCK_SIZE);
  1001. }
  1002. #endif
  1003. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1004. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1005. if (sha256->ctx.mode == ESP32_SHA_SW) {
  1006. ESP_LOGV(TAG, "Sha256Update process software loop");
  1007. ret = XTRANSFORM(sha256, (const byte*)local32);
  1008. }
  1009. else {
  1010. ESP_LOGV(TAG, "Sha256Update process hardware");
  1011. esp_sha256_process(sha256, (const byte*)local32);
  1012. }
  1013. #else
  1014. ret = XTRANSFORM(sha256, (const byte*)local32);
  1015. #endif
  1016. if (ret != 0)
  1017. break;
  1018. }
  1019. }
  1020. #endif
  1021. /* save remainder */
  1022. if (ret == 0 && len > 0) {
  1023. XMEMCPY(local, data, len);
  1024. sha256->buffLen = len;
  1025. }
  1026. return ret;
  1027. }
  1028. #if defined(WOLFSSL_KCAPI_HASH)
  1029. /* implemented in wolfcrypt/src/port/kcapi/kcapi_hash.c */
  1030. #else
  1031. int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
  1032. {
  1033. if (sha256 == NULL || (data == NULL && len > 0)) {
  1034. return BAD_FUNC_ARG;
  1035. }
  1036. if (data == NULL && len == 0) {
  1037. /* valid, but do nothing */
  1038. return 0;
  1039. }
  1040. #ifdef WOLF_CRYPTO_CB
  1041. #ifndef WOLF_CRYPTO_CB_FIND
  1042. if (sha256->devId != INVALID_DEVID)
  1043. #endif
  1044. {
  1045. int ret = wc_CryptoCb_Sha256Hash(sha256, data, len, NULL);
  1046. if (ret != CRYPTOCB_UNAVAILABLE)
  1047. return ret;
  1048. /* fall-through when unavailable */
  1049. }
  1050. #endif
  1051. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
  1052. if (sha256->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA256) {
  1053. #if defined(HAVE_INTEL_QA)
  1054. return IntelQaSymSha256(&sha256->asyncDev, NULL, data, len);
  1055. #endif
  1056. }
  1057. #endif /* WOLFSSL_ASYNC_CRYPT */
  1058. return Sha256Update(sha256, data, len);
  1059. }
  1060. #endif
  1061. static WC_INLINE int Sha256Final(wc_Sha256* sha256)
  1062. {
  1063. int ret;
  1064. byte* local;
  1065. if (sha256 == NULL) {
  1066. return BAD_FUNC_ARG;
  1067. }
  1068. /* we'll add a 0x80 byte at the end,
  1069. ** so make sure we have appropriate buffer length. */
  1070. if (sha256->buffLen > WC_SHA256_BLOCK_SIZE - 1) {
  1071. /* exit with error code if there's a bad buffer size in buffLen */
  1072. return BAD_STATE_E;
  1073. } /* buffLen check */
  1074. local = (byte*)sha256->buffer;
  1075. local[sha256->buffLen++] = 0x80; /* add 1 */
  1076. /* pad with zeros */
  1077. if (sha256->buffLen > WC_SHA256_PAD_SIZE) {
  1078. XMEMSET(&local[sha256->buffLen], 0,
  1079. WC_SHA256_BLOCK_SIZE - sha256->buffLen);
  1080. sha256->buffLen += WC_SHA256_BLOCK_SIZE - sha256->buffLen;
  1081. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1082. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1083. if (sha256->ctx.mode == ESP32_SHA_INIT) {
  1084. esp_sha_try_hw_lock(&sha256->ctx);
  1085. }
  1086. #endif
  1087. #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
  1088. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  1089. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  1090. defined(WOLFSSL_ESP32_CRYPT) && \
  1091. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1092. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1093. if (esp_sha_need_byte_reversal(&sha256->ctx))
  1094. #endif
  1095. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  1096. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  1097. if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
  1098. #endif
  1099. {
  1100. ByteReverseWords(sha256->buffer, sha256->buffer,
  1101. WC_SHA256_BLOCK_SIZE);
  1102. }
  1103. #endif
  1104. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1105. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1106. if (sha256->ctx.mode == ESP32_SHA_INIT) {
  1107. esp_sha_try_hw_lock(&sha256->ctx);
  1108. }
  1109. if (sha256->ctx.mode == ESP32_SHA_SW) {
  1110. ret = XTRANSFORM(sha256, (const byte*)local);
  1111. }
  1112. else {
  1113. ret = esp_sha256_process(sha256, (const byte*)local);
  1114. }
  1115. #else
  1116. ret = XTRANSFORM(sha256, (const byte*)local);
  1117. #endif
  1118. if (ret != 0)
  1119. return ret;
  1120. sha256->buffLen = 0;
  1121. }
  1122. XMEMSET(&local[sha256->buffLen], 0,
  1123. WC_SHA256_PAD_SIZE - sha256->buffLen);
  1124. /* put 64 bit length in separate 32 bit parts */
  1125. sha256->hiLen = (sha256->loLen >> (8 * sizeof(sha256->loLen) - 3)) +
  1126. (sha256->hiLen << 3);
  1127. sha256->loLen = sha256->loLen << 3;
  1128. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1129. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1130. if (sha256->ctx.mode == ESP32_SHA_INIT) {
  1131. esp_sha_try_hw_lock(&sha256->ctx);
  1132. }
  1133. #endif
  1134. /* store lengths */
  1135. #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
  1136. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  1137. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  1138. defined(WOLFSSL_ESP32_CRYPT) && \
  1139. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1140. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1141. if (esp_sha_need_byte_reversal(&sha256->ctx))
  1142. #endif
  1143. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  1144. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  1145. if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
  1146. #endif
  1147. {
  1148. ByteReverseWords(sha256->buffer, sha256->buffer,
  1149. WC_SHA256_BLOCK_SIZE);
  1150. }
  1151. #endif
  1152. /* ! 64-bit length ordering dependent on digest endian type ! */
  1153. XMEMCPY(&local[WC_SHA256_PAD_SIZE], &sha256->hiLen, sizeof(word32));
  1154. XMEMCPY(&local[WC_SHA256_PAD_SIZE + sizeof(word32)], &sha256->loLen,
  1155. sizeof(word32));
  1156. /* Only the ESP32-C3 with HW enabled may need pad size byte order reversal
  1157. * depending on HW or SW mode */
  1158. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  1159. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  1160. defined(WOLFSSL_ESP32_CRYPT) && \
  1161. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1162. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1163. if (sha256->ctx.mode == ESP32_SHA_HW) {
  1164. #if defined(WOLFSSL_SUPER_VERBOSE_DEBUG)
  1165. ESP_LOGV(TAG, "Start: Reverse PAD SIZE Endianness.");
  1166. #endif
  1167. ByteReverseWords(
  1168. &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)], /* out */
  1169. &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)], /* in */
  1170. 2 * sizeof(word32) /* byte count to reverse */
  1171. );
  1172. #if defined(WOLFSSL_SUPER_VERBOSE_DEBUG)
  1173. ESP_LOGV(TAG, "End: Reverse PAD SIZE Endianness.");
  1174. #endif
  1175. } /* end if (sha256->ctx.mode == ESP32_SHA_HW) */
  1176. #endif
  1177. #if defined(FREESCALE_MMCAU_SHA) || \
  1178. (defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  1179. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)))
  1180. /* Kinetis requires only these bytes reversed */
  1181. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  1182. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  1183. if (IS_INTEL_AVX1(intel_flags) || IS_INTEL_AVX2(intel_flags))
  1184. #endif
  1185. {
  1186. ByteReverseWords(
  1187. &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)],
  1188. &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)],
  1189. 2 * sizeof(word32));
  1190. }
  1191. #endif
  1192. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1193. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1194. if (sha256->ctx.mode == ESP32_SHA_INIT) {
  1195. esp_sha_try_hw_lock(&sha256->ctx);
  1196. }
  1197. /* depending on architecture and ctx.mode value
  1198. * we may or may not need default digest */
  1199. if (sha256->ctx.mode == ESP32_SHA_SW) {
  1200. ret = XTRANSFORM(sha256, (const byte*)local);
  1201. }
  1202. else {
  1203. ret = esp_sha256_digest_process(sha256, 1);
  1204. }
  1205. #else
  1206. ret = XTRANSFORM(sha256, (const byte*)local);
  1207. #endif
  1208. return ret;
  1209. }
  1210. #if !defined(WOLFSSL_KCAPI_HASH)
  1211. int wc_Sha256FinalRaw(wc_Sha256* sha256, byte* hash)
  1212. {
  1213. #ifdef LITTLE_ENDIAN_ORDER
  1214. word32 digest[WC_SHA256_DIGEST_SIZE / sizeof(word32)];
  1215. #endif
  1216. if (sha256 == NULL || hash == NULL) {
  1217. return BAD_FUNC_ARG;
  1218. }
  1219. #ifdef LITTLE_ENDIAN_ORDER
  1220. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  1221. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  1222. defined(WOLFSSL_ESP32_CRYPT) && \
  1223. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1224. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1225. if (esp_sha_need_byte_reversal(&sha256->ctx))
  1226. #endif
  1227. {
  1228. ByteReverseWords((word32*)digest,
  1229. (word32*)sha256->digest,
  1230. WC_SHA256_DIGEST_SIZE);
  1231. }
  1232. XMEMCPY(hash, digest, WC_SHA256_DIGEST_SIZE);
  1233. #else
  1234. XMEMCPY(hash, sha256->digest, WC_SHA256_DIGEST_SIZE);
  1235. #endif
  1236. return 0;
  1237. }
  1238. int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
  1239. {
  1240. int ret;
  1241. if (sha256 == NULL || hash == NULL) {
  1242. return BAD_FUNC_ARG;
  1243. }
  1244. #ifdef WOLF_CRYPTO_CB
  1245. #ifndef WOLF_CRYPTO_CB_FIND
  1246. if (sha256->devId != INVALID_DEVID)
  1247. #endif
  1248. {
  1249. ret = wc_CryptoCb_Sha256Hash(sha256, NULL, 0, hash);
  1250. if (ret != CRYPTOCB_UNAVAILABLE)
  1251. return ret;
  1252. /* fall-through when unavailable */
  1253. }
  1254. #endif
  1255. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
  1256. if (sha256->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA256) {
  1257. #if defined(HAVE_INTEL_QA)
  1258. return IntelQaSymSha256(&sha256->asyncDev, hash, NULL,
  1259. WC_SHA256_DIGEST_SIZE);
  1260. #endif
  1261. }
  1262. #endif /* WOLFSSL_ASYNC_CRYPT */
  1263. ret = Sha256Final(sha256);
  1264. if (ret != 0) {
  1265. return ret;
  1266. }
  1267. #if defined(LITTLE_ENDIAN_ORDER)
  1268. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  1269. defined(WOLFSSL_ESP32_CRYPT) && \
  1270. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1271. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1272. if (esp_sha_need_byte_reversal(&sha256->ctx))
  1273. #endif
  1274. {
  1275. ByteReverseWords(sha256->digest, sha256->digest,
  1276. WC_SHA256_DIGEST_SIZE);
  1277. }
  1278. #endif
  1279. XMEMCPY(hash, sha256->digest, WC_SHA256_DIGEST_SIZE);
  1280. return InitSha256(sha256); /* reset state */
  1281. }
  1282. #if defined(OPENSSL_EXTRA) || defined(HAVE_CURL)
  1283. /* Apply SHA256 transformation to the data */
  1284. /* @param sha a pointer to wc_Sha256 structure */
  1285. /* @param data data to be applied SHA256 transformation */
  1286. /* @return 0 on successful, otherwise non-zero on failure */
  1287. int wc_Sha256Transform(wc_Sha256* sha, const unsigned char* data)
  1288. {
  1289. if (sha == NULL || data == NULL) {
  1290. return BAD_FUNC_ARG;
  1291. }
  1292. return (Transform_Sha256(sha, data));
  1293. }
  1294. #endif
  1295. #endif /* OPENSSL_EXTRA */
  1296. #endif /* !WOLFSSL_KCAPI_HASH */
  1297. #ifdef WOLFSSL_SHA224
  1298. #ifdef STM32_HASH_SHA2
  1299. /* Supports CubeMX HAL or Standard Peripheral Library */
  1300. int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
  1301. {
  1302. if (sha224 == NULL)
  1303. return BAD_FUNC_ARG;
  1304. (void)devId;
  1305. (void)heap;
  1306. XMEMSET(sha224, 0, sizeof(wc_Sha224));
  1307. wc_Stm32_Hash_Init(&sha224->stmCtx);
  1308. return 0;
  1309. }
  1310. int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
  1311. {
  1312. int ret = 0;
  1313. if (sha224 == NULL || (data == NULL && len > 0)) {
  1314. return BAD_FUNC_ARG;
  1315. }
  1316. ret = wolfSSL_CryptHwMutexLock();
  1317. if (ret == 0) {
  1318. ret = wc_Stm32_Hash_Update(&sha224->stmCtx,
  1319. HASH_AlgoSelection_SHA224, data, len, WC_SHA224_BLOCK_SIZE);
  1320. wolfSSL_CryptHwMutexUnLock();
  1321. }
  1322. return ret;
  1323. }
  1324. int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
  1325. {
  1326. int ret = 0;
  1327. if (sha224 == NULL || hash == NULL) {
  1328. return BAD_FUNC_ARG;
  1329. }
  1330. ret = wolfSSL_CryptHwMutexLock();
  1331. if (ret == 0) {
  1332. ret = wc_Stm32_Hash_Final(&sha224->stmCtx,
  1333. HASH_AlgoSelection_SHA224, hash, WC_SHA224_DIGEST_SIZE);
  1334. wolfSSL_CryptHwMutexUnLock();
  1335. }
  1336. (void)wc_InitSha224(sha224); /* reset state */
  1337. return ret;
  1338. }
  1339. #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_HASH)
  1340. int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
  1341. {
  1342. if (sha224 == NULL) {
  1343. return BAD_FUNC_ARG;
  1344. }
  1345. (void)devId;
  1346. return se050_hash_init(&sha224->se050Ctx, heap);
  1347. }
  1348. int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
  1349. {
  1350. return se050_hash_update(&sha224->se050Ctx, data, len);
  1351. }
  1352. int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
  1353. {
  1354. int ret = 0;
  1355. ret = se050_hash_final(&sha224->se050Ctx, hash, WC_SHA224_DIGEST_SIZE,
  1356. kAlgorithm_SSS_SHA224);
  1357. (void)wc_InitSha224(sha224);
  1358. return ret;
  1359. }
  1360. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_HASH) && \
  1361. !defined(WOLFSSL_QNX_CAAM)
  1362. /* functions defined in wolfcrypt/src/port/caam/caam_sha256.c */
  1363. #elif defined(WOLFSSL_AFALG_HASH)
  1364. #error SHA224 currently not supported with AF_ALG enabled
  1365. #elif defined(WOLFSSL_DEVCRYPTO_HASH)
  1366. /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
  1367. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  1368. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  1369. #elif defined(WOLFSSL_KCAPI_HASH) && !defined(WOLFSSL_NO_KCAPI_SHA224)
  1370. /* implemented in wolfcrypt/src/port/kcapi/kcapi_hash.c */
  1371. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_HASH)
  1372. /* implemented in wolfcrypt/src/port/psa/psa_hash.c */
  1373. #elif defined(WOLFSSL_RENESAS_RX64_HASH)
  1374. /* implemented in wolfcrypt/src/port/Renesas/renesas_rx64_hw_sha.c */
  1375. #elif defined(WOLFSSL_RENESAS_RSIP) && \
  1376. !defined(NO_WOLFSSL_RENESAS_FSPSM_HASH)
  1377. /* implemented in wolfcrypt/src/port/Renesas/renesas_fspsm_sha.c */
  1378. #else
  1379. #define NEED_SOFT_SHA224
  1380. static int InitSha224(wc_Sha224* sha224)
  1381. {
  1382. int ret = 0;
  1383. if (sha224 == NULL) {
  1384. return BAD_FUNC_ARG;
  1385. }
  1386. sha224->digest[0] = 0xc1059ed8;
  1387. sha224->digest[1] = 0x367cd507;
  1388. sha224->digest[2] = 0x3070dd17;
  1389. sha224->digest[3] = 0xf70e5939;
  1390. sha224->digest[4] = 0xffc00b31;
  1391. sha224->digest[5] = 0x68581511;
  1392. sha224->digest[6] = 0x64f98fa7;
  1393. sha224->digest[7] = 0xbefa4fa4;
  1394. sha224->buffLen = 0;
  1395. sha224->loLen = 0;
  1396. sha224->hiLen = 0;
  1397. #if defined(WOLFSSL_X86_64_BUILD) && defined(USE_INTEL_SPEEDUP) && \
  1398. (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
  1399. /* choose best Transform function under this runtime environment */
  1400. Sha256_SetTransform();
  1401. #endif
  1402. #ifdef WOLFSSL_HASH_FLAGS
  1403. sha224->flags = 0;
  1404. #endif
  1405. #ifdef WOLFSSL_HASH_KEEP
  1406. sha224->msg = NULL;
  1407. sha224->len = 0;
  1408. sha224->used = 0;
  1409. #endif
  1410. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1411. (!defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256) || \
  1412. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224))
  1413. /* not to be confused with SHAS512_224 */
  1414. ret = esp_sha_init(&(sha224->ctx), WC_HASH_TYPE_SHA224);
  1415. #endif
  1416. return ret;
  1417. }
  1418. #endif
  1419. #ifdef NEED_SOFT_SHA224
  1420. int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
  1421. {
  1422. int ret = 0;
  1423. if (sha224 == NULL)
  1424. return BAD_FUNC_ARG;
  1425. sha224->heap = heap;
  1426. #ifdef WOLFSSL_SMALL_STACK_CACHE
  1427. sha224->W = NULL;
  1428. #endif
  1429. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW)
  1430. #if defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224)
  1431. /* We know this is a fresh, uninitialized item, so set to INIT */
  1432. if (sha224->ctx.mode != ESP32_SHA_SW) {
  1433. ESP_LOGV(TAG, "Set sha224 ctx mode init to ESP32_SHA_SW. "
  1434. "Prior value: %d", sha224->ctx.mode);
  1435. }
  1436. /* no sha224 HW support is available, set to SW */
  1437. sha224->ctx.mode = ESP32_SHA_SW;
  1438. #else
  1439. /* We know this is a fresh, uninitialized item, so set to INIT */
  1440. sha224->ctx.mode = ESP32_SHA_INIT;
  1441. #endif
  1442. #endif
  1443. ret = InitSha224(sha224);
  1444. if (ret != 0) {
  1445. return ret;
  1446. }
  1447. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
  1448. ret = wolfAsync_DevCtxInit(&sha224->asyncDev,
  1449. WOLFSSL_ASYNC_MARKER_SHA224, sha224->heap, devId);
  1450. #else
  1451. (void)devId;
  1452. #endif /* WOLFSSL_ASYNC_CRYPT */
  1453. #ifdef WOLFSSL_IMXRT1170_CAAM
  1454. ret = wc_CAAM_HashInit(&sha224->hndl, &sha224->ctx, WC_HASH_TYPE_SHA224);
  1455. #endif
  1456. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1457. (!defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256) || \
  1458. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224))
  1459. if (sha224->ctx.mode != ESP32_SHA_INIT) {
  1460. ESP_LOGV("SHA224", "Set ctx mode from prior value: "
  1461. "%d", sha224->ctx.mode);
  1462. }
  1463. /* We know this is a fresh, uninitialized item, so set to INIT */
  1464. sha224->ctx.mode = ESP32_SHA_INIT;
  1465. #endif
  1466. return ret;
  1467. }
  1468. int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
  1469. {
  1470. int ret;
  1471. if (sha224 == NULL || (data == NULL && len > 0)) {
  1472. return BAD_FUNC_ARG;
  1473. }
  1474. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
  1475. if (sha224->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA224) {
  1476. #if defined(HAVE_INTEL_QA)
  1477. return IntelQaSymSha224(&sha224->asyncDev, NULL, data, len);
  1478. #endif
  1479. }
  1480. #endif /* WOLFSSL_ASYNC_CRYPT */
  1481. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1482. (defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256) || \
  1483. defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224))
  1484. sha224->ctx.mode = ESP32_SHA_SW; /* no SHA224 HW, so always SW */
  1485. #endif
  1486. ret = Sha256Update((wc_Sha256*)sha224, data, len);
  1487. return ret;
  1488. }
  1489. int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
  1490. {
  1491. int ret;
  1492. if (sha224 == NULL || hash == NULL) {
  1493. return BAD_FUNC_ARG;
  1494. }
  1495. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
  1496. if (sha224->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA224) {
  1497. #if defined(HAVE_INTEL_QA)
  1498. return IntelQaSymSha224(&sha224->asyncDev, hash, NULL,
  1499. WC_SHA224_DIGEST_SIZE);
  1500. #endif
  1501. }
  1502. #endif /* WOLFSSL_ASYNC_CRYPT */
  1503. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1504. (!defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256) || \
  1505. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224))
  1506. /* nothing enabled here for C3 success */
  1507. #endif
  1508. ret = Sha256Final((wc_Sha256*)sha224);
  1509. if (ret != 0)
  1510. return ret;
  1511. #if defined(LITTLE_ENDIAN_ORDER)
  1512. #if (defined(CONFIG_IDF_TARGET_ESP32C3) || \
  1513. defined(CONFIG_IDF_TARGET_ESP32C6)) && \
  1514. defined(WOLFSSL_ESP32_CRYPT) && \
  1515. (!defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256) || \
  1516. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224))
  1517. if (esp_sha_need_byte_reversal(&sha224->ctx))
  1518. #endif
  1519. {
  1520. ByteReverseWords(sha224->digest,
  1521. sha224->digest,
  1522. WC_SHA224_DIGEST_SIZE);
  1523. }
  1524. #endif
  1525. XMEMCPY(hash, sha224->digest, WC_SHA224_DIGEST_SIZE);
  1526. return InitSha224(sha224); /* reset state */
  1527. }
  1528. #endif /* end of SHA224 software implementation */
  1529. int wc_InitSha224(wc_Sha224* sha224)
  1530. {
  1531. int devId = INVALID_DEVID;
  1532. #ifdef WOLF_CRYPTO_CB
  1533. devId = wc_CryptoCb_DefaultDevID();
  1534. #endif
  1535. return wc_InitSha224_ex(sha224, NULL, devId);
  1536. }
  1537. #if !defined(WOLFSSL_HAVE_PSA) || defined(WOLFSSL_PSA_NO_HASH)
  1538. /* implemented in wolfcrypt/src/port/psa/psa_hash.c */
  1539. void wc_Sha224Free(wc_Sha224* sha224)
  1540. {
  1541. if (sha224 == NULL)
  1542. return;
  1543. #ifdef WOLFSSL_SMALL_STACK_CACHE
  1544. if (sha224->W != NULL) {
  1545. ForceZero(sha224->W, sizeof(word32) * WC_SHA224_BLOCK_SIZE);
  1546. XFREE(sha224->W, NULL, DYNAMIC_TYPE_DIGEST);
  1547. sha224->W = NULL;
  1548. }
  1549. #endif
  1550. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
  1551. wolfAsync_DevCtxFree(&sha224->asyncDev, WOLFSSL_ASYNC_MARKER_SHA224);
  1552. #endif /* WOLFSSL_ASYNC_CRYPT */
  1553. #ifdef WOLFSSL_PIC32MZ_HASH
  1554. wc_Sha256Pic32Free(sha224);
  1555. #endif
  1556. #if defined(WOLFSSL_KCAPI_HASH)
  1557. KcapiHashFree(&sha224->kcapi);
  1558. #endif
  1559. #if defined(WOLFSSL_RENESAS_RX64_HASH)
  1560. if (sha224->msg != NULL) {
  1561. ForceZero(sha224->msg, sha224->len);
  1562. XFREE(sha224->msg, sha224->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1563. sha224->msg = NULL;
  1564. }
  1565. #endif
  1566. ForceZero(sha224, sizeof(*sha224));
  1567. }
  1568. #endif /* !defined(WOLFSSL_HAVE_PSA) || defined(WOLFSSL_PSA_NO_HASH) */
  1569. #endif /* WOLFSSL_SHA224 */
  1570. int wc_InitSha256(wc_Sha256* sha256)
  1571. {
  1572. int devId = INVALID_DEVID;
  1573. #ifdef WOLF_CRYPTO_CB
  1574. devId = wc_CryptoCb_DefaultDevID();
  1575. #endif
  1576. return wc_InitSha256_ex(sha256, NULL, devId);
  1577. }
  1578. #if !defined(WOLFSSL_HAVE_PSA) || defined(WOLFSSL_PSA_NO_HASH)
  1579. /* implemented in wolfcrypt/src/port/psa/psa_hash.c */
  1580. void wc_Sha256Free(wc_Sha256* sha256)
  1581. {
  1582. if (sha256 == NULL)
  1583. return;
  1584. #if defined(WOLFSSL_ESP32) && \
  1585. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1586. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1587. esp_sha_release_unfinished_lock(&sha256->ctx);
  1588. #endif
  1589. #ifdef WOLFSSL_SMALL_STACK_CACHE
  1590. if (sha256->W != NULL) {
  1591. ForceZero(sha256->W, sizeof(word32) * WC_SHA256_BLOCK_SIZE);
  1592. XFREE(sha256->W, NULL, DYNAMIC_TYPE_DIGEST);
  1593. sha256->W = NULL;
  1594. }
  1595. #endif
  1596. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
  1597. wolfAsync_DevCtxFree(&sha256->asyncDev, WOLFSSL_ASYNC_MARKER_SHA256);
  1598. #endif /* WOLFSSL_ASYNC_CRYPT */
  1599. #ifdef WOLFSSL_PIC32MZ_HASH
  1600. wc_Sha256Pic32Free(sha256);
  1601. #endif
  1602. #if defined(WOLFSSL_AFALG_HASH)
  1603. if (sha256->alFd > 0) {
  1604. close(sha256->alFd);
  1605. sha256->alFd = -1; /* avoid possible double close on socket */
  1606. }
  1607. if (sha256->rdFd > 0) {
  1608. close(sha256->rdFd);
  1609. sha256->rdFd = -1; /* avoid possible double close on socket */
  1610. }
  1611. #endif /* WOLFSSL_AFALG_HASH */
  1612. #ifdef WOLFSSL_DEVCRYPTO_HASH
  1613. wc_DevCryptoFree(&sha256->ctx);
  1614. #endif /* WOLFSSL_DEVCRYPTO */
  1615. #if (defined(WOLFSSL_AFALG_HASH) && defined(WOLFSSL_AFALG_HASH_KEEP)) || \
  1616. (defined(WOLFSSL_DEVCRYPTO_HASH) && defined(WOLFSSL_DEVCRYPTO_HASH_KEEP)) || \
  1617. ((defined(WOLFSSL_RENESAS_TSIP_TLS) || \
  1618. defined(WOLFSSL_RENESAS_TSIP_CRYPTONLY)) && \
  1619. !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)) || \
  1620. (defined(WOLFSSL_RENESAS_SCEPROTECT) && \
  1621. !defined(NO_WOLFSSL_RENESAS_FSPSM_HASH)) || \
  1622. defined(WOLFSSL_RENESAS_RX64_HASH) || \
  1623. defined(WOLFSSL_HASH_KEEP)
  1624. if (sha256->msg != NULL) {
  1625. ForceZero(sha256->msg, sha256->len);
  1626. XFREE(sha256->msg, sha256->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1627. sha256->msg = NULL;
  1628. }
  1629. #endif
  1630. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_HASH)
  1631. se050_hash_free(&sha256->se050Ctx);
  1632. #endif
  1633. #if defined(WOLFSSL_KCAPI_HASH)
  1634. KcapiHashFree(&sha256->kcapi);
  1635. #endif
  1636. #ifdef WOLFSSL_IMXRT_DCP
  1637. DCPSha256Free(sha256);
  1638. #endif
  1639. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  1640. wc_MAXQ10XX_Sha256Free(sha256);
  1641. #endif
  1642. #ifdef HAVE_ARIA
  1643. if (sha256->hSession != NULL) {
  1644. MC_CloseSession(sha256->hSession);
  1645. sha256->hSession = NULL;
  1646. }
  1647. #endif
  1648. /* Espressif embedded hardware acceleration specific: */
  1649. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1650. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH) && \
  1651. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1652. if (sha256->ctx.lockDepth > 0) {
  1653. /* probably due to unclean shutdown, error, or other problem.
  1654. *
  1655. * if you find yourself here, code needs to be cleaned up to
  1656. * properly release hardware. this init is only for handling
  1657. * the unexpected. by the time free is called, the hardware
  1658. * should have already been released (lockDepth = 0)
  1659. */
  1660. (void)InitSha256(sha256); /* unlock mutex, set mode to ESP32_SHA_INIT */
  1661. ESP_LOGV(TAG, "Alert: hardware unlock needed in wc_Sha256Free.");
  1662. }
  1663. else {
  1664. ESP_LOGV(TAG, "Hardware unlock not needed in wc_Sha256Free.");
  1665. }
  1666. #endif
  1667. ForceZero(sha256, sizeof(*sha256));
  1668. } /* wc_Sha256Free */
  1669. #endif /* !defined(WOLFSSL_HAVE_PSA) || defined(WOLFSSL_PSA_NO_HASH) */
  1670. #ifdef WOLFSSL_HASH_KEEP
  1671. /* Some hardware have issues with update, this function stores the data to be
  1672. * hashed into an array. Once ready, the Final operation is called on all of the
  1673. * data to be hashed at once.
  1674. * returns 0 on success
  1675. */
  1676. int wc_Sha256_Grow(wc_Sha256* sha256, const byte* in, int inSz)
  1677. {
  1678. return _wc_Hash_Grow(&(sha256->msg), &(sha256->used), &(sha256->len), in,
  1679. inSz, sha256->heap);
  1680. }
  1681. #ifdef WOLFSSL_SHA224
  1682. int wc_Sha224_Grow(wc_Sha224* sha224, const byte* in, int inSz)
  1683. {
  1684. return _wc_Hash_Grow(&(sha224->msg), &(sha224->used), &(sha224->len), in,
  1685. inSz, sha224->heap);
  1686. }
  1687. #endif /* WOLFSSL_SHA224 */
  1688. #endif /* WOLFSSL_HASH_KEEP */
  1689. #endif /* !WOLFSSL_TI_HASH */
  1690. #ifndef WOLFSSL_TI_HASH
  1691. #if !defined(WOLFSSL_RENESAS_RX64_HASH) && \
  1692. (!defined(WOLFSSL_RENESAS_RSIP) || \
  1693. defined(NO_WOLFSSL_RENESAS_FSPSM_HASH))
  1694. #ifdef WOLFSSL_SHA224
  1695. #if defined(WOLFSSL_KCAPI_HASH) && !defined(WOLFSSL_NO_KCAPI_SHA224)
  1696. /* implemented in wolfcrypt/src/port/kcapi/kcapi_hash.c */
  1697. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_HASH)
  1698. /* implemented in wolfcrypt/src/port/psa/psa_hash.c */
  1699. #else
  1700. int wc_Sha224GetHash(wc_Sha224* sha224, byte* hash)
  1701. {
  1702. int ret;
  1703. #ifdef WOLFSSL_SMALL_STACK
  1704. wc_Sha224* tmpSha224;
  1705. #else
  1706. wc_Sha224 tmpSha224[1];
  1707. #endif
  1708. if (sha224 == NULL || hash == NULL) {
  1709. return BAD_FUNC_ARG;
  1710. }
  1711. #ifdef WOLFSSL_SMALL_STACK
  1712. tmpSha224 = (wc_Sha224*)XMALLOC(sizeof(wc_Sha224), NULL,
  1713. DYNAMIC_TYPE_TMP_BUFFER);
  1714. if (tmpSha224 == NULL) {
  1715. return MEMORY_E;
  1716. }
  1717. #endif
  1718. ret = wc_Sha224Copy(sha224, tmpSha224);
  1719. if (ret == 0) {
  1720. ret = wc_Sha224Final(tmpSha224, hash);
  1721. wc_Sha224Free(tmpSha224);
  1722. }
  1723. #ifdef WOLFSSL_SMALL_STACK
  1724. XFREE(tmpSha224, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  1725. #endif
  1726. return ret;
  1727. }
  1728. int wc_Sha224Copy(wc_Sha224* src, wc_Sha224* dst)
  1729. {
  1730. int ret = 0; /* assume success unless proven otherwise */
  1731. if (src == NULL || dst == NULL) {
  1732. return BAD_FUNC_ARG;
  1733. }
  1734. XMEMCPY(dst, src, sizeof(wc_Sha224));
  1735. #ifdef WOLFSSL_SMALL_STACK_CACHE
  1736. dst->W = NULL;
  1737. #endif
  1738. #if defined(WOLFSSL_SILABS_SE_ACCEL) && defined(WOLFSSL_SILABS_SE_ACCEL_3)
  1739. dst->silabsCtx.hash_ctx.cmd_ctx = &dst->silabsCtx.cmd_ctx;
  1740. dst->silabsCtx.hash_ctx.hash_type_ctx = &dst->silabsCtx.hash_type_ctx;
  1741. #endif
  1742. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
  1743. ret = wolfAsync_DevCopy(&src->asyncDev, &dst->asyncDev);
  1744. #endif
  1745. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1746. (!defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256) || \
  1747. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA224))
  1748. /* regardless of any other settings, there's no SHA-224 HW on ESP32 */
  1749. #ifndef CONFIG_IDF_TARGET_ESP32
  1750. ret = esp_sha224_ctx_copy(src, dst);
  1751. #endif
  1752. #endif
  1753. #ifdef WOLFSSL_HASH_FLAGS
  1754. dst->flags |= WC_HASH_FLAG_ISCOPY;
  1755. #endif
  1756. #if defined(WOLFSSL_HASH_KEEP)
  1757. if (src->msg != NULL) {
  1758. dst->msg = (byte*)XMALLOC(src->len, dst->heap,
  1759. DYNAMIC_TYPE_TMP_BUFFER);
  1760. if (dst->msg == NULL)
  1761. return MEMORY_E;
  1762. XMEMCPY(dst->msg, src->msg, src->len);
  1763. }
  1764. #endif
  1765. return ret;
  1766. }
  1767. #endif /* WOLFSSL_KCAPI_HASH && !WOLFSSL_NO_KCAPI_SHA224 */
  1768. #ifdef WOLFSSL_HASH_FLAGS
  1769. int wc_Sha224SetFlags(wc_Sha224* sha224, word32 flags)
  1770. {
  1771. if (sha224) {
  1772. sha224->flags = flags;
  1773. }
  1774. return 0;
  1775. }
  1776. int wc_Sha224GetFlags(wc_Sha224* sha224, word32* flags)
  1777. {
  1778. if (sha224 && flags) {
  1779. *flags = sha224->flags;
  1780. }
  1781. return 0;
  1782. }
  1783. #endif
  1784. #endif /* WOLFSSL_SHA224 */
  1785. #endif /* WOLFSSL_RENESAS_RX64_HASH */
  1786. #ifdef WOLFSSL_AFALG_HASH
  1787. /* implemented in wolfcrypt/src/port/af_alg/afalg_hash.c */
  1788. #elif defined(WOLFSSL_DEVCRYPTO_HASH)
  1789. /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
  1790. #elif (defined(WOLFSSL_RENESAS_TSIP_TLS) || \
  1791. defined(WOLFSSL_RENESAS_TSIP_CRYPTONLY)) && \
  1792. !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)
  1793. /* implemented in wolfcrypt/src/port/Renesas/renesas_tsip_sha.c */
  1794. #elif (defined(WOLFSSL_RENESAS_SCEPROTECT) || defined(WOLFSSL_RENESAS_RSIP))\
  1795. && !defined(NO_WOLFSSL_RENESAS_FSPSM_HASH)
  1796. /* implemented in wolfcrypt/src/port/Renesas/renesas_fspsm_sha.c */
  1797. #elif defined(WOLFSSL_PSOC6_CRYPTO)
  1798. /* implemented in wolfcrypt/src/port/cypress/psoc6_crypto.c */
  1799. #elif defined(WOLFSSL_IMXRT_DCP)
  1800. /* implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  1801. #elif defined(WOLFSSL_KCAPI_HASH)
  1802. /* implemented in wolfcrypt/src/port/kcapi/kcapi_hash.c */
  1803. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_HASH)
  1804. /* implemented in wolfcrypt/src/port/psa/psa_hash.c */
  1805. #elif defined(WOLFSSL_RENESAS_RX64_HASH)
  1806. /* implemented in wolfcrypt/src/port/Renesas/renesas_rx64_hw_sha.c */
  1807. #else
  1808. int wc_Sha256GetHash(wc_Sha256* sha256, byte* hash)
  1809. {
  1810. int ret;
  1811. #ifdef WOLFSSL_SMALL_STACK
  1812. wc_Sha256* tmpSha256;
  1813. #else
  1814. wc_Sha256 tmpSha256[1];
  1815. #endif
  1816. if (sha256 == NULL || hash == NULL) {
  1817. return BAD_FUNC_ARG;
  1818. }
  1819. #ifdef WOLFSSL_SMALL_STACK
  1820. tmpSha256 = (wc_Sha256*)XMALLOC(sizeof(wc_Sha256), NULL,
  1821. DYNAMIC_TYPE_TMP_BUFFER);
  1822. if (tmpSha256 == NULL) {
  1823. return MEMORY_E;
  1824. }
  1825. #endif
  1826. ret = wc_Sha256Copy(sha256, tmpSha256);
  1827. if (ret == 0) {
  1828. ret = wc_Sha256Final(tmpSha256, hash);
  1829. wc_Sha256Free(tmpSha256); /* TODO move outside brackets? */
  1830. }
  1831. #ifdef WOLFSSL_SMALL_STACK
  1832. XFREE(tmpSha256, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  1833. #endif
  1834. return ret;
  1835. }
  1836. int wc_Sha256Copy(wc_Sha256* src, wc_Sha256* dst)
  1837. {
  1838. int ret = 0;
  1839. if (src == NULL || dst == NULL) {
  1840. return BAD_FUNC_ARG;
  1841. }
  1842. XMEMCPY(dst, src, sizeof(wc_Sha256));
  1843. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  1844. wc_MAXQ10XX_Sha256Copy(src);
  1845. #endif
  1846. #ifdef WOLFSSL_SMALL_STACK_CACHE
  1847. dst->W = NULL;
  1848. #endif
  1849. #if defined(WOLFSSL_SILABS_SE_ACCEL) && defined(WOLFSSL_SILABS_SE_ACCEL_3)
  1850. dst->silabsCtx.hash_ctx.cmd_ctx = &dst->silabsCtx.cmd_ctx;
  1851. dst->silabsCtx.hash_ctx.hash_type_ctx = &dst->silabsCtx.hash_type_ctx;
  1852. #endif
  1853. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
  1854. ret = wolfAsync_DevCopy(&src->asyncDev, &dst->asyncDev);
  1855. #endif
  1856. #ifdef WOLFSSL_PIC32MZ_HASH
  1857. ret = wc_Pic32HashCopy(&src->cache, &dst->cache);
  1858. #endif
  1859. #if defined(WOLFSSL_USE_ESP32_CRYPT_HASH_HW) && \
  1860. !defined(NO_WOLFSSL_ESP32_CRYPT_HASH_SHA256)
  1861. esp_sha256_ctx_copy(src, dst);
  1862. #endif
  1863. #ifdef HAVE_ARIA
  1864. dst->hSession = NULL;
  1865. if((src->hSession != NULL) && (MC_CopySession(src->hSession, &(dst->hSession)) != MC_OK)) {
  1866. return MEMORY_E;
  1867. }
  1868. #endif
  1869. #ifdef WOLFSSL_HASH_FLAGS
  1870. dst->flags |= WC_HASH_FLAG_ISCOPY;
  1871. #endif
  1872. #if defined(WOLFSSL_HASH_KEEP)
  1873. if (src->msg != NULL) {
  1874. dst->msg = (byte*)XMALLOC(src->len, dst->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1875. if (dst->msg == NULL)
  1876. return MEMORY_E;
  1877. XMEMCPY(dst->msg, src->msg, src->len);
  1878. }
  1879. #endif
  1880. return ret;
  1881. }
  1882. #endif
  1883. #ifdef WOLFSSL_HASH_FLAGS
  1884. int wc_Sha256SetFlags(wc_Sha256* sha256, word32 flags)
  1885. {
  1886. if (sha256) {
  1887. sha256->flags = flags;
  1888. }
  1889. return 0;
  1890. }
  1891. int wc_Sha256GetFlags(wc_Sha256* sha256, word32* flags)
  1892. {
  1893. if (sha256 && flags) {
  1894. *flags = sha256->flags;
  1895. }
  1896. return 0;
  1897. }
  1898. #endif
  1899. #endif /* !WOLFSSL_TI_HASH */
  1900. #endif /* NO_SHA256 */