+// SPDX-License-Identifier: GPL-2.0+
/*
* Copyright (c) 2012 The Chromium OS Authors.
*
*
* (C) Copyright 2000
- *
- * SPDX-License-Identifier: GPL-2.0+
*/
#ifndef USE_HOSTCC
#include <common.h>
#include <command.h>
+#include <env.h>
+#include <log.h>
#include <malloc.h>
#include <mapmem.h>
#include <hw_sha.h>
+#include <asm/cache.h>
+#include <asm/global_data.h>
#include <asm/io.h>
#include <linux/errno.h>
+#include <u-boot/crc.h>
#else
#include "mkimage.h"
#include <time.h>
-#include <image.h>
#endif /* !USE_HOSTCC*/
#include <hash.h>
+#include <image.h>
#include <u-boot/crc.h>
#include <u-boot/sha1.h>
#include <u-boot/sha256.h>
+#include <u-boot/sha512.h>
#include <u-boot/md5.h>
-#ifdef CONFIG_SHA1
+#if !defined(USE_HOSTCC) && defined(CONFIG_NEEDS_MANUAL_RELOC)
+DECLARE_GLOBAL_DATA_PTR;
+#endif
+
+static void reloc_update(void);
+
+#if defined(CONFIG_SHA1) && !defined(CONFIG_SHA_PROG_HW_ACCEL)
static int hash_init_sha1(struct hash_algo *algo, void **ctxp)
{
sha1_context *ctx = malloc(sizeof(sha1_context));
}
#endif
-#ifdef CONFIG_SHA256
+#if defined(CONFIG_SHA256) && !defined(CONFIG_SHA_PROG_HW_ACCEL)
static int hash_init_sha256(struct hash_algo *algo, void **ctxp)
{
sha256_context *ctx = malloc(sizeof(sha256_context));
}
#endif
+#if defined(CONFIG_SHA384) && !defined(CONFIG_SHA_PROG_HW_ACCEL)
+static int hash_init_sha384(struct hash_algo *algo, void **ctxp)
+{
+ sha512_context *ctx = malloc(sizeof(sha512_context));
+ sha384_starts(ctx);
+ *ctxp = ctx;
+ return 0;
+}
+
+static int hash_update_sha384(struct hash_algo *algo, void *ctx,
+ const void *buf, unsigned int size, int is_last)
+{
+ sha384_update((sha512_context *)ctx, buf, size);
+ return 0;
+}
+
+static int hash_finish_sha384(struct hash_algo *algo, void *ctx, void
+ *dest_buf, int size)
+{
+ if (size < algo->digest_size)
+ return -1;
+
+ sha384_finish((sha512_context *)ctx, dest_buf);
+ free(ctx);
+ return 0;
+}
+#endif
+
+#if defined(CONFIG_SHA512) && !defined(CONFIG_SHA_PROG_HW_ACCEL)
+static int hash_init_sha512(struct hash_algo *algo, void **ctxp)
+{
+ sha512_context *ctx = malloc(sizeof(sha512_context));
+ sha512_starts(ctx);
+ *ctxp = ctx;
+ return 0;
+}
+
+static int hash_update_sha512(struct hash_algo *algo, void *ctx,
+ const void *buf, unsigned int size, int is_last)
+{
+ sha512_update((sha512_context *)ctx, buf, size);
+ return 0;
+}
+
+static int hash_finish_sha512(struct hash_algo *algo, void *ctx, void
+ *dest_buf, int size)
+{
+ if (size < algo->digest_size)
+ return -1;
+
+ sha512_finish((sha512_context *)ctx, dest_buf);
+ free(ctx);
+ return 0;
+}
+#endif
+
+
+static int hash_init_crc16_ccitt(struct hash_algo *algo, void **ctxp)
+{
+ uint16_t *ctx = malloc(sizeof(uint16_t));
+ *ctx = 0;
+ *ctxp = ctx;
+ return 0;
+}
+
+static int hash_update_crc16_ccitt(struct hash_algo *algo, void *ctx,
+ const void *buf, unsigned int size,
+ int is_last)
+{
+ *((uint16_t *)ctx) = crc16_ccitt(*((uint16_t *)ctx), buf, size);
+ return 0;
+}
+
+static int hash_finish_crc16_ccitt(struct hash_algo *algo, void *ctx,
+ void *dest_buf, int size)
+{
+ if (size < algo->digest_size)
+ return -1;
+
+ *((uint16_t *)dest_buf) = *((uint16_t *)ctx);
+ free(ctx);
+ return 0;
+}
+
static int hash_init_crc32(struct hash_algo *algo, void **ctxp)
{
uint32_t *ctx = malloc(sizeof(uint32_t));
}
/*
- * These are the hash algorithms we support. Chips which support accelerated
- * crypto could perhaps add named version of these algorithms here. Note that
- * algorithm names must be in lower case.
+ * These are the hash algorithms we support. If we have hardware acceleration
+ * is enable we will use that, otherwise a software version of the algorithm.
+ * Note that algorithm names must be in lower case.
*/
static struct hash_algo hash_algo[] = {
- /*
- * CONFIG_SHA_HW_ACCEL is defined if hardware acceleration is
- * available.
- */
-#ifdef CONFIG_SHA_HW_ACCEL
+#ifdef CONFIG_SHA1
{
- "sha1",
- SHA1_SUM_LEN,
- hw_sha1,
- CHUNKSZ_SHA1,
+ .name = "sha1",
+ .digest_size = SHA1_SUM_LEN,
+ .chunk_size = CHUNKSZ_SHA1,
+#ifdef CONFIG_SHA_HW_ACCEL
+ .hash_func_ws = hw_sha1,
+#else
+ .hash_func_ws = sha1_csum_wd,
+#endif
#ifdef CONFIG_SHA_PROG_HW_ACCEL
- hw_sha_init,
- hw_sha_update,
- hw_sha_finish,
+ .hash_init = hw_sha_init,
+ .hash_update = hw_sha_update,
+ .hash_finish = hw_sha_finish,
+#else
+ .hash_init = hash_init_sha1,
+ .hash_update = hash_update_sha1,
+ .hash_finish = hash_finish_sha1,
+#endif
+ },
+#endif
+#ifdef CONFIG_SHA256
+ {
+ .name = "sha256",
+ .digest_size = SHA256_SUM_LEN,
+ .chunk_size = CHUNKSZ_SHA256,
+#ifdef CONFIG_SHA_HW_ACCEL
+ .hash_func_ws = hw_sha256,
+#else
+ .hash_func_ws = sha256_csum_wd,
#endif
- }, {
- "sha256",
- SHA256_SUM_LEN,
- hw_sha256,
- CHUNKSZ_SHA256,
#ifdef CONFIG_SHA_PROG_HW_ACCEL
- hw_sha_init,
- hw_sha_update,
- hw_sha_finish,
+ .hash_init = hw_sha_init,
+ .hash_update = hw_sha_update,
+ .hash_finish = hw_sha_finish,
+#else
+ .hash_init = hash_init_sha256,
+ .hash_update = hash_update_sha256,
+ .hash_finish = hash_finish_sha256,
#endif
},
#endif
-#ifdef CONFIG_SHA1
+#ifdef CONFIG_SHA384
{
- "sha1",
- SHA1_SUM_LEN,
- sha1_csum_wd,
- CHUNKSZ_SHA1,
- hash_init_sha1,
- hash_update_sha1,
- hash_finish_sha1,
+ .name = "sha384",
+ .digest_size = SHA384_SUM_LEN,
+ .chunk_size = CHUNKSZ_SHA384,
+#ifdef CONFIG_SHA512_HW_ACCEL
+ .hash_func_ws = hw_sha384,
+#else
+ .hash_func_ws = sha384_csum_wd,
+#endif
+#if defined(CONFIG_SHA512_HW_ACCEL) && defined(CONFIG_SHA_PROG_HW_ACCEL)
+ .hash_init = hw_sha_init,
+ .hash_update = hw_sha_update,
+ .hash_finish = hw_sha_finish,
+#else
+ .hash_init = hash_init_sha384,
+ .hash_update = hash_update_sha384,
+ .hash_finish = hash_finish_sha384,
+#endif
},
#endif
-#ifdef CONFIG_SHA256
+#ifdef CONFIG_SHA512
{
- "sha256",
- SHA256_SUM_LEN,
- sha256_csum_wd,
- CHUNKSZ_SHA256,
- hash_init_sha256,
- hash_update_sha256,
- hash_finish_sha256,
+ .name = "sha512",
+ .digest_size = SHA512_SUM_LEN,
+ .chunk_size = CHUNKSZ_SHA512,
+#ifdef CONFIG_SHA512_HW_ACCEL
+ .hash_func_ws = hw_sha512,
+#else
+ .hash_func_ws = sha512_csum_wd,
+#endif
+#if defined(CONFIG_SHA512_HW_ACCEL) && defined(CONFIG_SHA_PROG_HW_ACCEL)
+ .hash_init = hw_sha_init,
+ .hash_update = hw_sha_update,
+ .hash_finish = hw_sha_finish,
+#else
+ .hash_init = hash_init_sha512,
+ .hash_update = hash_update_sha512,
+ .hash_finish = hash_finish_sha512,
+#endif
},
#endif
{
- "crc32",
- 4,
- crc32_wd_buf,
- CHUNKSZ_CRC32,
- hash_init_crc32,
- hash_update_crc32,
- hash_finish_crc32,
+ .name = "crc16-ccitt",
+ .digest_size = 2,
+ .chunk_size = CHUNKSZ,
+ .hash_func_ws = crc16_ccitt_wd_buf,
+ .hash_init = hash_init_crc16_ccitt,
+ .hash_update = hash_update_crc16_ccitt,
+ .hash_finish = hash_finish_crc16_ccitt,
+ },
+ {
+ .name = "crc32",
+ .digest_size = 4,
+ .chunk_size = CHUNKSZ_CRC32,
+ .hash_func_ws = crc32_wd_buf,
+ .hash_init = hash_init_crc32,
+ .hash_update = hash_update_crc32,
+ .hash_finish = hash_finish_crc32,
},
};
/* Try to minimize code size for boards that don't want much hashing */
#if defined(CONFIG_SHA256) || defined(CONFIG_CMD_SHA1SUM) || \
- defined(CONFIG_CRC32_VERIFY) || defined(CONFIG_CMD_HASH)
+ defined(CONFIG_CRC32_VERIFY) || defined(CONFIG_CMD_HASH) || \
+ defined(CONFIG_SHA384) || defined(CONFIG_SHA512)
#define multi_hash() 1
#else
#define multi_hash() 0
#endif
+static void reloc_update(void)
+{
+#if !defined(USE_HOSTCC) && defined(CONFIG_NEEDS_MANUAL_RELOC)
+ int i;
+ static bool done;
+
+ if (!done) {
+ done = true;
+ for (i = 0; i < ARRAY_SIZE(hash_algo); i++) {
+ hash_algo[i].name += gd->reloc_off;
+ hash_algo[i].hash_func_ws += gd->reloc_off;
+ hash_algo[i].hash_init += gd->reloc_off;
+ hash_algo[i].hash_update += gd->reloc_off;
+ hash_algo[i].hash_finish += gd->reloc_off;
+ }
+ }
+#endif
+}
+
int hash_lookup_algo(const char *algo_name, struct hash_algo **algop)
{
int i;
+ reloc_update();
+
for (i = 0; i < ARRAY_SIZE(hash_algo); i++) {
if (!strcmp(algo_name, hash_algo[i].name)) {
*algop = &hash_algo[i];
{
int i;
+ reloc_update();
+
for (i = 0; i < ARRAY_SIZE(hash_algo); i++) {
if (!strcmp(algo_name, hash_algo[i].name)) {
if (hash_algo[i].hash_init) {
str_ptr += 2;
}
*str_ptr = '\0';
- setenv(dest, str_output);
+ env_set(dest, str_output);
} else {
ulong addr;
void *buf;
if (strlen(verify_str) == digits)
vsum_str = verify_str;
else {
- vsum_str = getenv(verify_str);
+ vsum_str = env_get(verify_str);
if (vsum_str == NULL || strlen(vsum_str) != digits) {
printf("Expected %d hex digits in env var\n",
digits);
printf("%02x", output[i]);
}
-int hash_command(const char *algo_name, int flags, cmd_tbl_t *cmdtp, int flag,
- int argc, char * const argv[])
+int hash_command(const char *algo_name, int flags, struct cmd_tbl *cmdtp,
+ int flag, int argc, char *const argv[])
{
ulong addr, len;
if (multi_hash()) {
struct hash_algo *algo;
- uint8_t output[HASH_MAX_DIGEST_SIZE];
+ u8 *output;
uint8_t vsum[HASH_MAX_DIGEST_SIZE];
void *buf;
return 1;
}
+ output = memalign(ARCH_DMA_MINALIGN,
+ sizeof(uint32_t) * HASH_MAX_DIGEST_SIZE);
+
buf = map_sysmem(addr, len);
algo->hash_func_ws(buf, len, output, algo->chunk_size);
unmap_sysmem(buf);
store_result(algo, output, *argv,
flags & HASH_FLAG_ENV);
}
+ unmap_sysmem(output);
+
}
/* Horrible code size hack for boards that just want crc32 */