aboutsummaryrefslogtreecommitdiff
path: root/src/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'src/crypto')
-rw-r--r--src/crypto/chacha.h11
-rw-r--r--src/crypto/hash-ops.h3
-rw-r--r--src/crypto/hash.h6
-rw-r--r--src/crypto/slow-hash.c30
4 files changed, 34 insertions, 16 deletions
diff --git a/src/crypto/chacha.h b/src/crypto/chacha.h
index 22da53bd0..7a120931a 100644
--- a/src/crypto/chacha.h
+++ b/src/crypto/chacha.h
@@ -69,10 +69,17 @@ namespace crypto {
chacha20(data, length, key.data(), reinterpret_cast<const uint8_t*>(&iv), cipher);
}
- inline void generate_chacha_key(const void *data, size_t size, chacha_key& key, int cn_variant = 0, bool prehashed=false) {
+ inline void generate_chacha_key(const void *data, size_t size, chacha_key& key) {
static_assert(sizeof(chacha_key) <= sizeof(hash), "Size of hash must be at least that of chacha_key");
tools::scrubbed_arr<char, HASH_SIZE> pwd_hash;
- crypto::cn_slow_hash_pre(data, size, pwd_hash.data(), cn_variant, prehashed);
+ crypto::cn_slow_hash(data, size, pwd_hash.data(), 0/*variant*/, 0/*prehashed*/);
+ memcpy(&key, pwd_hash.data(), sizeof(key));
+ }
+
+ inline void generate_chacha_key_prehashed(const void *data, size_t size, chacha_key& key) {
+ static_assert(sizeof(chacha_key) <= sizeof(hash), "Size of hash must be at least that of chacha_key");
+ tools::scrubbed_arr<char, HASH_SIZE> pwd_hash;
+ crypto::cn_slow_hash(data, size, pwd_hash.data(), 0/*variant*/, 1/*prehashed*/);
memcpy(&key, pwd_hash.data(), sizeof(key));
}
diff --git a/src/crypto/hash-ops.h b/src/crypto/hash-ops.h
index 934d464de..d77d55cf3 100644
--- a/src/crypto/hash-ops.h
+++ b/src/crypto/hash-ops.h
@@ -79,8 +79,7 @@ enum {
};
void cn_fast_hash(const void *data, size_t length, char *hash);
-void cn_slow_hash(const void *data, size_t length, char *hash, int variant);
-void cn_slow_hash_pre(const void *data, size_t length, char *hash, int variant, bool pre);
+void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed);
void hash_extra_blake(const void *data, size_t length, char *hash);
void hash_extra_groestl(const void *data, size_t length, char *hash);
diff --git a/src/crypto/hash.h b/src/crypto/hash.h
index bf4f4c096..995e2294e 100644
--- a/src/crypto/hash.h
+++ b/src/crypto/hash.h
@@ -72,7 +72,11 @@ namespace crypto {
}
inline void cn_slow_hash(const void *data, std::size_t length, hash &hash, int variant = 0) {
- cn_slow_hash(data, length, reinterpret_cast<char *>(&hash), variant);
+ cn_slow_hash(data, length, reinterpret_cast<char *>(&hash), variant, 0/*prehashed*/);
+ }
+
+ inline void cn_slow_hash_prehashed(const void *data, std::size_t length, hash &hash, int variant = 0) {
+ cn_slow_hash(data, length, reinterpret_cast<char *>(&hash), variant, 1/*prehashed*/);
}
inline void tree_hash(const hash *hashes, std::size_t count, hash &root_hash) {
diff --git a/src/crypto/slow-hash.c b/src/crypto/slow-hash.c
index 8c7dad8e0..d7dcbd274 100644
--- a/src/crypto/slow-hash.c
+++ b/src/crypto/slow-hash.c
@@ -564,11 +564,7 @@ void slow_hash_free_state(void)
* @param length the length in bytes of the data
* @param hash a pointer to a buffer in which the final 256 bit hash will be stored
*/
-void cn_slow_hash(const void *data, size_t length, char *hash, int variant) {
- cn_slow_hash_pre(data,length,hash,variant,false);
-}
-
-void cn_slow_hash_pre(const void *data, size_t length, char *hash, int variant, bool prehashed)
+void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed)
{
RDATA_ALIGN16 uint8_t expandedKey[240]; /* These buffers are aligned to use later with SSE functions */
@@ -909,7 +905,7 @@ STATIC INLINE void aes_pseudo_round_xor(const uint8_t *in, uint8_t *out, const u
}
}
-void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
+void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed)
{
RDATA_ALIGN16 uint8_t expandedKey[240];
RDATA_ALIGN16 uint8_t hp_state[MEMORY];
@@ -932,7 +928,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
/* CryptoNight Step 1: Use Keccak1600 to initialize the 'state' (and 'text') buffers from the data. */
- hash_process(&state.hs, data, length);
+ if (prehashed) {
+ memcpy(&state.hs, data, length);
+ } else {
+ hash_process(&state.hs, data, length);
+ }
memcpy(text, state.init, INIT_SIZE_BYTE);
VARIANT1_INIT64();
@@ -1105,7 +1105,7 @@ STATIC INLINE void xor_blocks(uint8_t* a, const uint8_t* b)
U64(a)[1] ^= U64(b)[1];
}
-void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
+void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed)
{
uint8_t text[INIT_SIZE_BYTE];
uint8_t a[AES_BLOCK_SIZE];
@@ -1131,7 +1131,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
long_state = (uint8_t *)malloc(MEMORY);
#endif
- hash_process(&state.hs, data, length);
+ if (prehashed) {
+ memcpy(&state.hs, data, length);
+ } else {
+ hash_process(&state.hs, data, length);
+ }
memcpy(text, state.init, INIT_SIZE_BYTE);
VARIANT1_INIT64();
@@ -1289,7 +1293,7 @@ union cn_slow_hash_state {
};
#pragma pack(pop)
-void cn_slow_hash(const void *data, size_t length, char *hash, int variant) {
+void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed) {
uint8_t long_state[MEMORY];
union cn_slow_hash_state state;
uint8_t text[INIT_SIZE_BYTE];
@@ -1301,7 +1305,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant) {
uint8_t aes_key[AES_KEY_SIZE];
oaes_ctx *aes_ctx;
- hash_process(&state.hs, data, length);
+ if (prehashed) {
+ memcpy(&state.hs, data, length);
+ } else {
+ hash_process(&state.hs, data, length);
+ }
memcpy(text, state.init, INIT_SIZE_BYTE);
memcpy(aes_key, state.hs.b, AES_KEY_SIZE);
aes_ctx = (oaes_ctx *) oaes_alloc();