diff options
author | jsing <> | 2025-08-12 10:00:40 +0000 |
---|---|---|
committer | jsing <> | 2025-08-12 10:00:40 +0000 |
commit | a33b5fdaa6df6faaf1a3fabbd6a4937e8249ee69 (patch) | |
tree | ea5aa07fcb7db9688fe927e28a4b37fabd65c854 | |
parent | 6579aa60908701d308df77065093fc34bffc030c (diff) | |
download | openbsd-a33b5fdaa6df6faaf1a3fabbd6a4937e8249ee69.tar.gz openbsd-a33b5fdaa6df6faaf1a3fabbd6a4937e8249ee69.tar.bz2 openbsd-a33b5fdaa6df6faaf1a3fabbd6a4937e8249ee69.zip |
Add const to bignum_*() function calls.
Now that s2n-bignum has marked various inputs as const, we can do the same.
In most cases we were casting away const, which we no longer need to do.
-rw-r--r-- | src/lib/libcrypto/bn/arch/amd64/bn_arch.c | 32 |
1 files changed, 16 insertions, 16 deletions
diff --git a/src/lib/libcrypto/bn/arch/amd64/bn_arch.c b/src/lib/libcrypto/bn/arch/amd64/bn_arch.c index 8eb3670def..38a4350d17 100644 --- a/src/lib/libcrypto/bn/arch/amd64/bn_arch.c +++ b/src/lib/libcrypto/bn/arch/amd64/bn_arch.c | |||
@@ -1,4 +1,4 @@ | |||
1 | /* $OpenBSD: bn_arch.c,v 1.8 2025/08/05 15:01:13 jsing Exp $ */ | 1 | /* $OpenBSD: bn_arch.c,v 1.9 2025/08/12 10:00:40 jsing Exp $ */ |
2 | /* | 2 | /* |
3 | * Copyright (c) 2023 Joel Sing <jsing@openbsd.org> | 3 | * Copyright (c) 2023 Joel Sing <jsing@openbsd.org> |
4 | * | 4 | * |
@@ -26,8 +26,8 @@ BN_ULONG | |||
26 | bn_add(BN_ULONG *r, int r_len, const BN_ULONG *a, int a_len, const BN_ULONG *b, | 26 | bn_add(BN_ULONG *r, int r_len, const BN_ULONG *a, int a_len, const BN_ULONG *b, |
27 | int b_len) | 27 | int b_len) |
28 | { | 28 | { |
29 | return bignum_add(r_len, (uint64_t *)r, a_len, (uint64_t *)a, | 29 | return bignum_add(r_len, (uint64_t *)r, a_len, (const uint64_t *)a, |
30 | b_len, (uint64_t *)b); | 30 | b_len, (const uint64_t *)b); |
31 | } | 31 | } |
32 | #endif | 32 | #endif |
33 | 33 | ||
@@ -36,8 +36,8 @@ bn_add(BN_ULONG *r, int r_len, const BN_ULONG *a, int a_len, const BN_ULONG *b, | |||
36 | BN_ULONG | 36 | BN_ULONG |
37 | bn_add_words(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd, int n) | 37 | bn_add_words(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd, int n) |
38 | { | 38 | { |
39 | return bignum_add(n, (uint64_t *)rd, n, (uint64_t *)ad, n, | 39 | return bignum_add(n, (uint64_t *)rd, n, (const uint64_t *)ad, n, |
40 | (uint64_t *)bd); | 40 | (const uint64_t *)bd); |
41 | } | 41 | } |
42 | #endif | 42 | #endif |
43 | 43 | ||
@@ -46,8 +46,8 @@ BN_ULONG | |||
46 | bn_sub(BN_ULONG *r, int r_len, const BN_ULONG *a, int a_len, const BN_ULONG *b, | 46 | bn_sub(BN_ULONG *r, int r_len, const BN_ULONG *a, int a_len, const BN_ULONG *b, |
47 | int b_len) | 47 | int b_len) |
48 | { | 48 | { |
49 | return bignum_sub(r_len, (uint64_t *)r, a_len, (uint64_t *)a, | 49 | return bignum_sub(r_len, (uint64_t *)r, a_len, (const uint64_t *)a, |
50 | b_len, (uint64_t *)b); | 50 | b_len, (const uint64_t *)b); |
51 | } | 51 | } |
52 | #endif | 52 | #endif |
53 | 53 | ||
@@ -55,8 +55,8 @@ bn_sub(BN_ULONG *r, int r_len, const BN_ULONG *a, int a_len, const BN_ULONG *b, | |||
55 | BN_ULONG | 55 | BN_ULONG |
56 | bn_sub_words(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd, int n) | 56 | bn_sub_words(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd, int n) |
57 | { | 57 | { |
58 | return bignum_sub(n, (uint64_t *)rd, n, (uint64_t *)ad, n, | 58 | return bignum_sub(n, (uint64_t *)rd, n, (const uint64_t *)ad, n, |
59 | (uint64_t *)bd); | 59 | (const uint64_t *)bd); |
60 | } | 60 | } |
61 | #endif | 61 | #endif |
62 | 62 | ||
@@ -64,7 +64,7 @@ bn_sub_words(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd, int n) | |||
64 | BN_ULONG | 64 | BN_ULONG |
65 | bn_mul_add_words(BN_ULONG *rd, const BN_ULONG *ad, int num, BN_ULONG w) | 65 | bn_mul_add_words(BN_ULONG *rd, const BN_ULONG *ad, int num, BN_ULONG w) |
66 | { | 66 | { |
67 | return bignum_cmadd(num, (uint64_t *)rd, w, num, (uint64_t *)ad); | 67 | return bignum_cmadd(num, (uint64_t *)rd, w, num, (const uint64_t *)ad); |
68 | } | 68 | } |
69 | #endif | 69 | #endif |
70 | 70 | ||
@@ -72,7 +72,7 @@ bn_mul_add_words(BN_ULONG *rd, const BN_ULONG *ad, int num, BN_ULONG w) | |||
72 | BN_ULONG | 72 | BN_ULONG |
73 | bn_mul_words(BN_ULONG *rd, const BN_ULONG *ad, int num, BN_ULONG w) | 73 | bn_mul_words(BN_ULONG *rd, const BN_ULONG *ad, int num, BN_ULONG w) |
74 | { | 74 | { |
75 | return bignum_cmul(num, (uint64_t *)rd, w, num, (uint64_t *)ad); | 75 | return bignum_cmul(num, (uint64_t *)rd, w, num, (const uint64_t *)ad); |
76 | } | 76 | } |
77 | #endif | 77 | #endif |
78 | 78 | ||
@@ -81,7 +81,7 @@ void | |||
81 | bn_mul_comba4(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd) | 81 | bn_mul_comba4(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd) |
82 | { | 82 | { |
83 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ | 83 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ |
84 | bignum_mul_4_8_alt((uint64_t *)rd, (uint64_t *)ad, (uint64_t *)bd); | 84 | bignum_mul_4_8_alt((uint64_t *)rd, (const uint64_t *)ad, (const uint64_t *)bd); |
85 | } | 85 | } |
86 | #endif | 86 | #endif |
87 | 87 | ||
@@ -90,7 +90,7 @@ void | |||
90 | bn_mul_comba8(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd) | 90 | bn_mul_comba8(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd) |
91 | { | 91 | { |
92 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ | 92 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ |
93 | bignum_mul_8_16_alt((uint64_t *)rd, (uint64_t *)ad, (uint64_t *)bd); | 93 | bignum_mul_8_16_alt((uint64_t *)rd, (const uint64_t *)ad, (const uint64_t *)bd); |
94 | } | 94 | } |
95 | #endif | 95 | #endif |
96 | 96 | ||
@@ -98,7 +98,7 @@ bn_mul_comba8(BN_ULONG *rd, const BN_ULONG *ad, const BN_ULONG *bd) | |||
98 | int | 98 | int |
99 | bn_sqr(BIGNUM *r, const BIGNUM *a, int r_len, BN_CTX *ctx) | 99 | bn_sqr(BIGNUM *r, const BIGNUM *a, int r_len, BN_CTX *ctx) |
100 | { | 100 | { |
101 | bignum_sqr(r_len, (uint64_t *)r->d, a->top, (uint64_t *)a->d); | 101 | bignum_sqr(r_len, (uint64_t *)r->d, a->top, (const uint64_t *)a->d); |
102 | 102 | ||
103 | return 1; | 103 | return 1; |
104 | } | 104 | } |
@@ -109,7 +109,7 @@ void | |||
109 | bn_sqr_comba4(BN_ULONG *rd, const BN_ULONG *ad) | 109 | bn_sqr_comba4(BN_ULONG *rd, const BN_ULONG *ad) |
110 | { | 110 | { |
111 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ | 111 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ |
112 | bignum_sqr_4_8_alt((uint64_t *)rd, (uint64_t *)ad); | 112 | bignum_sqr_4_8_alt((uint64_t *)rd, (const uint64_t *)ad); |
113 | } | 113 | } |
114 | #endif | 114 | #endif |
115 | 115 | ||
@@ -118,7 +118,7 @@ void | |||
118 | bn_sqr_comba8(BN_ULONG *rd, const BN_ULONG *ad) | 118 | bn_sqr_comba8(BN_ULONG *rd, const BN_ULONG *ad) |
119 | { | 119 | { |
120 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ | 120 | /* XXX - consider using non-alt on CPUs that have the ADX extension. */ |
121 | bignum_sqr_8_16_alt((uint64_t *)rd, (uint64_t *)ad); | 121 | bignum_sqr_8_16_alt((uint64_t *)rd, (const uint64_t *)ad); |
122 | } | 122 | } |
123 | #endif | 123 | #endif |
124 | 124 | ||