aboutsummaryrefslogtreecommitdiff
path: root/patches/asm/0001-Revert-Add-endbr64-where-needed-by-inspection.-Passe.patch
diff options
context:
space:
mode:
Diffstat (limited to 'patches/asm/0001-Revert-Add-endbr64-where-needed-by-inspection.-Passe.patch')
-rw-r--r--patches/asm/0001-Revert-Add-endbr64-where-needed-by-inspection.-Passe.patch1055
1 files changed, 1055 insertions, 0 deletions
diff --git a/patches/asm/0001-Revert-Add-endbr64-where-needed-by-inspection.-Passe.patch b/patches/asm/0001-Revert-Add-endbr64-where-needed-by-inspection.-Passe.patch
new file mode 100644
index 0000000..8ec94cd
--- /dev/null
+++ b/patches/asm/0001-Revert-Add-endbr64-where-needed-by-inspection.-Passe.patch
@@ -0,0 +1,1055 @@
1From 5f7fafff280e6e17d7fafea20fda99762d74581f Mon Sep 17 00:00:00 2001
2From: Brent Cook <busterb@gmail.com>
3Date: Mon, 30 Oct 2023 21:59:32 -0500
4Subject: [PATCH 1/4] Revert "Add endbr64 where needed by inspection. Passes
5 regresson tests."
6
7This reverts commit e2118101444d3cf3cab87297b363cecd5357ae89.
8---
9 src/lib/libcrypto/aes/asm/aes-x86_64.pl | 13 -------------
10 src/lib/libcrypto/aes/asm/aesni-sha1-x86_64.pl | 4 ----
11 src/lib/libcrypto/aes/asm/aesni-x86_64.pl | 15 ---------------
12 src/lib/libcrypto/aes/asm/bsaes-x86_64.pl | 14 --------------
13 src/lib/libcrypto/aes/asm/vpaes-x86_64.pl | 14 --------------
14 src/lib/libcrypto/bn/arch/amd64/bignum_add.S | 1 -
15 src/lib/libcrypto/bn/arch/amd64/bignum_cmadd.S | 1 -
16 src/lib/libcrypto/bn/arch/amd64/bignum_cmul.S | 1 -
17 src/lib/libcrypto/bn/arch/amd64/bignum_mul.S | 1 -
18 .../libcrypto/bn/arch/amd64/bignum_mul_4_8_alt.S | 1 -
19 .../libcrypto/bn/arch/amd64/bignum_mul_8_16_alt.S | 1 -
20 src/lib/libcrypto/bn/arch/amd64/bignum_sqr.S | 1 -
21 .../libcrypto/bn/arch/amd64/bignum_sqr_4_8_alt.S | 1 -
22 .../libcrypto/bn/arch/amd64/bignum_sqr_8_16_alt.S | 1 -
23 src/lib/libcrypto/bn/arch/amd64/bignum_sub.S | 1 -
24 src/lib/libcrypto/bn/arch/amd64/word_clz.S | 1 -
25 src/lib/libcrypto/bn/asm/modexp512-x86_64.pl | 5 -----
26 src/lib/libcrypto/bn/asm/x86_64-mont.pl | 3 ---
27 src/lib/libcrypto/bn/asm/x86_64-mont5.pl | 5 -----
28 src/lib/libcrypto/camellia/asm/cmll-x86_64.pl | 8 --------
29 src/lib/libcrypto/md5/asm/md5-x86_64.pl | 1 -
30 src/lib/libcrypto/modes/asm/ghash-x86_64.pl | 4 ----
31 src/lib/libcrypto/rc4/asm/rc4-md5-x86_64.pl | 6 ++----
32 src/lib/libcrypto/rc4/asm/rc4-x86_64.pl | 7 ++-----
33 src/lib/libcrypto/sha/asm/sha1-x86_64.pl | 4 ----
34 src/lib/libcrypto/sha/asm/sha512-x86_64.pl | 1 -
35 src/lib/libcrypto/whrlpool/asm/wp-x86_64.pl | 1 -
36 src/lib/libcrypto/x86_64cpuid.pl | 2 --
37 28 files changed, 4 insertions(+), 114 deletions(-)
38
39diff --git a/src/lib/libcrypto/aes/asm/aes-x86_64.pl b/src/lib/libcrypto/aes/asm/aes-x86_64.pl
40index 78ba20ca5..b7399b552 100755
41--- a/src/lib/libcrypto/aes/asm/aes-x86_64.pl
42+++ b/src/lib/libcrypto/aes/asm/aes-x86_64.pl
43@@ -318,7 +318,6 @@ $code.=<<___;
44 .type _x86_64_AES_encrypt,\@abi-omnipotent
45 .align 16
46 _x86_64_AES_encrypt:
47- endbr64
48 xor 0($key),$s0 # xor with key
49 xor 4($key),$s1
50 xor 8($key),$s2
51@@ -549,7 +548,6 @@ $code.=<<___;
52 .type _x86_64_AES_encrypt_compact,\@abi-omnipotent
53 .align 16
54 _x86_64_AES_encrypt_compact:
55- endbr64
56 lea 128($sbox),$inp # size optimization
57 mov 0-128($inp),$acc1 # prefetch Te4
58 mov 32-128($inp),$acc2
59@@ -595,7 +593,6 @@ $code.=<<___;
60 .hidden asm_AES_encrypt
61 asm_AES_encrypt:
62 AES_encrypt:
63- endbr64
64 push %rbx
65 push %rbp
66 push %r12
67@@ -887,7 +884,6 @@ $code.=<<___;
68 .type _x86_64_AES_decrypt,\@abi-omnipotent
69 .align 16
70 _x86_64_AES_decrypt:
71- endbr64
72 xor 0($key),$s0 # xor with key
73 xor 4($key),$s1
74 xor 8($key),$s2
75@@ -1142,7 +1138,6 @@ $code.=<<___;
76 .type _x86_64_AES_decrypt_compact,\@abi-omnipotent
77 .align 16
78 _x86_64_AES_decrypt_compact:
79- endbr64
80 lea 128($sbox),$inp # size optimization
81 mov 0-128($inp),$acc1 # prefetch Td4
82 mov 32-128($inp),$acc2
83@@ -1197,7 +1192,6 @@ $code.=<<___;
84 .hidden asm_AES_decrypt
85 asm_AES_decrypt:
86 AES_decrypt:
87- endbr64
88 push %rbx
89 push %rbp
90 push %r12
91@@ -1297,7 +1291,6 @@ $code.=<<___;
92 .type AES_set_encrypt_key,\@function,3
93 .align 16
94 AES_set_encrypt_key:
95- endbr64
96 push %rbx
97 push %rbp
98 push %r12 # redundant, but allows to share
99@@ -1323,7 +1316,6 @@ AES_set_encrypt_key:
100 .type _x86_64_AES_set_encrypt_key,\@abi-omnipotent
101 .align 16
102 _x86_64_AES_set_encrypt_key:
103- endbr64
104 mov %esi,%ecx # %ecx=bits
105 mov %rdi,%rsi # %rsi=userKey
106 mov %rdx,%rdi # %rdi=key
107@@ -1569,7 +1561,6 @@ $code.=<<___;
108 .type AES_set_decrypt_key,\@function,3
109 .align 16
110 AES_set_decrypt_key:
111- endbr64
112 push %rbx
113 push %rbp
114 push %r12
115@@ -1669,7 +1660,6 @@ $code.=<<___;
116 .hidden asm_AES_cbc_encrypt
117 asm_AES_cbc_encrypt:
118 AES_cbc_encrypt:
119- endbr64
120 cmp \$0,%rdx # check length
121 je .Lcbc_epilogue
122 pushfq
123@@ -2561,7 +2551,6 @@ $code.=<<___;
124 .type block_se_handler,\@abi-omnipotent
125 .align 16
126 block_se_handler:
127- endbr64
128 push %rsi
129 push %rdi
130 push %rbx
131@@ -2620,7 +2609,6 @@ block_se_handler:
132 .type key_se_handler,\@abi-omnipotent
133 .align 16
134 key_se_handler:
135- endbr64
136 push %rsi
137 push %rdi
138 push %rbx
139@@ -2678,7 +2666,6 @@ key_se_handler:
140 .type cbc_se_handler,\@abi-omnipotent
141 .align 16
142 cbc_se_handler:
143- endbr64
144 push %rsi
145 push %rdi
146 push %rbx
147diff --git a/src/lib/libcrypto/aes/asm/aesni-sha1-x86_64.pl b/src/lib/libcrypto/aes/asm/aesni-sha1-x86_64.pl
148index 879d16793..bafa906a0 100644
149--- a/src/lib/libcrypto/aes/asm/aesni-sha1-x86_64.pl
150+++ b/src/lib/libcrypto/aes/asm/aesni-sha1-x86_64.pl
151@@ -89,7 +89,6 @@ $code.=<<___;
152 .type aesni_cbc_sha1_enc,\@abi-omnipotent
153 .align 16
154 aesni_cbc_sha1_enc:
155- endbr64
156 # caller should check for SSSE3 and AES-NI bits
157 mov OPENSSL_ia32cap_P+0(%rip),%r10d
158 mov OPENSSL_ia32cap_P+4(%rip),%r11d
159@@ -133,7 +132,6 @@ $code.=<<___;
160 .type aesni_cbc_sha1_enc_ssse3,\@function,6
161 .align 16
162 aesni_cbc_sha1_enc_ssse3:
163- endbr64
164 mov `($win64?56:8)`(%rsp),$inp # load 7th argument
165 #shr \$6,$len # debugging artefact
166 #jz .Lepilogue_ssse3 # debugging artefact
167@@ -652,7 +650,6 @@ $code.=<<___;
168 .type aesni_cbc_sha1_enc_avx,\@function,6
169 .align 16
170 aesni_cbc_sha1_enc_avx:
171- endbr64
172 mov `($win64?56:8)`(%rsp),$inp # load 7th argument
173 #shr \$6,$len # debugging artefact
174 #jz .Lepilogue_avx # debugging artefact
175@@ -1103,7 +1100,6 @@ $code.=<<___;
176 .type ssse3_handler,\@abi-omnipotent
177 .align 16
178 ssse3_handler:
179- endbr64
180 push %rsi
181 push %rdi
182 push %rbx
183diff --git a/src/lib/libcrypto/aes/asm/aesni-x86_64.pl b/src/lib/libcrypto/aes/asm/aesni-x86_64.pl
184index 07d40a84a..e662fbc7c 100644
185--- a/src/lib/libcrypto/aes/asm/aesni-x86_64.pl
186+++ b/src/lib/libcrypto/aes/asm/aesni-x86_64.pl
187@@ -242,7 +242,6 @@ $code.=<<___;
188 .type ${PREFIX}_encrypt,\@abi-omnipotent
189 .align 16
190 ${PREFIX}_encrypt:
191- endbr64
192 movups ($inp),$inout0 # load input
193 mov 240($key),$rounds # key->rounds
194 ___
195@@ -256,7 +255,6 @@ $code.=<<___;
196 .type ${PREFIX}_decrypt,\@abi-omnipotent
197 .align 16
198 ${PREFIX}_decrypt:
199- endbr64
200 movups ($inp),$inout0 # load input
201 mov 240($key),$rounds # key->rounds
202 ___
203@@ -286,7 +284,6 @@ $code.=<<___;
204 .type _aesni_${dir}rypt3,\@abi-omnipotent
205 .align 16
206 _aesni_${dir}rypt3:
207- endbr64
208 $movkey ($key),$rndkey0
209 shr \$1,$rounds
210 $movkey 16($key),$rndkey1
211@@ -331,7 +328,6 @@ $code.=<<___;
212 .type _aesni_${dir}rypt4,\@abi-omnipotent
213 .align 16
214 _aesni_${dir}rypt4:
215- endbr64
216 $movkey ($key),$rndkey0
217 shr \$1,$rounds
218 $movkey 16($key),$rndkey1
219@@ -377,7 +373,6 @@ $code.=<<___;
220 .type _aesni_${dir}rypt6,\@abi-omnipotent
221 .align 16
222 _aesni_${dir}rypt6:
223- endbr64
224 $movkey ($key),$rndkey0
225 shr \$1,$rounds
226 $movkey 16($key),$rndkey1
227@@ -442,7 +437,6 @@ $code.=<<___;
228 .type _aesni_${dir}rypt8,\@abi-omnipotent
229 .align 16
230 _aesni_${dir}rypt8:
231- endbr64
232 $movkey ($key),$rndkey0
233 shr \$1,$rounds
234 $movkey 16($key),$rndkey1
235@@ -531,7 +525,6 @@ $code.=<<___;
236 .type aesni_ecb_encrypt,\@function,5
237 .align 16
238 aesni_ecb_encrypt:
239- endbr64
240 and \$-16,$len
241 jz .Lecb_ret
242
243@@ -837,7 +830,6 @@ $code.=<<___;
244 .type aesni_ccm64_encrypt_blocks,\@function,6
245 .align 16
246 aesni_ccm64_encrypt_blocks:
247- endbr64
248 ___
249 $code.=<<___ if ($win64);
250 lea -0x58(%rsp),%rsp
251@@ -2487,7 +2479,6 @@ $code.=<<___;
252 .type ${PREFIX}_set_decrypt_key,\@abi-omnipotent
253 .align 16
254 ${PREFIX}_set_decrypt_key:
255- endbr64
256 sub \$8,%rsp
257 call __aesni_set_encrypt_key
258 shl \$4,$bits # rounds-1 after _aesni_set_encrypt_key
259@@ -2538,7 +2529,6 @@ $code.=<<___;
260 .type ${PREFIX}_set_encrypt_key,\@abi-omnipotent
261 .align 16
262 ${PREFIX}_set_encrypt_key:
263- endbr64
264 __aesni_set_encrypt_key:
265 sub \$8,%rsp
266 mov \$-1,%rax
267@@ -2760,7 +2750,6 @@ $code.=<<___ if ($PREFIX eq "aesni");
268 .type ecb_se_handler,\@abi-omnipotent
269 .align 16
270 ecb_se_handler:
271- endbr64
272 push %rsi
273 push %rdi
274 push %rbx
275@@ -2780,7 +2769,6 @@ ecb_se_handler:
276 .type ccm64_se_handler,\@abi-omnipotent
277 .align 16
278 ccm64_se_handler:
279- endbr64
280 push %rsi
281 push %rdi
282 push %rbx
283@@ -2822,7 +2810,6 @@ ccm64_se_handler:
284 .type ctr32_se_handler,\@abi-omnipotent
285 .align 16
286 ctr32_se_handler:
287- endbr64
288 push %rsi
289 push %rdi
290 push %rbx
291@@ -2858,7 +2845,6 @@ ctr32_se_handler:
292 .type xts_se_handler,\@abi-omnipotent
293 .align 16
294 xts_se_handler:
295- endbr64
296 push %rsi
297 push %rdi
298 push %rbx
299@@ -2900,7 +2886,6 @@ $code.=<<___;
300 .type cbc_se_handler,\@abi-omnipotent
301 .align 16
302 cbc_se_handler:
303- endbr64
304 push %rsi
305 push %rdi
306 push %rbx
307diff --git a/src/lib/libcrypto/aes/asm/bsaes-x86_64.pl b/src/lib/libcrypto/aes/asm/bsaes-x86_64.pl
308index 7098ba27f..20e9e1f71 100644
309--- a/src/lib/libcrypto/aes/asm/bsaes-x86_64.pl
310+++ b/src/lib/libcrypto/aes/asm/bsaes-x86_64.pl
311@@ -813,7 +813,6 @@ $code.=<<___;
312 .type _bsaes_encrypt8,\@abi-omnipotent
313 .align 64
314 _bsaes_encrypt8:
315- endbr64
316 lea .LBS0(%rip), $const # constants table
317
318 movdqa ($key), @XMM[9] # round 0 key
319@@ -878,7 +877,6 @@ $code.=<<___;
320 .type _bsaes_decrypt8,\@abi-omnipotent
321 .align 64
322 _bsaes_decrypt8:
323- endbr64
324 lea .LBS0(%rip), $const # constants table
325
326 movdqa ($key), @XMM[9] # round 0 key
327@@ -970,7 +968,6 @@ $code.=<<___;
328 .type _bsaes_key_convert,\@abi-omnipotent
329 .align 16
330 _bsaes_key_convert:
331- endbr64
332 lea .Lmasks(%rip), $const
333 movdqu ($inp), %xmm7 # load round 0 key
334 lea 0x10($inp), $inp
335@@ -1060,7 +1057,6 @@ $code.=<<___;
336 .type bsaes_enc_key_convert,\@function,2
337 .align 16
338 bsaes_enc_key_convert:
339- endbr64
340 mov 240($inp),%r10d # pass rounds
341 mov $inp,%rcx # pass key
342 mov $out,%rax # pass key schedule
343@@ -1075,7 +1071,6 @@ bsaes_enc_key_convert:
344 .align 16
345 bsaes_encrypt_128:
346 .Lenc128_loop:
347- endbr64
348 movdqu 0x00($inp), @XMM[0] # load input
349 movdqu 0x10($inp), @XMM[1]
350 movdqu 0x20($inp), @XMM[2]
351@@ -1108,7 +1103,6 @@ bsaes_encrypt_128:
352 .type bsaes_dec_key_convert,\@function,2
353 .align 16
354 bsaes_dec_key_convert:
355- endbr64
356 mov 240($inp),%r10d # pass rounds
357 mov $inp,%rcx # pass key
358 mov $out,%rax # pass key schedule
359@@ -1123,7 +1117,6 @@ bsaes_dec_key_convert:
360 .type bsaes_decrypt_128,\@function,4
361 .align 16
362 bsaes_decrypt_128:
363- endbr64
364 .Ldec128_loop:
365 movdqu 0x00($inp), @XMM[0] # load input
366 movdqu 0x10($inp), @XMM[1]
367@@ -1169,7 +1162,6 @@ $code.=<<___;
368 .type bsaes_ecb_encrypt_blocks,\@abi-omnipotent
369 .align 16
370 bsaes_ecb_encrypt_blocks:
371- endbr64
372 mov %rsp, %rax
373 .Lecb_enc_prologue:
374 push %rbp
375@@ -1371,7 +1363,6 @@ $code.=<<___;
376 .type bsaes_ecb_decrypt_blocks,\@abi-omnipotent
377 .align 16
378 bsaes_ecb_decrypt_blocks:
379- endbr64
380 mov %rsp, %rax
381 .Lecb_dec_prologue:
382 push %rbp
383@@ -1577,7 +1568,6 @@ $code.=<<___;
384 .type bsaes_cbc_encrypt,\@abi-omnipotent
385 .align 16
386 bsaes_cbc_encrypt:
387- endbr64
388 ___
389 $code.=<<___ if ($win64);
390 mov 48(%rsp),$arg6 # pull direction flag
391@@ -1865,7 +1855,6 @@ $code.=<<___;
392 .type bsaes_ctr32_encrypt_blocks,\@abi-omnipotent
393 .align 16
394 bsaes_ctr32_encrypt_blocks:
395- endbr64
396 mov %rsp, %rax
397 .Lctr_enc_prologue:
398 push %rbp
399@@ -2107,7 +2096,6 @@ $code.=<<___;
400 .type bsaes_xts_encrypt,\@abi-omnipotent
401 .align 16
402 bsaes_xts_encrypt:
403- endbr64
404 mov %rsp, %rax
405 .Lxts_enc_prologue:
406 push %rbp
407@@ -2489,7 +2477,6 @@ $code.=<<___;
408 .type bsaes_xts_decrypt,\@abi-omnipotent
409 .align 16
410 bsaes_xts_decrypt:
411- endbr64
412 mov %rsp, %rax
413 .Lxts_dec_prologue:
414 push %rbp
415@@ -2966,7 +2953,6 @@ $code.=<<___;
416 .type se_handler,\@abi-omnipotent
417 .align 16
418 se_handler:
419- endbr64
420 push %rsi
421 push %rdi
422 push %rbx
423diff --git a/src/lib/libcrypto/aes/asm/vpaes-x86_64.pl b/src/lib/libcrypto/aes/asm/vpaes-x86_64.pl
424index 8ff8d8602..3ffb1a303 100644
425--- a/src/lib/libcrypto/aes/asm/vpaes-x86_64.pl
426+++ b/src/lib/libcrypto/aes/asm/vpaes-x86_64.pl
427@@ -82,7 +82,6 @@ $code.=<<___;
428 .type _vpaes_encrypt_core,\@abi-omnipotent
429 .align 16
430 _vpaes_encrypt_core:
431- endbr64
432 mov %rdx, %r9
433 mov \$16, %r11
434 mov 240(%rdx),%eax
435@@ -173,7 +172,6 @@ _vpaes_encrypt_core:
436 .type _vpaes_decrypt_core,\@abi-omnipotent
437 .align 16
438 _vpaes_decrypt_core:
439- endbr64
440 mov %rdx, %r9 # load key
441 mov 240(%rdx),%eax
442 movdqa %xmm9, %xmm1
443@@ -281,7 +279,6 @@ _vpaes_decrypt_core:
444 .type _vpaes_schedule_core,\@abi-omnipotent
445 .align 16
446 _vpaes_schedule_core:
447- endbr64
448 # rdi = key
449 # rsi = size in bits
450 # rdx = buffer
451@@ -467,7 +464,6 @@ _vpaes_schedule_core:
452 .type _vpaes_schedule_192_smear,\@abi-omnipotent
453 .align 16
454 _vpaes_schedule_192_smear:
455- endbr64
456 pshufd \$0x80, %xmm6, %xmm0 # d c 0 0 -> c 0 0 0
457 pxor %xmm0, %xmm6 # -> c+d c 0 0
458 pshufd \$0xFE, %xmm7, %xmm0 # b a _ _ -> b b b a
459@@ -499,7 +495,6 @@ _vpaes_schedule_192_smear:
460 .type _vpaes_schedule_round,\@abi-omnipotent
461 .align 16
462 _vpaes_schedule_round:
463- endbr64
464 # extract rcon from xmm8
465 pxor %xmm1, %xmm1
466 palignr \$15, %xmm8, %xmm1
467@@ -567,7 +562,6 @@ _vpaes_schedule_low_round:
468 .type _vpaes_schedule_transform,\@abi-omnipotent
469 .align 16
470 _vpaes_schedule_transform:
471- endbr64
472 movdqa %xmm9, %xmm1
473 pandn %xmm0, %xmm1
474 psrld \$4, %xmm1
475@@ -606,7 +600,6 @@ _vpaes_schedule_transform:
476 .type _vpaes_schedule_mangle,\@abi-omnipotent
477 .align 16
478 _vpaes_schedule_mangle:
479- endbr64
480 movdqa %xmm0, %xmm4 # save xmm0 for later
481 movdqa .Lk_mc_forward(%rip),%xmm5
482 test %rcx, %rcx
483@@ -680,7 +673,6 @@ _vpaes_schedule_mangle:
484 .type ${PREFIX}_set_encrypt_key,\@function,3
485 .align 16
486 ${PREFIX}_set_encrypt_key:
487- endbr64
488 ___
489 $code.=<<___ if ($win64);
490 lea -0xb8(%rsp),%rsp
491@@ -729,7 +721,6 @@ $code.=<<___;
492 .type ${PREFIX}_set_decrypt_key,\@function,3
493 .align 16
494 ${PREFIX}_set_decrypt_key:
495- endbr64
496 ___
497 $code.=<<___ if ($win64);
498 lea -0xb8(%rsp),%rsp
499@@ -783,7 +774,6 @@ $code.=<<___;
500 .type ${PREFIX}_encrypt,\@function,3
501 .align 16
502 ${PREFIX}_encrypt:
503- endbr64
504 ___
505 $code.=<<___ if ($win64);
506 lea -0xb8(%rsp),%rsp
507@@ -827,7 +817,6 @@ $code.=<<___;
508 .type ${PREFIX}_decrypt,\@function,3
509 .align 16
510 ${PREFIX}_decrypt:
511- endbr64
512 ___
513 $code.=<<___ if ($win64);
514 lea -0xb8(%rsp),%rsp
515@@ -877,7 +866,6 @@ $code.=<<___;
516 .type ${PREFIX}_cbc_encrypt,\@function,6
517 .align 16
518 ${PREFIX}_cbc_encrypt:
519- endbr64
520 xchg $key,$len
521 ___
522 ($len,$key)=($key,$len);
523@@ -961,7 +949,6 @@ $code.=<<___;
524 .type _vpaes_preheat,\@abi-omnipotent
525 .align 16
526 _vpaes_preheat:
527- endbr64
528 lea .Lk_s0F(%rip), %r10
529 movdqa -0x20(%r10), %xmm10 # .Lk_inv
530 movdqa -0x10(%r10), %xmm11 # .Lk_inv+16
531@@ -1092,7 +1079,6 @@ $code.=<<___;
532 .type se_handler,\@abi-omnipotent
533 .align 16
534 se_handler:
535- endbr64
536 push %rsi
537 push %rdi
538 push %rbx
539diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_add.S b/src/lib/libcrypto/bn/arch/amd64/bignum_add.S
540index 06298ca69..d56fa5e3a 100644
541--- a/src/lib/libcrypto/bn/arch/amd64/bignum_add.S
542+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_add.S
543@@ -49,7 +49,6 @@
544
545
546 S2N_BN_SYMBOL(bignum_add):
547- endbr64
548
549 #if WINDOWS_ABI
550 push rdi
551diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_cmadd.S b/src/lib/libcrypto/bn/arch/amd64/bignum_cmadd.S
552index 5ad712749..1dc1e5870 100644
553--- a/src/lib/libcrypto/bn/arch/amd64/bignum_cmadd.S
554+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_cmadd.S
555@@ -54,7 +54,6 @@
556
557
558 S2N_BN_SYMBOL(bignum_cmadd):
559- endbr64
560
561 #if WINDOWS_ABI
562 push rdi
563diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_cmul.S b/src/lib/libcrypto/bn/arch/amd64/bignum_cmul.S
564index 9199c8f48..c1a23ccea 100644
565--- a/src/lib/libcrypto/bn/arch/amd64/bignum_cmul.S
566+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_cmul.S
567@@ -51,7 +51,6 @@
568
569
570 S2N_BN_SYMBOL(bignum_cmul):
571- endbr64
572
573 #if WINDOWS_ABI
574 push rdi
575diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_mul.S b/src/lib/libcrypto/bn/arch/amd64/bignum_mul.S
576index 2d7ed1909..42ac988a1 100644
577--- a/src/lib/libcrypto/bn/arch/amd64/bignum_mul.S
578+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_mul.S
579@@ -59,7 +59,6 @@
580
581
582 S2N_BN_SYMBOL(bignum_mul):
583- endbr64
584
585 #if WINDOWS_ABI
586 push rdi
587diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_mul_4_8_alt.S b/src/lib/libcrypto/bn/arch/amd64/bignum_mul_4_8_alt.S
588index f02b09b28..3b7848b28 100644
589--- a/src/lib/libcrypto/bn/arch/amd64/bignum_mul_4_8_alt.S
590+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_mul_4_8_alt.S
591@@ -72,7 +72,6 @@
592 adc h, rdx
593
594 S2N_BN_SYMBOL(bignum_mul_4_8_alt):
595- endbr64
596
597 #if WINDOWS_ABI
598 push rdi
599diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_mul_8_16_alt.S b/src/lib/libcrypto/bn/arch/amd64/bignum_mul_8_16_alt.S
600index 97be83e1f..1be37840d 100644
601--- a/src/lib/libcrypto/bn/arch/amd64/bignum_mul_8_16_alt.S
602+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_mul_8_16_alt.S
603@@ -72,7 +72,6 @@
604 adc h, rdx
605
606 S2N_BN_SYMBOL(bignum_mul_8_16_alt):
607- endbr64
608
609 #if WINDOWS_ABI
610 push rdi
611diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_sqr.S b/src/lib/libcrypto/bn/arch/amd64/bignum_sqr.S
612index c4a0cabf3..2e05b9c17 100644
613--- a/src/lib/libcrypto/bn/arch/amd64/bignum_sqr.S
614+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_sqr.S
615@@ -62,7 +62,6 @@
616 #define llshort ebp
617
618 S2N_BN_SYMBOL(bignum_sqr):
619- endbr64
620
621 #if WINDOWS_ABI
622 push rdi
623diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_4_8_alt.S b/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_4_8_alt.S
624index b228414dc..a635177c6 100644
625--- a/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_4_8_alt.S
626+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_4_8_alt.S
627@@ -71,7 +71,6 @@
628 adc c, 0
629
630 S2N_BN_SYMBOL(bignum_sqr_4_8_alt):
631- endbr64
632
633 #if WINDOWS_ABI
634 push rdi
635diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_8_16_alt.S b/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_8_16_alt.S
636index 04efeec7e..f698202d2 100644
637--- a/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_8_16_alt.S
638+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_sqr_8_16_alt.S
639@@ -103,7 +103,6 @@
640 adc c, 0
641
642 S2N_BN_SYMBOL(bignum_sqr_8_16_alt):
643- endbr64
644
645 #if WINDOWS_ABI
646 push rdi
647diff --git a/src/lib/libcrypto/bn/arch/amd64/bignum_sub.S b/src/lib/libcrypto/bn/arch/amd64/bignum_sub.S
648index 11a9bd7ed..f8e1fe35a 100644
649--- a/src/lib/libcrypto/bn/arch/amd64/bignum_sub.S
650+++ b/src/lib/libcrypto/bn/arch/amd64/bignum_sub.S
651@@ -49,7 +49,6 @@
652
653
654 S2N_BN_SYMBOL(bignum_sub):
655- endbr64
656
657 #if WINDOWS_ABI
658 push rdi
659diff --git a/src/lib/libcrypto/bn/arch/amd64/word_clz.S b/src/lib/libcrypto/bn/arch/amd64/word_clz.S
660index 464a9d90f..025e98f9c 100644
661--- a/src/lib/libcrypto/bn/arch/amd64/word_clz.S
662+++ b/src/lib/libcrypto/bn/arch/amd64/word_clz.S
663@@ -30,7 +30,6 @@
664 .text
665
666 S2N_BN_SYMBOL(word_clz):
667- endbr64
668
669 #if WINDOWS_ABI
670 push rdi
671diff --git a/src/lib/libcrypto/bn/asm/modexp512-x86_64.pl b/src/lib/libcrypto/bn/asm/modexp512-x86_64.pl
672index af78fff54..2e71a7f03 100644
673--- a/src/lib/libcrypto/bn/asm/modexp512-x86_64.pl
674+++ b/src/lib/libcrypto/bn/asm/modexp512-x86_64.pl
675@@ -347,7 +347,6 @@ $code.=<<___;
676 .type MULADD_128x512,\@abi-omnipotent
677 .align 16
678 MULADD_128x512:
679- endbr64
680 ___
681 &MULSTEP_512([map("%r$_",(8..15))], "(+8*0)(%rcx)", "%rsi", "%rbp", "%rbx");
682 $code.=<<___;
683@@ -415,7 +414,6 @@ $code.=<<___;
684 .type mont_reduce,\@abi-omnipotent
685 .align 16
686 mont_reduce:
687- endbr64
688 ___
689
690 my $STACK_DEPTH = 8;
691@@ -678,7 +676,6 @@ $code.=<<___;
692 .type mont_mul_a3b,\@abi-omnipotent
693 .align 16
694 mont_mul_a3b:
695- endbr64
696 #
697 # multiply tmp = src1 * src2
698 # For multiply: dst = rcx, src1 = rdi, src2 = rsi
699@@ -1080,7 +1077,6 @@ $code.=<<___;
700 .type sqr_reduce,\@abi-omnipotent
701 .align 16
702 sqr_reduce:
703- endbr64
704 mov (+$pResult_offset+8)(%rsp), %rcx
705 ___
706 &SQR_512("%rsp+$tmp16_offset+8", "%rcx", [map("%r$_",(10..15,8..9))], "%rbx", "%rbp", "%rsi", "%rdi");
707@@ -1110,7 +1106,6 @@ $code.=<<___;
708 .globl mod_exp_512
709 .type mod_exp_512,\@function,4
710 mod_exp_512:
711- endbr64
712 push %rbp
713 push %rbx
714 push %r12
715diff --git a/src/lib/libcrypto/bn/asm/x86_64-mont.pl b/src/lib/libcrypto/bn/asm/x86_64-mont.pl
716index 6f5ab331e..cae7309d5 100755
717--- a/src/lib/libcrypto/bn/asm/x86_64-mont.pl
718+++ b/src/lib/libcrypto/bn/asm/x86_64-mont.pl
719@@ -63,7 +63,6 @@ $code=<<___;
720 .type bn_mul_mont,\@function,6
721 .align 16
722 bn_mul_mont:
723- endbr64
724 test \$3,${num}d
725 jnz .Lmul_enter
726 cmp \$8,${num}d
727@@ -279,7 +278,6 @@ $code.=<<___;
728 .align 16
729 bn_mul4x_mont:
730 .Lmul4x_enter:
731- endbr64
732 push %rbx
733 push %rbp
734 push %r12
735@@ -707,7 +705,6 @@ $code.=<<___;
736 .align 16
737 bn_sqr4x_mont:
738 .Lsqr4x_enter:
739- endbr64
740 push %rbx
741 push %rbp
742 push %r12
743diff --git a/src/lib/libcrypto/bn/asm/x86_64-mont5.pl b/src/lib/libcrypto/bn/asm/x86_64-mont5.pl
744index 3b3325a6c..7b9c6df27 100755
745--- a/src/lib/libcrypto/bn/asm/x86_64-mont5.pl
746+++ b/src/lib/libcrypto/bn/asm/x86_64-mont5.pl
747@@ -57,7 +57,6 @@ $code=<<___;
748 .type bn_mul_mont_gather5,\@function,6
749 .align 64
750 bn_mul_mont_gather5:
751- endbr64
752 test \$3,${num}d
753 jnz .Lmul_enter
754 cmp \$8,${num}d
755@@ -388,7 +387,6 @@ $code.=<<___;
756 .type bn_mul4x_mont_gather5,\@function,6
757 .align 16
758 bn_mul4x_mont_gather5:
759- endbr64
760 .Lmul4x_enter:
761 mov ${num}d,${num}d
762 movd `($win64?56:8)`(%rsp),%xmm5 # load 7th argument
763@@ -927,7 +925,6 @@ $code.=<<___;
764 .type bn_scatter5,\@abi-omnipotent
765 .align 16
766 bn_scatter5:
767- endbr64
768 cmp \$0, $num
769 jz .Lscatter_epilogue
770 lea ($tbl,$idx,8),$tbl
771@@ -946,7 +943,6 @@ bn_scatter5:
772 .type bn_gather5,\@abi-omnipotent
773 .align 16
774 bn_gather5:
775- endbr64
776 .LSEH_begin_bn_gather5: # Win64 thing, but harmless in other cases
777 # I can't trust assembler to use specific encoding:-(
778 .byte 0x4c,0x8d,0x14,0x24 # lea (%rsp),%r10
779@@ -1057,7 +1053,6 @@ $code.=<<___;
780 .type mul_handler,\@abi-omnipotent
781 .align 16
782 mul_handler:
783- endbr64
784 push %rsi
785 push %rdi
786 push %rbx
787diff --git a/src/lib/libcrypto/camellia/asm/cmll-x86_64.pl b/src/lib/libcrypto/camellia/asm/cmll-x86_64.pl
788index 3ceed3e89..586e5d6e9 100644
789--- a/src/lib/libcrypto/camellia/asm/cmll-x86_64.pl
790+++ b/src/lib/libcrypto/camellia/asm/cmll-x86_64.pl
791@@ -116,7 +116,6 @@ $code=<<___;
792 .type Camellia_EncryptBlock,\@abi-omnipotent
793 .align 16
794 Camellia_EncryptBlock:
795- endbr64
796 movl \$128,%eax
797 subl $arg0d,%eax
798 movl \$3,$arg0d
799@@ -129,7 +128,6 @@ Camellia_EncryptBlock:
800 .align 16
801 .Lenc_rounds:
802 Camellia_EncryptBlock_Rounds:
803- endbr64
804 push %rbx
805 push %rbp
806 push %r13
807@@ -178,7 +176,6 @@ Camellia_EncryptBlock_Rounds:
808 .type _x86_64_Camellia_encrypt,\@abi-omnipotent
809 .align 16
810 _x86_64_Camellia_encrypt:
811- endbr64
812 xor 0($key),@S[1]
813 xor 4($key),@S[0] # ^=key[0-3]
814 xor 8($key),@S[3]
815@@ -229,7 +226,6 @@ $code.=<<___;
816 .type Camellia_DecryptBlock,\@abi-omnipotent
817 .align 16
818 Camellia_DecryptBlock:
819- endbr64
820 movl \$128,%eax
821 subl $arg0d,%eax
822 movl \$3,$arg0d
823@@ -242,7 +238,6 @@ Camellia_DecryptBlock:
824 .align 16
825 .Ldec_rounds:
826 Camellia_DecryptBlock_Rounds:
827- endbr64
828 push %rbx
829 push %rbp
830 push %r13
831@@ -291,7 +286,6 @@ Camellia_DecryptBlock_Rounds:
832 .type _x86_64_Camellia_decrypt,\@abi-omnipotent
833 .align 16
834 _x86_64_Camellia_decrypt:
835- endbr64
836 xor 0($key),@S[1]
837 xor 4($key),@S[0] # ^=key[0-3]
838 xor 8($key),@S[3]
839@@ -406,7 +400,6 @@ $code.=<<___;
840 .type Camellia_Ekeygen,\@function,3
841 .align 16
842 Camellia_Ekeygen:
843- endbr64
844 push %rbx
845 push %rbp
846 push %r13
847@@ -637,7 +630,6 @@ $code.=<<___;
848 .type Camellia_cbc_encrypt,\@function,6
849 .align 16
850 Camellia_cbc_encrypt:
851- endbr64
852 cmp \$0,%rdx
853 je .Lcbc_abort
854 push %rbx
855diff --git a/src/lib/libcrypto/md5/asm/md5-x86_64.pl b/src/lib/libcrypto/md5/asm/md5-x86_64.pl
856index 06d69094f..c902a1b53 100755
857--- a/src/lib/libcrypto/md5/asm/md5-x86_64.pl
858+++ b/src/lib/libcrypto/md5/asm/md5-x86_64.pl
859@@ -128,7 +128,6 @@ $code .= <<EOF;
860 .globl md5_block_asm_data_order
861 .type md5_block_asm_data_order,\@function,3
862 md5_block_asm_data_order:
863- endbr64
864 push %rbp
865 push %rbx
866 push %r12
867diff --git a/src/lib/libcrypto/modes/asm/ghash-x86_64.pl b/src/lib/libcrypto/modes/asm/ghash-x86_64.pl
868index 9ce0c3814..71d0822ac 100644
869--- a/src/lib/libcrypto/modes/asm/ghash-x86_64.pl
870+++ b/src/lib/libcrypto/modes/asm/ghash-x86_64.pl
871@@ -412,7 +412,6 @@ $code.=<<___;
872 .type gcm_init_clmul,\@abi-omnipotent
873 .align 16
874 gcm_init_clmul:
875- endbr64
876 movdqu ($Xip),$Hkey
877 pshufd \$0b01001110,$Hkey,$Hkey # dword swap
878
879@@ -450,7 +449,6 @@ $code.=<<___;
880 .type gcm_gmult_clmul,\@abi-omnipotent
881 .align 16
882 gcm_gmult_clmul:
883- endbr64
884 movdqu ($Xip),$Xi
885 movdqa .Lbswap_mask(%rip),$T3
886 movdqu ($Htbl),$Hkey
887@@ -478,7 +476,6 @@ $code.=<<___;
888 .type gcm_ghash_clmul,\@abi-omnipotent
889 .align 16
890 gcm_ghash_clmul:
891- endbr64
892 ___
893 $code.=<<___ if ($win64);
894 .LSEH_begin_gcm_ghash_clmul:
895@@ -689,7 +686,6 @@ $code.=<<___;
896 .type se_handler,\@abi-omnipotent
897 .align 16
898 se_handler:
899- endbr64
900 push %rsi
901 push %rdi
902 push %rbx
903diff --git a/src/lib/libcrypto/rc4/asm/rc4-md5-x86_64.pl b/src/lib/libcrypto/rc4/asm/rc4-md5-x86_64.pl
904index 3190e6a8e..c65a2c751 100644
905--- a/src/lib/libcrypto/rc4/asm/rc4-md5-x86_64.pl
906+++ b/src/lib/libcrypto/rc4/asm/rc4-md5-x86_64.pl
907@@ -38,7 +38,7 @@ my ($rc4,$md5)=(1,1); # what to generate?
908 my $D="#" if (!$md5); # if set to "#", MD5 is stitched into RC4(),
909 # but its result is discarded. Idea here is
910 # to be able to use 'openssl speed rc4' for
911- # benchmarking the stitched subroutine...
912+ # benchmarking the stitched subroutine...
913
914 my $flavour = shift;
915 my $output = shift;
916@@ -109,7 +109,6 @@ $code.=<<___;
917 .globl $func
918 .type $func,\@function,$nargs
919 $func:
920- endbr64
921 cmp \$0,$len
922 je .Labort
923 push %rbx
924@@ -405,7 +404,7 @@ $code.=<<___ if ($rc4 && (!$md5 || $D));
925 and \$63,$len # remaining bytes
926 jnz .Loop1
927 jmp .Ldone
928-
929+
930 .align 16
931 .Loop1:
932 add $TX[0]#b,$YY#b
933@@ -454,7 +453,6 @@ $code.=<<___;
934 .type RC4_set_key,\@function,3
935 .align 16
936 RC4_set_key:
937- endbr64
938 lea 8($dat),$dat
939 lea ($inp,$len),$inp
940 neg $len
941diff --git a/src/lib/libcrypto/rc4/asm/rc4-x86_64.pl b/src/lib/libcrypto/rc4/asm/rc4-x86_64.pl
942index 0472acce8..f678daaac 100755
943--- a/src/lib/libcrypto/rc4/asm/rc4-x86_64.pl
944+++ b/src/lib/libcrypto/rc4/asm/rc4-x86_64.pl
945@@ -41,7 +41,7 @@
946
947 # April 2005
948 #
949-# P4 EM64T core appears to be "allergic" to 64-bit inc/dec. Replacing
950+# P4 EM64T core appears to be "allergic" to 64-bit inc/dec. Replacing
951 # those with add/sub results in 50% performance improvement of folded
952 # loop...
953
954@@ -127,9 +127,7 @@ $code=<<___;
955 .globl RC4
956 .type RC4,\@function,4
957 .align 16
958-RC4:
959- endbr64
960- or $len,$len
961+RC4: or $len,$len
962 jne .Lentry
963 ret
964 .Lentry:
965@@ -435,7 +433,6 @@ $code.=<<___;
966 .type RC4_set_key,\@function,3
967 .align 16
968 RC4_set_key:
969- endbr64
970 lea 8($dat),$dat
971 lea ($inp,$len),$inp
972 neg $len
973diff --git a/src/lib/libcrypto/sha/asm/sha1-x86_64.pl b/src/lib/libcrypto/sha/asm/sha1-x86_64.pl
974index e15ff47f8..43eee73c4 100755
975--- a/src/lib/libcrypto/sha/asm/sha1-x86_64.pl
976+++ b/src/lib/libcrypto/sha/asm/sha1-x86_64.pl
977@@ -222,7 +222,6 @@ $code.=<<___;
978 .type sha1_block_data_order,\@function,3
979 .align 16
980 sha1_block_data_order:
981- endbr64
982 mov OPENSSL_ia32cap_P+0(%rip),%r9d
983 mov OPENSSL_ia32cap_P+4(%rip),%r8d
984 test \$IA32CAP_MASK1_SSSE3,%r8d # check SSSE3 bit
985@@ -310,7 +309,6 @@ $code.=<<___;
986 .align 16
987 sha1_block_data_order_ssse3:
988 _ssse3_shortcut:
989- endbr64
990 push %rbx
991 push %rbp
992 push %r12
993@@ -731,7 +729,6 @@ $code.=<<___;
994 .align 16
995 sha1_block_data_order_avx:
996 _avx_shortcut:
997- endbr64
998 push %rbx
999 push %rbp
1000 push %r12
1001@@ -1102,7 +1099,6 @@ $code.=<<___;
1002 .type se_handler,\@abi-omnipotent
1003 .align 16
1004 se_handler:
1005- endbr64
1006 push %rsi
1007 push %rdi
1008 push %rbx
1009diff --git a/src/lib/libcrypto/sha/asm/sha512-x86_64.pl b/src/lib/libcrypto/sha/asm/sha512-x86_64.pl
1010index 120693fee..0517eab66 100755
1011--- a/src/lib/libcrypto/sha/asm/sha512-x86_64.pl
1012+++ b/src/lib/libcrypto/sha/asm/sha512-x86_64.pl
1013@@ -175,7 +175,6 @@ $code=<<___;
1014 .type $func,\@function,4
1015 .align 16
1016 $func:
1017- endbr64
1018 push %rbx
1019 push %rbp
1020 push %r12
1021diff --git a/src/lib/libcrypto/whrlpool/asm/wp-x86_64.pl b/src/lib/libcrypto/whrlpool/asm/wp-x86_64.pl
1022index 7958f6d28..de5d3acfb 100644
1023--- a/src/lib/libcrypto/whrlpool/asm/wp-x86_64.pl
1024+++ b/src/lib/libcrypto/whrlpool/asm/wp-x86_64.pl
1025@@ -57,7 +57,6 @@ $code=<<___;
1026 .type $func,\@function,3
1027 .align 16
1028 $func:
1029- endbr64
1030 push %rbx
1031 push %rbp
1032 push %r12
1033diff --git a/src/lib/libcrypto/x86_64cpuid.pl b/src/lib/libcrypto/x86_64cpuid.pl
1034index dc56732a2..1b67d1110 100644
1035--- a/src/lib/libcrypto/x86_64cpuid.pl
1036+++ b/src/lib/libcrypto/x86_64cpuid.pl
1037@@ -18,7 +18,6 @@ print<<___;
1038 .extern OPENSSL_cpuid_setup
1039 .hidden OPENSSL_cpuid_setup
1040 .section .init
1041- endbr64
1042 call OPENSSL_cpuid_setup
1043
1044 .extern OPENSSL_ia32cap_P
1045@@ -30,7 +29,6 @@ print<<___;
1046 .type OPENSSL_ia32_cpuid,\@abi-omnipotent
1047 .align 16
1048 OPENSSL_ia32_cpuid:
1049- endbr64
1050 mov %rbx,%r8 # save %rbx
1051
1052 xor %eax,%eax
1053--
10542.42.0
1055