diff options
author | Denys Vlasenko <vda.linux@googlemail.com> | 2021-04-26 17:25:27 +0200 |
---|---|---|
committer | Denys Vlasenko <vda.linux@googlemail.com> | 2021-04-26 17:25:27 +0200 |
commit | 772e18775e0e1db2392dcbea970d5729018437e8 (patch) | |
tree | 0a7cdbe8b89e00fc54b30f4d5541ca6aa77e5d18 | |
parent | b3b1713a58dab938524e263426004ab0aca112a8 (diff) | |
download | busybox-w32-772e18775e0e1db2392dcbea970d5729018437e8.tar.gz busybox-w32-772e18775e0e1db2392dcbea970d5729018437e8.tar.bz2 busybox-w32-772e18775e0e1db2392dcbea970d5729018437e8.zip |
tls: shrink sp_256_proj_point_dbl_10
function old new delta
sp_256_ecc_mulmod_10 1329 1300 -29
Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
-rw-r--r-- | networking/tls_sp_c32.c | 61 |
1 files changed, 24 insertions, 37 deletions
diff --git a/networking/tls_sp_c32.c b/networking/tls_sp_c32.c index c151eea27..70e20aa86 100644 --- a/networking/tls_sp_c32.c +++ b/networking/tls_sp_c32.c | |||
@@ -681,18 +681,11 @@ static void sp_256_proj_point_dbl_10(sp_point* r, sp_point* p, sp_digit* t) | |||
681 | static void sp_256_proj_point_add_10(sp_point* r, sp_point* p, sp_point* q, | 681 | static void sp_256_proj_point_add_10(sp_point* r, sp_point* p, sp_point* q, |
682 | sp_digit* t) | 682 | sp_digit* t) |
683 | { | 683 | { |
684 | sp_point *ap[2]; | ||
685 | sp_point *rp[2]; | ||
686 | sp_point tp; | ||
687 | sp_digit* t1 = t; | 684 | sp_digit* t1 = t; |
688 | sp_digit* t2 = t + 2*10; | 685 | sp_digit* t2 = t + 2*10; |
689 | sp_digit* t3 = t + 4*10; | 686 | sp_digit* t3 = t + 4*10; |
690 | sp_digit* t4 = t + 6*10; | 687 | sp_digit* t4 = t + 6*10; |
691 | sp_digit* t5 = t + 8*10; | 688 | sp_digit* t5 = t + 8*10; |
692 | sp_digit* x; | ||
693 | sp_digit* y; | ||
694 | sp_digit* z; | ||
695 | int i; | ||
696 | 689 | ||
697 | /* Ensure only the first point is the same as the result. */ | 690 | /* Ensure only the first point is the same as the result. */ |
698 | if (q == r) { | 691 | if (q == r) { |
@@ -711,33 +704,27 @@ static void sp_256_proj_point_add_10(sp_point* r, sp_point* p, sp_point* q, | |||
711 | sp_256_proj_point_dbl_10(r, p, t); | 704 | sp_256_proj_point_dbl_10(r, p, t); |
712 | } | 705 | } |
713 | else { | 706 | else { |
714 | rp[0] = r; | 707 | sp_point tp; |
715 | rp[1] = &tp; | 708 | sp_point *v; |
716 | memset(&tp, 0, sizeof(tp)); | 709 | |
717 | x = rp[p->infinity | q->infinity]->x; | 710 | v = r; |
718 | y = rp[p->infinity | q->infinity]->y; | 711 | if (p->infinity | q->infinity) { |
719 | z = rp[p->infinity | q->infinity]->z; | 712 | memset(&tp, 0, sizeof(tp)); |
720 | 713 | v = &tp; | |
721 | ap[0] = p; | 714 | } |
722 | ap[1] = q; | 715 | |
723 | for (i=0; i<10; i++) | 716 | *r = p->infinity ? *q : *p; /* struct copy */ |
724 | r->x[i] = ap[p->infinity]->x[i]; | ||
725 | for (i=0; i<10; i++) | ||
726 | r->y[i] = ap[p->infinity]->y[i]; | ||
727 | for (i=0; i<10; i++) | ||
728 | r->z[i] = ap[p->infinity]->z[i]; | ||
729 | r->infinity = ap[p->infinity]->infinity; | ||
730 | 717 | ||
731 | /* U1 = X1*Z2^2 */ | 718 | /* U1 = X1*Z2^2 */ |
732 | sp_256_mont_sqr_10(t1, q->z, p256_mod, p256_mp_mod); | 719 | sp_256_mont_sqr_10(t1, q->z, p256_mod, p256_mp_mod); |
733 | sp_256_mont_mul_10(t3, t1, q->z, p256_mod, p256_mp_mod); | 720 | sp_256_mont_mul_10(t3, t1, q->z, p256_mod, p256_mp_mod); |
734 | sp_256_mont_mul_10(t1, t1, x, p256_mod, p256_mp_mod); | 721 | sp_256_mont_mul_10(t1, t1, v->x, p256_mod, p256_mp_mod); |
735 | /* U2 = X2*Z1^2 */ | 722 | /* U2 = X2*Z1^2 */ |
736 | sp_256_mont_sqr_10(t2, z, p256_mod, p256_mp_mod); | 723 | sp_256_mont_sqr_10(t2, v->z, p256_mod, p256_mp_mod); |
737 | sp_256_mont_mul_10(t4, t2, z, p256_mod, p256_mp_mod); | 724 | sp_256_mont_mul_10(t4, t2, v->z, p256_mod, p256_mp_mod); |
738 | sp_256_mont_mul_10(t2, t2, q->x, p256_mod, p256_mp_mod); | 725 | sp_256_mont_mul_10(t2, t2, q->x, p256_mod, p256_mp_mod); |
739 | /* S1 = Y1*Z2^3 */ | 726 | /* S1 = Y1*Z2^3 */ |
740 | sp_256_mont_mul_10(t3, t3, y, p256_mod, p256_mp_mod); | 727 | sp_256_mont_mul_10(t3, t3, v->y, p256_mod, p256_mp_mod); |
741 | /* S2 = Y2*Z1^3 */ | 728 | /* S2 = Y2*Z1^3 */ |
742 | sp_256_mont_mul_10(t4, t4, q->y, p256_mod, p256_mp_mod); | 729 | sp_256_mont_mul_10(t4, t4, q->y, p256_mod, p256_mp_mod); |
743 | /* H = U2 - U1 */ | 730 | /* H = U2 - U1 */ |
@@ -745,21 +732,21 @@ static void sp_256_proj_point_add_10(sp_point* r, sp_point* p, sp_point* q, | |||
745 | /* R = S2 - S1 */ | 732 | /* R = S2 - S1 */ |
746 | sp_256_mont_sub_10(t4, t4, t3, p256_mod); | 733 | sp_256_mont_sub_10(t4, t4, t3, p256_mod); |
747 | /* Z3 = H*Z1*Z2 */ | 734 | /* Z3 = H*Z1*Z2 */ |
748 | sp_256_mont_mul_10(z, z, q->z, p256_mod, p256_mp_mod); | 735 | sp_256_mont_mul_10(v->z, v->z, q->z, p256_mod, p256_mp_mod); |
749 | sp_256_mont_mul_10(z, z, t2, p256_mod, p256_mp_mod); | 736 | sp_256_mont_mul_10(v->z, v->z, t2, p256_mod, p256_mp_mod); |
750 | /* X3 = R^2 - H^3 - 2*U1*H^2 */ | 737 | /* X3 = R^2 - H^3 - 2*U1*H^2 */ |
751 | sp_256_mont_sqr_10(x, t4, p256_mod, p256_mp_mod); | 738 | sp_256_mont_sqr_10(v->x, t4, p256_mod, p256_mp_mod); |
752 | sp_256_mont_sqr_10(t5, t2, p256_mod, p256_mp_mod); | 739 | sp_256_mont_sqr_10(t5, t2, p256_mod, p256_mp_mod); |
753 | sp_256_mont_mul_10(y, t1, t5, p256_mod, p256_mp_mod); | 740 | sp_256_mont_mul_10(v->y, t1, t5, p256_mod, p256_mp_mod); |
754 | sp_256_mont_mul_10(t5, t5, t2, p256_mod, p256_mp_mod); | 741 | sp_256_mont_mul_10(t5, t5, t2, p256_mod, p256_mp_mod); |
755 | sp_256_mont_sub_10(x, x, t5, p256_mod); | 742 | sp_256_mont_sub_10(v->x, v->x, t5, p256_mod); |
756 | sp_256_mont_dbl_10(t1, y, p256_mod); | 743 | sp_256_mont_dbl_10(t1, v->y, p256_mod); |
757 | sp_256_mont_sub_10(x, x, t1, p256_mod); | 744 | sp_256_mont_sub_10(v->x, v->x, t1, p256_mod); |
758 | /* Y3 = R*(U1*H^2 - X3) - S1*H^3 */ | 745 | /* Y3 = R*(U1*H^2 - X3) - S1*H^3 */ |
759 | sp_256_mont_sub_10(y, y, x, p256_mod); | 746 | sp_256_mont_sub_10(v->y, v->y, v->x, p256_mod); |
760 | sp_256_mont_mul_10(y, y, t4, p256_mod, p256_mp_mod); | 747 | sp_256_mont_mul_10(v->y, v->y, t4, p256_mod, p256_mp_mod); |
761 | sp_256_mont_mul_10(t5, t5, t3, p256_mod, p256_mp_mod); | 748 | sp_256_mont_mul_10(t5, t5, t3, p256_mod, p256_mp_mod); |
762 | sp_256_mont_sub_10(y, y, t5, p256_mod); | 749 | sp_256_mont_sub_10(v->y, v->y, t5, p256_mod); |
763 | } | 750 | } |
764 | } | 751 | } |
765 | 752 | ||