Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/boringssl.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Benjamin <davidben@chromium.org>2015-02-25 20:43:35 +0300
committerAdam Langley <agl@google.com>2015-02-26 00:26:16 +0300
commit09bdb2a2c3d40646ac4b20f5e73421307a80185f (patch)
tree79b50b46fd0d04ffa1307895fe857680bebc54a9 /crypto/bn/asm
parentbcb2d91e1002985a26623fb4a6b4f896db5ae6c2 (diff)
Remove explicit .hiddens from x86_64 perlasm files.
This reverts the non-ARM portions of 97999919bbe85ab213d283e18e597e028f8685d1. x86_64 perlasm already makes .globl imply .hidden. (Confusingly, ARM does not.) Since we don't need it, revert those to minimize divergence with upstream. Change-Id: I2d205cfb1183e65d4f18a62bde187d206b1a96de Reviewed-on: https://boringssl-review.googlesource.com/3610 Reviewed-by: Adam Langley <agl@google.com>
Diffstat (limited to 'crypto/bn/asm')
-rw-r--r--crypto/bn/asm/rsaz-avx2.pl14
-rw-r--r--crypto/bn/asm/x86_64-mont.pl1
2 files changed, 0 insertions, 15 deletions
diff --git a/crypto/bn/asm/rsaz-avx2.pl b/crypto/bn/asm/rsaz-avx2.pl
index 9a9223b3..3b6ccf83 100644
--- a/crypto/bn/asm/rsaz-avx2.pl
+++ b/crypto/bn/asm/rsaz-avx2.pl
@@ -159,7 +159,6 @@ $code.=<<___;
.globl rsaz_1024_sqr_avx2
.type rsaz_1024_sqr_avx2,\@function,5
-.hidden rsaz_1024_sqr_avx2
.align 64
rsaz_1024_sqr_avx2: # 702 cycles, 14% faster than rsaz_1024_mul_avx2
lea (%rsp), %rax
@@ -892,7 +891,6 @@ $bp="%r13"; # reassigned argument
$code.=<<___;
.globl rsaz_1024_mul_avx2
.type rsaz_1024_mul_avx2,\@function,5
-.hidden rsaz_1024_mul_avx2
.align 64
rsaz_1024_mul_avx2:
lea (%rsp), %rax
@@ -1486,7 +1484,6 @@ my @T = map("%r$_",(8..11));
$code.=<<___;
.globl rsaz_1024_red2norm_avx2
.type rsaz_1024_red2norm_avx2,\@abi-omnipotent
-.hidden rsaz_1024_red2norm_avx2
.align 32
rsaz_1024_red2norm_avx2:
sub \$-128,$inp # size optimization
@@ -1526,7 +1523,6 @@ $code.=<<___;
.globl rsaz_1024_norm2red_avx2
.type rsaz_1024_norm2red_avx2,\@abi-omnipotent
-.hidden rsaz_1024_norm2red_avx2
.align 32
rsaz_1024_norm2red_avx2:
sub \$-128,$out # size optimization
@@ -1569,7 +1565,6 @@ my ($out,$inp,$power) = $win64 ? ("%rcx","%rdx","%r8d") : ("%rdi","%rsi","%edx")
$code.=<<___;
.globl rsaz_1024_scatter5_avx2
.type rsaz_1024_scatter5_avx2,\@abi-omnipotent
-.hidden rsaz_1024_scatter5_avx2
.align 32
rsaz_1024_scatter5_avx2:
vzeroupper
@@ -1595,7 +1590,6 @@ rsaz_1024_scatter5_avx2:
.globl rsaz_1024_gather5_avx2
.type rsaz_1024_gather5_avx2,\@abi-omnipotent
-.hidden rsaz_1024_gather5_avx2
.align 32
rsaz_1024_gather5_avx2:
___
@@ -1690,7 +1684,6 @@ $code.=<<___;
.extern OPENSSL_ia32cap_P
.globl rsaz_avx2_eligible
.type rsaz_avx2_eligible,\@abi-omnipotent
-.hidden rsaz_avx2_eligible
.align 32
rsaz_avx2_eligible:
mov OPENSSL_ia32cap_P+8(%rip),%eax
@@ -1878,7 +1871,6 @@ print <<___; # assembler is too old
.globl rsaz_avx2_eligible
.type rsaz_avx2_eligible,\@abi-omnipotent
-.hidden rsaz_avx2_eligible
rsaz_avx2_eligible:
xor %eax,%eax
ret
@@ -1890,12 +1882,6 @@ rsaz_avx2_eligible:
.globl rsaz_1024_red2norm_avx2
.globl rsaz_1024_scatter5_avx2
.globl rsaz_1024_gather5_avx2
-.hidden rsaz_1024_sqr_avx2
-.hidden rsaz_1024_mul_avx2
-.hidden rsaz_1024_norm2red_avx2
-.hidden rsaz_1024_red2norm_avx2
-.hidden rsaz_1024_scatter5_avx2
-.hidden rsaz_1024_gather5_avx2
.type rsaz_1024_sqr_avx2,\@abi-omnipotent
rsaz_1024_sqr_avx2:
rsaz_1024_mul_avx2:
diff --git a/crypto/bn/asm/x86_64-mont.pl b/crypto/bn/asm/x86_64-mont.pl
index 38af80a8..39476ab0 100644
--- a/crypto/bn/asm/x86_64-mont.pl
+++ b/crypto/bn/asm/x86_64-mont.pl
@@ -90,7 +90,6 @@ $code=<<___;
.globl bn_mul_mont
.type bn_mul_mont,\@function,6
-.hidden bn_mul_mont
.align 16
bn_mul_mont:
test \$3,${num}d