Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/boringssl.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Benjamin <davidben@chromium.org>2015-02-25 20:43:35 +0300
committerAdam Langley <agl@google.com>2015-02-26 00:26:16 +0300
commit09bdb2a2c3d40646ac4b20f5e73421307a80185f (patch)
tree79b50b46fd0d04ffa1307895fe857680bebc54a9
parentbcb2d91e1002985a26623fb4a6b4f896db5ae6c2 (diff)
Remove explicit .hiddens from x86_64 perlasm files.
This reverts the non-ARM portions of 97999919bbe85ab213d283e18e597e028f8685d1. x86_64 perlasm already makes .globl imply .hidden. (Confusingly, ARM does not.) Since we don't need it, revert those to minimize divergence with upstream. Change-Id: I2d205cfb1183e65d4f18a62bde187d206b1a96de Reviewed-on: https://boringssl-review.googlesource.com/3610 Reviewed-by: Adam Langley <agl@google.com>
-rw-r--r--crypto/aes/asm/aes-x86_64.pl2
-rw-r--r--crypto/aes/asm/aesni-x86_64.pl11
-rw-r--r--crypto/aes/asm/aesv8-armx.pl5
-rw-r--r--crypto/aes/asm/bsaes-x86_64.pl10
-rw-r--r--crypto/aes/asm/vpaes-x86_64.pl5
-rw-r--r--crypto/bn/asm/rsaz-avx2.pl14
-rw-r--r--crypto/bn/asm/x86_64-mont.pl1
-rw-r--r--crypto/cpu-x86_64-asm.pl1
-rw-r--r--crypto/md5/asm/md5-x86_64.pl1
-rw-r--r--crypto/modes/asm/aesni-gcm-x86_64.pl4
-rw-r--r--crypto/modes/asm/ghash-x86_64.pl8
-rw-r--r--crypto/rc4/asm/rc4-md5-x86_64.pl1
-rw-r--r--crypto/rc4/asm/rc4-x86_64.pl2
-rw-r--r--crypto/sha/asm/sha1-armv8.pl1
-rw-r--r--crypto/sha/asm/sha512-armv8.pl1
-rw-r--r--crypto/sha/asm/sha512-x86_64.pl1
16 files changed, 0 insertions, 68 deletions
diff --git a/crypto/aes/asm/aes-x86_64.pl b/crypto/aes/asm/aes-x86_64.pl
index f1afbfba..4b6e1b44 100644
--- a/crypto/aes/asm/aes-x86_64.pl
+++ b/crypto/aes/asm/aes-x86_64.pl
@@ -1283,7 +1283,6 @@ $code.=<<___;
.align 16
.globl asm_AES_set_encrypt_key
.type asm_AES_set_encrypt_key,\@function,3
-.hidden asm_AES_set_encrypt_key
asm_AES_set_encrypt_key:
push %rbx
push %rbp
@@ -1549,7 +1548,6 @@ $code.=<<___;
.align 16
.globl asm_AES_set_decrypt_key
.type asm_AES_set_decrypt_key,\@function,3
-.hidden asm_AES_set_decrypt_key
asm_AES_set_decrypt_key:
push %rbx
push %rbp
diff --git a/crypto/aes/asm/aesni-x86_64.pl b/crypto/aes/asm/aesni-x86_64.pl
index 918e1258..5f617463 100644
--- a/crypto/aes/asm/aesni-x86_64.pl
+++ b/crypto/aes/asm/aesni-x86_64.pl
@@ -256,7 +256,6 @@ ___
$code.=<<___;
.globl ${PREFIX}_encrypt
.type ${PREFIX}_encrypt,\@abi-omnipotent
-.hidden ${PREFIX}_encrypt
.align 16
${PREFIX}_encrypt:
movups ($inp),$inout0 # load input
@@ -270,7 +269,6 @@ $code.=<<___;
.globl ${PREFIX}_decrypt
.type ${PREFIX}_decrypt,\@abi-omnipotent
-.hidden ${PREFIX}_decrypt
.align 16
${PREFIX}_decrypt:
movups ($inp),$inout0 # load input
@@ -584,7 +582,6 @@ if ($PREFIX eq "aesni") {
$code.=<<___;
.globl aesni_ecb_encrypt
.type aesni_ecb_encrypt,\@function,5
-.hidden aesni_ecb_encrypt
.align 16
aesni_ecb_encrypt:
___
@@ -909,7 +906,6 @@ my $bswap_mask="%xmm7";
$code.=<<___;
.globl aesni_ccm64_encrypt_blocks
.type aesni_ccm64_encrypt_blocks,\@function,6
-.hidden aesni_ccm64_encrypt_blocks
.align 16
aesni_ccm64_encrypt_blocks:
___
@@ -990,7 +986,6 @@ ___
$code.=<<___;
.globl aesni_ccm64_decrypt_blocks
.type aesni_ccm64_decrypt_blocks,\@function,6
-.hidden aesni_ccm64_decrypt_blocks
.align 16
aesni_ccm64_decrypt_blocks:
___
@@ -1105,7 +1100,6 @@ my $frame_size = 0x80 + ($win64?160:0);
$code.=<<___;
.globl aesni_ctr32_encrypt_blocks
.type aesni_ctr32_encrypt_blocks,\@function,5
-.hidden aesni_ctr32_encrypt_blocks
.align 16
aesni_ctr32_encrypt_blocks:
lea (%rsp),%rax
@@ -1617,7 +1611,6 @@ my $frame_size = 0x70 + ($win64?160:0);
$code.=<<___;
.globl aesni_xts_encrypt
.type aesni_xts_encrypt,\@function,6
-.hidden aesni_xts_encrypt
.align 16
aesni_xts_encrypt:
lea (%rsp),%rax
@@ -2052,7 +2045,6 @@ ___
$code.=<<___;
.globl aesni_xts_decrypt
.type aesni_xts_decrypt,\@function,6
-.hidden aesni_xts_decrypt
.align 16
aesni_xts_decrypt:
lea (%rsp),%rax
@@ -2524,7 +2516,6 @@ my $inp_=$key_;
$code.=<<___;
.globl ${PREFIX}_cbc_encrypt
.type ${PREFIX}_cbc_encrypt,\@function,6
-.hidden ${PREFIX}_cbc_encrypt
.align 16
${PREFIX}_cbc_encrypt:
test $len,$len # check length
@@ -2982,7 +2973,6 @@ ___
$code.=<<___;
.globl ${PREFIX}_set_decrypt_key
.type ${PREFIX}_set_decrypt_key,\@abi-omnipotent
-.hidden ${PREFIX}_set_decrypt_key
.align 16
${PREFIX}_set_decrypt_key:
.byte 0x48,0x83,0xEC,0x08 # sub rsp,8
@@ -3033,7 +3023,6 @@ ___
$code.=<<___;
.globl ${PREFIX}_set_encrypt_key
.type ${PREFIX}_set_encrypt_key,\@abi-omnipotent
-.hidden ${PREFIX}_set_encrypt_key
.align 16
${PREFIX}_set_encrypt_key:
__aesni_set_encrypt_key:
diff --git a/crypto/aes/asm/aesv8-armx.pl b/crypto/aes/asm/aesv8-armx.pl
index 1e3d6626..1e93f868 100644
--- a/crypto/aes/asm/aesv8-armx.pl
+++ b/crypto/aes/asm/aesv8-armx.pl
@@ -63,7 +63,6 @@ rcon:
.globl ${prefix}_set_encrypt_key
.type ${prefix}_set_encrypt_key,%function
-.hidden ${prefix}_set_encrypt_key
.align 5
${prefix}_set_encrypt_key:
.Lenc_key:
@@ -235,7 +234,6 @@ $code.=<<___;
.globl ${prefix}_set_decrypt_key
.type ${prefix}_set_decrypt_key,%function
-.hidden ${prefix}_set_decrypt_key
.align 5
${prefix}_set_decrypt_key:
___
@@ -300,7 +298,6 @@ my ($rndkey0,$rndkey1,$inout)=map("q$_",(0..3));
$code.=<<___;
.globl ${prefix}_${dir}crypt
.type ${prefix}_${dir}crypt,%function
-.hidden ${prefix}_${dir}crypt
.align 5
${prefix}_${dir}crypt:
ldr $rounds,[$key,#240]
@@ -345,7 +342,6 @@ my ($dat,$tmp,$rndzero_n_last)=($dat0,$tmp0,$tmp1);
$code.=<<___;
.globl ${prefix}_cbc_encrypt
.type ${prefix}_cbc_encrypt,%function
-.hidden ${prefix}_cbc_encrypt
.align 5
${prefix}_cbc_encrypt:
___
@@ -653,7 +649,6 @@ my ($dat,$tmp)=($dat0,$tmp0);
$code.=<<___;
.globl ${prefix}_ctr32_encrypt_blocks
.type ${prefix}_ctr32_encrypt_blocks,%function
-.hidden ${prefix}_ctr32_encrypt_blocks
.align 5
${prefix}_ctr32_encrypt_blocks:
___
diff --git a/crypto/aes/asm/bsaes-x86_64.pl b/crypto/aes/asm/bsaes-x86_64.pl
index 8c6e66a4..3f7d33c4 100644
--- a/crypto/aes/asm/bsaes-x86_64.pl
+++ b/crypto/aes/asm/bsaes-x86_64.pl
@@ -1049,7 +1049,6 @@ if (0 && !$win64) { # following four functions are unsupported interface
$code.=<<___;
.globl bsaes_enc_key_convert
.type bsaes_enc_key_convert,\@function,2
-.hidden bsaes_enc_key_convert
.align 16
bsaes_enc_key_convert:
mov 240($inp),%r10d # pass rounds
@@ -1063,7 +1062,6 @@ bsaes_enc_key_convert:
.globl bsaes_encrypt_128
.type bsaes_encrypt_128,\@function,4
-.hidden bsaes_encrypt_128
.align 16
bsaes_encrypt_128:
.Lenc128_loop:
@@ -1097,7 +1095,6 @@ bsaes_encrypt_128:
.globl bsaes_dec_key_convert
.type bsaes_dec_key_convert,\@function,2
-.hidden bsaes_dec_key_convert
.align 16
bsaes_dec_key_convert:
mov 240($inp),%r10d # pass rounds
@@ -1112,7 +1109,6 @@ bsaes_dec_key_convert:
.globl bsaes_decrypt_128
.type bsaes_decrypt_128,\@function,4
-.hidden bsaes_decrypt_128
.align 16
bsaes_decrypt_128:
.Ldec128_loop:
@@ -1158,7 +1154,6 @@ if ($ecb) {
$code.=<<___;
.globl bsaes_ecb_encrypt_blocks
.type bsaes_ecb_encrypt_blocks,\@abi-omnipotent
-.hidden bsaes_ecb_encrypt_blocks
.align 16
bsaes_ecb_encrypt_blocks:
mov %rsp, %rax
@@ -1360,7 +1355,6 @@ $code.=<<___;
.globl bsaes_ecb_decrypt_blocks
.type bsaes_ecb_decrypt_blocks,\@abi-omnipotent
-.hidden bsaes_ecb_decrypt_blocks
.align 16
bsaes_ecb_decrypt_blocks:
mov %rsp, %rax
@@ -1566,7 +1560,6 @@ $code.=<<___;
.extern asm_AES_cbc_encrypt
.globl bsaes_cbc_encrypt
.type bsaes_cbc_encrypt,\@abi-omnipotent
-.hidden bsaes_cbc_encrypt
.align 16
bsaes_cbc_encrypt:
___
@@ -1854,7 +1847,6 @@ $code.=<<___;
.globl bsaes_ctr32_encrypt_blocks
.type bsaes_ctr32_encrypt_blocks,\@abi-omnipotent
-.hidden bsaes_ctr32_encrypt_blocks
.align 16
bsaes_ctr32_encrypt_blocks:
mov %rsp, %rax
@@ -2096,7 +2088,6 @@ $arg6=~s/d$//;
$code.=<<___;
.globl bsaes_xts_encrypt
.type bsaes_xts_encrypt,\@abi-omnipotent
-.hidden bsaes_xts_encrypt
.align 16
bsaes_xts_encrypt:
mov %rsp, %rax
@@ -2478,7 +2469,6 @@ $code.=<<___;
.globl bsaes_xts_decrypt
.type bsaes_xts_decrypt,\@abi-omnipotent
-.hidden bsaes_xts_decrypt
.align 16
bsaes_xts_decrypt:
mov %rsp, %rax
diff --git a/crypto/aes/asm/vpaes-x86_64.pl b/crypto/aes/asm/vpaes-x86_64.pl
index a647b920..f2ef318f 100644
--- a/crypto/aes/asm/vpaes-x86_64.pl
+++ b/crypto/aes/asm/vpaes-x86_64.pl
@@ -671,7 +671,6 @@ _vpaes_schedule_mangle:
#
.globl ${PREFIX}_set_encrypt_key
.type ${PREFIX}_set_encrypt_key,\@function,3
-.hidden ${PREFIX}_set_encrypt_key
.align 16
${PREFIX}_set_encrypt_key:
___
@@ -720,7 +719,6 @@ $code.=<<___;
.globl ${PREFIX}_set_decrypt_key
.type ${PREFIX}_set_decrypt_key,\@function,3
-.hidden ${PREFIX}_set_decrypt_key
.align 16
${PREFIX}_set_decrypt_key:
___
@@ -774,7 +772,6 @@ $code.=<<___;
.globl ${PREFIX}_encrypt
.type ${PREFIX}_encrypt,\@function,3
-.hidden ${PREFIX}_encrypt
.align 16
${PREFIX}_encrypt:
___
@@ -818,7 +815,6 @@ $code.=<<___;
.globl ${PREFIX}_decrypt
.type ${PREFIX}_decrypt,\@function,3
-.hidden ${PREFIX}_decrypt
.align 16
${PREFIX}_decrypt:
___
@@ -868,7 +864,6 @@ my ($inp,$out,$len,$key,$ivp,$enc)=("%rdi","%rsi","%rdx","%rcx","%r8","%r9");
$code.=<<___;
.globl ${PREFIX}_cbc_encrypt
.type ${PREFIX}_cbc_encrypt,\@function,6
-.hidden ${PREFIX}_cbc_encrypt
.align 16
${PREFIX}_cbc_encrypt:
xchg $key,$len
diff --git a/crypto/bn/asm/rsaz-avx2.pl b/crypto/bn/asm/rsaz-avx2.pl
index 9a9223b3..3b6ccf83 100644
--- a/crypto/bn/asm/rsaz-avx2.pl
+++ b/crypto/bn/asm/rsaz-avx2.pl
@@ -159,7 +159,6 @@ $code.=<<___;
.globl rsaz_1024_sqr_avx2
.type rsaz_1024_sqr_avx2,\@function,5
-.hidden rsaz_1024_sqr_avx2
.align 64
rsaz_1024_sqr_avx2: # 702 cycles, 14% faster than rsaz_1024_mul_avx2
lea (%rsp), %rax
@@ -892,7 +891,6 @@ $bp="%r13"; # reassigned argument
$code.=<<___;
.globl rsaz_1024_mul_avx2
.type rsaz_1024_mul_avx2,\@function,5
-.hidden rsaz_1024_mul_avx2
.align 64
rsaz_1024_mul_avx2:
lea (%rsp), %rax
@@ -1486,7 +1484,6 @@ my @T = map("%r$_",(8..11));
$code.=<<___;
.globl rsaz_1024_red2norm_avx2
.type rsaz_1024_red2norm_avx2,\@abi-omnipotent
-.hidden rsaz_1024_red2norm_avx2
.align 32
rsaz_1024_red2norm_avx2:
sub \$-128,$inp # size optimization
@@ -1526,7 +1523,6 @@ $code.=<<___;
.globl rsaz_1024_norm2red_avx2
.type rsaz_1024_norm2red_avx2,\@abi-omnipotent
-.hidden rsaz_1024_norm2red_avx2
.align 32
rsaz_1024_norm2red_avx2:
sub \$-128,$out # size optimization
@@ -1569,7 +1565,6 @@ my ($out,$inp,$power) = $win64 ? ("%rcx","%rdx","%r8d") : ("%rdi","%rsi","%edx")
$code.=<<___;
.globl rsaz_1024_scatter5_avx2
.type rsaz_1024_scatter5_avx2,\@abi-omnipotent
-.hidden rsaz_1024_scatter5_avx2
.align 32
rsaz_1024_scatter5_avx2:
vzeroupper
@@ -1595,7 +1590,6 @@ rsaz_1024_scatter5_avx2:
.globl rsaz_1024_gather5_avx2
.type rsaz_1024_gather5_avx2,\@abi-omnipotent
-.hidden rsaz_1024_gather5_avx2
.align 32
rsaz_1024_gather5_avx2:
___
@@ -1690,7 +1684,6 @@ $code.=<<___;
.extern OPENSSL_ia32cap_P
.globl rsaz_avx2_eligible
.type rsaz_avx2_eligible,\@abi-omnipotent
-.hidden rsaz_avx2_eligible
.align 32
rsaz_avx2_eligible:
mov OPENSSL_ia32cap_P+8(%rip),%eax
@@ -1878,7 +1871,6 @@ print <<___; # assembler is too old
.globl rsaz_avx2_eligible
.type rsaz_avx2_eligible,\@abi-omnipotent
-.hidden rsaz_avx2_eligible
rsaz_avx2_eligible:
xor %eax,%eax
ret
@@ -1890,12 +1882,6 @@ rsaz_avx2_eligible:
.globl rsaz_1024_red2norm_avx2
.globl rsaz_1024_scatter5_avx2
.globl rsaz_1024_gather5_avx2
-.hidden rsaz_1024_sqr_avx2
-.hidden rsaz_1024_mul_avx2
-.hidden rsaz_1024_norm2red_avx2
-.hidden rsaz_1024_red2norm_avx2
-.hidden rsaz_1024_scatter5_avx2
-.hidden rsaz_1024_gather5_avx2
.type rsaz_1024_sqr_avx2,\@abi-omnipotent
rsaz_1024_sqr_avx2:
rsaz_1024_mul_avx2:
diff --git a/crypto/bn/asm/x86_64-mont.pl b/crypto/bn/asm/x86_64-mont.pl
index 38af80a8..39476ab0 100644
--- a/crypto/bn/asm/x86_64-mont.pl
+++ b/crypto/bn/asm/x86_64-mont.pl
@@ -90,7 +90,6 @@ $code=<<___;
.globl bn_mul_mont
.type bn_mul_mont,\@function,6
-.hidden bn_mul_mont
.align 16
bn_mul_mont:
test \$3,${num}d
diff --git a/crypto/cpu-x86_64-asm.pl b/crypto/cpu-x86_64-asm.pl
index af1c7a5a..89d7a6c9 100644
--- a/crypto/cpu-x86_64-asm.pl
+++ b/crypto/cpu-x86_64-asm.pl
@@ -22,7 +22,6 @@ print<<___;
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,\@function,1
-.hidden OPENSSL_ia32_cpuid
.align 16
OPENSSL_ia32_cpuid:
# On Windows, $arg1 is rcx, but that will be clobbered. So make Windows
diff --git a/crypto/md5/asm/md5-x86_64.pl b/crypto/md5/asm/md5-x86_64.pl
index 45f23c09..77a6e01d 100644
--- a/crypto/md5/asm/md5-x86_64.pl
+++ b/crypto/md5/asm/md5-x86_64.pl
@@ -129,7 +129,6 @@ $code .= <<EOF;
.globl md5_block_asm_data_order
.type md5_block_asm_data_order,\@function,3
-.hidden md5_block_asm_data_order
md5_block_asm_data_order:
push %rbp
push %rbx
diff --git a/crypto/modes/asm/aesni-gcm-x86_64.pl b/crypto/modes/asm/aesni-gcm-x86_64.pl
index f4ff1f50..7e4e04ea 100644
--- a/crypto/modes/asm/aesni-gcm-x86_64.pl
+++ b/crypto/modes/asm/aesni-gcm-x86_64.pl
@@ -397,7 +397,6 @@ ___
$code.=<<___;
.globl aesni_gcm_decrypt
.type aesni_gcm_decrypt,\@function,6
-.hidden aesni_gcm_decrypt
.align 32
aesni_gcm_decrypt:
xor $ret,$ret
@@ -608,7 +607,6 @@ _aesni_ctr32_6x:
.globl aesni_gcm_encrypt
.type aesni_gcm_encrypt,\@function,6
-.hidden aesni_gcm_encrypt
.align 32
aesni_gcm_encrypt:
xor $ret,$ret
@@ -1038,7 +1036,6 @@ $code=<<___; # assembler is too old
.globl aesni_gcm_encrypt
.type aesni_gcm_encrypt,\@abi-omnipotent
-.hidden aesni_gcm_encrypt
aesni_gcm_encrypt:
xor %eax,%eax
ret
@@ -1046,7 +1043,6 @@ aesni_gcm_encrypt:
.globl aesni_gcm_decrypt
.type aesni_gcm_decrypt,\@abi-omnipotent
-.hidden aesni_gcm_decrypt
aesni_gcm_decrypt:
xor %eax,%eax
ret
diff --git a/crypto/modes/asm/ghash-x86_64.pl b/crypto/modes/asm/ghash-x86_64.pl
index aacce2de..6e656ca1 100644
--- a/crypto/modes/asm/ghash-x86_64.pl
+++ b/crypto/modes/asm/ghash-x86_64.pl
@@ -225,7 +225,6 @@ $code=<<___;
.globl gcm_gmult_4bit
.type gcm_gmult_4bit,\@function,2
-.hidden gcm_gmult_4bit
.align 16
gcm_gmult_4bit:
push %rbx
@@ -256,7 +255,6 @@ $rem_8bit=$rem_4bit;
$code.=<<___;
.globl gcm_ghash_4bit
.type gcm_ghash_4bit,\@function,4
-.hidden gcm_ghash_4bit
.align 16
gcm_ghash_4bit:
push %rbx
@@ -484,7 +482,6 @@ ___
$code.=<<___;
.globl gcm_init_clmul
.type gcm_init_clmul,\@abi-omnipotent
-.hidden gcm_init_clmul
.align 16
gcm_init_clmul:
.L_init_clmul:
@@ -565,7 +562,6 @@ ___
$code.=<<___;
.globl gcm_gmult_clmul
.type gcm_gmult_clmul,\@abi-omnipotent
-.hidden gcm_gmult_clmul
.align 16
gcm_gmult_clmul:
.L_gmult_clmul:
@@ -615,7 +611,6 @@ ___
$code.=<<___;
.globl gcm_ghash_clmul
.type gcm_ghash_clmul,\@abi-omnipotent
-.hidden gcm_ghash_clmul
.align 32
gcm_ghash_clmul:
.L_ghash_clmul:
@@ -972,7 +967,6 @@ ___
$code.=<<___;
.globl gcm_init_avx
.type gcm_init_avx,\@abi-omnipotent
-.hidden gcm_init_avx
.align 32
gcm_init_avx:
___
@@ -1115,7 +1109,6 @@ ___
$code.=<<___;
.globl gcm_gmult_avx
.type gcm_gmult_avx,\@abi-omnipotent
-.hidden gcm_gmult_avx
.align 32
gcm_gmult_avx:
jmp .L_gmult_clmul
@@ -1125,7 +1118,6 @@ ___
$code.=<<___;
.globl gcm_ghash_avx
.type gcm_ghash_avx,\@abi-omnipotent
-.hidden gcm_ghash_avx
.align 32
gcm_ghash_avx:
___
diff --git a/crypto/rc4/asm/rc4-md5-x86_64.pl b/crypto/rc4/asm/rc4-md5-x86_64.pl
index 8ebf4051..272fa91e 100644
--- a/crypto/rc4/asm/rc4-md5-x86_64.pl
+++ b/crypto/rc4/asm/rc4-md5-x86_64.pl
@@ -110,7 +110,6 @@ $code.=<<___;
.globl $func
.type $func,\@function,$nargs
-.hidden $func
$func:
cmp \$0,$len
je .Labort
diff --git a/crypto/rc4/asm/rc4-x86_64.pl b/crypto/rc4/asm/rc4-x86_64.pl
index 14e4da1b..db462425 100644
--- a/crypto/rc4/asm/rc4-x86_64.pl
+++ b/crypto/rc4/asm/rc4-x86_64.pl
@@ -127,7 +127,6 @@ $code=<<___;
.globl asm_RC4
.type asm_RC4,\@function,4
-.hidden asm_RC4
.align 16
asm_RC4:
or $len,$len
@@ -434,7 +433,6 @@ $ido="%r9";
$code.=<<___;
.globl asm_RC4_set_key
.type asm_RC4_set_key,\@function,3
-.hidden asm_RC4_set_key
.align 16
asm_RC4_set_key:
lea 8($dat),$dat
diff --git a/crypto/sha/asm/sha1-armv8.pl b/crypto/sha/asm/sha1-armv8.pl
index a8efe4ff..deb1238d 100644
--- a/crypto/sha/asm/sha1-armv8.pl
+++ b/crypto/sha/asm/sha1-armv8.pl
@@ -156,7 +156,6 @@ $code.=<<___;
.globl sha1_block_data_order
.type sha1_block_data_order,%function
-.hidden sha1_block_data_order
.align 6
sha1_block_data_order:
ldr x16,.LOPENSSL_armcap_P
diff --git a/crypto/sha/asm/sha512-armv8.pl b/crypto/sha/asm/sha512-armv8.pl
index 570b0843..5a9c8129 100644
--- a/crypto/sha/asm/sha512-armv8.pl
+++ b/crypto/sha/asm/sha512-armv8.pl
@@ -154,7 +154,6 @@ $code.=<<___;
.globl $func
.type $func,%function
-.hidden $func
.align 6
$func:
___
diff --git a/crypto/sha/asm/sha512-x86_64.pl b/crypto/sha/asm/sha512-x86_64.pl
index 93f0c9c2..6660a88b 100644
--- a/crypto/sha/asm/sha512-x86_64.pl
+++ b/crypto/sha/asm/sha512-x86_64.pl
@@ -258,7 +258,6 @@ $code=<<___;
.extern OPENSSL_ia32cap_P
.globl $func
.type $func,\@function,3
-.hidden $func
.align 16
$func:
___