Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/boringssl.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/crypto
diff options
context:
space:
mode:
authorAdam Langley <agl@google.com>2015-02-20 04:48:29 +0300
committerAdam Langley <agl@google.com>2015-02-21 00:24:01 +0300
commit97999919bbe85ab213d283e18e597e028f8685d1 (patch)
treed1410b26a832dae606ea255ad5dce6b0b449cce7 /crypto
parentbcc4e2304123169af6acd1b842509430e79fd390 (diff)
Hide all asm symbols.
We are leaking asm symbols in Android builds because the asm code isn't affected by -fvisibility=hidden. This change hides all asm symbols. This assumes that no asm symbols are public API and that should be true. Some points to note: In crypto/rc4/asm/rc4-md5-x86_64.pl there are |RC4_set_key| and |RC4_options| functions which aren't getting marked as hidden. That's because those functions aren't actually ever generated. (I'm just trying to minimise drift with upstream here.) In crypto/rc4/asm/rc4-x86_64.pl there's |RC4_options| which is "public" API, except that we've never had it in the header files. So I've just deleted it. Since we have an internal caller, we'll probably have to put it back in the future, but it can just be done in rc4.c to save problems. BUG=448386 Change-Id: I3846617a0e3d73ec9e5ec3638a53364adbbc6260 Reviewed-on: https://boringssl-review.googlesource.com/3520 Reviewed-by: David Benjamin <davidben@chromium.org> Reviewed-by: Adam Langley <agl@google.com>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/aes/asm/aes-x86_64.pl2
-rw-r--r--crypto/aes/asm/aesni-x86_64.pl11
-rw-r--r--crypto/aes/asm/aesv8-armx.pl5
-rw-r--r--crypto/aes/asm/bsaes-x86_64.pl10
-rw-r--r--crypto/aes/asm/vpaes-x86_64.pl5
-rw-r--r--crypto/bn/asm/rsaz-avx2.pl14
-rw-r--r--crypto/bn/asm/x86_64-mont.pl1
-rw-r--r--crypto/cpu-x86_64-asm.pl1
-rw-r--r--crypto/md5/asm/md5-x86_64.pl1
-rw-r--r--crypto/modes/asm/aesni-gcm-x86_64.pl4
-rw-r--r--crypto/modes/asm/ghash-x86_64.pl8
-rw-r--r--crypto/rc4/asm/rc4-md5-x86_64.pl1
-rw-r--r--crypto/rc4/asm/rc4-x86_64.pl28
-rw-r--r--crypto/sha/asm/sha1-armv8.pl1
-rw-r--r--crypto/sha/asm/sha512-armv8.pl1
-rw-r--r--crypto/sha/asm/sha512-x86_64.pl1
16 files changed, 68 insertions, 26 deletions
diff --git a/crypto/aes/asm/aes-x86_64.pl b/crypto/aes/asm/aes-x86_64.pl
index 4b6e1b44..f1afbfba 100644
--- a/crypto/aes/asm/aes-x86_64.pl
+++ b/crypto/aes/asm/aes-x86_64.pl
@@ -1283,6 +1283,7 @@ $code.=<<___;
.align 16
.globl asm_AES_set_encrypt_key
.type asm_AES_set_encrypt_key,\@function,3
+.hidden asm_AES_set_encrypt_key
asm_AES_set_encrypt_key:
push %rbx
push %rbp
@@ -1548,6 +1549,7 @@ $code.=<<___;
.align 16
.globl asm_AES_set_decrypt_key
.type asm_AES_set_decrypt_key,\@function,3
+.hidden asm_AES_set_decrypt_key
asm_AES_set_decrypt_key:
push %rbx
push %rbp
diff --git a/crypto/aes/asm/aesni-x86_64.pl b/crypto/aes/asm/aesni-x86_64.pl
index 5f617463..918e1258 100644
--- a/crypto/aes/asm/aesni-x86_64.pl
+++ b/crypto/aes/asm/aesni-x86_64.pl
@@ -256,6 +256,7 @@ ___
$code.=<<___;
.globl ${PREFIX}_encrypt
.type ${PREFIX}_encrypt,\@abi-omnipotent
+.hidden ${PREFIX}_encrypt
.align 16
${PREFIX}_encrypt:
movups ($inp),$inout0 # load input
@@ -269,6 +270,7 @@ $code.=<<___;
.globl ${PREFIX}_decrypt
.type ${PREFIX}_decrypt,\@abi-omnipotent
+.hidden ${PREFIX}_decrypt
.align 16
${PREFIX}_decrypt:
movups ($inp),$inout0 # load input
@@ -582,6 +584,7 @@ if ($PREFIX eq "aesni") {
$code.=<<___;
.globl aesni_ecb_encrypt
.type aesni_ecb_encrypt,\@function,5
+.hidden aesni_ecb_encrypt
.align 16
aesni_ecb_encrypt:
___
@@ -906,6 +909,7 @@ my $bswap_mask="%xmm7";
$code.=<<___;
.globl aesni_ccm64_encrypt_blocks
.type aesni_ccm64_encrypt_blocks,\@function,6
+.hidden aesni_ccm64_encrypt_blocks
.align 16
aesni_ccm64_encrypt_blocks:
___
@@ -986,6 +990,7 @@ ___
$code.=<<___;
.globl aesni_ccm64_decrypt_blocks
.type aesni_ccm64_decrypt_blocks,\@function,6
+.hidden aesni_ccm64_decrypt_blocks
.align 16
aesni_ccm64_decrypt_blocks:
___
@@ -1100,6 +1105,7 @@ my $frame_size = 0x80 + ($win64?160:0);
$code.=<<___;
.globl aesni_ctr32_encrypt_blocks
.type aesni_ctr32_encrypt_blocks,\@function,5
+.hidden aesni_ctr32_encrypt_blocks
.align 16
aesni_ctr32_encrypt_blocks:
lea (%rsp),%rax
@@ -1611,6 +1617,7 @@ my $frame_size = 0x70 + ($win64?160:0);
$code.=<<___;
.globl aesni_xts_encrypt
.type aesni_xts_encrypt,\@function,6
+.hidden aesni_xts_encrypt
.align 16
aesni_xts_encrypt:
lea (%rsp),%rax
@@ -2045,6 +2052,7 @@ ___
$code.=<<___;
.globl aesni_xts_decrypt
.type aesni_xts_decrypt,\@function,6
+.hidden aesni_xts_decrypt
.align 16
aesni_xts_decrypt:
lea (%rsp),%rax
@@ -2516,6 +2524,7 @@ my $inp_=$key_;
$code.=<<___;
.globl ${PREFIX}_cbc_encrypt
.type ${PREFIX}_cbc_encrypt,\@function,6
+.hidden ${PREFIX}_cbc_encrypt
.align 16
${PREFIX}_cbc_encrypt:
test $len,$len # check length
@@ -2973,6 +2982,7 @@ ___
$code.=<<___;
.globl ${PREFIX}_set_decrypt_key
.type ${PREFIX}_set_decrypt_key,\@abi-omnipotent
+.hidden ${PREFIX}_set_decrypt_key
.align 16
${PREFIX}_set_decrypt_key:
.byte 0x48,0x83,0xEC,0x08 # sub rsp,8
@@ -3023,6 +3033,7 @@ ___
$code.=<<___;
.globl ${PREFIX}_set_encrypt_key
.type ${PREFIX}_set_encrypt_key,\@abi-omnipotent
+.hidden ${PREFIX}_set_encrypt_key
.align 16
${PREFIX}_set_encrypt_key:
__aesni_set_encrypt_key:
diff --git a/crypto/aes/asm/aesv8-armx.pl b/crypto/aes/asm/aesv8-armx.pl
index 1e93f868..1e3d6626 100644
--- a/crypto/aes/asm/aesv8-armx.pl
+++ b/crypto/aes/asm/aesv8-armx.pl
@@ -63,6 +63,7 @@ rcon:
.globl ${prefix}_set_encrypt_key
.type ${prefix}_set_encrypt_key,%function
+.hidden ${prefix}_set_encrypt_key
.align 5
${prefix}_set_encrypt_key:
.Lenc_key:
@@ -234,6 +235,7 @@ $code.=<<___;
.globl ${prefix}_set_decrypt_key
.type ${prefix}_set_decrypt_key,%function
+.hidden ${prefix}_set_decrypt_key
.align 5
${prefix}_set_decrypt_key:
___
@@ -298,6 +300,7 @@ my ($rndkey0,$rndkey1,$inout)=map("q$_",(0..3));
$code.=<<___;
.globl ${prefix}_${dir}crypt
.type ${prefix}_${dir}crypt,%function
+.hidden ${prefix}_${dir}crypt
.align 5
${prefix}_${dir}crypt:
ldr $rounds,[$key,#240]
@@ -342,6 +345,7 @@ my ($dat,$tmp,$rndzero_n_last)=($dat0,$tmp0,$tmp1);
$code.=<<___;
.globl ${prefix}_cbc_encrypt
.type ${prefix}_cbc_encrypt,%function
+.hidden ${prefix}_cbc_encrypt
.align 5
${prefix}_cbc_encrypt:
___
@@ -649,6 +653,7 @@ my ($dat,$tmp)=($dat0,$tmp0);
$code.=<<___;
.globl ${prefix}_ctr32_encrypt_blocks
.type ${prefix}_ctr32_encrypt_blocks,%function
+.hidden ${prefix}_ctr32_encrypt_blocks
.align 5
${prefix}_ctr32_encrypt_blocks:
___
diff --git a/crypto/aes/asm/bsaes-x86_64.pl b/crypto/aes/asm/bsaes-x86_64.pl
index 3f7d33c4..8c6e66a4 100644
--- a/crypto/aes/asm/bsaes-x86_64.pl
+++ b/crypto/aes/asm/bsaes-x86_64.pl
@@ -1049,6 +1049,7 @@ if (0 && !$win64) { # following four functions are unsupported interface
$code.=<<___;
.globl bsaes_enc_key_convert
.type bsaes_enc_key_convert,\@function,2
+.hidden bsaes_enc_key_convert
.align 16
bsaes_enc_key_convert:
mov 240($inp),%r10d # pass rounds
@@ -1062,6 +1063,7 @@ bsaes_enc_key_convert:
.globl bsaes_encrypt_128
.type bsaes_encrypt_128,\@function,4
+.hidden bsaes_encrypt_128
.align 16
bsaes_encrypt_128:
.Lenc128_loop:
@@ -1095,6 +1097,7 @@ bsaes_encrypt_128:
.globl bsaes_dec_key_convert
.type bsaes_dec_key_convert,\@function,2
+.hidden bsaes_dec_key_convert
.align 16
bsaes_dec_key_convert:
mov 240($inp),%r10d # pass rounds
@@ -1109,6 +1112,7 @@ bsaes_dec_key_convert:
.globl bsaes_decrypt_128
.type bsaes_decrypt_128,\@function,4
+.hidden bsaes_decrypt_128
.align 16
bsaes_decrypt_128:
.Ldec128_loop:
@@ -1154,6 +1158,7 @@ if ($ecb) {
$code.=<<___;
.globl bsaes_ecb_encrypt_blocks
.type bsaes_ecb_encrypt_blocks,\@abi-omnipotent
+.hidden bsaes_ecb_encrypt_blocks
.align 16
bsaes_ecb_encrypt_blocks:
mov %rsp, %rax
@@ -1355,6 +1360,7 @@ $code.=<<___;
.globl bsaes_ecb_decrypt_blocks
.type bsaes_ecb_decrypt_blocks,\@abi-omnipotent
+.hidden bsaes_ecb_decrypt_blocks
.align 16
bsaes_ecb_decrypt_blocks:
mov %rsp, %rax
@@ -1560,6 +1566,7 @@ $code.=<<___;
.extern asm_AES_cbc_encrypt
.globl bsaes_cbc_encrypt
.type bsaes_cbc_encrypt,\@abi-omnipotent
+.hidden bsaes_cbc_encrypt
.align 16
bsaes_cbc_encrypt:
___
@@ -1847,6 +1854,7 @@ $code.=<<___;
.globl bsaes_ctr32_encrypt_blocks
.type bsaes_ctr32_encrypt_blocks,\@abi-omnipotent
+.hidden bsaes_ctr32_encrypt_blocks
.align 16
bsaes_ctr32_encrypt_blocks:
mov %rsp, %rax
@@ -2088,6 +2096,7 @@ $arg6=~s/d$//;
$code.=<<___;
.globl bsaes_xts_encrypt
.type bsaes_xts_encrypt,\@abi-omnipotent
+.hidden bsaes_xts_encrypt
.align 16
bsaes_xts_encrypt:
mov %rsp, %rax
@@ -2469,6 +2478,7 @@ $code.=<<___;
.globl bsaes_xts_decrypt
.type bsaes_xts_decrypt,\@abi-omnipotent
+.hidden bsaes_xts_decrypt
.align 16
bsaes_xts_decrypt:
mov %rsp, %rax
diff --git a/crypto/aes/asm/vpaes-x86_64.pl b/crypto/aes/asm/vpaes-x86_64.pl
index f2ef318f..a647b920 100644
--- a/crypto/aes/asm/vpaes-x86_64.pl
+++ b/crypto/aes/asm/vpaes-x86_64.pl
@@ -671,6 +671,7 @@ _vpaes_schedule_mangle:
#
.globl ${PREFIX}_set_encrypt_key
.type ${PREFIX}_set_encrypt_key,\@function,3
+.hidden ${PREFIX}_set_encrypt_key
.align 16
${PREFIX}_set_encrypt_key:
___
@@ -719,6 +720,7 @@ $code.=<<___;
.globl ${PREFIX}_set_decrypt_key
.type ${PREFIX}_set_decrypt_key,\@function,3
+.hidden ${PREFIX}_set_decrypt_key
.align 16
${PREFIX}_set_decrypt_key:
___
@@ -772,6 +774,7 @@ $code.=<<___;
.globl ${PREFIX}_encrypt
.type ${PREFIX}_encrypt,\@function,3
+.hidden ${PREFIX}_encrypt
.align 16
${PREFIX}_encrypt:
___
@@ -815,6 +818,7 @@ $code.=<<___;
.globl ${PREFIX}_decrypt
.type ${PREFIX}_decrypt,\@function,3
+.hidden ${PREFIX}_decrypt
.align 16
${PREFIX}_decrypt:
___
@@ -864,6 +868,7 @@ my ($inp,$out,$len,$key,$ivp,$enc)=("%rdi","%rsi","%rdx","%rcx","%r8","%r9");
$code.=<<___;
.globl ${PREFIX}_cbc_encrypt
.type ${PREFIX}_cbc_encrypt,\@function,6
+.hidden ${PREFIX}_cbc_encrypt
.align 16
${PREFIX}_cbc_encrypt:
xchg $key,$len
diff --git a/crypto/bn/asm/rsaz-avx2.pl b/crypto/bn/asm/rsaz-avx2.pl
index 3b6ccf83..9a9223b3 100644
--- a/crypto/bn/asm/rsaz-avx2.pl
+++ b/crypto/bn/asm/rsaz-avx2.pl
@@ -159,6 +159,7 @@ $code.=<<___;
.globl rsaz_1024_sqr_avx2
.type rsaz_1024_sqr_avx2,\@function,5
+.hidden rsaz_1024_sqr_avx2
.align 64
rsaz_1024_sqr_avx2: # 702 cycles, 14% faster than rsaz_1024_mul_avx2
lea (%rsp), %rax
@@ -891,6 +892,7 @@ $bp="%r13"; # reassigned argument
$code.=<<___;
.globl rsaz_1024_mul_avx2
.type rsaz_1024_mul_avx2,\@function,5
+.hidden rsaz_1024_mul_avx2
.align 64
rsaz_1024_mul_avx2:
lea (%rsp), %rax
@@ -1484,6 +1486,7 @@ my @T = map("%r$_",(8..11));
$code.=<<___;
.globl rsaz_1024_red2norm_avx2
.type rsaz_1024_red2norm_avx2,\@abi-omnipotent
+.hidden rsaz_1024_red2norm_avx2
.align 32
rsaz_1024_red2norm_avx2:
sub \$-128,$inp # size optimization
@@ -1523,6 +1526,7 @@ $code.=<<___;
.globl rsaz_1024_norm2red_avx2
.type rsaz_1024_norm2red_avx2,\@abi-omnipotent
+.hidden rsaz_1024_norm2red_avx2
.align 32
rsaz_1024_norm2red_avx2:
sub \$-128,$out # size optimization
@@ -1565,6 +1569,7 @@ my ($out,$inp,$power) = $win64 ? ("%rcx","%rdx","%r8d") : ("%rdi","%rsi","%edx")
$code.=<<___;
.globl rsaz_1024_scatter5_avx2
.type rsaz_1024_scatter5_avx2,\@abi-omnipotent
+.hidden rsaz_1024_scatter5_avx2
.align 32
rsaz_1024_scatter5_avx2:
vzeroupper
@@ -1590,6 +1595,7 @@ rsaz_1024_scatter5_avx2:
.globl rsaz_1024_gather5_avx2
.type rsaz_1024_gather5_avx2,\@abi-omnipotent
+.hidden rsaz_1024_gather5_avx2
.align 32
rsaz_1024_gather5_avx2:
___
@@ -1684,6 +1690,7 @@ $code.=<<___;
.extern OPENSSL_ia32cap_P
.globl rsaz_avx2_eligible
.type rsaz_avx2_eligible,\@abi-omnipotent
+.hidden rsaz_avx2_eligible
.align 32
rsaz_avx2_eligible:
mov OPENSSL_ia32cap_P+8(%rip),%eax
@@ -1871,6 +1878,7 @@ print <<___; # assembler is too old
.globl rsaz_avx2_eligible
.type rsaz_avx2_eligible,\@abi-omnipotent
+.hidden rsaz_avx2_eligible
rsaz_avx2_eligible:
xor %eax,%eax
ret
@@ -1882,6 +1890,12 @@ rsaz_avx2_eligible:
.globl rsaz_1024_red2norm_avx2
.globl rsaz_1024_scatter5_avx2
.globl rsaz_1024_gather5_avx2
+.hidden rsaz_1024_sqr_avx2
+.hidden rsaz_1024_mul_avx2
+.hidden rsaz_1024_norm2red_avx2
+.hidden rsaz_1024_red2norm_avx2
+.hidden rsaz_1024_scatter5_avx2
+.hidden rsaz_1024_gather5_avx2
.type rsaz_1024_sqr_avx2,\@abi-omnipotent
rsaz_1024_sqr_avx2:
rsaz_1024_mul_avx2:
diff --git a/crypto/bn/asm/x86_64-mont.pl b/crypto/bn/asm/x86_64-mont.pl
index 39476ab0..38af80a8 100644
--- a/crypto/bn/asm/x86_64-mont.pl
+++ b/crypto/bn/asm/x86_64-mont.pl
@@ -90,6 +90,7 @@ $code=<<___;
.globl bn_mul_mont
.type bn_mul_mont,\@function,6
+.hidden bn_mul_mont
.align 16
bn_mul_mont:
test \$3,${num}d
diff --git a/crypto/cpu-x86_64-asm.pl b/crypto/cpu-x86_64-asm.pl
index 59cfd184..9ba5c84f 100644
--- a/crypto/cpu-x86_64-asm.pl
+++ b/crypto/cpu-x86_64-asm.pl
@@ -22,6 +22,7 @@ print<<___;
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,\@function,1
+.hidden OPENSSL_ia32_cpuid
.align 16
OPENSSL_ia32_cpuid:
# On Windows, $arg1 is rcx, but that will be clobbered. So make Windows
diff --git a/crypto/md5/asm/md5-x86_64.pl b/crypto/md5/asm/md5-x86_64.pl
index 77a6e01d..45f23c09 100644
--- a/crypto/md5/asm/md5-x86_64.pl
+++ b/crypto/md5/asm/md5-x86_64.pl
@@ -129,6 +129,7 @@ $code .= <<EOF;
.globl md5_block_asm_data_order
.type md5_block_asm_data_order,\@function,3
+.hidden md5_block_asm_data_order
md5_block_asm_data_order:
push %rbp
push %rbx
diff --git a/crypto/modes/asm/aesni-gcm-x86_64.pl b/crypto/modes/asm/aesni-gcm-x86_64.pl
index 7e4e04ea..f4ff1f50 100644
--- a/crypto/modes/asm/aesni-gcm-x86_64.pl
+++ b/crypto/modes/asm/aesni-gcm-x86_64.pl
@@ -397,6 +397,7 @@ ___
$code.=<<___;
.globl aesni_gcm_decrypt
.type aesni_gcm_decrypt,\@function,6
+.hidden aesni_gcm_decrypt
.align 32
aesni_gcm_decrypt:
xor $ret,$ret
@@ -607,6 +608,7 @@ _aesni_ctr32_6x:
.globl aesni_gcm_encrypt
.type aesni_gcm_encrypt,\@function,6
+.hidden aesni_gcm_encrypt
.align 32
aesni_gcm_encrypt:
xor $ret,$ret
@@ -1036,6 +1038,7 @@ $code=<<___; # assembler is too old
.globl aesni_gcm_encrypt
.type aesni_gcm_encrypt,\@abi-omnipotent
+.hidden aesni_gcm_encrypt
aesni_gcm_encrypt:
xor %eax,%eax
ret
@@ -1043,6 +1046,7 @@ aesni_gcm_encrypt:
.globl aesni_gcm_decrypt
.type aesni_gcm_decrypt,\@abi-omnipotent
+.hidden aesni_gcm_decrypt
aesni_gcm_decrypt:
xor %eax,%eax
ret
diff --git a/crypto/modes/asm/ghash-x86_64.pl b/crypto/modes/asm/ghash-x86_64.pl
index 6e656ca1..aacce2de 100644
--- a/crypto/modes/asm/ghash-x86_64.pl
+++ b/crypto/modes/asm/ghash-x86_64.pl
@@ -225,6 +225,7 @@ $code=<<___;
.globl gcm_gmult_4bit
.type gcm_gmult_4bit,\@function,2
+.hidden gcm_gmult_4bit
.align 16
gcm_gmult_4bit:
push %rbx
@@ -255,6 +256,7 @@ $rem_8bit=$rem_4bit;
$code.=<<___;
.globl gcm_ghash_4bit
.type gcm_ghash_4bit,\@function,4
+.hidden gcm_ghash_4bit
.align 16
gcm_ghash_4bit:
push %rbx
@@ -482,6 +484,7 @@ ___
$code.=<<___;
.globl gcm_init_clmul
.type gcm_init_clmul,\@abi-omnipotent
+.hidden gcm_init_clmul
.align 16
gcm_init_clmul:
.L_init_clmul:
@@ -562,6 +565,7 @@ ___
$code.=<<___;
.globl gcm_gmult_clmul
.type gcm_gmult_clmul,\@abi-omnipotent
+.hidden gcm_gmult_clmul
.align 16
gcm_gmult_clmul:
.L_gmult_clmul:
@@ -611,6 +615,7 @@ ___
$code.=<<___;
.globl gcm_ghash_clmul
.type gcm_ghash_clmul,\@abi-omnipotent
+.hidden gcm_ghash_clmul
.align 32
gcm_ghash_clmul:
.L_ghash_clmul:
@@ -967,6 +972,7 @@ ___
$code.=<<___;
.globl gcm_init_avx
.type gcm_init_avx,\@abi-omnipotent
+.hidden gcm_init_avx
.align 32
gcm_init_avx:
___
@@ -1109,6 +1115,7 @@ ___
$code.=<<___;
.globl gcm_gmult_avx
.type gcm_gmult_avx,\@abi-omnipotent
+.hidden gcm_gmult_avx
.align 32
gcm_gmult_avx:
jmp .L_gmult_clmul
@@ -1118,6 +1125,7 @@ ___
$code.=<<___;
.globl gcm_ghash_avx
.type gcm_ghash_avx,\@abi-omnipotent
+.hidden gcm_ghash_avx
.align 32
gcm_ghash_avx:
___
diff --git a/crypto/rc4/asm/rc4-md5-x86_64.pl b/crypto/rc4/asm/rc4-md5-x86_64.pl
index 272fa91e..8ebf4051 100644
--- a/crypto/rc4/asm/rc4-md5-x86_64.pl
+++ b/crypto/rc4/asm/rc4-md5-x86_64.pl
@@ -110,6 +110,7 @@ $code.=<<___;
.globl $func
.type $func,\@function,$nargs
+.hidden $func
$func:
cmp \$0,$len
je .Labort
diff --git a/crypto/rc4/asm/rc4-x86_64.pl b/crypto/rc4/asm/rc4-x86_64.pl
index 2c52ac08..14e4da1b 100644
--- a/crypto/rc4/asm/rc4-x86_64.pl
+++ b/crypto/rc4/asm/rc4-x86_64.pl
@@ -127,6 +127,7 @@ $code=<<___;
.globl asm_RC4
.type asm_RC4,\@function,4
+.hidden asm_RC4
.align 16
asm_RC4:
or $len,$len
@@ -433,6 +434,7 @@ $ido="%r9";
$code.=<<___;
.globl asm_RC4_set_key
.type asm_RC4_set_key,\@function,3
+.hidden asm_RC4_set_key
.align 16
asm_RC4_set_key:
lea 8($dat),$dat
@@ -502,32 +504,6 @@ asm_RC4_set_key:
mov %eax,-4($dat)
ret
.size asm_RC4_set_key,.-asm_RC4_set_key
-
-.globl RC4_options
-.type RC4_options,\@abi-omnipotent
-.align 16
-RC4_options:
- lea .Lopts(%rip),%rax
- mov OPENSSL_ia32cap_P(%rip),%rdx
- mov (%rdx),%edx
- bt \$20,%edx
- jc .L8xchar
- bt \$30,%edx
- jnc .Ldone
- add \$25,%rax
- ret
-.L8xchar:
- add \$12,%rax
-.Ldone:
- ret
-.align 64
-.Lopts:
-.asciz "rc4(8x,int)"
-.asciz "rc4(8x,char)"
-.asciz "rc4(16x,int)"
-.asciz "RC4 for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
-.align 64
-.size RC4_options,.-RC4_options
___
# EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
diff --git a/crypto/sha/asm/sha1-armv8.pl b/crypto/sha/asm/sha1-armv8.pl
index deb1238d..a8efe4ff 100644
--- a/crypto/sha/asm/sha1-armv8.pl
+++ b/crypto/sha/asm/sha1-armv8.pl
@@ -156,6 +156,7 @@ $code.=<<___;
.globl sha1_block_data_order
.type sha1_block_data_order,%function
+.hidden sha1_block_data_order
.align 6
sha1_block_data_order:
ldr x16,.LOPENSSL_armcap_P
diff --git a/crypto/sha/asm/sha512-armv8.pl b/crypto/sha/asm/sha512-armv8.pl
index 5a9c8129..570b0843 100644
--- a/crypto/sha/asm/sha512-armv8.pl
+++ b/crypto/sha/asm/sha512-armv8.pl
@@ -154,6 +154,7 @@ $code.=<<___;
.globl $func
.type $func,%function
+.hidden $func
.align 6
$func:
___
diff --git a/crypto/sha/asm/sha512-x86_64.pl b/crypto/sha/asm/sha512-x86_64.pl
index 6660a88b..93f0c9c2 100644
--- a/crypto/sha/asm/sha512-x86_64.pl
+++ b/crypto/sha/asm/sha512-x86_64.pl
@@ -258,6 +258,7 @@ $code=<<___;
.extern OPENSSL_ia32cap_P
.globl $func
.type $func,\@function,3
+.hidden $func
.align 16
$func:
___