From 97999919bbe85ab213d283e18e597e028f8685d1 Mon Sep 17 00:00:00 2001 From: Adam Langley Date: Thu, 19 Feb 2015 17:48:29 -0800 Subject: Hide all asm symbols. We are leaking asm symbols in Android builds because the asm code isn't affected by -fvisibility=hidden. This change hides all asm symbols. This assumes that no asm symbols are public API and that should be true. Some points to note: In crypto/rc4/asm/rc4-md5-x86_64.pl there are |RC4_set_key| and |RC4_options| functions which aren't getting marked as hidden. That's because those functions aren't actually ever generated. (I'm just trying to minimise drift with upstream here.) In crypto/rc4/asm/rc4-x86_64.pl there's |RC4_options| which is "public" API, except that we've never had it in the header files. So I've just deleted it. Since we have an internal caller, we'll probably have to put it back in the future, but it can just be done in rc4.c to save problems. BUG=448386 Change-Id: I3846617a0e3d73ec9e5ec3638a53364adbbc6260 Reviewed-on: https://boringssl-review.googlesource.com/3520 Reviewed-by: David Benjamin Reviewed-by: Adam Langley --- crypto/aes/asm/aes-x86_64.pl | 2 ++ crypto/aes/asm/aesni-x86_64.pl | 11 +++++++++++ crypto/aes/asm/aesv8-armx.pl | 5 +++++ crypto/aes/asm/bsaes-x86_64.pl | 10 ++++++++++ crypto/aes/asm/vpaes-x86_64.pl | 5 +++++ crypto/bn/asm/rsaz-avx2.pl | 14 ++++++++++++++ crypto/bn/asm/x86_64-mont.pl | 1 + crypto/cpu-x86_64-asm.pl | 1 + crypto/md5/asm/md5-x86_64.pl | 1 + crypto/modes/asm/aesni-gcm-x86_64.pl | 4 ++++ crypto/modes/asm/ghash-x86_64.pl | 8 ++++++++ crypto/rc4/asm/rc4-md5-x86_64.pl | 1 + crypto/rc4/asm/rc4-x86_64.pl | 28 ++-------------------------- crypto/sha/asm/sha1-armv8.pl | 1 + crypto/sha/asm/sha512-armv8.pl | 1 + crypto/sha/asm/sha512-x86_64.pl | 1 + 16 files changed, 68 insertions(+), 26 deletions(-) (limited to 'crypto') diff --git a/crypto/aes/asm/aes-x86_64.pl b/crypto/aes/asm/aes-x86_64.pl index 4b6e1b44..f1afbfba 100644 --- a/crypto/aes/asm/aes-x86_64.pl +++ b/crypto/aes/asm/aes-x86_64.pl @@ -1283,6 +1283,7 @@ $code.=<<___; .align 16 .globl asm_AES_set_encrypt_key .type asm_AES_set_encrypt_key,\@function,3 +.hidden asm_AES_set_encrypt_key asm_AES_set_encrypt_key: push %rbx push %rbp @@ -1548,6 +1549,7 @@ $code.=<<___; .align 16 .globl asm_AES_set_decrypt_key .type asm_AES_set_decrypt_key,\@function,3 +.hidden asm_AES_set_decrypt_key asm_AES_set_decrypt_key: push %rbx push %rbp diff --git a/crypto/aes/asm/aesni-x86_64.pl b/crypto/aes/asm/aesni-x86_64.pl index 5f617463..918e1258 100644 --- a/crypto/aes/asm/aesni-x86_64.pl +++ b/crypto/aes/asm/aesni-x86_64.pl @@ -256,6 +256,7 @@ ___ $code.=<<___; .globl ${PREFIX}_encrypt .type ${PREFIX}_encrypt,\@abi-omnipotent +.hidden ${PREFIX}_encrypt .align 16 ${PREFIX}_encrypt: movups ($inp),$inout0 # load input @@ -269,6 +270,7 @@ $code.=<<___; .globl ${PREFIX}_decrypt .type ${PREFIX}_decrypt,\@abi-omnipotent +.hidden ${PREFIX}_decrypt .align 16 ${PREFIX}_decrypt: movups ($inp),$inout0 # load input @@ -582,6 +584,7 @@ if ($PREFIX eq "aesni") { $code.=<<___; .globl aesni_ecb_encrypt .type aesni_ecb_encrypt,\@function,5 +.hidden aesni_ecb_encrypt .align 16 aesni_ecb_encrypt: ___ @@ -906,6 +909,7 @@ my $bswap_mask="%xmm7"; $code.=<<___; .globl aesni_ccm64_encrypt_blocks .type aesni_ccm64_encrypt_blocks,\@function,6 +.hidden aesni_ccm64_encrypt_blocks .align 16 aesni_ccm64_encrypt_blocks: ___ @@ -986,6 +990,7 @@ ___ $code.=<<___; .globl aesni_ccm64_decrypt_blocks .type aesni_ccm64_decrypt_blocks,\@function,6 +.hidden aesni_ccm64_decrypt_blocks .align 16 aesni_ccm64_decrypt_blocks: ___ @@ -1100,6 +1105,7 @@ my $frame_size = 0x80 + ($win64?160:0); $code.=<<___; .globl aesni_ctr32_encrypt_blocks .type aesni_ctr32_encrypt_blocks,\@function,5 +.hidden aesni_ctr32_encrypt_blocks .align 16 aesni_ctr32_encrypt_blocks: lea (%rsp),%rax @@ -1611,6 +1617,7 @@ my $frame_size = 0x70 + ($win64?160:0); $code.=<<___; .globl aesni_xts_encrypt .type aesni_xts_encrypt,\@function,6 +.hidden aesni_xts_encrypt .align 16 aesni_xts_encrypt: lea (%rsp),%rax @@ -2045,6 +2052,7 @@ ___ $code.=<<___; .globl aesni_xts_decrypt .type aesni_xts_decrypt,\@function,6 +.hidden aesni_xts_decrypt .align 16 aesni_xts_decrypt: lea (%rsp),%rax @@ -2516,6 +2524,7 @@ my $inp_=$key_; $code.=<<___; .globl ${PREFIX}_cbc_encrypt .type ${PREFIX}_cbc_encrypt,\@function,6 +.hidden ${PREFIX}_cbc_encrypt .align 16 ${PREFIX}_cbc_encrypt: test $len,$len # check length @@ -2973,6 +2982,7 @@ ___ $code.=<<___; .globl ${PREFIX}_set_decrypt_key .type ${PREFIX}_set_decrypt_key,\@abi-omnipotent +.hidden ${PREFIX}_set_decrypt_key .align 16 ${PREFIX}_set_decrypt_key: .byte 0x48,0x83,0xEC,0x08 # sub rsp,8 @@ -3023,6 +3033,7 @@ ___ $code.=<<___; .globl ${PREFIX}_set_encrypt_key .type ${PREFIX}_set_encrypt_key,\@abi-omnipotent +.hidden ${PREFIX}_set_encrypt_key .align 16 ${PREFIX}_set_encrypt_key: __aesni_set_encrypt_key: diff --git a/crypto/aes/asm/aesv8-armx.pl b/crypto/aes/asm/aesv8-armx.pl index 1e93f868..1e3d6626 100644 --- a/crypto/aes/asm/aesv8-armx.pl +++ b/crypto/aes/asm/aesv8-armx.pl @@ -63,6 +63,7 @@ rcon: .globl ${prefix}_set_encrypt_key .type ${prefix}_set_encrypt_key,%function +.hidden ${prefix}_set_encrypt_key .align 5 ${prefix}_set_encrypt_key: .Lenc_key: @@ -234,6 +235,7 @@ $code.=<<___; .globl ${prefix}_set_decrypt_key .type ${prefix}_set_decrypt_key,%function +.hidden ${prefix}_set_decrypt_key .align 5 ${prefix}_set_decrypt_key: ___ @@ -298,6 +300,7 @@ my ($rndkey0,$rndkey1,$inout)=map("q$_",(0..3)); $code.=<<___; .globl ${prefix}_${dir}crypt .type ${prefix}_${dir}crypt,%function +.hidden ${prefix}_${dir}crypt .align 5 ${prefix}_${dir}crypt: ldr $rounds,[$key,#240] @@ -342,6 +345,7 @@ my ($dat,$tmp,$rndzero_n_last)=($dat0,$tmp0,$tmp1); $code.=<<___; .globl ${prefix}_cbc_encrypt .type ${prefix}_cbc_encrypt,%function +.hidden ${prefix}_cbc_encrypt .align 5 ${prefix}_cbc_encrypt: ___ @@ -649,6 +653,7 @@ my ($dat,$tmp)=($dat0,$tmp0); $code.=<<___; .globl ${prefix}_ctr32_encrypt_blocks .type ${prefix}_ctr32_encrypt_blocks,%function +.hidden ${prefix}_ctr32_encrypt_blocks .align 5 ${prefix}_ctr32_encrypt_blocks: ___ diff --git a/crypto/aes/asm/bsaes-x86_64.pl b/crypto/aes/asm/bsaes-x86_64.pl index 3f7d33c4..8c6e66a4 100644 --- a/crypto/aes/asm/bsaes-x86_64.pl +++ b/crypto/aes/asm/bsaes-x86_64.pl @@ -1049,6 +1049,7 @@ if (0 && !$win64) { # following four functions are unsupported interface $code.=<<___; .globl bsaes_enc_key_convert .type bsaes_enc_key_convert,\@function,2 +.hidden bsaes_enc_key_convert .align 16 bsaes_enc_key_convert: mov 240($inp),%r10d # pass rounds @@ -1062,6 +1063,7 @@ bsaes_enc_key_convert: .globl bsaes_encrypt_128 .type bsaes_encrypt_128,\@function,4 +.hidden bsaes_encrypt_128 .align 16 bsaes_encrypt_128: .Lenc128_loop: @@ -1095,6 +1097,7 @@ bsaes_encrypt_128: .globl bsaes_dec_key_convert .type bsaes_dec_key_convert,\@function,2 +.hidden bsaes_dec_key_convert .align 16 bsaes_dec_key_convert: mov 240($inp),%r10d # pass rounds @@ -1109,6 +1112,7 @@ bsaes_dec_key_convert: .globl bsaes_decrypt_128 .type bsaes_decrypt_128,\@function,4 +.hidden bsaes_decrypt_128 .align 16 bsaes_decrypt_128: .Ldec128_loop: @@ -1154,6 +1158,7 @@ if ($ecb) { $code.=<<___; .globl bsaes_ecb_encrypt_blocks .type bsaes_ecb_encrypt_blocks,\@abi-omnipotent +.hidden bsaes_ecb_encrypt_blocks .align 16 bsaes_ecb_encrypt_blocks: mov %rsp, %rax @@ -1355,6 +1360,7 @@ $code.=<<___; .globl bsaes_ecb_decrypt_blocks .type bsaes_ecb_decrypt_blocks,\@abi-omnipotent +.hidden bsaes_ecb_decrypt_blocks .align 16 bsaes_ecb_decrypt_blocks: mov %rsp, %rax @@ -1560,6 +1566,7 @@ $code.=<<___; .extern asm_AES_cbc_encrypt .globl bsaes_cbc_encrypt .type bsaes_cbc_encrypt,\@abi-omnipotent +.hidden bsaes_cbc_encrypt .align 16 bsaes_cbc_encrypt: ___ @@ -1847,6 +1854,7 @@ $code.=<<___; .globl bsaes_ctr32_encrypt_blocks .type bsaes_ctr32_encrypt_blocks,\@abi-omnipotent +.hidden bsaes_ctr32_encrypt_blocks .align 16 bsaes_ctr32_encrypt_blocks: mov %rsp, %rax @@ -2088,6 +2096,7 @@ $arg6=~s/d$//; $code.=<<___; .globl bsaes_xts_encrypt .type bsaes_xts_encrypt,\@abi-omnipotent +.hidden bsaes_xts_encrypt .align 16 bsaes_xts_encrypt: mov %rsp, %rax @@ -2469,6 +2478,7 @@ $code.=<<___; .globl bsaes_xts_decrypt .type bsaes_xts_decrypt,\@abi-omnipotent +.hidden bsaes_xts_decrypt .align 16 bsaes_xts_decrypt: mov %rsp, %rax diff --git a/crypto/aes/asm/vpaes-x86_64.pl b/crypto/aes/asm/vpaes-x86_64.pl index f2ef318f..a647b920 100644 --- a/crypto/aes/asm/vpaes-x86_64.pl +++ b/crypto/aes/asm/vpaes-x86_64.pl @@ -671,6 +671,7 @@ _vpaes_schedule_mangle: # .globl ${PREFIX}_set_encrypt_key .type ${PREFIX}_set_encrypt_key,\@function,3 +.hidden ${PREFIX}_set_encrypt_key .align 16 ${PREFIX}_set_encrypt_key: ___ @@ -719,6 +720,7 @@ $code.=<<___; .globl ${PREFIX}_set_decrypt_key .type ${PREFIX}_set_decrypt_key,\@function,3 +.hidden ${PREFIX}_set_decrypt_key .align 16 ${PREFIX}_set_decrypt_key: ___ @@ -772,6 +774,7 @@ $code.=<<___; .globl ${PREFIX}_encrypt .type ${PREFIX}_encrypt,\@function,3 +.hidden ${PREFIX}_encrypt .align 16 ${PREFIX}_encrypt: ___ @@ -815,6 +818,7 @@ $code.=<<___; .globl ${PREFIX}_decrypt .type ${PREFIX}_decrypt,\@function,3 +.hidden ${PREFIX}_decrypt .align 16 ${PREFIX}_decrypt: ___ @@ -864,6 +868,7 @@ my ($inp,$out,$len,$key,$ivp,$enc)=("%rdi","%rsi","%rdx","%rcx","%r8","%r9"); $code.=<<___; .globl ${PREFIX}_cbc_encrypt .type ${PREFIX}_cbc_encrypt,\@function,6 +.hidden ${PREFIX}_cbc_encrypt .align 16 ${PREFIX}_cbc_encrypt: xchg $key,$len diff --git a/crypto/bn/asm/rsaz-avx2.pl b/crypto/bn/asm/rsaz-avx2.pl index 3b6ccf83..9a9223b3 100644 --- a/crypto/bn/asm/rsaz-avx2.pl +++ b/crypto/bn/asm/rsaz-avx2.pl @@ -159,6 +159,7 @@ $code.=<<___; .globl rsaz_1024_sqr_avx2 .type rsaz_1024_sqr_avx2,\@function,5 +.hidden rsaz_1024_sqr_avx2 .align 64 rsaz_1024_sqr_avx2: # 702 cycles, 14% faster than rsaz_1024_mul_avx2 lea (%rsp), %rax @@ -891,6 +892,7 @@ $bp="%r13"; # reassigned argument $code.=<<___; .globl rsaz_1024_mul_avx2 .type rsaz_1024_mul_avx2,\@function,5 +.hidden rsaz_1024_mul_avx2 .align 64 rsaz_1024_mul_avx2: lea (%rsp), %rax @@ -1484,6 +1486,7 @@ my @T = map("%r$_",(8..11)); $code.=<<___; .globl rsaz_1024_red2norm_avx2 .type rsaz_1024_red2norm_avx2,\@abi-omnipotent +.hidden rsaz_1024_red2norm_avx2 .align 32 rsaz_1024_red2norm_avx2: sub \$-128,$inp # size optimization @@ -1523,6 +1526,7 @@ $code.=<<___; .globl rsaz_1024_norm2red_avx2 .type rsaz_1024_norm2red_avx2,\@abi-omnipotent +.hidden rsaz_1024_norm2red_avx2 .align 32 rsaz_1024_norm2red_avx2: sub \$-128,$out # size optimization @@ -1565,6 +1569,7 @@ my ($out,$inp,$power) = $win64 ? ("%rcx","%rdx","%r8d") : ("%rdi","%rsi","%edx") $code.=<<___; .globl rsaz_1024_scatter5_avx2 .type rsaz_1024_scatter5_avx2,\@abi-omnipotent +.hidden rsaz_1024_scatter5_avx2 .align 32 rsaz_1024_scatter5_avx2: vzeroupper @@ -1590,6 +1595,7 @@ rsaz_1024_scatter5_avx2: .globl rsaz_1024_gather5_avx2 .type rsaz_1024_gather5_avx2,\@abi-omnipotent +.hidden rsaz_1024_gather5_avx2 .align 32 rsaz_1024_gather5_avx2: ___ @@ -1684,6 +1690,7 @@ $code.=<<___; .extern OPENSSL_ia32cap_P .globl rsaz_avx2_eligible .type rsaz_avx2_eligible,\@abi-omnipotent +.hidden rsaz_avx2_eligible .align 32 rsaz_avx2_eligible: mov OPENSSL_ia32cap_P+8(%rip),%eax @@ -1871,6 +1878,7 @@ print <<___; # assembler is too old .globl rsaz_avx2_eligible .type rsaz_avx2_eligible,\@abi-omnipotent +.hidden rsaz_avx2_eligible rsaz_avx2_eligible: xor %eax,%eax ret @@ -1882,6 +1890,12 @@ rsaz_avx2_eligible: .globl rsaz_1024_red2norm_avx2 .globl rsaz_1024_scatter5_avx2 .globl rsaz_1024_gather5_avx2 +.hidden rsaz_1024_sqr_avx2 +.hidden rsaz_1024_mul_avx2 +.hidden rsaz_1024_norm2red_avx2 +.hidden rsaz_1024_red2norm_avx2 +.hidden rsaz_1024_scatter5_avx2 +.hidden rsaz_1024_gather5_avx2 .type rsaz_1024_sqr_avx2,\@abi-omnipotent rsaz_1024_sqr_avx2: rsaz_1024_mul_avx2: diff --git a/crypto/bn/asm/x86_64-mont.pl b/crypto/bn/asm/x86_64-mont.pl index 39476ab0..38af80a8 100644 --- a/crypto/bn/asm/x86_64-mont.pl +++ b/crypto/bn/asm/x86_64-mont.pl @@ -90,6 +90,7 @@ $code=<<___; .globl bn_mul_mont .type bn_mul_mont,\@function,6 +.hidden bn_mul_mont .align 16 bn_mul_mont: test \$3,${num}d diff --git a/crypto/cpu-x86_64-asm.pl b/crypto/cpu-x86_64-asm.pl index 59cfd184..9ba5c84f 100644 --- a/crypto/cpu-x86_64-asm.pl +++ b/crypto/cpu-x86_64-asm.pl @@ -22,6 +22,7 @@ print<<___; .globl OPENSSL_ia32_cpuid .type OPENSSL_ia32_cpuid,\@function,1 +.hidden OPENSSL_ia32_cpuid .align 16 OPENSSL_ia32_cpuid: # On Windows, $arg1 is rcx, but that will be clobbered. So make Windows diff --git a/crypto/md5/asm/md5-x86_64.pl b/crypto/md5/asm/md5-x86_64.pl index 77a6e01d..45f23c09 100644 --- a/crypto/md5/asm/md5-x86_64.pl +++ b/crypto/md5/asm/md5-x86_64.pl @@ -129,6 +129,7 @@ $code .= <" -.align 64 -.size RC4_options,.-RC4_options ___ # EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame, diff --git a/crypto/sha/asm/sha1-armv8.pl b/crypto/sha/asm/sha1-armv8.pl index deb1238d..a8efe4ff 100644 --- a/crypto/sha/asm/sha1-armv8.pl +++ b/crypto/sha/asm/sha1-armv8.pl @@ -156,6 +156,7 @@ $code.=<<___; .globl sha1_block_data_order .type sha1_block_data_order,%function +.hidden sha1_block_data_order .align 6 sha1_block_data_order: ldr x16,.LOPENSSL_armcap_P diff --git a/crypto/sha/asm/sha512-armv8.pl b/crypto/sha/asm/sha512-armv8.pl index 5a9c8129..570b0843 100644 --- a/crypto/sha/asm/sha512-armv8.pl +++ b/crypto/sha/asm/sha512-armv8.pl @@ -154,6 +154,7 @@ $code.=<<___; .globl $func .type $func,%function +.hidden $func .align 6 $func: ___ diff --git a/crypto/sha/asm/sha512-x86_64.pl b/crypto/sha/asm/sha512-x86_64.pl index 6660a88b..93f0c9c2 100644 --- a/crypto/sha/asm/sha512-x86_64.pl +++ b/crypto/sha/asm/sha512-x86_64.pl @@ -258,6 +258,7 @@ $code=<<___; .extern OPENSSL_ia32cap_P .globl $func .type $func,\@function,3 +.hidden $func .align 16 $func: ___ -- cgit v1.2.3