From ca7b1affc72ea13d603d710a9c0356fc3859bf98 Mon Sep 17 00:00:00 2001 From: Peter Avalos Date: Tue, 27 Mar 2012 17:04:42 -0700 Subject: [PATCH] Update files for OpenSSL-1.0.1 update. This commit changes the order of sources in Makefiles to better conform to what's in the OpenSSL distribution. This should make it easier to update the build system for future imports. --- Makefile_upgrade.inc | 3 + crypto/openssl/crypto/bn/asm/x86_64-mont5.pl | 0 crypto/openssl/ssl/srtp.h | 1 - secure/lib/libcrypto/Makefile | 382 +- secure/lib/libcrypto/Makefile.inc | 15 +- secure/lib/libcrypto/asm/Makefile | 54 +- secure/lib/libcrypto/asm/aes-586.s | 22 +- secure/lib/libcrypto/asm/aes-x86_64.s | 25 +- secure/lib/libcrypto/asm/aesni-sha1-x86_64.s | 1396 +++++++ secure/lib/libcrypto/asm/aesni-x86.s | 2143 ++++++++++ secure/lib/libcrypto/asm/aesni-x86_64.s | 2535 ++++++++++++ secure/lib/libcrypto/asm/bsaes-x86_64.s | 2576 ++++++++++++ secure/lib/libcrypto/asm/cmll-x86.s | 10 +- secure/lib/libcrypto/asm/ghash-x86.s | 1269 ++++++ secure/lib/libcrypto/asm/ghash-x86_64.s | 1026 +++++ secure/lib/libcrypto/asm/modexp512-x86_64.s | 1773 +++++++++ secure/lib/libcrypto/asm/rc4-586.s | 218 +- secure/lib/libcrypto/asm/rc4-md5-x86_64.s | 1259 ++++++ secure/lib/libcrypto/asm/rc4-x86_64.s | 728 ++-- secure/lib/libcrypto/asm/sha1-586.s | 2528 ++++++++---- secure/lib/libcrypto/asm/sha1-x86_64.s | 3526 +++++++++++------ secure/lib/libcrypto/asm/sha256-586.s | 65 +- secure/lib/libcrypto/asm/sha256-x86_64.s | 2150 +++++----- secure/lib/libcrypto/asm/sha512-x86_64.s | 2146 +++++----- secure/lib/libcrypto/asm/vpaes-x86.s | 659 +++ secure/lib/libcrypto/asm/vpaes-x86_64.s | 826 ++++ secure/lib/libcrypto/asm/x86-gf2m.s | 343 ++ secure/lib/libcrypto/asm/x86_64-gf2m.s | 291 ++ secure/lib/libcrypto/asm/x86_64-mont.s | 1311 +++++- secure/lib/libcrypto/asm/x86_64-mont5.s | 784 ++++ secure/lib/libcrypto/asm/x86_64cpuid.s | 69 +- secure/lib/libcrypto/asm/x86cpuid.s | 142 +- secure/lib/libcrypto/man/ASN1_OBJECT_new.3 | 2 +- secure/lib/libcrypto/man/ASN1_STRING_length.3 | 2 +- secure/lib/libcrypto/man/ASN1_STRING_new.3 | 2 +- .../lib/libcrypto/man/ASN1_STRING_print_ex.3 | 2 +- .../lib/libcrypto/man/ASN1_generate_nconf.3 | 2 +- secure/lib/libcrypto/man/BIO_ctrl.3 | 2 +- secure/lib/libcrypto/man/BIO_f_base64.3 | 2 +- secure/lib/libcrypto/man/BIO_f_buffer.3 | 2 +- secure/lib/libcrypto/man/BIO_f_cipher.3 | 2 +- secure/lib/libcrypto/man/BIO_f_md.3 | 2 +- secure/lib/libcrypto/man/BIO_f_null.3 | 2 +- secure/lib/libcrypto/man/BIO_f_ssl.3 | 2 +- secure/lib/libcrypto/man/BIO_find_type.3 | 2 +- secure/lib/libcrypto/man/BIO_new.3 | 2 +- secure/lib/libcrypto/man/BIO_new_CMS.3 | 2 +- secure/lib/libcrypto/man/BIO_push.3 | 2 +- secure/lib/libcrypto/man/BIO_read.3 | 2 +- secure/lib/libcrypto/man/BIO_s_accept.3 | 2 +- secure/lib/libcrypto/man/BIO_s_bio.3 | 2 +- secure/lib/libcrypto/man/BIO_s_connect.3 | 2 +- secure/lib/libcrypto/man/BIO_s_fd.3 | 2 +- secure/lib/libcrypto/man/BIO_s_file.3 | 2 +- secure/lib/libcrypto/man/BIO_s_mem.3 | 2 +- secure/lib/libcrypto/man/BIO_s_null.3 | 2 +- secure/lib/libcrypto/man/BIO_s_socket.3 | 2 +- secure/lib/libcrypto/man/BIO_set_callback.3 | 2 +- secure/lib/libcrypto/man/BIO_should_retry.3 | 2 +- secure/lib/libcrypto/man/BN_BLINDING_new.3 | 2 +- secure/lib/libcrypto/man/BN_CTX_new.3 | 2 +- secure/lib/libcrypto/man/BN_CTX_start.3 | 2 +- secure/lib/libcrypto/man/BN_add.3 | 2 +- secure/lib/libcrypto/man/BN_add_word.3 | 2 +- secure/lib/libcrypto/man/BN_bn2bin.3 | 2 +- secure/lib/libcrypto/man/BN_cmp.3 | 2 +- secure/lib/libcrypto/man/BN_copy.3 | 2 +- secure/lib/libcrypto/man/BN_generate_prime.3 | 2 +- secure/lib/libcrypto/man/BN_mod_inverse.3 | 2 +- .../lib/libcrypto/man/BN_mod_mul_montgomery.3 | 2 +- .../lib/libcrypto/man/BN_mod_mul_reciprocal.3 | 2 +- secure/lib/libcrypto/man/BN_new.3 | 2 +- secure/lib/libcrypto/man/BN_num_bytes.3 | 2 +- secure/lib/libcrypto/man/BN_rand.3 | 2 +- secure/lib/libcrypto/man/BN_set_bit.3 | 2 +- secure/lib/libcrypto/man/BN_swap.3 | 2 +- secure/lib/libcrypto/man/BN_zero.3 | 2 +- secure/lib/libcrypto/man/CMS_add0_cert.3 | 2 +- .../libcrypto/man/CMS_add1_recipient_cert.3 | 2 +- secure/lib/libcrypto/man/CMS_compress.3 | 2 +- secure/lib/libcrypto/man/CMS_decrypt.3 | 2 +- secure/lib/libcrypto/man/CMS_encrypt.3 | 2 +- secure/lib/libcrypto/man/CMS_final.3 | 2 +- .../libcrypto/man/CMS_get0_RecipientInfos.3 | 2 +- .../lib/libcrypto/man/CMS_get0_SignerInfos.3 | 2 +- secure/lib/libcrypto/man/CMS_get0_type.3 | 2 +- .../libcrypto/man/CMS_get1_ReceiptRequest.3 | 2 +- secure/lib/libcrypto/man/CMS_sign.3 | 2 +- .../lib/libcrypto/man/CMS_sign_add1_signer.3 | 2 +- secure/lib/libcrypto/man/CMS_sign_receipt.3 | 2 +- secure/lib/libcrypto/man/CMS_uncompress.3 | 2 +- secure/lib/libcrypto/man/CMS_verify.3 | 2 +- secure/lib/libcrypto/man/CMS_verify_receipt.3 | 2 +- secure/lib/libcrypto/man/CONF_modules_free.3 | 2 +- .../libcrypto/man/CONF_modules_load_file.3 | 2 +- secure/lib/libcrypto/man/CRYPTO_set_ex_data.3 | 2 +- secure/lib/libcrypto/man/DH_generate_key.3 | 2 +- .../libcrypto/man/DH_generate_parameters.3 | 2 +- .../lib/libcrypto/man/DH_get_ex_new_index.3 | 2 +- secure/lib/libcrypto/man/DH_new.3 | 2 +- secure/lib/libcrypto/man/DH_set_method.3 | 2 +- secure/lib/libcrypto/man/DH_size.3 | 2 +- secure/lib/libcrypto/man/DSA_SIG_new.3 | 2 +- secure/lib/libcrypto/man/DSA_do_sign.3 | 2 +- secure/lib/libcrypto/man/DSA_dup_DH.3 | 2 +- secure/lib/libcrypto/man/DSA_generate_key.3 | 2 +- .../libcrypto/man/DSA_generate_parameters.3 | 2 +- .../lib/libcrypto/man/DSA_get_ex_new_index.3 | 2 +- secure/lib/libcrypto/man/DSA_new.3 | 2 +- secure/lib/libcrypto/man/DSA_set_method.3 | 2 +- secure/lib/libcrypto/man/DSA_sign.3 | 2 +- secure/lib/libcrypto/man/DSA_size.3 | 2 +- secure/lib/libcrypto/man/ERR_GET_LIB.3 | 2 +- secure/lib/libcrypto/man/ERR_clear_error.3 | 2 +- secure/lib/libcrypto/man/ERR_error_string.3 | 2 +- secure/lib/libcrypto/man/ERR_get_error.3 | 2 +- .../libcrypto/man/ERR_load_crypto_strings.3 | 2 +- secure/lib/libcrypto/man/ERR_load_strings.3 | 2 +- secure/lib/libcrypto/man/ERR_print_errors.3 | 2 +- secure/lib/libcrypto/man/ERR_put_error.3 | 2 +- secure/lib/libcrypto/man/ERR_remove_state.3 | 2 +- secure/lib/libcrypto/man/ERR_set_mark.3 | 2 +- secure/lib/libcrypto/man/EVP_BytesToKey.3 | 2 +- secure/lib/libcrypto/man/EVP_DigestInit.3 | 2 +- secure/lib/libcrypto/man/EVP_DigestSignInit.3 | 2 +- .../lib/libcrypto/man/EVP_DigestVerifyInit.3 | 2 +- secure/lib/libcrypto/man/EVP_EncryptInit.3 | 2 +- secure/lib/libcrypto/man/EVP_OpenInit.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_CTX_ctrl.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_CTX_new.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_cmp.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_decrypt.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_derive.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_encrypt.3 | 2 +- .../man/EVP_PKEY_get_default_digest.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_keygen.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_new.3 | 2 +- .../libcrypto/man/EVP_PKEY_print_private.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_set1_RSA.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_sign.3 | 2 +- secure/lib/libcrypto/man/EVP_PKEY_verify.3 | 2 +- .../libcrypto/man/EVP_PKEY_verifyrecover.3 | 2 +- secure/lib/libcrypto/man/EVP_SealInit.3 | 2 +- secure/lib/libcrypto/man/EVP_SignInit.3 | 2 +- secure/lib/libcrypto/man/EVP_VerifyInit.3 | 2 +- secure/lib/libcrypto/man/OBJ_nid2obj.3 | 2 +- secure/lib/libcrypto/man/OPENSSL_Applink.3 | 2 +- .../libcrypto/man/OPENSSL_VERSION_NUMBER.3 | 2 +- secure/lib/libcrypto/man/OPENSSL_config.3 | 2 +- secure/lib/libcrypto/man/OPENSSL_ia32cap.3 | 2 +- .../man/OPENSSL_load_builtin_modules.3 | 2 +- .../man/OpenSSL_add_all_algorithms.3 | 2 +- .../libcrypto/man/PEM_write_bio_CMS_stream.3 | 2 +- .../man/PEM_write_bio_PKCS7_stream.3 | 2 +- secure/lib/libcrypto/man/PKCS12_create.3 | 2 +- secure/lib/libcrypto/man/PKCS12_parse.3 | 2 +- secure/lib/libcrypto/man/PKCS7_decrypt.3 | 2 +- secure/lib/libcrypto/man/PKCS7_encrypt.3 | 2 +- secure/lib/libcrypto/man/PKCS7_sign.3 | 2 +- .../lib/libcrypto/man/PKCS7_sign_add_signer.3 | 2 +- secure/lib/libcrypto/man/PKCS7_verify.3 | 2 +- secure/lib/libcrypto/man/RAND_add.3 | 2 +- secure/lib/libcrypto/man/RAND_bytes.3 | 2 +- secure/lib/libcrypto/man/RAND_cleanup.3 | 2 +- secure/lib/libcrypto/man/RAND_egd.3 | 2 +- secure/lib/libcrypto/man/RAND_load_file.3 | 2 +- .../lib/libcrypto/man/RAND_set_rand_method.3 | 2 +- secure/lib/libcrypto/man/RSA_blinding_on.3 | 2 +- secure/lib/libcrypto/man/RSA_check_key.3 | 2 +- secure/lib/libcrypto/man/RSA_generate_key.3 | 2 +- .../lib/libcrypto/man/RSA_get_ex_new_index.3 | 2 +- secure/lib/libcrypto/man/RSA_new.3 | 2 +- .../man/RSA_padding_add_PKCS1_type_1.3 | 2 +- secure/lib/libcrypto/man/RSA_print.3 | 2 +- .../lib/libcrypto/man/RSA_private_encrypt.3 | 2 +- secure/lib/libcrypto/man/RSA_public_encrypt.3 | 2 +- secure/lib/libcrypto/man/RSA_set_method.3 | 2 +- secure/lib/libcrypto/man/RSA_sign.3 | 2 +- .../man/RSA_sign_ASN1_OCTET_STRING.3 | 2 +- secure/lib/libcrypto/man/RSA_size.3 | 2 +- secure/lib/libcrypto/man/SMIME_read_CMS.3 | 2 +- secure/lib/libcrypto/man/SMIME_read_PKCS7.3 | 2 +- secure/lib/libcrypto/man/SMIME_write_CMS.3 | 2 +- secure/lib/libcrypto/man/SMIME_write_PKCS7.3 | 2 +- .../man/X509_NAME_ENTRY_get_object.3 | 2 +- .../man/X509_NAME_add_entry_by_txt.3 | 2 +- .../man/X509_NAME_get_index_by_NID.3 | 2 +- secure/lib/libcrypto/man/X509_NAME_print_ex.3 | 2 +- .../libcrypto/man/X509_STORE_CTX_get_error.3 | 2 +- .../man/X509_STORE_CTX_get_ex_new_index.3 | 2 +- secure/lib/libcrypto/man/X509_STORE_CTX_new.3 | 2 +- .../man/X509_STORE_CTX_set_verify_cb.3 | 2 +- .../man/X509_STORE_set_verify_cb_func.3 | 2 +- .../man/X509_VERIFY_PARAM_set_flags.3 | 2 +- secure/lib/libcrypto/man/X509_new.3 | 2 +- secure/lib/libcrypto/man/X509_verify_cert.3 | 2 +- secure/lib/libcrypto/man/bio.3 | 2 +- secure/lib/libcrypto/man/blowfish.3 | 2 +- secure/lib/libcrypto/man/bn.3 | 2 +- secure/lib/libcrypto/man/bn_internal.3 | 2 +- secure/lib/libcrypto/man/buffer.3 | 2 +- secure/lib/libcrypto/man/crypto.3 | 2 +- secure/lib/libcrypto/man/d2i_ASN1_OBJECT.3 | 2 +- secure/lib/libcrypto/man/d2i_DHparams.3 | 2 +- secure/lib/libcrypto/man/d2i_DSAPublicKey.3 | 2 +- .../lib/libcrypto/man/d2i_PKCS8PrivateKey.3 | 2 +- secure/lib/libcrypto/man/d2i_RSAPublicKey.3 | 2 +- secure/lib/libcrypto/man/d2i_X509.3 | 2 +- secure/lib/libcrypto/man/d2i_X509_ALGOR.3 | 2 +- secure/lib/libcrypto/man/d2i_X509_CRL.3 | 2 +- secure/lib/libcrypto/man/d2i_X509_NAME.3 | 2 +- secure/lib/libcrypto/man/d2i_X509_REQ.3 | 2 +- secure/lib/libcrypto/man/d2i_X509_SIG.3 | 2 +- secure/lib/libcrypto/man/des.3 | 2 +- secure/lib/libcrypto/man/des_modes.7 | 2 +- secure/lib/libcrypto/man/dh.3 | 2 +- secure/lib/libcrypto/man/dsa.3 | 2 +- secure/lib/libcrypto/man/ecdsa.3 | 2 +- secure/lib/libcrypto/man/engine.3 | 2 +- secure/lib/libcrypto/man/err.3 | 2 +- secure/lib/libcrypto/man/evp.3 | 2 +- secure/lib/libcrypto/man/hmac.3 | 2 +- secure/lib/libcrypto/man/i2d_CMS_bio_stream.3 | 2 +- .../lib/libcrypto/man/i2d_PKCS7_bio_stream.3 | 2 +- secure/lib/libcrypto/man/lh_stats.3 | 2 +- secure/lib/libcrypto/man/lhash.3 | 2 +- secure/lib/libcrypto/man/md5.3 | 2 +- secure/lib/libcrypto/man/mdc2.3 | 2 +- secure/lib/libcrypto/man/pem.3 | 2 +- secure/lib/libcrypto/man/rand.3 | 2 +- secure/lib/libcrypto/man/rc4.3 | 2 +- secure/lib/libcrypto/man/ripemd.3 | 2 +- secure/lib/libcrypto/man/rsa.3 | 2 +- secure/lib/libcrypto/man/sha.3 | 2 +- secure/lib/libcrypto/man/threads.3 | 2 +- secure/lib/libcrypto/man/ui.3 | 2 +- secure/lib/libcrypto/man/ui_compat.3 | 2 +- secure/lib/libcrypto/man/x509.3 | 2 +- secure/lib/libcrypto/opensslconf-i386.h | 12 + secure/lib/libcrypto/opensslconf-x86_64.h | 12 + secure/lib/libssl/Makefile | 21 +- secure/lib/libssl/man/SSL_CIPHER_get_name.3 | 2 +- .../man/SSL_COMP_add_compression_method.3 | 2 +- .../libssl/man/SSL_CTX_add_extra_chain_cert.3 | 2 +- secure/lib/libssl/man/SSL_CTX_add_session.3 | 2 +- secure/lib/libssl/man/SSL_CTX_ctrl.3 | 2 +- .../lib/libssl/man/SSL_CTX_flush_sessions.3 | 2 +- secure/lib/libssl/man/SSL_CTX_free.3 | 2 +- .../lib/libssl/man/SSL_CTX_get_ex_new_index.3 | 2 +- .../lib/libssl/man/SSL_CTX_get_verify_mode.3 | 2 +- .../man/SSL_CTX_load_verify_locations.3 | 2 +- secure/lib/libssl/man/SSL_CTX_new.3 | 2 +- secure/lib/libssl/man/SSL_CTX_sess_number.3 | 2 +- .../libssl/man/SSL_CTX_sess_set_cache_size.3 | 2 +- .../lib/libssl/man/SSL_CTX_sess_set_get_cb.3 | 2 +- secure/lib/libssl/man/SSL_CTX_sessions.3 | 2 +- .../lib/libssl/man/SSL_CTX_set_cert_store.3 | 2 +- .../man/SSL_CTX_set_cert_verify_callback.3 | 2 +- .../lib/libssl/man/SSL_CTX_set_cipher_list.3 | 2 +- .../libssl/man/SSL_CTX_set_client_CA_list.3 | 2 +- .../libssl/man/SSL_CTX_set_client_cert_cb.3 | 2 +- .../man/SSL_CTX_set_default_passwd_cb.3 | 2 +- .../man/SSL_CTX_set_generate_session_id.3 | 2 +- .../libssl/man/SSL_CTX_set_info_callback.3 | 2 +- .../libssl/man/SSL_CTX_set_max_cert_list.3 | 2 +- secure/lib/libssl/man/SSL_CTX_set_mode.3 | 2 +- .../lib/libssl/man/SSL_CTX_set_msg_callback.3 | 2 +- secure/lib/libssl/man/SSL_CTX_set_options.3 | 2 +- .../man/SSL_CTX_set_psk_client_callback.3 | 2 +- .../libssl/man/SSL_CTX_set_quiet_shutdown.3 | 2 +- .../man/SSL_CTX_set_session_cache_mode.3 | 2 +- .../man/SSL_CTX_set_session_id_context.3 | 2 +- .../lib/libssl/man/SSL_CTX_set_ssl_version.3 | 2 +- secure/lib/libssl/man/SSL_CTX_set_timeout.3 | 2 +- .../libssl/man/SSL_CTX_set_tmp_dh_callback.3 | 2 +- .../libssl/man/SSL_CTX_set_tmp_rsa_callback.3 | 2 +- secure/lib/libssl/man/SSL_CTX_set_verify.3 | 2 +- .../lib/libssl/man/SSL_CTX_use_certificate.3 | 2 +- .../man/SSL_CTX_use_psk_identity_hint.3 | 2 +- secure/lib/libssl/man/SSL_SESSION_free.3 | 2 +- .../libssl/man/SSL_SESSION_get_ex_new_index.3 | 2 +- secure/lib/libssl/man/SSL_SESSION_get_time.3 | 2 +- secure/lib/libssl/man/SSL_accept.3 | 2 +- secure/lib/libssl/man/SSL_alert_type_string.3 | 7 +- secure/lib/libssl/man/SSL_clear.3 | 2 +- secure/lib/libssl/man/SSL_connect.3 | 2 +- secure/lib/libssl/man/SSL_do_handshake.3 | 2 +- secure/lib/libssl/man/SSL_free.3 | 2 +- secure/lib/libssl/man/SSL_get_SSL_CTX.3 | 2 +- secure/lib/libssl/man/SSL_get_ciphers.3 | 2 +- .../lib/libssl/man/SSL_get_client_CA_list.3 | 2 +- .../lib/libssl/man/SSL_get_current_cipher.3 | 2 +- .../lib/libssl/man/SSL_get_default_timeout.3 | 2 +- secure/lib/libssl/man/SSL_get_error.3 | 2 +- .../man/SSL_get_ex_data_X509_STORE_CTX_idx.3 | 2 +- secure/lib/libssl/man/SSL_get_ex_new_index.3 | 2 +- secure/lib/libssl/man/SSL_get_fd.3 | 2 +- .../lib/libssl/man/SSL_get_peer_cert_chain.3 | 2 +- .../lib/libssl/man/SSL_get_peer_certificate.3 | 2 +- secure/lib/libssl/man/SSL_get_psk_identity.3 | 2 +- secure/lib/libssl/man/SSL_get_rbio.3 | 2 +- secure/lib/libssl/man/SSL_get_session.3 | 2 +- secure/lib/libssl/man/SSL_get_verify_result.3 | 2 +- secure/lib/libssl/man/SSL_get_version.3 | 2 +- secure/lib/libssl/man/SSL_library_init.3 | 2 +- .../lib/libssl/man/SSL_load_client_CA_file.3 | 2 +- secure/lib/libssl/man/SSL_new.3 | 2 +- secure/lib/libssl/man/SSL_pending.3 | 2 +- secure/lib/libssl/man/SSL_read.3 | 2 +- secure/lib/libssl/man/SSL_rstate_string.3 | 2 +- secure/lib/libssl/man/SSL_session_reused.3 | 2 +- secure/lib/libssl/man/SSL_set_bio.3 | 2 +- secure/lib/libssl/man/SSL_set_connect_state.3 | 2 +- secure/lib/libssl/man/SSL_set_fd.3 | 2 +- secure/lib/libssl/man/SSL_set_session.3 | 2 +- secure/lib/libssl/man/SSL_set_shutdown.3 | 2 +- secure/lib/libssl/man/SSL_set_verify_result.3 | 2 +- secure/lib/libssl/man/SSL_shutdown.3 | 2 +- secure/lib/libssl/man/SSL_state_string.3 | 2 +- secure/lib/libssl/man/SSL_want.3 | 2 +- secure/lib/libssl/man/SSL_write.3 | 2 +- secure/lib/libssl/man/d2i_SSL_SESSION.3 | 2 +- secure/lib/libssl/man/ssl.3 | 2 +- secure/usr.bin/openssl/Makefile | 16 +- secure/usr.bin/openssl/man/CA.pl.1 | 2 +- secure/usr.bin/openssl/man/asn1parse.1 | 2 +- secure/usr.bin/openssl/man/ca.1 | 2 +- secure/usr.bin/openssl/man/ciphers.1 | 2 +- secure/usr.bin/openssl/man/cms.1 | 2 +- secure/usr.bin/openssl/man/config.5 | 2 +- secure/usr.bin/openssl/man/crl.1 | 2 +- secure/usr.bin/openssl/man/crl2pkcs7.1 | 2 +- secure/usr.bin/openssl/man/dgst.1 | 2 +- secure/usr.bin/openssl/man/dhparam.1 | 2 +- secure/usr.bin/openssl/man/dsa.1 | 2 +- secure/usr.bin/openssl/man/dsaparam.1 | 2 +- secure/usr.bin/openssl/man/ec.1 | 2 +- secure/usr.bin/openssl/man/ecparam.1 | 2 +- secure/usr.bin/openssl/man/enc.1 | 2 +- secure/usr.bin/openssl/man/errstr.1 | 2 +- secure/usr.bin/openssl/man/gendsa.1 | 2 +- secure/usr.bin/openssl/man/genpkey.1 | 8 +- secure/usr.bin/openssl/man/genrsa.1 | 2 +- secure/usr.bin/openssl/man/nseq.1 | 2 +- secure/usr.bin/openssl/man/ocsp.1 | 2 +- secure/usr.bin/openssl/man/openssl.1 | 19 +- secure/usr.bin/openssl/man/passwd.1 | 2 +- secure/usr.bin/openssl/man/pkcs12.1 | 2 +- secure/usr.bin/openssl/man/pkcs7.1 | 2 +- secure/usr.bin/openssl/man/pkcs8.1 | 2 +- secure/usr.bin/openssl/man/pkey.1 | 2 +- secure/usr.bin/openssl/man/pkeyparam.1 | 2 +- secure/usr.bin/openssl/man/pkeyutl.1 | 2 +- secure/usr.bin/openssl/man/rand.1 | 2 +- secure/usr.bin/openssl/man/req.1 | 2 +- secure/usr.bin/openssl/man/rsa.1 | 2 +- secure/usr.bin/openssl/man/rsautl.1 | 2 +- secure/usr.bin/openssl/man/s_client.1 | 2 +- secure/usr.bin/openssl/man/s_server.1 | 2 +- secure/usr.bin/openssl/man/s_time.1 | 2 +- secure/usr.bin/openssl/man/sess_id.1 | 2 +- secure/usr.bin/openssl/man/smime.1 | 2 +- secure/usr.bin/openssl/man/speed.1 | 2 +- secure/usr.bin/openssl/man/spkac.1 | 2 +- secure/usr.bin/openssl/man/ts.1 | 2 +- secure/usr.bin/openssl/man/tsget.1 | 2 +- secure/usr.bin/openssl/man/verify.1 | 2 +- secure/usr.bin/openssl/man/version.1 | 2 +- secure/usr.bin/openssl/man/x509.1 | 2 +- secure/usr.bin/openssl/man/x509v3_config.5 | 2 +- 370 files changed, 25803 insertions(+), 5229 deletions(-) mode change 100755 => 100644 crypto/openssl/crypto/bn/asm/x86_64-mont5.pl create mode 100644 secure/lib/libcrypto/asm/aesni-sha1-x86_64.s create mode 100644 secure/lib/libcrypto/asm/aesni-x86.s create mode 100644 secure/lib/libcrypto/asm/aesni-x86_64.s create mode 100644 secure/lib/libcrypto/asm/bsaes-x86_64.s create mode 100644 secure/lib/libcrypto/asm/ghash-x86.s create mode 100644 secure/lib/libcrypto/asm/ghash-x86_64.s create mode 100644 secure/lib/libcrypto/asm/modexp512-x86_64.s create mode 100644 secure/lib/libcrypto/asm/rc4-md5-x86_64.s create mode 100644 secure/lib/libcrypto/asm/vpaes-x86.s create mode 100644 secure/lib/libcrypto/asm/vpaes-x86_64.s create mode 100644 secure/lib/libcrypto/asm/x86-gf2m.s create mode 100644 secure/lib/libcrypto/asm/x86_64-gf2m.s create mode 100644 secure/lib/libcrypto/asm/x86_64-mont5.s diff --git a/Makefile_upgrade.inc b/Makefile_upgrade.inc index 6668334a57..55ea141ac4 100644 --- a/Makefile_upgrade.inc +++ b/Makefile_upgrade.inc @@ -1984,6 +1984,9 @@ TO_REMOVE+=/usr/lib/profile/libtinfo.aa TO_REMOVE+=/usr/bin/kzip TO_REMOVE+=/usr/share/man/cat8/kzip.8.gz TO_REMOVE+=/usr/share/man/man8/kzip.8.gz +TO_REMOVE+=/usr/include/openssl/e_os.h +TO_REMOVE+=/usr/include/openssl/eng_int.h +TO_REMOVE+=/usr/include/openssl/ui_locl.h .if ${MACHINE_ARCH} == "x86_64" TO_REMOVE+=/usr/libdata/stallion/2681.sys diff --git a/crypto/openssl/crypto/bn/asm/x86_64-mont5.pl b/crypto/openssl/crypto/bn/asm/x86_64-mont5.pl old mode 100755 new mode 100644 diff --git a/crypto/openssl/ssl/srtp.h b/crypto/openssl/ssl/srtp.h index c0cf33ef28..ae364d0ff9 100644 --- a/crypto/openssl/ssl/srtp.h +++ b/crypto/openssl/ssl/srtp.h @@ -132,7 +132,6 @@ extern "C" { int SSL_CTX_set_tlsext_use_srtp(SSL_CTX *ctx, const char *profiles); int SSL_set_tlsext_use_srtp(SSL *ctx, const char *profiles); -SRTP_PROTECTION_PROFILE *SSL_get_selected_srtp_profile(SSL *s); STACK_OF(SRTP_PROTECTION_PROFILE) *SSL_get_srtp_profiles(SSL *ssl); SRTP_PROTECTION_PROFILE *SSL_get_selected_srtp_profile(SSL *s); diff --git a/secure/lib/libcrypto/Makefile b/secure/lib/libcrypto/Makefile index 0b6980f01e..bc8d47af4c 100644 --- a/secure/lib/libcrypto/Makefile +++ b/secure/lib/libcrypto/Makefile @@ -20,44 +20,63 @@ LDFLAGS+=-Wl,-Bsymbolic .include "Makefile.inc" # base sources -SRCS= cpt_err.c cryptlib.c cversion.c ebcdic.c ex_data.c \ - mem.c mem_dbg.c o_dir.c o_time.c uid.c +SRCS= cryptlib.c mem.c mem_dbg.c cversion.c ex_data.c cpt_err.c ebcdic.c \ + uid.c o_time.c o_dir.c o_fips.c o_init.c fips_ers.c .if ${MACHINE_ARCH} == "i386" SRCS+= x86cpuid.s .elif ${MACHINE_ARCH} == "x86_64" SRCS+= x86_64cpuid.s .endif -INCS= ../e_os.h ../e_os2.h crypto.h ebcdic.h opensslv.h ossl_typ.h symhacks.h +INCS= ../e_os2.h +INCS+= crypto.h opensslv.h opensslconf.h ebcdic.h symhacks.h ossl_typ.h +SRCS+= buildinf.h +INCSDIR= ${INCLUDEDIR}/openssl + +CLEANFILES+= buildinf.h opensslconf.h + +buildinf.h: + ( echo "#ifndef MK1MF_BUILD"; \ + echo " /* auto-generated by crypto/Makefile.ssl for crypto/cversion.c */"; \ + echo " #define CFLAGS \"$(CC)\""; \ + echo " #define PLATFORM \"`uname -s`-`uname -m`\""; \ + echo " #define DATE \"`LC_ALL=C date`\""; \ + echo "#endif" ) > ${.TARGET} + +opensslconf.h: opensslconf-${MACHINE_ARCH}.h +.if defined(WANT_IDEA) + sed '/^# define OPENSSL_NO_IDEA$$/d;/^# define NO_IDEA$$/d' ${.ALLSRC} > ${.TARGET} +.else + cp ${.ALLSRC} ${.TARGET} +.endif + # aes -SRCS+= aes_cfb.c aes_ctr.c aes_ecb.c aes_ige.c \ - aes_misc.c aes_ofb.c aes_wrap.c +SRCS+= aes_misc.c aes_ecb.c aes_cfb.c aes_ofb.c aes_ctr.c aes_ige.c aes_wrap.c .if ${MACHINE_ARCH} == "i386" -SRCS+= aes-586.s +SRCS+= aes-586.s vpaes-x86.s aesni-x86.s .elif ${MACHINE_ARCH} == "x86_64" -SRCS+= aes-x86_64.s +SRCS+= aes-x86_64.s vpaes-x86_64.s bsaes-x86_64.s aesni-x86_64.s aesni-sha1-x86_64.s .endif -INCS+= aes.h aes_locl.h +INCS+= aes.h # asn1 -SRCS+= a_bitstr.c a_bool.c a_bytes.c a_d2i_fp.c a_digest.c \ - a_dup.c a_enum.c a_gentm.c a_i2d_fp.c a_int.c \ - a_mbstr.c a_object.c a_octet.c a_print.c \ - a_set.c a_sign.c a_strex.c a_strnid.c a_time.c a_type.c \ - a_utctm.c a_utf8.c a_verify.c ameth_lib.c asn1_err.c asn1_lib.c \ - asn1_gen.c asn1_par.c asn_mime.c \ - asn_moid.c asn_pack.c bio_asn1.c bio_ndef.c d2i_pr.c d2i_pu.c \ - evp_asn1.c f_enum.c f_int.c f_string.c i2d_pr.c i2d_pu.c \ - n_pkey.c nsseq.c p5_pbe.c p5_pbev2.c p8_pkey.c t_bitst.c \ - t_crl.c t_pkey.c t_req.c t_spki.c t_x509.c t_x509a.c \ - tasn_dec.c tasn_enc.c tasn_fre.c tasn_new.c tasn_prn.c tasn_typ.c \ - tasn_utl.c x_algor.c x_attrib.c x_bignum.c x_crl.c \ - x_exten.c x_info.c x_long.c x_name.c x_nx509.c x_pkey.c x_pubkey.c \ - x_req.c x_sig.c x_spki.c x_val.c x_x509.c x_x509a.c +SRCS+= a_object.c a_bitstr.c a_utctm.c a_gentm.c a_time.c a_int.c a_octet.c \ + a_print.c a_type.c a_set.c a_dup.c a_d2i_fp.c a_i2d_fp.c \ + a_enum.c a_utf8.c a_sign.c a_digest.c a_verify.c a_mbstr.c a_strex.c \ + x_algor.c x_val.c x_pubkey.c x_sig.c x_req.c x_attrib.c x_bignum.c \ + x_long.c x_name.c x_x509.c x_x509a.c x_crl.c x_info.c x_spki.c nsseq.c \ + x_nx509.c d2i_pu.c d2i_pr.c i2d_pu.c i2d_pr.c \ + t_req.c t_x509.c t_x509a.c t_crl.c t_pkey.c t_spki.c t_bitst.c \ + tasn_new.c tasn_fre.c tasn_enc.c tasn_dec.c tasn_utl.c tasn_typ.c \ + tasn_prn.c ameth_lib.c \ + f_int.c f_string.c n_pkey.c \ + f_enum.c x_pkey.c a_bool.c x_exten.c bio_asn1.c bio_ndef.c asn_mime.c \ + asn1_gen.c asn1_par.c asn1_lib.c asn1_err.c a_bytes.c a_strnid.c \ + evp_asn1.c asn_pack.c p5_pbe.c p5_pbev2.c p8_pkey.c asn_moid.c INCS+= asn1.h asn1_mac.h asn1t.h # bf -SRCS+= bf_cfb64.c bf_ecb.c bf_ofb64.c bf_skey.c +SRCS+= bf_skey.c bf_ecb.c bf_cfb64.c bf_ofb64.c .if ${MACHINE_ARCH} == "i386" SRCS+= bf-586.s .elif ${MACHINE_ARCH} == "x86_64" @@ -66,33 +85,33 @@ SRCS+= bf_enc.c INCS+= blowfish.h # bio -SRCS+= b_dump.c b_print.c b_sock.c bf_buff.c bf_lbuf.c bf_nbio.c \ - bf_null.c bio_cb.c bio_err.c bio_lib.c bss_acpt.c bss_bio.c \ - bss_conn.c bss_dgram.c bss_fd.c bss_file.c bss_log.c bss_mem.c \ - bss_null.c bss_sock.c +SRCS+= bio_lib.c bio_cb.c bio_err.c \ + bss_mem.c bss_null.c bss_fd.c \ + bss_file.c bss_sock.c bss_conn.c \ + bf_null.c bf_buff.c b_print.c b_dump.c \ + b_sock.c bss_acpt.c bf_nbio.c bss_log.c bss_bio.c \ + bss_dgram.c INCS+= bio.h # bn -SRCS+= bn_add.c bn_blind.c bn_const.c bn_ctx.c bn_depr.c \ - bn_div.c bn_err.c bn_exp.c \ - bn_exp2.c bn_gcd.c bn_gf2m.c bn_kron.c bn_lib.c bn_mod.c bn_mont.c \ - bn_mpi.c bn_mul.c bn_nist.c \ - bn_prime.c bn_print.c bn_rand.c bn_recp.c \ - bn_shift.c bn_sqr.c bn_sqrt.c bn_word.c +SRCS+= bn_add.c bn_div.c bn_exp.c bn_lib.c bn_ctx.c bn_mul.c bn_mod.c \ + bn_print.c bn_rand.c bn_shift.c bn_word.c bn_blind.c \ + bn_kron.c bn_sqrt.c bn_gcd.c bn_prime.c bn_err.c bn_sqr.c \ + bn_recp.c bn_mont.c bn_mpi.c bn_exp2.c bn_gf2m.c bn_nist.c \ + bn_depr.c bn_const.c bn_x931p.c .if ${MACHINE_ARCH} == "i386" -SRCS+= bn-586.s co-586.s x86-mont.s +SRCS+= bn-586.s co-586.s x86-mont.s x86-gf2m.s .elif ${MACHINE_ARCH} == "x86_64" -SRCS+= x86_64-gcc.c x86_64-mont.s +SRCS+= x86_64-gcc.c x86_64-mont.s x86_64-mont5.s x86_64-gf2m.s modexp512-x86_64.s .endif INCS+= bn.h # buffer -SRCS+= buf_err.c buffer.c +SRCS+= buffer.c buf_str.c buf_err.c INCS+= buffer.h # camellia -SRCS+= cmll_cfb.c \ - cmll_ctr.c cmll_ecb.c cmll_ofb.c +SRCS+= cmll_ecb.c cmll_ofb.c cmll_cfb.c cmll_ctr.c cmll_utl.c .if ${MACHINE_ARCH} == "i386" SRCS+= cmll-x86.s .elif ${MACHINE_ARCH} == "x86_64" @@ -101,29 +120,36 @@ SRCS+= cmll-x86_64.s cmll_misc.c INCS+= camellia.h # cast -SRCS+= c_cfb64.c c_ecb.c c_enc.c c_ofb64.c c_skey.c +SRCS+= c_skey.c c_ecb.c c_enc.c c_cfb64.c c_ofb64.c INCS+= cast.h +# cmac +SRCS+= cmac.c cm_ameth.c cm_pmeth.c +INCS+= cmac.h + # cms -SRCS+= cms_asn1.c cms_att.c cms_cd.c cms_dd.c cms_enc.c cms_env.c cms_err.c \ - cms_ess.c cms_io.c cms_lib.c cms_sd.c cms_smime.c +SRCS+= cms_lib.c cms_asn1.c cms_att.c cms_io.c cms_smime.c cms_err.c \ + cms_sd.c cms_dd.c cms_cd.c cms_env.c cms_enc.c cms_ess.c \ + cms_pwri.c INCS+= cms.h # comp -SRCS+= c_rle.c c_zlib.c comp_err.c comp_lib.c +SRCS+= comp_lib.c comp_err.c \ + c_rle.c c_zlib.c INCS+= comp.h # conf -SRCS+= conf_api.c conf_def.c conf_err.c conf_lib.c conf_mall.c conf_mod.c conf_sap.c +SRCS+= conf_err.c conf_lib.c conf_api.c conf_def.c conf_mod.c \ + conf_mall.c conf_sap.c INCS+= conf.h conf_api.h # des -SRCS+= cbc3_enc.c cbc_cksm.c cbc_enc.c cfb64ede.c cfb64enc.c cfb_enc.c \ - des_old.c des_old2.c \ - ecb3_enc.c ecb_enc.c ede_cbcm_enc.c \ - enc_read.c enc_writ.c fcrypt.c ofb64ede.c ofb64enc.c \ - ofb_enc.c pcbc_enc.c qud_cksm.c rand_key.c read2pwd.c \ - rpc_enc.c set_key.c str2key.c xcbc_enc.c +SRCS+= set_key.c ecb_enc.c cbc_enc.c \ + ecb3_enc.c cfb64enc.c cfb64ede.c cfb_enc.c ofb64ede.c \ + enc_read.c enc_writ.c ofb64enc.c \ + ofb_enc.c str2key.c pcbc_enc.c qud_cksm.c rand_key.c \ + fcrypt.c xcbc_enc.c rpc_enc.c cbc_cksm.c \ + ede_cbcm_enc.c des_old.c des_old2.c read2pwd.c .if ${MACHINE_ARCH} == "i386" SRCS+= des-586.s crypt586.s .elif ${MACHINE_ARCH} == "x86_64" @@ -132,14 +158,13 @@ SRCS+= des_enc.c fcrypt_b.c INCS+= des.h des_old.h # dh -SRCS+= dh_ameth.c dh_asn1.c dh_check.c dh_depr.c \ - dh_err.c dh_gen.c dh_key.c dh_lib.c dh_pmeth.c dh_prn.c +SRCS+= dh_asn1.c dh_gen.c dh_key.c dh_lib.c dh_check.c dh_err.c dh_depr.c \ + dh_ameth.c dh_pmeth.c dh_prn.c INCS+= dh.h # dsa -SRCS+= dsa_ameth.c dsa_asn1.c dsa_depr.c dsa_err.c \ - dsa_gen.c dsa_key.c dsa_lib.c \ - dsa_ossl.c dsa_pmeth.c dsa_prn.c dsa_sign.c dsa_vrf.c +SRCS+= dsa_gen.c dsa_key.c dsa_lib.c dsa_asn1.c dsa_vrf.c dsa_sign.c \ + dsa_err.c dsa_ossl.c dsa_depr.c dsa_ameth.c dsa_pmeth.c dsa_prn.c INCS+= dsa.h # dso @@ -147,64 +172,63 @@ SRCS+= dso_dl.c dso_dlfcn.c dso_err.c dso_lib.c dso_null.c dso_openssl.c INCS+= dso.h # ec -SRCS+= ec_ameth.c ec_asn1.c ec_check.c ec_curve.c \ - ec_cvt.c ec_err.c ec_key.c ec_lib.c \ - ec_mult.c ec_pmeth.c ec_print.c ec2_smpl.c ec2_mult.c \ - eck_prn.c ecp_mont.c ecp_nist.c \ - ecp_smpl.c +SRCS+= ec_lib.c ecp_smpl.c ecp_mont.c ecp_nist.c ec_cvt.c ec_mult.c \ + ec_err.c ec_curve.c ec_check.c ec_print.c ec_asn1.c ec_key.c \ + ec2_smpl.c ec2_mult.c ec_ameth.c ec_pmeth.c eck_prn.c \ + ecp_nistp224.c ecp_nistp256.c ecp_nistp521.c ecp_nistputil.c \ + ecp_oct.c ec2_oct.c ec_oct.c INCS+= ec.h # ecdh -SRCS+= ech_err.c ech_key.c ech_lib.c ech_ossl.c +SRCS+= ech_lib.c ech_ossl.c ech_key.c ech_err.c INCS+= ecdh.h # ecdsa -SRCS+= ecs_asn1.c ecs_err.c ecs_lib.c ecs_ossl.c ecs_sign.c ecs_vrf.c +SRCS+= ecs_lib.c ecs_asn1.c ecs_ossl.c ecs_sign.c ecs_vrf.c ecs_err.c INCS+= ecdsa.h # engine CFLAGS+=-DHAVE_CRYPTODEV -SRCS+= eng_all.c eng_cnf.c eng_cryptodev.c eng_ctrl.c eng_dyn.c eng_err.c \ - eng_fat.c eng_init.c eng_lib.c eng_list.c eng_openssl.c \ - eng_pkey.c eng_table.c tb_asnmth.c tb_cipher.c tb_dh.c \ - tb_digest.c tb_dsa.c tb_ecdh.c tb_ecdsa.c \ - tb_pkmeth.c tb_rand.c tb_rsa.c tb_store.c -INCS+= eng_int.h engine.h +SRCS+= eng_err.c eng_lib.c eng_list.c eng_init.c eng_ctrl.c \ + eng_table.c eng_pkey.c eng_fat.c eng_all.c \ + tb_rsa.c tb_dsa.c tb_ecdsa.c tb_dh.c tb_ecdh.c tb_rand.c tb_store.c \ + tb_cipher.c tb_digest.c tb_pkmeth.c tb_asnmth.c \ + eng_openssl.c eng_cnf.c eng_dyn.c eng_cryptodev.c \ + eng_rsax.c eng_rdrand.c +INCS+= engine.h # err SRCS+= err.c err_all.c err_prn.c INCS+= err.h # evp -SRCS+= bio_b64.c bio_enc.c bio_md.c bio_ok.c c_all.c c_allc.c c_alld.c \ - digest.c e_aes.c e_bf.c e_camellia.c e_cast.c e_des.c e_des3.c \ - e_idea.c e_null.c e_rc2.c e_rc4.c e_rc5.c e_seed.c e_xcbc_d.c \ - encode.c \ - evp_acnf.c evp_enc.c evp_err.c evp_key.c evp_lib.c evp_pbe.c \ - evp_pkey.c m_dss.c m_dss1.c m_ecdsa.c m_md2.c m_md4.c m_md5.c \ - m_mdc2.c m_null.c m_ripemd.c m_sha.c m_sha1.c \ - m_sigver.c m_wp.c names.c openbsd_hw.c \ - p5_crpt.c p5_crpt2.c p_dec.c p_enc.c p_lib.c p_open.c p_seal.c \ - p_sign.c p_verify.c pmeth_fn.c pmeth_gn.c pmeth_lib.c +SRCS+= encode.c digest.c evp_enc.c evp_key.c evp_acnf.c \ + e_des.c e_bf.c e_idea.c e_des3.c e_camellia.c\ + e_rc4.c e_aes.c names.c e_seed.c \ + e_xcbc_d.c e_rc2.c e_cast.c e_rc5.c \ + m_null.c m_md2.c m_md4.c m_md5.c m_sha.c m_sha1.c m_wp.c \ + m_dss.c m_dss1.c m_mdc2.c m_ripemd.c m_ecdsa.c\ + p_open.c p_seal.c p_sign.c p_verify.c p_lib.c p_enc.c p_dec.c \ + bio_md.c bio_b64.c bio_enc.c evp_err.c e_null.c \ + c_all.c c_allc.c c_alld.c evp_lib.c bio_ok.c \ + evp_pkey.c evp_pbe.c p5_crpt.c p5_crpt2.c \ + e_old.c pmeth_lib.c pmeth_fn.c pmeth_gn.c m_sigver.c evp_fips.c \ + e_aes_cbc_hmac_sha1.c e_rc4_hmac_md5.c INCS+= evp.h # hmac -SRCS+= hm_ameth.c hm_pmeth.c hmac.c +SRCS+= hmac.c hm_ameth.c hm_pmeth.c INCS+= hmac.h # idea .if defined(WANT_IDEA) -SRCS+= i_cbc.c i_cfb64.c i_ecb.c i_ofb64.c i_skey.c +SRCS+= i_cbc.c i_cfb64.c i_ofb64.c i_ecb.c i_skey.c INCS+= idea.h _ideapath= ${LCRYPTO_SRC}/crypto/idea .endif -# krb5 -#SRCS+= krb5_asn.c -#INCS+= krb5_asn.h - # lhash -SRCS+= lh_stats.c lhash.c +SRCS+= lhash.c lh_stats.c INCS+= lhash.h # md2 @@ -225,36 +249,42 @@ SRCS+= md5-x86_64.s INCS+= md5.h # mdc2 -SRCS+= mdc2_one.c mdc2dgst.c +SRCS+= mdc2dgst.c mdc2_one.c INCS+= mdc2.h # modes -SRCS+= cbc128.c cfb128.c ctr128.c cts128.c ofb128.c +SRCS+= cbc128.c ctr128.c cts128.c cfb128.c ofb128.c gcm128.c \ + ccm128.c xts128.c +.if ${MACHINE_ARCH} == "i386" +SRCS+= ghash-x86.s +.elif ${MACHINE_ARCH} == "x86_64" +SRCS+= ghash-x86_64.s +.endif INCS+= modes.h # objects -SRCS+= o_names.c obj_dat.c obj_err.c obj_lib.c obj_xref.c +SRCS+= o_names.c obj_dat.c obj_lib.c obj_err.c obj_xref.c INCS+= objects.h obj_mac.h # ocsp -SRCS+= ocsp_asn.c ocsp_cl.c ocsp_err.c ocsp_ext.c ocsp_ht.c \ - ocsp_lib.c ocsp_prn.c ocsp_srv.c ocsp_vfy.c +SRCS+= ocsp_asn.c ocsp_ext.c ocsp_ht.c ocsp_lib.c ocsp_cl.c \ + ocsp_srv.c ocsp_prn.c ocsp_vfy.c ocsp_err.c INCS+= ocsp.h # pem -SRCS+= pem_all.c pem_err.c pem_info.c pem_lib.c pem_oth.c pem_pk8.c \ - pem_pkey.c pem_seal.c pem_sign.c pem_x509.c pem_xaux.c pvkfmt.c +SRCS+= pem_sign.c pem_seal.c pem_info.c pem_lib.c pem_all.c pem_err.c \ + pem_x509.c pem_xaux.c pem_oth.c pem_pk8.c pem_pkey.c pvkfmt.c INCS+= pem.h pem2.h # pkcs12 -SRCS+= p12_add.c p12_asn.c p12_attr.c p12_crpt.c p12_crt.c \ - p12_decr.c p12_init.c p12_key.c p12_kiss.c p12_mutl.c \ - p12_npas.c p12_p8d.c p12_p8e.c p12_utl.c pk12err.c +SRCS+= p12_add.c p12_asn.c p12_attr.c p12_crpt.c p12_crt.c p12_decr.c \ + p12_init.c p12_key.c p12_kiss.c p12_mutl.c \ + p12_utl.c p12_npas.c pk12err.c p12_p8d.c p12_p8e.c INCS+= pkcs12.h # pkcs7 -SRCS+= bio_pk7.c example.c pk7_asn1.c pk7_attr.c pk7_dgst.c pk7_doit.c \ - pk7_lib.c pk7_mime.c pk7_smime.c pkcs7err.c +SRCS+= pk7_asn1.c pk7_lib.c pkcs7err.c pk7_doit.c pk7_smime.c pk7_attr.c \ + pk7_mime.c bio_pk7.c INCS+= pkcs7.h # pqueue @@ -262,26 +292,26 @@ SRCS+= pqueue.c INCS+= pqueue.h # rand -SRCS+= md_rand.c rand_egd.c \ - rand_err.c rand_lib.c rand_nw.c rand_unix.c \ - randfile.c +SRCS+= md_rand.c randfile.c rand_lib.c rand_err.c rand_egd.c \ + rand_unix.c INCS+= rand.h # rc2 -SRCS+= rc2_cbc.c rc2_ecb.c rc2_skey.c rc2cfb64.c rc2ofb64.c +SRCS+= rc2_ecb.c rc2_skey.c rc2_cbc.c rc2cfb64.c rc2ofb64.c INCS+= rc2.h # rc4 +SRCS+= rc4_utl.c .if ${MACHINE_ARCH} == "i386" SRCS+= rc4-586.s .elif ${MACHINE_ARCH} == "x86_64" -SRCS+= rc4-x86_64.s +SRCS+= rc4-x86_64.s rc4-md5-x86_64.s .endif INCS+= rc4.h # rc5 # .. is patented, so don't compile by default -#SRCS+= rc5_ecb.c rc5_enc.c rc5_skey.c rc5cfb64.c rc5ofb64.c +#SRCS+= rc5_skey.c rc5_ecb.c rc5_enc.c rc5cfb64.c rc5ofb64.c #INCS+= rc5.h # ripemd @@ -292,19 +322,18 @@ SRCS+= rmd-586.s INCS+= ripemd.h # rsa -SRCS+= rsa_ameth.c rsa_asn1.c rsa_chk.c rsa_depr.c rsa_eay.c \ - rsa_err.c rsa_gen.c \ - rsa_lib.c rsa_none.c rsa_null.c rsa_oaep.c \ - rsa_pk1.c rsa_pmeth.c rsa_prn.c rsa_pss.c \ - rsa_saos.c rsa_sign.c rsa_ssl.c rsa_x931.c +SRCS+= rsa_eay.c rsa_gen.c rsa_lib.c rsa_sign.c rsa_saos.c rsa_err.c \ + rsa_pk1.c rsa_ssl.c rsa_none.c rsa_oaep.c rsa_chk.c rsa_null.c \ + rsa_pss.c rsa_x931.c rsa_asn1.c rsa_depr.c rsa_ameth.c rsa_prn.c \ + rsa_pmeth.c rsa_crpt.c INCS+= rsa.h # seed -SRCS+= seed.c seed_cbc.c seed_cfb.c seed_ecb.c seed_ofb.c +SRCS+= seed.c seed_ecb.c seed_cbc.c seed_cfb.c seed_ofb.c INCS+= seed.h # sha -SRCS+= sha1_one.c sha1dgst.c sha256.c sha512.c sha_dgst.c sha_one.c +SRCS+= sha_dgst.c sha1dgst.c sha_one.c sha1_one.c sha256.c sha512.c .if ${MACHINE_ARCH} == "i386" SRCS+= sha1-586.s sha256-586.s sha512-586.s .elif ${MACHINE_ARCH} == "x86_64" @@ -312,6 +341,10 @@ SRCS+= sha1-x86_64.s sha256-x86_64.s sha512-x86_64.s .endif INCS+= sha.h +# srp +SRCS+= srp_lib.c srp_vfy.c +INCS+= srp.h + # stack SRCS+= stack.c INCS+= stack.h safestack.h @@ -321,9 +354,9 @@ INCS+= stack.h safestack.h #INCS+= store.h # ts -SRCS+= ts_asn1.c ts_conf.c ts_err.c ts_lib.c \ - ts_req_print.c ts_req_utils.c ts_rsp_print.c ts_rsp_sign.c \ - ts_rsp_utils.c ts_rsp_verify.c ts_verify_ctx.c +SRCS+= ts_err.c ts_req_utils.c ts_req_print.c ts_rsp_utils.c ts_rsp_print.c \ + ts_rsp_sign.c ts_rsp_verify.c ts_verify_ctx.c ts_lib.c ts_conf.c \ + ts_asn1.c INCS+= ts.h # txt_db @@ -331,8 +364,8 @@ SRCS+= txt_db.c INCS+= txt_db.h # ui -SRCS+= ui_compat.c ui_err.c ui_lib.c ui_openssl.c ui_util.c -INCS+= ui.h ui_compat.h ui_locl.h +SRCS+= ui_err.c ui_lib.c ui_openssl.c ui_util.c ui_compat.c +INCS+= ui.h ui_compat.h # whrlpool SRCS+= wp_dgst.c @@ -344,97 +377,80 @@ SRCS+= wp-x86_64.s INCS+= whrlpool.h # x509 -SRCS+= by_dir.c by_file.c x509_att.c x509_cmp.c x509_d2.c \ - x509_def.c x509_err.c x509_ext.c x509_lu.c x509_obj.c \ - x509_r2x.c x509_req.c x509_set.c x509_trs.c x509_txt.c \ - x509_v3.c x509_vfy.c x509_vpm.c x509cset.c x509name.c x509rset.c \ - x509spki.c x509type.c x_all.c +SRCS+= x509_def.c x509_d2.c x509_r2x.c x509_cmp.c \ + x509_obj.c x509_req.c x509spki.c x509_vfy.c \ + x509_set.c x509cset.c x509rset.c x509_err.c \ + x509name.c x509_v3.c x509_ext.c x509_att.c \ + x509type.c x509_lu.c x_all.c x509_txt.c \ + x509_trs.c by_file.c by_dir.c x509_vpm.c INCS+= x509.h x509_vfy.h # x509v3 -SRCS+= pcy_cache.c pcy_data.c pcy_lib.c pcy_map.c pcy_node.c pcy_tree.c \ - v3_addr.c v3_akey.c v3_akeya.c v3_asid.c v3_alt.c v3_bcons.c v3_bitst.c \ - v3_conf.c v3_cpols.c v3_crld.c v3_enum.c v3_extku.c \ - v3_genn.c v3_ia5.c v3_info.c v3_int.c v3_lib.c v3_ncons.c v3_ocsp.c \ - v3_pci.c v3_pcia.c v3_pcons.c v3_pmaps.c v3_pku.c v3_prn.c v3_purp.c \ - v3_skey.c v3_sxnet.c v3_utl.c v3err.c +SRCS+= v3_bcons.c v3_bitst.c v3_conf.c v3_extku.c v3_ia5.c v3_lib.c \ + v3_prn.c v3_utl.c v3err.c v3_genn.c v3_alt.c v3_skey.c v3_akey.c v3_pku.c \ + v3_int.c v3_enum.c v3_sxnet.c v3_cpols.c v3_crld.c v3_purp.c v3_info.c \ + v3_ocsp.c v3_akeya.c v3_pmaps.c v3_pcons.c v3_ncons.c v3_pcia.c v3_pci.c \ + pcy_cache.c pcy_node.c pcy_data.c pcy_map.c pcy_tree.c pcy_lib.c \ + v3_asid.c v3_addr.c INCS+= x509v3.h -SRCS+= buildinf.h -INCS+= opensslconf.h -INCSDIR= ${INCLUDEDIR}/openssl - -CLEANFILES+= buildinf.h opensslconf.h - -buildinf.h: - ( echo "#ifndef MK1MF_BUILD"; \ - echo " /* auto-generated by crypto/Makefile.ssl for crypto/cversion.c */"; \ - echo " #define CFLAGS \"$(CC)\""; \ - echo " #define PLATFORM \"`uname -s`-`uname -m`\""; \ - echo " #define DATE \"`LC_ALL=C date`\""; \ - echo "#endif" ) > ${.TARGET} - -opensslconf.h: opensslconf-${MACHINE_ARCH}.h -.if defined(WANT_IDEA) - sed '/^# define OPENSSL_NO_IDEA$$/d;/^# define NO_IDEA$$/d' ${.ALLSRC} > ${.TARGET} -.else - cp ${.ALLSRC} ${.TARGET} -.endif - .include +# The crypto subdirs are listed in the order of the vendor's Makefile +# to aid future imports. .PATH: \ ${.CURDIR}/asm \ ${LCRYPTO_SRC}/crypto \ + ${LCRYPTO_SRC}/crypto/objects \ + ${LCRYPTO_SRC}/crypto/md4 \ + ${LCRYPTO_SRC}/crypto/md5 \ + ${LCRYPTO_SRC}/crypto/sha \ + ${LCRYPTO_SRC}/crypto/mdc2 \ + ${LCRYPTO_SRC}/crypto/hmac \ + ${LCRYPTO_SRC}/crypto/ripemd \ + ${LCRYPTO_SRC}/crypto/whrlpool \ + ${LCRYPTO_SRC}/crypto/des \ ${LCRYPTO_SRC}/crypto/aes \ - ${LCRYPTO_SRC}/crypto/asn1 \ + ${LCRYPTO_SRC}/crypto/rc2 \ + ${LCRYPTO_SRC}/crypto/rc4 \ ${LCRYPTO_SRC}/crypto/bf \ - ${LCRYPTO_SRC}/crypto/bio \ + ${LCRYPTO_SRC}/crypto/cast \ + ${LCRYPTO_SRC}/crypto/camellia \ + ${LCRYPTO_SRC}/crypto/seed \ + ${LCRYPTO_SRC}/crypto/modes \ ${LCRYPTO_SRC}/crypto/bn \ ${LCRYPTO_SRC}/crypto/bn/asm \ - ${LCRYPTO_SRC}/crypto/buffer \ - ${LCRYPTO_SRC}/crypto/camellia \ - ${LCRYPTO_SRC}/crypto/cast \ - ${LCRYPTO_SRC}/crypto/cms \ - ${LCRYPTO_SRC}/crypto/comp \ - ${LCRYPTO_SRC}/crypto/conf \ - ${LCRYPTO_SRC}/crypto/des \ - ${LCRYPTO_SRC}/crypto/dh \ - ${LCRYPTO_SRC}/crypto/dsa \ - ${LCRYPTO_SRC}/crypto/dso \ ${LCRYPTO_SRC}/crypto/ec \ - ${LCRYPTO_SRC}/crypto/ecdh \ + ${LCRYPTO_SRC}/crypto/rsa \ + ${LCRYPTO_SRC}/crypto/dsa \ ${LCRYPTO_SRC}/crypto/ecdsa \ + ${LCRYPTO_SRC}/crypto/dh \ + ${LCRYPTO_SRC}/crypto/ecdh \ + ${LCRYPTO_SRC}/crypto/dso \ ${LCRYPTO_SRC}/crypto/engine \ + ${LCRYPTO_SRC}/crypto/buffer \ + ${LCRYPTO_SRC}/crypto/bio \ + ${LCRYPTO_SRC}/crypto/stack \ + ${LCRYPTO_SRC}/crypto/lhash \ + ${LCRYPTO_SRC}/crypto/rand \ ${LCRYPTO_SRC}/crypto/err \ ${LCRYPTO_SRC}/crypto/evp \ - ${LCRYPTO_SRC}/crypto/hmac \ - ${_ideapath} \ - ${LCRYPTO_SRC}/crypto/lhash \ - ${LCRYPTO_SRC}/crypto/md4 \ - ${LCRYPTO_SRC}/crypto/md5 \ - ${LCRYPTO_SRC}/crypto/mdc2 \ - ${LCRYPTO_SRC}/crypto/modes \ - ${LCRYPTO_SRC}/crypto/objects \ - ${LCRYPTO_SRC}/crypto/ocsp \ + ${LCRYPTO_SRC}/crypto/asn1 \ ${LCRYPTO_SRC}/crypto/pem \ - ${LCRYPTO_SRC}/crypto/pkcs12 \ + ${LCRYPTO_SRC}/crypto/x509 \ + ${LCRYPTO_SRC}/crypto/x509v3 \ + ${LCRYPTO_SRC}/crypto/conf \ + ${LCRYPTO_SRC}/crypto/txt_db \ ${LCRYPTO_SRC}/crypto/pkcs7 \ + ${LCRYPTO_SRC}/crypto/pkcs12 \ + ${LCRYPTO_SRC}/crypto/comp \ + ${LCRYPTO_SRC}/crypto/ocsp \ + ${LCRYPTO_SRC}/crypto/ui \ + ${LCRYPTO_SRC}/crypto/cms \ ${LCRYPTO_SRC}/crypto/pqueue \ - ${LCRYPTO_SRC}/crypto/rand \ - ${LCRYPTO_SRC}/crypto/rc2 \ - ${LCRYPTO_SRC}/crypto/rc4 \ - ${LCRYPTO_SRC}/crypto/ripemd \ - ${LCRYPTO_SRC}/crypto/rsa \ - ${LCRYPTO_SRC}/crypto/seed \ - ${LCRYPTO_SRC}/crypto/sha \ - ${LCRYPTO_SRC}/crypto/stack \ - ${LCRYPTO_SRC}/crypto/threads \ ${LCRYPTO_SRC}/crypto/ts \ - ${LCRYPTO_SRC}/crypto/txt_db \ - ${LCRYPTO_SRC}/crypto/ui \ - ${LCRYPTO_SRC}/crypto/whrlpool \ - ${LCRYPTO_SRC}/crypto/x509 \ - ${LCRYPTO_SRC}/crypto/x509v3 \ + ${LCRYPTO_SRC}/crypto/srp \ + ${LCRYPTO_SRC}/crypto/cmac \ + ${_ideapath} \ ${LCRYPTO_SRC} \ ${.CURDIR}/man diff --git a/secure/lib/libcrypto/Makefile.inc b/secure/lib/libcrypto/Makefile.inc index 81679b2738..c6fdf6b07d 100644 --- a/secure/lib/libcrypto/Makefile.inc +++ b/secure/lib/libcrypto/Makefile.inc @@ -1,20 +1,25 @@ # $FreeBSD: src/secure/lib/libcrypto/Makefile.inc,v 1.7.2.11 2003/02/20 15:07:32 nectar Exp $ # $DragonFly: src/secure/lib/libcrypto/Makefile.inc,v 1.18 2008/09/27 21:04:45 pavalos Exp $ -OSSLVERSION= 1.0.0g -OSSLDATE= 2012-01-18 +OSSLVERSION= 1.0.1 +OSSLDATE= 2012-03-14 LCRYPTO_SRC= ${.CURDIR}/../../../crypto/openssl LCRYPTO_DOC= ${LCRYPTO_SRC}/doc CFLAGS+= -DDSO_DLFCN -DHAVE_DLFCN_H -DL_ENDIAN -DTERMIOS CFLAGS+= -DOPENSSL_THREADS -CFLAGS+= -DOPENSSL_IA32_SSE2 -DOPENSSL_BN_ASM_MONT -DSHA1_ASM \ - -DSHA256_ASM -DSHA512_ASM -DMD5_ASM -DAES_ASM -DWHIRLPOOL_ASM +CFLAGS+= -DOPENSSL_IA32_SSE2 -DOPENSSL_BN_ASM_MONT \ + -DOPENSSL_BN_ASM_GF2m -DSHA1_ASM \ + -DSHA256_ASM -DSHA512_ASM -DMD5_ASM -DAES_ASM -DVPAES_ASM \ + -DWHIRLPOOL_ASM -DGHASH_ASM .if ${MACHINE_ARCH} == "i386" -CFLAGS+= -DOPENSSL_BN_ASM_PART_WORDS -DRMD160_ASM +CFLAGS+= -DOPENSSL_BN_ASM_PART_WORDS -DRMD160_ASM +.elif ${MACHINE_ARCH} == "x86_64" +CFLAGS+= -DOPENSSL_BN_ASM_MONT5 -DBSAES_ASM .endif CFLAGS+= -I${LCRYPTO_SRC} -I${LCRYPTO_SRC}/crypto \ -I${LCRYPTO_SRC}/crypto/asn1 -I${LCRYPTO_SRC}/crypto/evp \ + -I${LCRYPTO_SRC}/crypto/modes \ -I${LCRYPTO_SRC}/crypto/engine -I${.OBJDIR} MANDIR= ${SHAREDIR}/openssl/man/man diff --git a/secure/lib/libcrypto/asm/Makefile b/secure/lib/libcrypto/asm/Makefile index 9bba3dd5d1..0838b56fba 100644 --- a/secure/lib/libcrypto/asm/Makefile +++ b/secure/lib/libcrypto/asm/Makefile @@ -11,6 +11,7 @@ OPENSSL_SRC= ../../../../crypto/openssl ${OPENSSL_SRC}/crypto/camellia/asm \ ${OPENSSL_SRC}/crypto/des/asm \ ${OPENSSL_SRC}/crypto/md5/asm \ + ${OPENSSL_SRC}/crypto/modes/asm \ ${OPENSSL_SRC}/crypto/perlasm \ ${OPENSSL_SRC}/crypto/rc4/asm \ ${OPENSSL_SRC}/crypto/ripemd/asm \ @@ -21,63 +22,70 @@ OPENSSL_SRC= ../../../../crypto/openssl # cpuid SRCS= x86cpuid.pl -# aes -SRCS+= aes-586.pl - -# bf -SRCS+= bf-586.pl - # bn -SRCS+= bn-586.pl co-586.pl x86-mont.pl - -# camellia -SRCS+= cmll-x86.pl +SRCS+= bn-586.pl co-586.pl x86-mont.pl x86-gf2m.pl # des SRCS+= des-586.pl crypt586.pl -# md5 -SRCS+= md5-586.pl +# aes +SRCS+= aes-586.pl vpaes-x86.pl aesni-x86.pl + +# bf +SRCS+= bf-586.pl # rc4 SRCS+= rc4-586.pl -# ripemd -SRCS+= rmd-586.pl +# md5 +SRCS+= md5-586.pl # sha SRCS+= sha1-586.pl sha256-586.pl sha512-586.pl +# ripemd +SRCS+= rmd-586.pl + # whrlpool SRCS+= wp-mmx.pl +# camellia +SRCS+= cmll-x86.pl + +# modes +SRCS+= ghash-x86.pl + PERLFLAGS= ${CFLAGS} .elif ${MACHINE_ARCH} == "x86_64" # cpuid SRCS= x86_64cpuid.pl -# aes -SRCS+= aes-x86_64.pl - # bn -SRCS+= x86_64-mont.pl +SRCS+= x86_64-mont.pl x86_64-mont5.pl x86_64-gf2m.pl modexp512-x86_64.pl -# camellia -SRCS+= cmll-x86_64.pl +# aes +SRCS+= aes-x86_64.pl vpaes-x86_64.pl bsaes-x86_64.pl \ + aesni-x86_64.pl aesni-sha1-x86_64.pl + +# rc4 +SRCS+= rc4-x86_64.pl rc4-md5-x86_64.pl # md5 SRCS+= md5-x86_64.pl -# rc4 -SRCS+= rc4-x86_64.pl - # sha SRCS+= sha1-x86_64.pl sha256-x86_64.s sha512-x86_64.pl # whrlpool SRCS+= wp-x86_64.pl +# camellia +SRCS+= cmll-x86_64.pl + +# modes +SRCS+= ghash-x86_64.pl + PERLFLAGS= .endif diff --git a/secure/lib/libcrypto/asm/aes-586.s b/secure/lib/libcrypto/asm/aes-586.s index dca6d0d203..f69b7d543d 100644 --- a/secure/lib/libcrypto/asm/aes-586.s +++ b/secure/lib/libcrypto/asm/aes-586.s @@ -2988,19 +2988,19 @@ _x86_AES_set_encrypt_key: popl %ebp ret .size _x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key -.globl AES_set_encrypt_key -.type AES_set_encrypt_key,@function +.globl private_AES_set_encrypt_key +.type private_AES_set_encrypt_key,@function .align 16 -AES_set_encrypt_key: -.L_AES_set_encrypt_key_begin: +private_AES_set_encrypt_key: +.L_private_AES_set_encrypt_key_begin: call _x86_AES_set_encrypt_key ret -.size AES_set_encrypt_key,.-.L_AES_set_encrypt_key_begin -.globl AES_set_decrypt_key -.type AES_set_decrypt_key,@function +.size private_AES_set_encrypt_key,.-.L_private_AES_set_encrypt_key_begin +.globl private_AES_set_decrypt_key +.type private_AES_set_decrypt_key,@function .align 16 -AES_set_decrypt_key: -.L_AES_set_decrypt_key_begin: +private_AES_set_decrypt_key: +.L_private_AES_set_decrypt_key_begin: call _x86_AES_set_encrypt_key cmpl $0,%eax je .L054proceed @@ -3229,8 +3229,8 @@ AES_set_decrypt_key: popl %ebx popl %ebp ret -.size AES_set_decrypt_key,.-.L_AES_set_decrypt_key_begin +.size private_AES_set_decrypt_key,.-.L_private_AES_set_decrypt_key_begin .byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89 .byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114 .byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 -.comm OPENSSL_ia32cap_P,4,4 +.comm OPENSSL_ia32cap_P,8,4 diff --git a/secure/lib/libcrypto/asm/aes-x86_64.s b/secure/lib/libcrypto/asm/aes-x86_64.s index d000a57213..e385566f08 100644 --- a/secure/lib/libcrypto/asm/aes-x86_64.s +++ b/secure/lib/libcrypto/asm/aes-x86_64.s @@ -330,6 +330,9 @@ _x86_64_AES_encrypt_compact: .globl AES_encrypt .type AES_encrypt,@function .align 16 +.globl asm_AES_encrypt +.hidden asm_AES_encrypt +asm_AES_encrypt: AES_encrypt: pushq %rbx pushq %rbp @@ -775,6 +778,9 @@ _x86_64_AES_decrypt_compact: .globl AES_decrypt .type AES_decrypt,@function .align 16 +.globl asm_AES_decrypt +.hidden asm_AES_decrypt +asm_AES_decrypt: AES_decrypt: pushq %rbx pushq %rbp @@ -838,10 +844,10 @@ AES_decrypt: .Ldec_epilogue: .byte 0xf3,0xc3 .size AES_decrypt,.-AES_decrypt -.globl AES_set_encrypt_key -.type AES_set_encrypt_key,@function +.globl private_AES_set_encrypt_key +.type private_AES_set_encrypt_key,@function .align 16 -AES_set_encrypt_key: +private_AES_set_encrypt_key: pushq %rbx pushq %rbp pushq %r12 @@ -862,7 +868,7 @@ AES_set_encrypt_key: addq $56,%rsp .Lenc_key_epilogue: .byte 0xf3,0xc3 -.size AES_set_encrypt_key,.-AES_set_encrypt_key +.size private_AES_set_encrypt_key,.-private_AES_set_encrypt_key .type _x86_64_AES_set_encrypt_key,@function .align 16 @@ -1103,10 +1109,10 @@ _x86_64_AES_set_encrypt_key: .Lexit: .byte 0xf3,0xc3 .size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key -.globl AES_set_decrypt_key -.type AES_set_decrypt_key,@function +.globl private_AES_set_decrypt_key +.type private_AES_set_decrypt_key,@function .align 16 -AES_set_decrypt_key: +private_AES_set_decrypt_key: pushq %rbx pushq %rbp pushq %r12 @@ -1289,11 +1295,14 @@ AES_set_decrypt_key: addq $56,%rsp .Ldec_key_epilogue: .byte 0xf3,0xc3 -.size AES_set_decrypt_key,.-AES_set_decrypt_key +.size private_AES_set_decrypt_key,.-private_AES_set_decrypt_key .globl AES_cbc_encrypt .type AES_cbc_encrypt,@function .align 16 +.globl asm_AES_cbc_encrypt +.hidden asm_AES_cbc_encrypt +asm_AES_cbc_encrypt: AES_cbc_encrypt: cmpq $0,%rdx je .Lcbc_epilogue diff --git a/secure/lib/libcrypto/asm/aesni-sha1-x86_64.s b/secure/lib/libcrypto/asm/aesni-sha1-x86_64.s new file mode 100644 index 0000000000..32fd600b92 --- /dev/null +++ b/secure/lib/libcrypto/asm/aesni-sha1-x86_64.s @@ -0,0 +1,1396 @@ +.text + + +.globl aesni_cbc_sha1_enc +.type aesni_cbc_sha1_enc,@function +.align 16 +aesni_cbc_sha1_enc: + + movl OPENSSL_ia32cap_P+0(%rip),%r10d + movl OPENSSL_ia32cap_P+4(%rip),%r11d + jmp aesni_cbc_sha1_enc_ssse3 + .byte 0xf3,0xc3 +.size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc +.type aesni_cbc_sha1_enc_ssse3,@function +.align 16 +aesni_cbc_sha1_enc_ssse3: + movq 8(%rsp),%r10 + + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + leaq -104(%rsp),%rsp + + + movq %rdi,%r12 + movq %rsi,%r13 + movq %rdx,%r14 + movq %rcx,%r15 + movdqu (%r8),%xmm11 + movq %r8,88(%rsp) + shlq $6,%r14 + subq %r12,%r13 + movl 240(%r15),%r8d + addq %r10,%r14 + + leaq K_XX_XX(%rip),%r11 + movl 0(%r9),%eax + movl 4(%r9),%ebx + movl 8(%r9),%ecx + movl 12(%r9),%edx + movl %ebx,%esi + movl 16(%r9),%ebp + + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r10),%xmm0 + movdqu 16(%r10),%xmm1 + movdqu 32(%r10),%xmm2 + movdqu 48(%r10),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r10 +.byte 102,15,56,0,206 +.byte 102,15,56,0,214 +.byte 102,15,56,0,222 + paddd %xmm9,%xmm0 + paddd %xmm9,%xmm1 + paddd %xmm9,%xmm2 + movdqa %xmm0,0(%rsp) + psubd %xmm9,%xmm0 + movdqa %xmm1,16(%rsp) + psubd %xmm9,%xmm1 + movdqa %xmm2,32(%rsp) + psubd %xmm9,%xmm2 + movups (%r15),%xmm13 + movups 16(%r15),%xmm14 + jmp .Loop_ssse3 +.align 16 +.Loop_ssse3: + movdqa %xmm1,%xmm4 + addl 0(%rsp),%ebp + movups 0(%r12),%xmm12 + xorps %xmm13,%xmm12 + xorps %xmm12,%xmm11 +.byte 102,69,15,56,220,222 + movups 32(%r15),%xmm15 + xorl %edx,%ecx + movdqa %xmm3,%xmm8 +.byte 102,15,58,15,224,8 + movl %eax,%edi + roll $5,%eax + paddd %xmm3,%xmm9 + andl %ecx,%esi + xorl %edx,%ecx + psrldq $4,%xmm8 + xorl %edx,%esi + addl %eax,%ebp + pxor %xmm0,%xmm4 + rorl $2,%ebx + addl %esi,%ebp + pxor %xmm2,%xmm8 + addl 4(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pxor %xmm8,%xmm4 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm9,48(%rsp) + xorl %ecx,%edi +.byte 102,69,15,56,220,223 + movups 48(%r15),%xmm14 + addl %ebp,%edx + movdqa %xmm4,%xmm10 + movdqa %xmm4,%xmm8 + rorl $7,%eax + addl %edi,%edx + addl 8(%rsp),%ecx + xorl %ebx,%eax + pslldq $12,%xmm10 + paddd %xmm4,%xmm4 + movl %edx,%edi + roll $5,%edx + andl %eax,%esi + xorl %ebx,%eax + psrld $31,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + movdqa %xmm10,%xmm9 + rorl $7,%ebp + addl %esi,%ecx + psrld $30,%xmm10 + por %xmm8,%xmm4 + addl 12(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx +.byte 102,69,15,56,220,222 + movups 64(%r15),%xmm15 + pslld $2,%xmm9 + pxor %xmm10,%xmm4 + andl %ebp,%edi + xorl %eax,%ebp + movdqa 0(%r11),%xmm10 + xorl %eax,%edi + addl %ecx,%ebx + pxor %xmm9,%xmm4 + rorl $7,%edx + addl %edi,%ebx + movdqa %xmm2,%xmm5 + addl 16(%rsp),%eax + xorl %ebp,%edx + movdqa %xmm4,%xmm9 +.byte 102,15,58,15,233,8 + movl %ebx,%edi + roll $5,%ebx + paddd %xmm4,%xmm10 + andl %edx,%esi + xorl %ebp,%edx + psrldq $4,%xmm9 + xorl %ebp,%esi + addl %ebx,%eax + pxor %xmm1,%xmm5 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm3,%xmm9 + addl 20(%rsp),%ebp +.byte 102,69,15,56,220,223 + movups 80(%r15),%xmm14 + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pxor %xmm9,%xmm5 + andl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm10,0(%rsp) + xorl %edx,%edi + addl %eax,%ebp + movdqa %xmm5,%xmm8 + movdqa %xmm5,%xmm9 + rorl $7,%ebx + addl %edi,%ebp + addl 24(%rsp),%edx + xorl %ecx,%ebx + pslldq $12,%xmm8 + paddd %xmm5,%xmm5 + movl %ebp,%edi + roll $5,%ebp + andl %ebx,%esi + xorl %ecx,%ebx + psrld $31,%xmm9 + xorl %ecx,%esi +.byte 102,69,15,56,220,222 + movups 96(%r15),%xmm15 + addl %ebp,%edx + movdqa %xmm8,%xmm10 + rorl $7,%eax + addl %esi,%edx + psrld $30,%xmm8 + por %xmm9,%xmm5 + addl 28(%rsp),%ecx + xorl %ebx,%eax + movl %edx,%esi + roll $5,%edx + pslld $2,%xmm10 + pxor %xmm8,%xmm5 + andl %eax,%edi + xorl %ebx,%eax + movdqa 16(%r11),%xmm8 + xorl %ebx,%edi + addl %edx,%ecx + pxor %xmm10,%xmm5 + rorl $7,%ebp + addl %edi,%ecx + movdqa %xmm3,%xmm6 + addl 32(%rsp),%ebx + xorl %eax,%ebp + movdqa %xmm5,%xmm10 +.byte 102,15,58,15,242,8 + movl %ecx,%edi + roll $5,%ecx +.byte 102,69,15,56,220,223 + movups 112(%r15),%xmm14 + paddd %xmm5,%xmm8 + andl %ebp,%esi + xorl %eax,%ebp + psrldq $4,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + pxor %xmm2,%xmm6 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm4,%xmm10 + addl 36(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + pxor %xmm10,%xmm6 + andl %edx,%edi + xorl %ebp,%edx + movdqa %xmm8,16(%rsp) + xorl %ebp,%edi + addl %ebx,%eax + movdqa %xmm6,%xmm9 + movdqa %xmm6,%xmm10 + rorl $7,%ecx + addl %edi,%eax + addl 40(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 128(%r15),%xmm15 + xorl %edx,%ecx + pslldq $12,%xmm9 + paddd %xmm6,%xmm6 + movl %eax,%edi + roll $5,%eax + andl %ecx,%esi + xorl %edx,%ecx + psrld $31,%xmm10 + xorl %edx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + psrld $30,%xmm9 + por %xmm10,%xmm6 + addl 44(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pslld $2,%xmm8 + pxor %xmm9,%xmm6 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa 16(%r11),%xmm9 + xorl %ecx,%edi +.byte 102,69,15,56,220,223 + movups 144(%r15),%xmm14 + addl %ebp,%edx + pxor %xmm8,%xmm6 + rorl $7,%eax + addl %edi,%edx + movdqa %xmm4,%xmm7 + addl 48(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm6,%xmm8 +.byte 102,15,58,15,251,8 + movl %edx,%edi + roll $5,%edx + paddd %xmm6,%xmm9 + andl %eax,%esi + xorl %ebx,%eax + psrldq $4,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + pxor %xmm3,%xmm7 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm5,%xmm8 + addl 52(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx +.byte 102,69,15,56,220,222 + movups 160(%r15),%xmm15 + pxor %xmm8,%xmm7 + andl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm9,32(%rsp) + xorl %eax,%edi + addl %ecx,%ebx + movdqa %xmm7,%xmm10 + movdqa %xmm7,%xmm8 + rorl $7,%edx + addl %edi,%ebx + addl 56(%rsp),%eax + xorl %ebp,%edx + pslldq $12,%xmm10 + paddd %xmm7,%xmm7 + movl %ebx,%edi + roll $5,%ebx + andl %edx,%esi + xorl %ebp,%edx + psrld $31,%xmm8 + xorl %ebp,%esi + addl %ebx,%eax + movdqa %xmm10,%xmm9 + rorl $7,%ecx + addl %esi,%eax + psrld $30,%xmm10 + por %xmm8,%xmm7 + addl 60(%rsp),%ebp + cmpl $11,%r8d + jb .Laesenclast1 + movups 176(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 192(%r15),%xmm15 +.byte 102,69,15,56,220,222 + je .Laesenclast1 + movups 208(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 224(%r15),%xmm15 +.byte 102,69,15,56,220,222 +.Laesenclast1: +.byte 102,69,15,56,221,223 + movups 16(%r15),%xmm14 + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pslld $2,%xmm9 + pxor %xmm10,%xmm7 + andl %ecx,%edi + xorl %edx,%ecx + movdqa 16(%r11),%xmm10 + xorl %edx,%edi + addl %eax,%ebp + pxor %xmm9,%xmm7 + rorl $7,%ebx + addl %edi,%ebp + movdqa %xmm7,%xmm9 + addl 0(%rsp),%edx + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,206,8 + xorl %ecx,%ebx + movl %ebp,%edi + roll $5,%ebp + pxor %xmm1,%xmm0 + andl %ebx,%esi + xorl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm7,%xmm10 + xorl %ecx,%esi + movups 16(%r12),%xmm12 + xorps %xmm13,%xmm12 + movups %xmm11,0(%r13,%r12,1) + xorps %xmm12,%xmm11 +.byte 102,69,15,56,220,222 + movups 32(%r15),%xmm15 + addl %ebp,%edx + pxor %xmm9,%xmm0 + rorl $7,%eax + addl %esi,%edx + addl 4(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm0,%xmm9 + movdqa %xmm10,48(%rsp) + movl %edx,%esi + roll $5,%edx + andl %eax,%edi + xorl %ebx,%eax + pslld $2,%xmm0 + xorl %ebx,%edi + addl %edx,%ecx + psrld $30,%xmm9 + rorl $7,%ebp + addl %edi,%ecx + addl 8(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%edi + roll $5,%ecx +.byte 102,69,15,56,220,223 + movups 48(%r15),%xmm14 + por %xmm9,%xmm0 + andl %ebp,%esi + xorl %eax,%ebp + movdqa %xmm0,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 12(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + andl %edx,%edi + xorl %ebp,%edx + xorl %ebp,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 16(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 64(%r15),%xmm15 + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,215,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm2,%xmm1 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm8,%xmm9 + paddd %xmm0,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm10,%xmm1 + addl 20(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm1,%xmm10 + movdqa %xmm8,0(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm1 + addl 24(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm10 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi +.byte 102,69,15,56,220,223 + movups 80(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm10,%xmm1 + addl 28(%rsp),%ebx + xorl %eax,%edi + movdqa %xmm1,%xmm8 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 32(%rsp),%eax + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,192,8 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + pxor %xmm3,%xmm2 + xorl %edx,%esi + addl %ebx,%eax + movdqa 32(%r11),%xmm10 + paddd %xmm1,%xmm9 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm8,%xmm2 + addl 36(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 96(%r15),%xmm15 + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + movdqa %xmm2,%xmm8 + movdqa %xmm9,16(%rsp) + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + pslld $2,%xmm2 + addl 40(%rsp),%edx + xorl %ecx,%esi + psrld $30,%xmm8 + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + por %xmm8,%xmm2 + addl 44(%rsp),%ecx + xorl %ebx,%edi + movdqa %xmm2,%xmm9 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi +.byte 102,69,15,56,220,223 + movups 112(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 48(%rsp),%ebx + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,201,8 + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + pxor %xmm4,%xmm3 + xorl %ebp,%esi + addl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm2,%xmm10 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm9,%xmm3 + addl 52(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + movdqa %xmm3,%xmm9 + movdqa %xmm10,32(%rsp) + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + pslld $2,%xmm3 + addl 56(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 128(%r15),%xmm15 + xorl %edx,%esi + psrld $30,%xmm9 + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + por %xmm9,%xmm3 + addl 60(%rsp),%edx + xorl %ecx,%edi + movdqa %xmm3,%xmm10 + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 0(%rsp),%ecx + pxor %xmm0,%xmm4 +.byte 102,68,15,58,15,210,8 + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + pxor %xmm5,%xmm4 + xorl %eax,%esi +.byte 102,69,15,56,220,223 + movups 144(%r15),%xmm14 + addl %edx,%ecx + movdqa %xmm8,%xmm9 + paddd %xmm3,%xmm8 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm10,%xmm4 + addl 4(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + movdqa %xmm4,%xmm10 + movdqa %xmm8,48(%rsp) + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + pslld $2,%xmm4 + addl 8(%rsp),%eax + xorl %ebp,%esi + psrld $30,%xmm10 + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + por %xmm10,%xmm4 + addl 12(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 160(%r15),%xmm15 + xorl %edx,%edi + movdqa %xmm4,%xmm8 + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 16(%rsp),%edx + pxor %xmm1,%xmm5 +.byte 102,68,15,58,15,195,8 + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + pxor %xmm6,%xmm5 + xorl %ebx,%esi + addl %ebp,%edx + movdqa %xmm9,%xmm10 + paddd %xmm4,%xmm9 + rorl $7,%eax + addl %esi,%edx + pxor %xmm8,%xmm5 + addl 20(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + movdqa %xmm5,%xmm8 + movdqa %xmm9,0(%rsp) + xorl %eax,%edi + cmpl $11,%r8d + jb .Laesenclast2 + movups 176(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 192(%r15),%xmm15 +.byte 102,69,15,56,220,222 + je .Laesenclast2 + movups 208(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 224(%r15),%xmm15 +.byte 102,69,15,56,220,222 +.Laesenclast2: +.byte 102,69,15,56,221,223 + movups 16(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + pslld $2,%xmm5 + addl 24(%rsp),%ebx + xorl %eax,%esi + psrld $30,%xmm8 + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + por %xmm8,%xmm5 + addl 28(%rsp),%eax + xorl %ebp,%edi + movdqa %xmm5,%xmm9 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + movl %ecx,%edi + movups 32(%r12),%xmm12 + xorps %xmm13,%xmm12 + movups %xmm11,16(%r13,%r12,1) + xorps %xmm12,%xmm11 +.byte 102,69,15,56,220,222 + movups 32(%r15),%xmm15 + pxor %xmm2,%xmm6 +.byte 102,68,15,58,15,204,8 + xorl %edx,%ecx + addl 32(%rsp),%ebp + andl %edx,%edi + pxor %xmm7,%xmm6 + andl %ecx,%esi + rorl $7,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm5,%xmm10 + addl %edi,%ebp + movl %eax,%edi + pxor %xmm9,%xmm6 + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movdqa %xmm6,%xmm9 + movdqa %xmm10,16(%rsp) + movl %ebx,%esi + xorl %ecx,%ebx + addl 36(%rsp),%edx + andl %ecx,%esi + pslld $2,%xmm6 + andl %ebx,%edi + rorl $7,%eax + psrld $30,%xmm9 + addl %esi,%edx + movl %ebp,%esi + roll $5,%ebp +.byte 102,69,15,56,220,223 + movups 48(%r15),%xmm14 + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + por %xmm9,%xmm6 + movl %eax,%edi + xorl %ebx,%eax + movdqa %xmm6,%xmm10 + addl 40(%rsp),%ecx + andl %ebx,%edi + andl %eax,%esi + rorl $7,%ebp + addl %edi,%ecx + movl %edx,%edi + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%esi + xorl %eax,%ebp + addl 44(%rsp),%ebx + andl %eax,%esi + andl %ebp,%edi +.byte 102,69,15,56,220,222 + movups 64(%r15),%xmm15 + rorl $7,%edx + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%edi + pxor %xmm3,%xmm7 +.byte 102,68,15,58,15,213,8 + xorl %ebp,%edx + addl 48(%rsp),%eax + andl %ebp,%edi + pxor %xmm0,%xmm7 + andl %edx,%esi + rorl $7,%ecx + movdqa 48(%r11),%xmm9 + paddd %xmm6,%xmm8 + addl %edi,%eax + movl %ebx,%edi + pxor %xmm10,%xmm7 + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movdqa %xmm7,%xmm10 + movdqa %xmm8,32(%rsp) + movl %ecx,%esi +.byte 102,69,15,56,220,223 + movups 80(%r15),%xmm14 + xorl %edx,%ecx + addl 52(%rsp),%ebp + andl %edx,%esi + pslld $2,%xmm7 + andl %ecx,%edi + rorl $7,%ebx + psrld $30,%xmm10 + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + por %xmm10,%xmm7 + movl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm7,%xmm8 + addl 56(%rsp),%edx + andl %ecx,%edi + andl %ebx,%esi + rorl $7,%eax + addl %edi,%edx + movl %ebp,%edi + roll $5,%ebp +.byte 102,69,15,56,220,222 + movups 96(%r15),%xmm15 + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%esi + xorl %ebx,%eax + addl 60(%rsp),%ecx + andl %ebx,%esi + andl %eax,%edi + rorl $7,%ebp + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%edi + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,198,8 + xorl %eax,%ebp + addl 0(%rsp),%ebx + andl %eax,%edi + pxor %xmm1,%xmm0 + andl %ebp,%esi +.byte 102,69,15,56,220,223 + movups 112(%r15),%xmm14 + rorl $7,%edx + movdqa %xmm9,%xmm10 + paddd %xmm7,%xmm9 + addl %edi,%ebx + movl %ecx,%edi + pxor %xmm8,%xmm0 + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movdqa %xmm0,%xmm8 + movdqa %xmm9,48(%rsp) + movl %edx,%esi + xorl %ebp,%edx + addl 4(%rsp),%eax + andl %ebp,%esi + pslld $2,%xmm0 + andl %edx,%edi + rorl $7,%ecx + psrld $30,%xmm8 + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + por %xmm8,%xmm0 + movl %ecx,%edi +.byte 102,69,15,56,220,222 + movups 128(%r15),%xmm15 + xorl %edx,%ecx + movdqa %xmm0,%xmm9 + addl 8(%rsp),%ebp + andl %edx,%edi + andl %ecx,%esi + rorl $7,%ebx + addl %edi,%ebp + movl %eax,%edi + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%esi + xorl %ecx,%ebx + addl 12(%rsp),%edx + andl %ecx,%esi + andl %ebx,%edi + rorl $7,%eax + addl %esi,%edx + movl %ebp,%esi + roll $5,%ebp +.byte 102,69,15,56,220,223 + movups 144(%r15),%xmm14 + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%edi + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,207,8 + xorl %ebx,%eax + addl 16(%rsp),%ecx + andl %ebx,%edi + pxor %xmm2,%xmm1 + andl %eax,%esi + rorl $7,%ebp + movdqa %xmm10,%xmm8 + paddd %xmm0,%xmm10 + addl %edi,%ecx + movl %edx,%edi + pxor %xmm9,%xmm1 + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movdqa %xmm1,%xmm9 + movdqa %xmm10,0(%rsp) + movl %ebp,%esi + xorl %eax,%ebp + addl 20(%rsp),%ebx + andl %eax,%esi + pslld $2,%xmm1 + andl %ebp,%edi +.byte 102,69,15,56,220,222 + movups 160(%r15),%xmm15 + rorl $7,%edx + psrld $30,%xmm9 + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + por %xmm9,%xmm1 + movl %edx,%edi + xorl %ebp,%edx + movdqa %xmm1,%xmm10 + addl 24(%rsp),%eax + andl %ebp,%edi + andl %edx,%esi + rorl $7,%ecx + addl %edi,%eax + movl %ebx,%edi + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movl %ecx,%esi + cmpl $11,%r8d + jb .Laesenclast3 + movups 176(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 192(%r15),%xmm15 +.byte 102,69,15,56,220,222 + je .Laesenclast3 + movups 208(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 224(%r15),%xmm15 +.byte 102,69,15,56,220,222 +.Laesenclast3: +.byte 102,69,15,56,221,223 + movups 16(%r15),%xmm14 + xorl %edx,%ecx + addl 28(%rsp),%ebp + andl %edx,%esi + andl %ecx,%edi + rorl $7,%ebx + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%edi + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,208,8 + xorl %ecx,%ebx + addl 32(%rsp),%edx + andl %ecx,%edi + pxor %xmm3,%xmm2 + andl %ebx,%esi + rorl $7,%eax + movdqa %xmm8,%xmm9 + paddd %xmm1,%xmm8 + addl %edi,%edx + movl %ebp,%edi + pxor %xmm10,%xmm2 + roll $5,%ebp + movups 48(%r12),%xmm12 + xorps %xmm13,%xmm12 + movups %xmm11,32(%r13,%r12,1) + xorps %xmm12,%xmm11 +.byte 102,69,15,56,220,222 + movups 32(%r15),%xmm15 + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movdqa %xmm2,%xmm10 + movdqa %xmm8,16(%rsp) + movl %eax,%esi + xorl %ebx,%eax + addl 36(%rsp),%ecx + andl %ebx,%esi + pslld $2,%xmm2 + andl %eax,%edi + rorl $7,%ebp + psrld $30,%xmm10 + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + por %xmm10,%xmm2 + movl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm2,%xmm8 + addl 40(%rsp),%ebx + andl %eax,%edi + andl %ebp,%esi +.byte 102,69,15,56,220,223 + movups 48(%r15),%xmm14 + rorl $7,%edx + addl %edi,%ebx + movl %ecx,%edi + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%esi + xorl %ebp,%edx + addl 44(%rsp),%eax + andl %ebp,%esi + andl %edx,%edi + rorl $7,%ecx + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + addl 48(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 64(%r15),%xmm15 + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,193,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm4,%xmm3 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm10 + paddd %xmm2,%xmm9 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm8,%xmm3 + addl 52(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm3,%xmm8 + movdqa %xmm9,32(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm3 + addl 56(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm8 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi +.byte 102,69,15,56,220,223 + movups 80(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm8,%xmm3 + addl 60(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 0(%rsp),%eax + paddd %xmm3,%xmm10 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + movdqa %xmm10,48(%rsp) + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 4(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 96(%r15),%xmm15 + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 8(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 12(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi +.byte 102,69,15,56,220,223 + movups 112(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + cmpq %r14,%r10 + je .Ldone_ssse3 + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r10),%xmm0 + movdqu 16(%r10),%xmm1 + movdqu 32(%r10),%xmm2 + movdqu 48(%r10),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r10 + addl 16(%rsp),%ebx + xorl %eax,%esi +.byte 102,15,56,0,206 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm9,%xmm0 + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + movdqa %xmm0,0(%rsp) + addl 20(%rsp),%eax + xorl %ebp,%edi + psubd %xmm9,%xmm0 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 128(%r15),%xmm15 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi +.byte 102,15,56,0,214 + movl %edx,%edi + roll $5,%edx + paddd %xmm9,%xmm1 + xorl %eax,%esi +.byte 102,69,15,56,220,223 + movups 144(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + movdqa %xmm1,16(%rsp) + addl 36(%rsp),%ebx + xorl %eax,%edi + psubd %xmm9,%xmm1 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 160(%r15),%xmm15 + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi +.byte 102,15,56,0,222 + movl %ebp,%edi + roll $5,%ebp + paddd %xmm9,%xmm2 + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + movdqa %xmm2,32(%rsp) + addl 52(%rsp),%ecx + xorl %ebx,%edi + psubd %xmm9,%xmm2 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + cmpl $11,%r8d + jb .Laesenclast4 + movups 176(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 192(%r15),%xmm15 +.byte 102,69,15,56,220,222 + je .Laesenclast4 + movups 208(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 224(%r15),%xmm15 +.byte 102,69,15,56,220,222 +.Laesenclast4: +.byte 102,69,15,56,221,223 + movups 16(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + movups %xmm11,48(%r13,%r12,1) + leaq 64(%r12),%r12 + + addl 0(%r9),%eax + addl 4(%r9),%esi + addl 8(%r9),%ecx + addl 12(%r9),%edx + movl %eax,0(%r9) + addl 16(%r9),%ebp + movl %esi,4(%r9) + movl %esi,%ebx + movl %ecx,8(%r9) + movl %edx,12(%r9) + movl %ebp,16(%r9) + jmp .Loop_ssse3 + +.align 16 +.Ldone_ssse3: + addl 16(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 20(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 128(%r15),%xmm15 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi +.byte 102,69,15,56,220,223 + movups 144(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + addl 36(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp +.byte 102,69,15,56,220,222 + movups 160(%r15),%xmm15 + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 52(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + cmpl $11,%r8d + jb .Laesenclast5 + movups 176(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 192(%r15),%xmm15 +.byte 102,69,15,56,220,222 + je .Laesenclast5 + movups 208(%r15),%xmm14 +.byte 102,69,15,56,220,223 + movups 224(%r15),%xmm15 +.byte 102,69,15,56,220,222 +.Laesenclast5: +.byte 102,69,15,56,221,223 + movups 16(%r15),%xmm14 + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + movups %xmm11,48(%r13,%r12,1) + movq 88(%rsp),%r8 + + addl 0(%r9),%eax + addl 4(%r9),%esi + addl 8(%r9),%ecx + movl %eax,0(%r9) + addl 12(%r9),%edx + movl %esi,4(%r9) + addl 16(%r9),%ebp + movl %ecx,8(%r9) + movl %edx,12(%r9) + movl %ebp,16(%r9) + movups %xmm11,(%r8) + leaq 104(%rsp),%rsi + movq 0(%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lepilogue_ssse3: + .byte 0xf3,0xc3 +.size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3 +.align 64 +K_XX_XX: +.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 +.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 +.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc +.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 +.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f + +.byte 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.align 64 diff --git a/secure/lib/libcrypto/asm/aesni-x86.s b/secure/lib/libcrypto/asm/aesni-x86.s new file mode 100644 index 0000000000..4ceefd4947 --- /dev/null +++ b/secure/lib/libcrypto/asm/aesni-x86.s @@ -0,0 +1,2143 @@ +.file "../../../../crypto/openssl/crypto/aes/asm/aesni-x86.s" +.text +.globl aesni_encrypt +.type aesni_encrypt,@function +.align 16 +aesni_encrypt: +.L_aesni_encrypt_begin: + movl 4(%esp),%eax + movl 12(%esp),%edx + movups (%eax),%xmm2 + movl 240(%edx),%ecx + movl 8(%esp),%eax + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L000enc1_loop_1: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L000enc1_loop_1 +.byte 102,15,56,221,209 + movups %xmm2,(%eax) + ret +.size aesni_encrypt,.-.L_aesni_encrypt_begin +.globl aesni_decrypt +.type aesni_decrypt,@function +.align 16 +aesni_decrypt: +.L_aesni_decrypt_begin: + movl 4(%esp),%eax + movl 12(%esp),%edx + movups (%eax),%xmm2 + movl 240(%edx),%ecx + movl 8(%esp),%eax + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L001dec1_loop_2: +.byte 102,15,56,222,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L001dec1_loop_2 +.byte 102,15,56,223,209 + movups %xmm2,(%eax) + ret +.size aesni_decrypt,.-.L_aesni_decrypt_begin +.type _aesni_encrypt3,@function +.align 16 +_aesni_encrypt3: + movups (%edx),%xmm0 + shrl $1,%ecx + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 + pxor %xmm0,%xmm4 + movups (%edx),%xmm0 +.L002enc3_loop: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %ecx +.byte 102,15,56,220,225 + movups 16(%edx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leal 32(%edx),%edx +.byte 102,15,56,220,224 + movups (%edx),%xmm0 + jnz .L002enc3_loop +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 + ret +.size _aesni_encrypt3,.-_aesni_encrypt3 +.type _aesni_decrypt3,@function +.align 16 +_aesni_decrypt3: + movups (%edx),%xmm0 + shrl $1,%ecx + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 + pxor %xmm0,%xmm4 + movups (%edx),%xmm0 +.L003dec3_loop: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %ecx +.byte 102,15,56,222,225 + movups 16(%edx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leal 32(%edx),%edx +.byte 102,15,56,222,224 + movups (%edx),%xmm0 + jnz .L003dec3_loop +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 + ret +.size _aesni_decrypt3,.-_aesni_decrypt3 +.type _aesni_encrypt4,@function +.align 16 +_aesni_encrypt4: + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + shrl $1,%ecx + leal 32(%edx),%edx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 + pxor %xmm0,%xmm4 + pxor %xmm0,%xmm5 + movups (%edx),%xmm0 +.L004enc4_loop: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %ecx +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 + movups 16(%edx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leal 32(%edx),%edx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 + movups (%edx),%xmm0 + jnz .L004enc4_loop +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 +.byte 102,15,56,221,232 + ret +.size _aesni_encrypt4,.-_aesni_encrypt4 +.type _aesni_decrypt4,@function +.align 16 +_aesni_decrypt4: + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + shrl $1,%ecx + leal 32(%edx),%edx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 + pxor %xmm0,%xmm4 + pxor %xmm0,%xmm5 + movups (%edx),%xmm0 +.L005dec4_loop: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %ecx +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 + movups 16(%edx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leal 32(%edx),%edx +.byte 102,15,56,222,224 +.byte 102,15,56,222,232 + movups (%edx),%xmm0 + jnz .L005dec4_loop +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 +.byte 102,15,56,223,232 + ret +.size _aesni_decrypt4,.-_aesni_decrypt4 +.type _aesni_encrypt6,@function +.align 16 +_aesni_encrypt6: + movups (%edx),%xmm0 + shrl $1,%ecx + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 +.byte 102,15,56,220,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,220,217 + pxor %xmm0,%xmm5 + decl %ecx +.byte 102,15,56,220,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 +.byte 102,15,56,220,241 + movups (%edx),%xmm0 +.byte 102,15,56,220,249 + jmp .L_aesni_encrypt6_enter +.align 16 +.L006enc6_loop: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %ecx +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.align 16 +.L_aesni_encrypt6_enter: + movups 16(%edx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leal 32(%edx),%edx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 +.byte 102,15,56,220,240 +.byte 102,15,56,220,248 + movups (%edx),%xmm0 + jnz .L006enc6_loop +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 +.byte 102,15,56,221,232 +.byte 102,15,56,221,240 +.byte 102,15,56,221,248 + ret +.size _aesni_encrypt6,.-_aesni_encrypt6 +.type _aesni_decrypt6,@function +.align 16 +_aesni_decrypt6: + movups (%edx),%xmm0 + shrl $1,%ecx + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 +.byte 102,15,56,222,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,222,217 + pxor %xmm0,%xmm5 + decl %ecx +.byte 102,15,56,222,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,222,233 + pxor %xmm0,%xmm7 +.byte 102,15,56,222,241 + movups (%edx),%xmm0 +.byte 102,15,56,222,249 + jmp .L_aesni_decrypt6_enter +.align 16 +.L007dec6_loop: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %ecx +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.align 16 +.L_aesni_decrypt6_enter: + movups 16(%edx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leal 32(%edx),%edx +.byte 102,15,56,222,224 +.byte 102,15,56,222,232 +.byte 102,15,56,222,240 +.byte 102,15,56,222,248 + movups (%edx),%xmm0 + jnz .L007dec6_loop +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 +.byte 102,15,56,223,232 +.byte 102,15,56,223,240 +.byte 102,15,56,223,248 + ret +.size _aesni_decrypt6,.-_aesni_decrypt6 +.globl aesni_ecb_encrypt +.type aesni_ecb_encrypt,@function +.align 16 +aesni_ecb_encrypt: +.L_aesni_ecb_encrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + movl 36(%esp),%ebx + andl $-16,%eax + jz .L008ecb_ret + movl 240(%edx),%ecx + testl %ebx,%ebx + jz .L009ecb_decrypt + movl %edx,%ebp + movl %ecx,%ebx + cmpl $96,%eax + jb .L010ecb_enc_tail + movdqu (%esi),%xmm2 + movdqu 16(%esi),%xmm3 + movdqu 32(%esi),%xmm4 + movdqu 48(%esi),%xmm5 + movdqu 64(%esi),%xmm6 + movdqu 80(%esi),%xmm7 + leal 96(%esi),%esi + subl $96,%eax + jmp .L011ecb_enc_loop6_enter +.align 16 +.L012ecb_enc_loop6: + movups %xmm2,(%edi) + movdqu (%esi),%xmm2 + movups %xmm3,16(%edi) + movdqu 16(%esi),%xmm3 + movups %xmm4,32(%edi) + movdqu 32(%esi),%xmm4 + movups %xmm5,48(%edi) + movdqu 48(%esi),%xmm5 + movups %xmm6,64(%edi) + movdqu 64(%esi),%xmm6 + movups %xmm7,80(%edi) + leal 96(%edi),%edi + movdqu 80(%esi),%xmm7 + leal 96(%esi),%esi +.L011ecb_enc_loop6_enter: + call _aesni_encrypt6 + movl %ebp,%edx + movl %ebx,%ecx + subl $96,%eax + jnc .L012ecb_enc_loop6 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + movups %xmm7,80(%edi) + leal 96(%edi),%edi + addl $96,%eax + jz .L008ecb_ret +.L010ecb_enc_tail: + movups (%esi),%xmm2 + cmpl $32,%eax + jb .L013ecb_enc_one + movups 16(%esi),%xmm3 + je .L014ecb_enc_two + movups 32(%esi),%xmm4 + cmpl $64,%eax + jb .L015ecb_enc_three + movups 48(%esi),%xmm5 + je .L016ecb_enc_four + movups 64(%esi),%xmm6 + xorps %xmm7,%xmm7 + call _aesni_encrypt6 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + jmp .L008ecb_ret +.align 16 +.L013ecb_enc_one: + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L017enc1_loop_3: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L017enc1_loop_3 +.byte 102,15,56,221,209 + movups %xmm2,(%edi) + jmp .L008ecb_ret +.align 16 +.L014ecb_enc_two: + xorps %xmm4,%xmm4 + call _aesni_encrypt3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + jmp .L008ecb_ret +.align 16 +.L015ecb_enc_three: + call _aesni_encrypt3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + jmp .L008ecb_ret +.align 16 +.L016ecb_enc_four: + call _aesni_encrypt4 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + jmp .L008ecb_ret +.align 16 +.L009ecb_decrypt: + movl %edx,%ebp + movl %ecx,%ebx + cmpl $96,%eax + jb .L018ecb_dec_tail + movdqu (%esi),%xmm2 + movdqu 16(%esi),%xmm3 + movdqu 32(%esi),%xmm4 + movdqu 48(%esi),%xmm5 + movdqu 64(%esi),%xmm6 + movdqu 80(%esi),%xmm7 + leal 96(%esi),%esi + subl $96,%eax + jmp .L019ecb_dec_loop6_enter +.align 16 +.L020ecb_dec_loop6: + movups %xmm2,(%edi) + movdqu (%esi),%xmm2 + movups %xmm3,16(%edi) + movdqu 16(%esi),%xmm3 + movups %xmm4,32(%edi) + movdqu 32(%esi),%xmm4 + movups %xmm5,48(%edi) + movdqu 48(%esi),%xmm5 + movups %xmm6,64(%edi) + movdqu 64(%esi),%xmm6 + movups %xmm7,80(%edi) + leal 96(%edi),%edi + movdqu 80(%esi),%xmm7 + leal 96(%esi),%esi +.L019ecb_dec_loop6_enter: + call _aesni_decrypt6 + movl %ebp,%edx + movl %ebx,%ecx + subl $96,%eax + jnc .L020ecb_dec_loop6 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + movups %xmm7,80(%edi) + leal 96(%edi),%edi + addl $96,%eax + jz .L008ecb_ret +.L018ecb_dec_tail: + movups (%esi),%xmm2 + cmpl $32,%eax + jb .L021ecb_dec_one + movups 16(%esi),%xmm3 + je .L022ecb_dec_two + movups 32(%esi),%xmm4 + cmpl $64,%eax + jb .L023ecb_dec_three + movups 48(%esi),%xmm5 + je .L024ecb_dec_four + movups 64(%esi),%xmm6 + xorps %xmm7,%xmm7 + call _aesni_decrypt6 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + jmp .L008ecb_ret +.align 16 +.L021ecb_dec_one: + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L025dec1_loop_4: +.byte 102,15,56,222,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L025dec1_loop_4 +.byte 102,15,56,223,209 + movups %xmm2,(%edi) + jmp .L008ecb_ret +.align 16 +.L022ecb_dec_two: + xorps %xmm4,%xmm4 + call _aesni_decrypt3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + jmp .L008ecb_ret +.align 16 +.L023ecb_dec_three: + call _aesni_decrypt3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + jmp .L008ecb_ret +.align 16 +.L024ecb_dec_four: + call _aesni_decrypt4 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) +.L008ecb_ret: + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin +.globl aesni_ccm64_encrypt_blocks +.type aesni_ccm64_encrypt_blocks,@function +.align 16 +aesni_ccm64_encrypt_blocks: +.L_aesni_ccm64_encrypt_blocks_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + movl 36(%esp),%ebx + movl 40(%esp),%ecx + movl %esp,%ebp + subl $60,%esp + andl $-16,%esp + movl %ebp,48(%esp) + movdqu (%ebx),%xmm7 + movdqu (%ecx),%xmm3 + movl 240(%edx),%ecx + movl $202182159,(%esp) + movl $134810123,4(%esp) + movl $67438087,8(%esp) + movl $66051,12(%esp) + movl $1,%ebx + xorl %ebp,%ebp + movl %ebx,16(%esp) + movl %ebp,20(%esp) + movl %ebp,24(%esp) + movl %ebp,28(%esp) + shrl $1,%ecx + leal (%edx),%ebp + movdqa (%esp),%xmm5 + movdqa %xmm7,%xmm2 + movl %ecx,%ebx +.byte 102,15,56,0,253 +.L026ccm64_enc_outer: + movups (%ebp),%xmm0 + movl %ebx,%ecx + movups (%esi),%xmm6 + xorps %xmm0,%xmm2 + movups 16(%ebp),%xmm1 + xorps %xmm6,%xmm0 + leal 32(%ebp),%edx + xorps %xmm0,%xmm3 + movups (%edx),%xmm0 +.L027ccm64_enc2_loop: +.byte 102,15,56,220,209 + decl %ecx +.byte 102,15,56,220,217 + movups 16(%edx),%xmm1 +.byte 102,15,56,220,208 + leal 32(%edx),%edx +.byte 102,15,56,220,216 + movups (%edx),%xmm0 + jnz .L027ccm64_enc2_loop +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + paddq 16(%esp),%xmm7 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 + decl %eax + leal 16(%esi),%esi + xorps %xmm2,%xmm6 + movdqa %xmm7,%xmm2 + movups %xmm6,(%edi) + leal 16(%edi),%edi +.byte 102,15,56,0,213 + jnz .L026ccm64_enc_outer + movl 48(%esp),%esp + movl 40(%esp),%edi + movups %xmm3,(%edi) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin +.globl aesni_ccm64_decrypt_blocks +.type aesni_ccm64_decrypt_blocks,@function +.align 16 +aesni_ccm64_decrypt_blocks: +.L_aesni_ccm64_decrypt_blocks_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + movl 36(%esp),%ebx + movl 40(%esp),%ecx + movl %esp,%ebp + subl $60,%esp + andl $-16,%esp + movl %ebp,48(%esp) + movdqu (%ebx),%xmm7 + movdqu (%ecx),%xmm3 + movl 240(%edx),%ecx + movl $202182159,(%esp) + movl $134810123,4(%esp) + movl $67438087,8(%esp) + movl $66051,12(%esp) + movl $1,%ebx + xorl %ebp,%ebp + movl %ebx,16(%esp) + movl %ebp,20(%esp) + movl %ebp,24(%esp) + movl %ebp,28(%esp) + movdqa (%esp),%xmm5 + movdqa %xmm7,%xmm2 + movl %edx,%ebp + movl %ecx,%ebx +.byte 102,15,56,0,253 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L028enc1_loop_5: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L028enc1_loop_5 +.byte 102,15,56,221,209 + movups (%esi),%xmm6 + paddq 16(%esp),%xmm7 + leal 16(%esi),%esi + jmp .L029ccm64_dec_outer +.align 16 +.L029ccm64_dec_outer: + xorps %xmm2,%xmm6 + movdqa %xmm7,%xmm2 + movl %ebx,%ecx + movups %xmm6,(%edi) + leal 16(%edi),%edi +.byte 102,15,56,0,213 + subl $1,%eax + jz .L030ccm64_dec_break + movups (%ebp),%xmm0 + shrl $1,%ecx + movups 16(%ebp),%xmm1 + xorps %xmm0,%xmm6 + leal 32(%ebp),%edx + xorps %xmm0,%xmm2 + xorps %xmm6,%xmm3 + movups (%edx),%xmm0 +.L031ccm64_dec2_loop: +.byte 102,15,56,220,209 + decl %ecx +.byte 102,15,56,220,217 + movups 16(%edx),%xmm1 +.byte 102,15,56,220,208 + leal 32(%edx),%edx +.byte 102,15,56,220,216 + movups (%edx),%xmm0 + jnz .L031ccm64_dec2_loop + movups (%esi),%xmm6 + paddq 16(%esp),%xmm7 +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + leal 16(%esi),%esi +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 + jmp .L029ccm64_dec_outer +.align 16 +.L030ccm64_dec_break: + movl %ebp,%edx + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + xorps %xmm0,%xmm6 + leal 32(%edx),%edx + xorps %xmm6,%xmm3 +.L032enc1_loop_6: +.byte 102,15,56,220,217 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L032enc1_loop_6 +.byte 102,15,56,221,217 + movl 48(%esp),%esp + movl 40(%esp),%edi + movups %xmm3,(%edi) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin +.globl aesni_ctr32_encrypt_blocks +.type aesni_ctr32_encrypt_blocks,@function +.align 16 +aesni_ctr32_encrypt_blocks: +.L_aesni_ctr32_encrypt_blocks_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + movl 36(%esp),%ebx + movl %esp,%ebp + subl $88,%esp + andl $-16,%esp + movl %ebp,80(%esp) + cmpl $1,%eax + je .L033ctr32_one_shortcut + movdqu (%ebx),%xmm7 + movl $202182159,(%esp) + movl $134810123,4(%esp) + movl $67438087,8(%esp) + movl $66051,12(%esp) + movl $6,%ecx + xorl %ebp,%ebp + movl %ecx,16(%esp) + movl %ecx,20(%esp) + movl %ecx,24(%esp) + movl %ebp,28(%esp) +.byte 102,15,58,22,251,3 +.byte 102,15,58,34,253,3 + movl 240(%edx),%ecx + bswap %ebx + pxor %xmm1,%xmm1 + pxor %xmm0,%xmm0 + movdqa (%esp),%xmm2 +.byte 102,15,58,34,203,0 + leal 3(%ebx),%ebp +.byte 102,15,58,34,197,0 + incl %ebx +.byte 102,15,58,34,203,1 + incl %ebp +.byte 102,15,58,34,197,1 + incl %ebx +.byte 102,15,58,34,203,2 + incl %ebp +.byte 102,15,58,34,197,2 + movdqa %xmm1,48(%esp) +.byte 102,15,56,0,202 + movdqa %xmm0,64(%esp) +.byte 102,15,56,0,194 + pshufd $192,%xmm1,%xmm2 + pshufd $128,%xmm1,%xmm3 + cmpl $6,%eax + jb .L034ctr32_tail + movdqa %xmm7,32(%esp) + shrl $1,%ecx + movl %edx,%ebp + movl %ecx,%ebx + subl $6,%eax + jmp .L035ctr32_loop6 +.align 16 +.L035ctr32_loop6: + pshufd $64,%xmm1,%xmm4 + movdqa 32(%esp),%xmm1 + pshufd $192,%xmm0,%xmm5 + por %xmm1,%xmm2 + pshufd $128,%xmm0,%xmm6 + por %xmm1,%xmm3 + pshufd $64,%xmm0,%xmm7 + por %xmm1,%xmm4 + por %xmm1,%xmm5 + por %xmm1,%xmm6 + por %xmm1,%xmm7 + movups (%ebp),%xmm0 + movups 16(%ebp),%xmm1 + leal 32(%ebp),%edx + decl %ecx + pxor %xmm0,%xmm2 + pxor %xmm0,%xmm3 +.byte 102,15,56,220,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,220,217 + pxor %xmm0,%xmm5 +.byte 102,15,56,220,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 +.byte 102,15,56,220,241 + movups (%edx),%xmm0 +.byte 102,15,56,220,249 + call .L_aesni_encrypt6_enter + movups (%esi),%xmm1 + movups 16(%esi),%xmm0 + xorps %xmm1,%xmm2 + movups 32(%esi),%xmm1 + xorps %xmm0,%xmm3 + movups %xmm2,(%edi) + movdqa 16(%esp),%xmm0 + xorps %xmm1,%xmm4 + movdqa 48(%esp),%xmm1 + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + paddd %xmm0,%xmm1 + paddd 64(%esp),%xmm0 + movdqa (%esp),%xmm2 + movups 48(%esi),%xmm3 + movups 64(%esi),%xmm4 + xorps %xmm3,%xmm5 + movups 80(%esi),%xmm3 + leal 96(%esi),%esi + movdqa %xmm1,48(%esp) +.byte 102,15,56,0,202 + xorps %xmm4,%xmm6 + movups %xmm5,48(%edi) + xorps %xmm3,%xmm7 + movdqa %xmm0,64(%esp) +.byte 102,15,56,0,194 + movups %xmm6,64(%edi) + pshufd $192,%xmm1,%xmm2 + movups %xmm7,80(%edi) + leal 96(%edi),%edi + movl %ebx,%ecx + pshufd $128,%xmm1,%xmm3 + subl $6,%eax + jnc .L035ctr32_loop6 + addl $6,%eax + jz .L036ctr32_ret + movl %ebp,%edx + leal 1(,%ecx,2),%ecx + movdqa 32(%esp),%xmm7 +.L034ctr32_tail: + por %xmm7,%xmm2 + cmpl $2,%eax + jb .L037ctr32_one + pshufd $64,%xmm1,%xmm4 + por %xmm7,%xmm3 + je .L038ctr32_two + pshufd $192,%xmm0,%xmm5 + por %xmm7,%xmm4 + cmpl $4,%eax + jb .L039ctr32_three + pshufd $128,%xmm0,%xmm6 + por %xmm7,%xmm5 + je .L040ctr32_four + por %xmm7,%xmm6 + call _aesni_encrypt6 + movups (%esi),%xmm1 + movups 16(%esi),%xmm0 + xorps %xmm1,%xmm2 + movups 32(%esi),%xmm1 + xorps %xmm0,%xmm3 + movups 48(%esi),%xmm0 + xorps %xmm1,%xmm4 + movups 64(%esi),%xmm1 + xorps %xmm0,%xmm5 + movups %xmm2,(%edi) + xorps %xmm1,%xmm6 + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + jmp .L036ctr32_ret +.align 16 +.L033ctr32_one_shortcut: + movups (%ebx),%xmm2 + movl 240(%edx),%ecx +.L037ctr32_one: + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L041enc1_loop_7: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L041enc1_loop_7 +.byte 102,15,56,221,209 + movups (%esi),%xmm6 + xorps %xmm2,%xmm6 + movups %xmm6,(%edi) + jmp .L036ctr32_ret +.align 16 +.L038ctr32_two: + call _aesni_encrypt3 + movups (%esi),%xmm5 + movups 16(%esi),%xmm6 + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + jmp .L036ctr32_ret +.align 16 +.L039ctr32_three: + call _aesni_encrypt3 + movups (%esi),%xmm5 + movups 16(%esi),%xmm6 + xorps %xmm5,%xmm2 + movups 32(%esi),%xmm7 + xorps %xmm6,%xmm3 + movups %xmm2,(%edi) + xorps %xmm7,%xmm4 + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + jmp .L036ctr32_ret +.align 16 +.L040ctr32_four: + call _aesni_encrypt4 + movups (%esi),%xmm6 + movups 16(%esi),%xmm7 + movups 32(%esi),%xmm1 + xorps %xmm6,%xmm2 + movups 48(%esi),%xmm0 + xorps %xmm7,%xmm3 + movups %xmm2,(%edi) + xorps %xmm1,%xmm4 + movups %xmm3,16(%edi) + xorps %xmm0,%xmm5 + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) +.L036ctr32_ret: + movl 80(%esp),%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin +.globl aesni_xts_encrypt +.type aesni_xts_encrypt,@function +.align 16 +aesni_xts_encrypt: +.L_aesni_xts_encrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 36(%esp),%edx + movl 40(%esp),%esi + movl 240(%edx),%ecx + movups (%esi),%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L042enc1_loop_8: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L042enc1_loop_8 +.byte 102,15,56,221,209 + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + movl %esp,%ebp + subl $120,%esp + movl 240(%edx),%ecx + andl $-16,%esp + movl $135,96(%esp) + movl $0,100(%esp) + movl $1,104(%esp) + movl $0,108(%esp) + movl %eax,112(%esp) + movl %ebp,116(%esp) + movdqa %xmm2,%xmm1 + pxor %xmm0,%xmm0 + movdqa 96(%esp),%xmm3 + pcmpgtd %xmm1,%xmm0 + andl $-16,%eax + movl %edx,%ebp + movl %ecx,%ebx + subl $96,%eax + jc .L043xts_enc_short + shrl $1,%ecx + movl %ecx,%ebx + jmp .L044xts_enc_loop6 +.align 16 +.L044xts_enc_loop6: + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,16(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,32(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,48(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm7 + movdqa %xmm1,64(%esp) + paddq %xmm1,%xmm1 + movups (%ebp),%xmm0 + pand %xmm3,%xmm7 + movups (%esi),%xmm2 + pxor %xmm1,%xmm7 + movdqu 16(%esi),%xmm3 + xorps %xmm0,%xmm2 + movdqu 32(%esi),%xmm4 + pxor %xmm0,%xmm3 + movdqu 48(%esi),%xmm5 + pxor %xmm0,%xmm4 + movdqu 64(%esi),%xmm6 + pxor %xmm0,%xmm5 + movdqu 80(%esi),%xmm1 + pxor %xmm0,%xmm6 + leal 96(%esi),%esi + pxor (%esp),%xmm2 + movdqa %xmm7,80(%esp) + pxor %xmm1,%xmm7 + movups 16(%ebp),%xmm1 + leal 32(%ebp),%edx + pxor 16(%esp),%xmm3 +.byte 102,15,56,220,209 + pxor 32(%esp),%xmm4 +.byte 102,15,56,220,217 + pxor 48(%esp),%xmm5 + decl %ecx +.byte 102,15,56,220,225 + pxor 64(%esp),%xmm6 +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 +.byte 102,15,56,220,241 + movups (%edx),%xmm0 +.byte 102,15,56,220,249 + call .L_aesni_encrypt6_enter + movdqa 80(%esp),%xmm1 + pxor %xmm0,%xmm0 + xorps (%esp),%xmm2 + pcmpgtd %xmm1,%xmm0 + xorps 16(%esp),%xmm3 + movups %xmm2,(%edi) + xorps 32(%esp),%xmm4 + movups %xmm3,16(%edi) + xorps 48(%esp),%xmm5 + movups %xmm4,32(%edi) + xorps 64(%esp),%xmm6 + movups %xmm5,48(%edi) + xorps %xmm1,%xmm7 + movups %xmm6,64(%edi) + pshufd $19,%xmm0,%xmm2 + movups %xmm7,80(%edi) + leal 96(%edi),%edi + movdqa 96(%esp),%xmm3 + pxor %xmm0,%xmm0 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + movl %ebx,%ecx + pxor %xmm2,%xmm1 + subl $96,%eax + jnc .L044xts_enc_loop6 + leal 1(,%ecx,2),%ecx + movl %ebp,%edx + movl %ecx,%ebx +.L043xts_enc_short: + addl $96,%eax + jz .L045xts_enc_done6x + movdqa %xmm1,%xmm5 + cmpl $32,%eax + jb .L046xts_enc_one + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + je .L047xts_enc_two + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,%xmm6 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + cmpl $64,%eax + jb .L048xts_enc_three + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,%xmm7 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + movdqa %xmm5,(%esp) + movdqa %xmm6,16(%esp) + je .L049xts_enc_four + movdqa %xmm7,32(%esp) + pshufd $19,%xmm0,%xmm7 + movdqa %xmm1,48(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm7 + pxor %xmm1,%xmm7 + movdqu (%esi),%xmm2 + movdqu 16(%esi),%xmm3 + movdqu 32(%esi),%xmm4 + pxor (%esp),%xmm2 + movdqu 48(%esi),%xmm5 + pxor 16(%esp),%xmm3 + movdqu 64(%esi),%xmm6 + pxor 32(%esp),%xmm4 + leal 80(%esi),%esi + pxor 48(%esp),%xmm5 + movdqa %xmm7,64(%esp) + pxor %xmm7,%xmm6 + call _aesni_encrypt6 + movaps 64(%esp),%xmm1 + xorps (%esp),%xmm2 + xorps 16(%esp),%xmm3 + xorps 32(%esp),%xmm4 + movups %xmm2,(%edi) + xorps 48(%esp),%xmm5 + movups %xmm3,16(%edi) + xorps %xmm1,%xmm6 + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + leal 80(%edi),%edi + jmp .L050xts_enc_done +.align 16 +.L046xts_enc_one: + movups (%esi),%xmm2 + leal 16(%esi),%esi + xorps %xmm5,%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L051enc1_loop_9: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L051enc1_loop_9 +.byte 102,15,56,221,209 + xorps %xmm5,%xmm2 + movups %xmm2,(%edi) + leal 16(%edi),%edi + movdqa %xmm5,%xmm1 + jmp .L050xts_enc_done +.align 16 +.L047xts_enc_two: + movaps %xmm1,%xmm6 + movups (%esi),%xmm2 + movups 16(%esi),%xmm3 + leal 32(%esi),%esi + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm4,%xmm4 + call _aesni_encrypt3 + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + leal 32(%edi),%edi + movdqa %xmm6,%xmm1 + jmp .L050xts_enc_done +.align 16 +.L048xts_enc_three: + movaps %xmm1,%xmm7 + movups (%esi),%xmm2 + movups 16(%esi),%xmm3 + movups 32(%esi),%xmm4 + leal 48(%esi),%esi + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm7,%xmm4 + call _aesni_encrypt3 + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm7,%xmm4 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + leal 48(%edi),%edi + movdqa %xmm7,%xmm1 + jmp .L050xts_enc_done +.align 16 +.L049xts_enc_four: + movaps %xmm1,%xmm6 + movups (%esi),%xmm2 + movups 16(%esi),%xmm3 + movups 32(%esi),%xmm4 + xorps (%esp),%xmm2 + movups 48(%esi),%xmm5 + leal 64(%esi),%esi + xorps 16(%esp),%xmm3 + xorps %xmm7,%xmm4 + xorps %xmm6,%xmm5 + call _aesni_encrypt4 + xorps (%esp),%xmm2 + xorps 16(%esp),%xmm3 + xorps %xmm7,%xmm4 + movups %xmm2,(%edi) + xorps %xmm6,%xmm5 + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + leal 64(%edi),%edi + movdqa %xmm6,%xmm1 + jmp .L050xts_enc_done +.align 16 +.L045xts_enc_done6x: + movl 112(%esp),%eax + andl $15,%eax + jz .L052xts_enc_ret + movdqa %xmm1,%xmm5 + movl %eax,112(%esp) + jmp .L053xts_enc_steal +.align 16 +.L050xts_enc_done: + movl 112(%esp),%eax + pxor %xmm0,%xmm0 + andl $15,%eax + jz .L052xts_enc_ret + pcmpgtd %xmm1,%xmm0 + movl %eax,112(%esp) + pshufd $19,%xmm0,%xmm5 + paddq %xmm1,%xmm1 + pand 96(%esp),%xmm5 + pxor %xmm1,%xmm5 +.L053xts_enc_steal: + movzbl (%esi),%ecx + movzbl -16(%edi),%edx + leal 1(%esi),%esi + movb %cl,-16(%edi) + movb %dl,(%edi) + leal 1(%edi),%edi + subl $1,%eax + jnz .L053xts_enc_steal + subl 112(%esp),%edi + movl %ebp,%edx + movl %ebx,%ecx + movups -16(%edi),%xmm2 + xorps %xmm5,%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L054enc1_loop_10: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L054enc1_loop_10 +.byte 102,15,56,221,209 + xorps %xmm5,%xmm2 + movups %xmm2,-16(%edi) +.L052xts_enc_ret: + movl 116(%esp),%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin +.globl aesni_xts_decrypt +.type aesni_xts_decrypt,@function +.align 16 +aesni_xts_decrypt: +.L_aesni_xts_decrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 36(%esp),%edx + movl 40(%esp),%esi + movl 240(%edx),%ecx + movups (%esi),%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L055enc1_loop_11: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L055enc1_loop_11 +.byte 102,15,56,221,209 + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + movl %esp,%ebp + subl $120,%esp + andl $-16,%esp + xorl %ebx,%ebx + testl $15,%eax + setnz %bl + shll $4,%ebx + subl %ebx,%eax + movl $135,96(%esp) + movl $0,100(%esp) + movl $1,104(%esp) + movl $0,108(%esp) + movl %eax,112(%esp) + movl %ebp,116(%esp) + movl 240(%edx),%ecx + movl %edx,%ebp + movl %ecx,%ebx + movdqa %xmm2,%xmm1 + pxor %xmm0,%xmm0 + movdqa 96(%esp),%xmm3 + pcmpgtd %xmm1,%xmm0 + andl $-16,%eax + subl $96,%eax + jc .L056xts_dec_short + shrl $1,%ecx + movl %ecx,%ebx + jmp .L057xts_dec_loop6 +.align 16 +.L057xts_dec_loop6: + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,16(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,32(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,48(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + pshufd $19,%xmm0,%xmm7 + movdqa %xmm1,64(%esp) + paddq %xmm1,%xmm1 + movups (%ebp),%xmm0 + pand %xmm3,%xmm7 + movups (%esi),%xmm2 + pxor %xmm1,%xmm7 + movdqu 16(%esi),%xmm3 + xorps %xmm0,%xmm2 + movdqu 32(%esi),%xmm4 + pxor %xmm0,%xmm3 + movdqu 48(%esi),%xmm5 + pxor %xmm0,%xmm4 + movdqu 64(%esi),%xmm6 + pxor %xmm0,%xmm5 + movdqu 80(%esi),%xmm1 + pxor %xmm0,%xmm6 + leal 96(%esi),%esi + pxor (%esp),%xmm2 + movdqa %xmm7,80(%esp) + pxor %xmm1,%xmm7 + movups 16(%ebp),%xmm1 + leal 32(%ebp),%edx + pxor 16(%esp),%xmm3 +.byte 102,15,56,222,209 + pxor 32(%esp),%xmm4 +.byte 102,15,56,222,217 + pxor 48(%esp),%xmm5 + decl %ecx +.byte 102,15,56,222,225 + pxor 64(%esp),%xmm6 +.byte 102,15,56,222,233 + pxor %xmm0,%xmm7 +.byte 102,15,56,222,241 + movups (%edx),%xmm0 +.byte 102,15,56,222,249 + call .L_aesni_decrypt6_enter + movdqa 80(%esp),%xmm1 + pxor %xmm0,%xmm0 + xorps (%esp),%xmm2 + pcmpgtd %xmm1,%xmm0 + xorps 16(%esp),%xmm3 + movups %xmm2,(%edi) + xorps 32(%esp),%xmm4 + movups %xmm3,16(%edi) + xorps 48(%esp),%xmm5 + movups %xmm4,32(%edi) + xorps 64(%esp),%xmm6 + movups %xmm5,48(%edi) + xorps %xmm1,%xmm7 + movups %xmm6,64(%edi) + pshufd $19,%xmm0,%xmm2 + movups %xmm7,80(%edi) + leal 96(%edi),%edi + movdqa 96(%esp),%xmm3 + pxor %xmm0,%xmm0 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + movl %ebx,%ecx + pxor %xmm2,%xmm1 + subl $96,%eax + jnc .L057xts_dec_loop6 + leal 1(,%ecx,2),%ecx + movl %ebp,%edx + movl %ecx,%ebx +.L056xts_dec_short: + addl $96,%eax + jz .L058xts_dec_done6x + movdqa %xmm1,%xmm5 + cmpl $32,%eax + jb .L059xts_dec_one + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + je .L060xts_dec_two + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,%xmm6 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + cmpl $64,%eax + jb .L061xts_dec_three + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa %xmm1,%xmm7 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 + movdqa %xmm5,(%esp) + movdqa %xmm6,16(%esp) + je .L062xts_dec_four + movdqa %xmm7,32(%esp) + pshufd $19,%xmm0,%xmm7 + movdqa %xmm1,48(%esp) + paddq %xmm1,%xmm1 + pand %xmm3,%xmm7 + pxor %xmm1,%xmm7 + movdqu (%esi),%xmm2 + movdqu 16(%esi),%xmm3 + movdqu 32(%esi),%xmm4 + pxor (%esp),%xmm2 + movdqu 48(%esi),%xmm5 + pxor 16(%esp),%xmm3 + movdqu 64(%esi),%xmm6 + pxor 32(%esp),%xmm4 + leal 80(%esi),%esi + pxor 48(%esp),%xmm5 + movdqa %xmm7,64(%esp) + pxor %xmm7,%xmm6 + call _aesni_decrypt6 + movaps 64(%esp),%xmm1 + xorps (%esp),%xmm2 + xorps 16(%esp),%xmm3 + xorps 32(%esp),%xmm4 + movups %xmm2,(%edi) + xorps 48(%esp),%xmm5 + movups %xmm3,16(%edi) + xorps %xmm1,%xmm6 + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + movups %xmm6,64(%edi) + leal 80(%edi),%edi + jmp .L063xts_dec_done +.align 16 +.L059xts_dec_one: + movups (%esi),%xmm2 + leal 16(%esi),%esi + xorps %xmm5,%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L064dec1_loop_12: +.byte 102,15,56,222,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L064dec1_loop_12 +.byte 102,15,56,223,209 + xorps %xmm5,%xmm2 + movups %xmm2,(%edi) + leal 16(%edi),%edi + movdqa %xmm5,%xmm1 + jmp .L063xts_dec_done +.align 16 +.L060xts_dec_two: + movaps %xmm1,%xmm6 + movups (%esi),%xmm2 + movups 16(%esi),%xmm3 + leal 32(%esi),%esi + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + call _aesni_decrypt3 + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + leal 32(%edi),%edi + movdqa %xmm6,%xmm1 + jmp .L063xts_dec_done +.align 16 +.L061xts_dec_three: + movaps %xmm1,%xmm7 + movups (%esi),%xmm2 + movups 16(%esi),%xmm3 + movups 32(%esi),%xmm4 + leal 48(%esi),%esi + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm7,%xmm4 + call _aesni_decrypt3 + xorps %xmm5,%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm7,%xmm4 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + leal 48(%edi),%edi + movdqa %xmm7,%xmm1 + jmp .L063xts_dec_done +.align 16 +.L062xts_dec_four: + movaps %xmm1,%xmm6 + movups (%esi),%xmm2 + movups 16(%esi),%xmm3 + movups 32(%esi),%xmm4 + xorps (%esp),%xmm2 + movups 48(%esi),%xmm5 + leal 64(%esi),%esi + xorps 16(%esp),%xmm3 + xorps %xmm7,%xmm4 + xorps %xmm6,%xmm5 + call _aesni_decrypt4 + xorps (%esp),%xmm2 + xorps 16(%esp),%xmm3 + xorps %xmm7,%xmm4 + movups %xmm2,(%edi) + xorps %xmm6,%xmm5 + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + leal 64(%edi),%edi + movdqa %xmm6,%xmm1 + jmp .L063xts_dec_done +.align 16 +.L058xts_dec_done6x: + movl 112(%esp),%eax + andl $15,%eax + jz .L065xts_dec_ret + movl %eax,112(%esp) + jmp .L066xts_dec_only_one_more +.align 16 +.L063xts_dec_done: + movl 112(%esp),%eax + pxor %xmm0,%xmm0 + andl $15,%eax + jz .L065xts_dec_ret + pcmpgtd %xmm1,%xmm0 + movl %eax,112(%esp) + pshufd $19,%xmm0,%xmm2 + pxor %xmm0,%xmm0 + movdqa 96(%esp),%xmm3 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm2 + pcmpgtd %xmm1,%xmm0 + pxor %xmm2,%xmm1 +.L066xts_dec_only_one_more: + pshufd $19,%xmm0,%xmm5 + movdqa %xmm1,%xmm6 + paddq %xmm1,%xmm1 + pand %xmm3,%xmm5 + pxor %xmm1,%xmm5 + movl %ebp,%edx + movl %ebx,%ecx + movups (%esi),%xmm2 + xorps %xmm5,%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L067dec1_loop_13: +.byte 102,15,56,222,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L067dec1_loop_13 +.byte 102,15,56,223,209 + xorps %xmm5,%xmm2 + movups %xmm2,(%edi) +.L068xts_dec_steal: + movzbl 16(%esi),%ecx + movzbl (%edi),%edx + leal 1(%esi),%esi + movb %cl,(%edi) + movb %dl,16(%edi) + leal 1(%edi),%edi + subl $1,%eax + jnz .L068xts_dec_steal + subl 112(%esp),%edi + movl %ebp,%edx + movl %ebx,%ecx + movups (%edi),%xmm2 + xorps %xmm6,%xmm2 + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L069dec1_loop_14: +.byte 102,15,56,222,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L069dec1_loop_14 +.byte 102,15,56,223,209 + xorps %xmm6,%xmm2 + movups %xmm2,(%edi) +.L065xts_dec_ret: + movl 116(%esp),%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin +.globl aesni_cbc_encrypt +.type aesni_cbc_encrypt,@function +.align 16 +aesni_cbc_encrypt: +.L_aesni_cbc_encrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + movl %esp,%ebx + movl 24(%esp),%edi + subl $24,%ebx + movl 28(%esp),%eax + andl $-16,%ebx + movl 32(%esp),%edx + movl 36(%esp),%ebp + testl %eax,%eax + jz .L070cbc_abort + cmpl $0,40(%esp) + xchgl %esp,%ebx + movups (%ebp),%xmm7 + movl 240(%edx),%ecx + movl %edx,%ebp + movl %ebx,16(%esp) + movl %ecx,%ebx + je .L071cbc_decrypt + movaps %xmm7,%xmm2 + cmpl $16,%eax + jb .L072cbc_enc_tail + subl $16,%eax + jmp .L073cbc_enc_loop +.align 16 +.L073cbc_enc_loop: + movups (%esi),%xmm7 + leal 16(%esi),%esi + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + xorps %xmm0,%xmm7 + leal 32(%edx),%edx + xorps %xmm7,%xmm2 +.L074enc1_loop_15: +.byte 102,15,56,220,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L074enc1_loop_15 +.byte 102,15,56,221,209 + movl %ebx,%ecx + movl %ebp,%edx + movups %xmm2,(%edi) + leal 16(%edi),%edi + subl $16,%eax + jnc .L073cbc_enc_loop + addl $16,%eax + jnz .L072cbc_enc_tail + movaps %xmm2,%xmm7 + jmp .L075cbc_ret +.L072cbc_enc_tail: + movl %eax,%ecx +.long 2767451785 + movl $16,%ecx + subl %eax,%ecx + xorl %eax,%eax +.long 2868115081 + leal -16(%edi),%edi + movl %ebx,%ecx + movl %edi,%esi + movl %ebp,%edx + jmp .L073cbc_enc_loop +.align 16 +.L071cbc_decrypt: + cmpl $80,%eax + jbe .L076cbc_dec_tail + movaps %xmm7,(%esp) + subl $80,%eax + jmp .L077cbc_dec_loop6_enter +.align 16 +.L078cbc_dec_loop6: + movaps %xmm0,(%esp) + movups %xmm7,(%edi) + leal 16(%edi),%edi +.L077cbc_dec_loop6_enter: + movdqu (%esi),%xmm2 + movdqu 16(%esi),%xmm3 + movdqu 32(%esi),%xmm4 + movdqu 48(%esi),%xmm5 + movdqu 64(%esi),%xmm6 + movdqu 80(%esi),%xmm7 + call _aesni_decrypt6 + movups (%esi),%xmm1 + movups 16(%esi),%xmm0 + xorps (%esp),%xmm2 + xorps %xmm1,%xmm3 + movups 32(%esi),%xmm1 + xorps %xmm0,%xmm4 + movups 48(%esi),%xmm0 + xorps %xmm1,%xmm5 + movups 64(%esi),%xmm1 + xorps %xmm0,%xmm6 + movups 80(%esi),%xmm0 + xorps %xmm1,%xmm7 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + leal 96(%esi),%esi + movups %xmm4,32(%edi) + movl %ebx,%ecx + movups %xmm5,48(%edi) + movl %ebp,%edx + movups %xmm6,64(%edi) + leal 80(%edi),%edi + subl $96,%eax + ja .L078cbc_dec_loop6 + movaps %xmm7,%xmm2 + movaps %xmm0,%xmm7 + addl $80,%eax + jle .L079cbc_dec_tail_collected + movups %xmm2,(%edi) + leal 16(%edi),%edi +.L076cbc_dec_tail: + movups (%esi),%xmm2 + movaps %xmm2,%xmm6 + cmpl $16,%eax + jbe .L080cbc_dec_one + movups 16(%esi),%xmm3 + movaps %xmm3,%xmm5 + cmpl $32,%eax + jbe .L081cbc_dec_two + movups 32(%esi),%xmm4 + cmpl $48,%eax + jbe .L082cbc_dec_three + movups 48(%esi),%xmm5 + cmpl $64,%eax + jbe .L083cbc_dec_four + movups 64(%esi),%xmm6 + movaps %xmm7,(%esp) + movups (%esi),%xmm2 + xorps %xmm7,%xmm7 + call _aesni_decrypt6 + movups (%esi),%xmm1 + movups 16(%esi),%xmm0 + xorps (%esp),%xmm2 + xorps %xmm1,%xmm3 + movups 32(%esi),%xmm1 + xorps %xmm0,%xmm4 + movups 48(%esi),%xmm0 + xorps %xmm1,%xmm5 + movups 64(%esi),%xmm7 + xorps %xmm0,%xmm6 + movups %xmm2,(%edi) + movups %xmm3,16(%edi) + movups %xmm4,32(%edi) + movups %xmm5,48(%edi) + leal 64(%edi),%edi + movaps %xmm6,%xmm2 + subl $80,%eax + jmp .L079cbc_dec_tail_collected +.align 16 +.L080cbc_dec_one: + movups (%edx),%xmm0 + movups 16(%edx),%xmm1 + leal 32(%edx),%edx + xorps %xmm0,%xmm2 +.L084dec1_loop_16: +.byte 102,15,56,222,209 + decl %ecx + movups (%edx),%xmm1 + leal 16(%edx),%edx + jnz .L084dec1_loop_16 +.byte 102,15,56,223,209 + xorps %xmm7,%xmm2 + movaps %xmm6,%xmm7 + subl $16,%eax + jmp .L079cbc_dec_tail_collected +.align 16 +.L081cbc_dec_two: + xorps %xmm4,%xmm4 + call _aesni_decrypt3 + xorps %xmm7,%xmm2 + xorps %xmm6,%xmm3 + movups %xmm2,(%edi) + movaps %xmm3,%xmm2 + leal 16(%edi),%edi + movaps %xmm5,%xmm7 + subl $32,%eax + jmp .L079cbc_dec_tail_collected +.align 16 +.L082cbc_dec_three: + call _aesni_decrypt3 + xorps %xmm7,%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm5,%xmm4 + movups %xmm2,(%edi) + movaps %xmm4,%xmm2 + movups %xmm3,16(%edi) + leal 32(%edi),%edi + movups 32(%esi),%xmm7 + subl $48,%eax + jmp .L079cbc_dec_tail_collected +.align 16 +.L083cbc_dec_four: + call _aesni_decrypt4 + movups 16(%esi),%xmm1 + movups 32(%esi),%xmm0 + xorps %xmm7,%xmm2 + movups 48(%esi),%xmm7 + xorps %xmm6,%xmm3 + movups %xmm2,(%edi) + xorps %xmm1,%xmm4 + movups %xmm3,16(%edi) + xorps %xmm0,%xmm5 + movups %xmm4,32(%edi) + leal 48(%edi),%edi + movaps %xmm5,%xmm2 + subl $64,%eax +.L079cbc_dec_tail_collected: + andl $15,%eax + jnz .L085cbc_dec_tail_partial + movups %xmm2,(%edi) + jmp .L075cbc_ret +.align 16 +.L085cbc_dec_tail_partial: + movaps %xmm2,(%esp) + movl $16,%ecx + movl %esp,%esi + subl %eax,%ecx +.long 2767451785 +.L075cbc_ret: + movl 16(%esp),%esp + movl 36(%esp),%ebp + movups %xmm7,(%ebp) +.L070cbc_abort: + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin +.type _aesni_set_encrypt_key,@function +.align 16 +_aesni_set_encrypt_key: + testl %eax,%eax + jz .L086bad_pointer + testl %edx,%edx + jz .L086bad_pointer + movups (%eax),%xmm0 + xorps %xmm4,%xmm4 + leal 16(%edx),%edx + cmpl $256,%ecx + je .L08714rounds + cmpl $192,%ecx + je .L08812rounds + cmpl $128,%ecx + jne .L089bad_keybits +.align 16 +.L09010rounds: + movl $9,%ecx + movups %xmm0,-16(%edx) +.byte 102,15,58,223,200,1 + call .L091key_128_cold +.byte 102,15,58,223,200,2 + call .L092key_128 +.byte 102,15,58,223,200,4 + call .L092key_128 +.byte 102,15,58,223,200,8 + call .L092key_128 +.byte 102,15,58,223,200,16 + call .L092key_128 +.byte 102,15,58,223,200,32 + call .L092key_128 +.byte 102,15,58,223,200,64 + call .L092key_128 +.byte 102,15,58,223,200,128 + call .L092key_128 +.byte 102,15,58,223,200,27 + call .L092key_128 +.byte 102,15,58,223,200,54 + call .L092key_128 + movups %xmm0,(%edx) + movl %ecx,80(%edx) + xorl %eax,%eax + ret +.align 16 +.L092key_128: + movups %xmm0,(%edx) + leal 16(%edx),%edx +.L091key_128_cold: + shufps $16,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $140,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $255,%xmm1,%xmm1 + xorps %xmm1,%xmm0 + ret +.align 16 +.L08812rounds: + movq 16(%eax),%xmm2 + movl $11,%ecx + movups %xmm0,-16(%edx) +.byte 102,15,58,223,202,1 + call .L093key_192a_cold +.byte 102,15,58,223,202,2 + call .L094key_192b +.byte 102,15,58,223,202,4 + call .L095key_192a +.byte 102,15,58,223,202,8 + call .L094key_192b +.byte 102,15,58,223,202,16 + call .L095key_192a +.byte 102,15,58,223,202,32 + call .L094key_192b +.byte 102,15,58,223,202,64 + call .L095key_192a +.byte 102,15,58,223,202,128 + call .L094key_192b + movups %xmm0,(%edx) + movl %ecx,48(%edx) + xorl %eax,%eax + ret +.align 16 +.L095key_192a: + movups %xmm0,(%edx) + leal 16(%edx),%edx +.align 16 +.L093key_192a_cold: + movaps %xmm2,%xmm5 +.L096key_192b_warm: + shufps $16,%xmm0,%xmm4 + movdqa %xmm2,%xmm3 + xorps %xmm4,%xmm0 + shufps $140,%xmm0,%xmm4 + pslldq $4,%xmm3 + xorps %xmm4,%xmm0 + pshufd $85,%xmm1,%xmm1 + pxor %xmm3,%xmm2 + pxor %xmm1,%xmm0 + pshufd $255,%xmm0,%xmm3 + pxor %xmm3,%xmm2 + ret +.align 16 +.L094key_192b: + movaps %xmm0,%xmm3 + shufps $68,%xmm0,%xmm5 + movups %xmm5,(%edx) + shufps $78,%xmm2,%xmm3 + movups %xmm3,16(%edx) + leal 32(%edx),%edx + jmp .L096key_192b_warm +.align 16 +.L08714rounds: + movups 16(%eax),%xmm2 + movl $13,%ecx + leal 16(%edx),%edx + movups %xmm0,-32(%edx) + movups %xmm2,-16(%edx) +.byte 102,15,58,223,202,1 + call .L097key_256a_cold +.byte 102,15,58,223,200,1 + call .L098key_256b +.byte 102,15,58,223,202,2 + call .L099key_256a +.byte 102,15,58,223,200,2 + call .L098key_256b +.byte 102,15,58,223,202,4 + call .L099key_256a +.byte 102,15,58,223,200,4 + call .L098key_256b +.byte 102,15,58,223,202,8 + call .L099key_256a +.byte 102,15,58,223,200,8 + call .L098key_256b +.byte 102,15,58,223,202,16 + call .L099key_256a +.byte 102,15,58,223,200,16 + call .L098key_256b +.byte 102,15,58,223,202,32 + call .L099key_256a +.byte 102,15,58,223,200,32 + call .L098key_256b +.byte 102,15,58,223,202,64 + call .L099key_256a + movups %xmm0,(%edx) + movl %ecx,16(%edx) + xorl %eax,%eax + ret +.align 16 +.L099key_256a: + movups %xmm2,(%edx) + leal 16(%edx),%edx +.L097key_256a_cold: + shufps $16,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $140,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $255,%xmm1,%xmm1 + xorps %xmm1,%xmm0 + ret +.align 16 +.L098key_256b: + movups %xmm0,(%edx) + leal 16(%edx),%edx + shufps $16,%xmm2,%xmm4 + xorps %xmm4,%xmm2 + shufps $140,%xmm2,%xmm4 + xorps %xmm4,%xmm2 + shufps $170,%xmm1,%xmm1 + xorps %xmm1,%xmm2 + ret +.align 4 +.L086bad_pointer: + movl $-1,%eax + ret +.align 4 +.L089bad_keybits: + movl $-2,%eax + ret +.size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key +.globl aesni_set_encrypt_key +.type aesni_set_encrypt_key,@function +.align 16 +aesni_set_encrypt_key: +.L_aesni_set_encrypt_key_begin: + movl 4(%esp),%eax + movl 8(%esp),%ecx + movl 12(%esp),%edx + call _aesni_set_encrypt_key + ret +.size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin +.globl aesni_set_decrypt_key +.type aesni_set_decrypt_key,@function +.align 16 +aesni_set_decrypt_key: +.L_aesni_set_decrypt_key_begin: + movl 4(%esp),%eax + movl 8(%esp),%ecx + movl 12(%esp),%edx + call _aesni_set_encrypt_key + movl 12(%esp),%edx + shll $4,%ecx + testl %eax,%eax + jnz .L100dec_key_ret + leal 16(%edx,%ecx,1),%eax + movups (%edx),%xmm0 + movups (%eax),%xmm1 + movups %xmm0,(%eax) + movups %xmm1,(%edx) + leal 16(%edx),%edx + leal -16(%eax),%eax +.L101dec_key_inverse: + movups (%edx),%xmm0 + movups (%eax),%xmm1 +.byte 102,15,56,219,192 +.byte 102,15,56,219,201 + leal 16(%edx),%edx + leal -16(%eax),%eax + movups %xmm0,16(%eax) + movups %xmm1,-16(%edx) + cmpl %edx,%eax + ja .L101dec_key_inverse + movups (%edx),%xmm0 +.byte 102,15,56,219,192 + movups %xmm0,(%edx) + xorl %eax,%eax +.L100dec_key_ret: + ret +.size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin +.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 +.byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 +.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 +.byte 115,108,46,111,114,103,62,0 diff --git a/secure/lib/libcrypto/asm/aesni-x86_64.s b/secure/lib/libcrypto/asm/aesni-x86_64.s new file mode 100644 index 0000000000..917c832354 --- /dev/null +++ b/secure/lib/libcrypto/asm/aesni-x86_64.s @@ -0,0 +1,2535 @@ +.text +.globl aesni_encrypt +.type aesni_encrypt,@function +.align 16 +aesni_encrypt: + movups (%rdi),%xmm2 + movl 240(%rdx),%eax + movups (%rdx),%xmm0 + movups 16(%rdx),%xmm1 + leaq 32(%rdx),%rdx + xorps %xmm0,%xmm2 +.Loop_enc1_1: +.byte 102,15,56,220,209 + decl %eax + movups (%rdx),%xmm1 + leaq 16(%rdx),%rdx + jnz .Loop_enc1_1 +.byte 102,15,56,221,209 + movups %xmm2,(%rsi) + .byte 0xf3,0xc3 +.size aesni_encrypt,.-aesni_encrypt + +.globl aesni_decrypt +.type aesni_decrypt,@function +.align 16 +aesni_decrypt: + movups (%rdi),%xmm2 + movl 240(%rdx),%eax + movups (%rdx),%xmm0 + movups 16(%rdx),%xmm1 + leaq 32(%rdx),%rdx + xorps %xmm0,%xmm2 +.Loop_dec1_2: +.byte 102,15,56,222,209 + decl %eax + movups (%rdx),%xmm1 + leaq 16(%rdx),%rdx + jnz .Loop_dec1_2 +.byte 102,15,56,223,209 + movups %xmm2,(%rsi) + .byte 0xf3,0xc3 +.size aesni_decrypt, .-aesni_decrypt +.type _aesni_encrypt3,@function +.align 16 +_aesni_encrypt3: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + xorps %xmm0,%xmm3 + xorps %xmm0,%xmm4 + movups (%rcx),%xmm0 + +.Lenc_loop3: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %eax +.byte 102,15,56,220,225 + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,224 + movups (%rcx),%xmm0 + jnz .Lenc_loop3 + +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 + .byte 0xf3,0xc3 +.size _aesni_encrypt3,.-_aesni_encrypt3 +.type _aesni_decrypt3,@function +.align 16 +_aesni_decrypt3: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + xorps %xmm0,%xmm3 + xorps %xmm0,%xmm4 + movups (%rcx),%xmm0 + +.Ldec_loop3: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %eax +.byte 102,15,56,222,225 + movups 16(%rcx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,222,224 + movups (%rcx),%xmm0 + jnz .Ldec_loop3 + +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 + .byte 0xf3,0xc3 +.size _aesni_decrypt3,.-_aesni_decrypt3 +.type _aesni_encrypt4,@function +.align 16 +_aesni_encrypt4: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + xorps %xmm0,%xmm3 + xorps %xmm0,%xmm4 + xorps %xmm0,%xmm5 + movups (%rcx),%xmm0 + +.Lenc_loop4: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %eax +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 + movups (%rcx),%xmm0 + jnz .Lenc_loop4 + +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 +.byte 102,15,56,221,232 + .byte 0xf3,0xc3 +.size _aesni_encrypt4,.-_aesni_encrypt4 +.type _aesni_decrypt4,@function +.align 16 +_aesni_decrypt4: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + xorps %xmm0,%xmm3 + xorps %xmm0,%xmm4 + xorps %xmm0,%xmm5 + movups (%rcx),%xmm0 + +.Ldec_loop4: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %eax +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 + movups 16(%rcx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,222,224 +.byte 102,15,56,222,232 + movups (%rcx),%xmm0 + jnz .Ldec_loop4 + +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 +.byte 102,15,56,223,232 + .byte 0xf3,0xc3 +.size _aesni_decrypt4,.-_aesni_decrypt4 +.type _aesni_encrypt6,@function +.align 16 +_aesni_encrypt6: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 +.byte 102,15,56,220,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,220,217 + pxor %xmm0,%xmm5 +.byte 102,15,56,220,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 + decl %eax +.byte 102,15,56,220,241 + movups (%rcx),%xmm0 +.byte 102,15,56,220,249 + jmp .Lenc_loop6_enter +.align 16 +.Lenc_loop6: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %eax +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.Lenc_loop6_enter: + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 +.byte 102,15,56,220,240 +.byte 102,15,56,220,248 + movups (%rcx),%xmm0 + jnz .Lenc_loop6 + +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 +.byte 102,15,56,221,232 +.byte 102,15,56,221,240 +.byte 102,15,56,221,248 + .byte 0xf3,0xc3 +.size _aesni_encrypt6,.-_aesni_encrypt6 +.type _aesni_decrypt6,@function +.align 16 +_aesni_decrypt6: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + pxor %xmm0,%xmm3 +.byte 102,15,56,222,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,222,217 + pxor %xmm0,%xmm5 +.byte 102,15,56,222,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,222,233 + pxor %xmm0,%xmm7 + decl %eax +.byte 102,15,56,222,241 + movups (%rcx),%xmm0 +.byte 102,15,56,222,249 + jmp .Ldec_loop6_enter +.align 16 +.Ldec_loop6: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %eax +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.Ldec_loop6_enter: + movups 16(%rcx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,222,224 +.byte 102,15,56,222,232 +.byte 102,15,56,222,240 +.byte 102,15,56,222,248 + movups (%rcx),%xmm0 + jnz .Ldec_loop6 + +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 +.byte 102,15,56,223,232 +.byte 102,15,56,223,240 +.byte 102,15,56,223,248 + .byte 0xf3,0xc3 +.size _aesni_decrypt6,.-_aesni_decrypt6 +.type _aesni_encrypt8,@function +.align 16 +_aesni_encrypt8: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + xorps %xmm0,%xmm3 +.byte 102,15,56,220,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,220,217 + pxor %xmm0,%xmm5 +.byte 102,15,56,220,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 + decl %eax +.byte 102,15,56,220,241 + pxor %xmm0,%xmm8 +.byte 102,15,56,220,249 + pxor %xmm0,%xmm9 + movups (%rcx),%xmm0 +.byte 102,68,15,56,220,193 +.byte 102,68,15,56,220,201 + movups 16(%rcx),%xmm1 + jmp .Lenc_loop8_enter +.align 16 +.Lenc_loop8: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %eax +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.byte 102,68,15,56,220,193 +.byte 102,68,15,56,220,201 + movups 16(%rcx),%xmm1 +.Lenc_loop8_enter: +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 +.byte 102,15,56,220,240 +.byte 102,15,56,220,248 +.byte 102,68,15,56,220,192 +.byte 102,68,15,56,220,200 + movups (%rcx),%xmm0 + jnz .Lenc_loop8 + +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.byte 102,68,15,56,220,193 +.byte 102,68,15,56,220,201 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 +.byte 102,15,56,221,224 +.byte 102,15,56,221,232 +.byte 102,15,56,221,240 +.byte 102,15,56,221,248 +.byte 102,68,15,56,221,192 +.byte 102,68,15,56,221,200 + .byte 0xf3,0xc3 +.size _aesni_encrypt8,.-_aesni_encrypt8 +.type _aesni_decrypt8,@function +.align 16 +_aesni_decrypt8: + movups (%rcx),%xmm0 + shrl $1,%eax + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 + xorps %xmm0,%xmm3 +.byte 102,15,56,222,209 + pxor %xmm0,%xmm4 +.byte 102,15,56,222,217 + pxor %xmm0,%xmm5 +.byte 102,15,56,222,225 + pxor %xmm0,%xmm6 +.byte 102,15,56,222,233 + pxor %xmm0,%xmm7 + decl %eax +.byte 102,15,56,222,241 + pxor %xmm0,%xmm8 +.byte 102,15,56,222,249 + pxor %xmm0,%xmm9 + movups (%rcx),%xmm0 +.byte 102,68,15,56,222,193 +.byte 102,68,15,56,222,201 + movups 16(%rcx),%xmm1 + jmp .Ldec_loop8_enter +.align 16 +.Ldec_loop8: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %eax +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.byte 102,68,15,56,222,193 +.byte 102,68,15,56,222,201 + movups 16(%rcx),%xmm1 +.Ldec_loop8_enter: +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,222,224 +.byte 102,15,56,222,232 +.byte 102,15,56,222,240 +.byte 102,15,56,222,248 +.byte 102,68,15,56,222,192 +.byte 102,68,15,56,222,200 + movups (%rcx),%xmm0 + jnz .Ldec_loop8 + +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.byte 102,68,15,56,222,193 +.byte 102,68,15,56,222,201 +.byte 102,15,56,223,208 +.byte 102,15,56,223,216 +.byte 102,15,56,223,224 +.byte 102,15,56,223,232 +.byte 102,15,56,223,240 +.byte 102,15,56,223,248 +.byte 102,68,15,56,223,192 +.byte 102,68,15,56,223,200 + .byte 0xf3,0xc3 +.size _aesni_decrypt8,.-_aesni_decrypt8 +.globl aesni_ecb_encrypt +.type aesni_ecb_encrypt,@function +.align 16 +aesni_ecb_encrypt: + andq $-16,%rdx + jz .Lecb_ret + + movl 240(%rcx),%eax + movups (%rcx),%xmm0 + movq %rcx,%r11 + movl %eax,%r10d + testl %r8d,%r8d + jz .Lecb_decrypt + + cmpq $128,%rdx + jb .Lecb_enc_tail + + movdqu (%rdi),%xmm2 + movdqu 16(%rdi),%xmm3 + movdqu 32(%rdi),%xmm4 + movdqu 48(%rdi),%xmm5 + movdqu 64(%rdi),%xmm6 + movdqu 80(%rdi),%xmm7 + movdqu 96(%rdi),%xmm8 + movdqu 112(%rdi),%xmm9 + leaq 128(%rdi),%rdi + subq $128,%rdx + jmp .Lecb_enc_loop8_enter +.align 16 +.Lecb_enc_loop8: + movups %xmm2,(%rsi) + movq %r11,%rcx + movdqu (%rdi),%xmm2 + movl %r10d,%eax + movups %xmm3,16(%rsi) + movdqu 16(%rdi),%xmm3 + movups %xmm4,32(%rsi) + movdqu 32(%rdi),%xmm4 + movups %xmm5,48(%rsi) + movdqu 48(%rdi),%xmm5 + movups %xmm6,64(%rsi) + movdqu 64(%rdi),%xmm6 + movups %xmm7,80(%rsi) + movdqu 80(%rdi),%xmm7 + movups %xmm8,96(%rsi) + movdqu 96(%rdi),%xmm8 + movups %xmm9,112(%rsi) + leaq 128(%rsi),%rsi + movdqu 112(%rdi),%xmm9 + leaq 128(%rdi),%rdi +.Lecb_enc_loop8_enter: + + call _aesni_encrypt8 + + subq $128,%rdx + jnc .Lecb_enc_loop8 + + movups %xmm2,(%rsi) + movq %r11,%rcx + movups %xmm3,16(%rsi) + movl %r10d,%eax + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + movups %xmm8,96(%rsi) + movups %xmm9,112(%rsi) + leaq 128(%rsi),%rsi + addq $128,%rdx + jz .Lecb_ret + +.Lecb_enc_tail: + movups (%rdi),%xmm2 + cmpq $32,%rdx + jb .Lecb_enc_one + movups 16(%rdi),%xmm3 + je .Lecb_enc_two + movups 32(%rdi),%xmm4 + cmpq $64,%rdx + jb .Lecb_enc_three + movups 48(%rdi),%xmm5 + je .Lecb_enc_four + movups 64(%rdi),%xmm6 + cmpq $96,%rdx + jb .Lecb_enc_five + movups 80(%rdi),%xmm7 + je .Lecb_enc_six + movdqu 96(%rdi),%xmm8 + call _aesni_encrypt8 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + movups %xmm8,96(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_enc_one: + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_enc1_3: +.byte 102,15,56,220,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_enc1_3 +.byte 102,15,56,221,209 + movups %xmm2,(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_enc_two: + xorps %xmm4,%xmm4 + call _aesni_encrypt3 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_enc_three: + call _aesni_encrypt3 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_enc_four: + call _aesni_encrypt4 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_enc_five: + xorps %xmm7,%xmm7 + call _aesni_encrypt6 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_enc_six: + call _aesni_encrypt6 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + jmp .Lecb_ret + +.align 16 +.Lecb_decrypt: + cmpq $128,%rdx + jb .Lecb_dec_tail + + movdqu (%rdi),%xmm2 + movdqu 16(%rdi),%xmm3 + movdqu 32(%rdi),%xmm4 + movdqu 48(%rdi),%xmm5 + movdqu 64(%rdi),%xmm6 + movdqu 80(%rdi),%xmm7 + movdqu 96(%rdi),%xmm8 + movdqu 112(%rdi),%xmm9 + leaq 128(%rdi),%rdi + subq $128,%rdx + jmp .Lecb_dec_loop8_enter +.align 16 +.Lecb_dec_loop8: + movups %xmm2,(%rsi) + movq %r11,%rcx + movdqu (%rdi),%xmm2 + movl %r10d,%eax + movups %xmm3,16(%rsi) + movdqu 16(%rdi),%xmm3 + movups %xmm4,32(%rsi) + movdqu 32(%rdi),%xmm4 + movups %xmm5,48(%rsi) + movdqu 48(%rdi),%xmm5 + movups %xmm6,64(%rsi) + movdqu 64(%rdi),%xmm6 + movups %xmm7,80(%rsi) + movdqu 80(%rdi),%xmm7 + movups %xmm8,96(%rsi) + movdqu 96(%rdi),%xmm8 + movups %xmm9,112(%rsi) + leaq 128(%rsi),%rsi + movdqu 112(%rdi),%xmm9 + leaq 128(%rdi),%rdi +.Lecb_dec_loop8_enter: + + call _aesni_decrypt8 + + movups (%r11),%xmm0 + subq $128,%rdx + jnc .Lecb_dec_loop8 + + movups %xmm2,(%rsi) + movq %r11,%rcx + movups %xmm3,16(%rsi) + movl %r10d,%eax + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + movups %xmm8,96(%rsi) + movups %xmm9,112(%rsi) + leaq 128(%rsi),%rsi + addq $128,%rdx + jz .Lecb_ret + +.Lecb_dec_tail: + movups (%rdi),%xmm2 + cmpq $32,%rdx + jb .Lecb_dec_one + movups 16(%rdi),%xmm3 + je .Lecb_dec_two + movups 32(%rdi),%xmm4 + cmpq $64,%rdx + jb .Lecb_dec_three + movups 48(%rdi),%xmm5 + je .Lecb_dec_four + movups 64(%rdi),%xmm6 + cmpq $96,%rdx + jb .Lecb_dec_five + movups 80(%rdi),%xmm7 + je .Lecb_dec_six + movups 96(%rdi),%xmm8 + movups (%rcx),%xmm0 + call _aesni_decrypt8 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + movups %xmm8,96(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_dec_one: + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_dec1_4: +.byte 102,15,56,222,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_dec1_4 +.byte 102,15,56,223,209 + movups %xmm2,(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_dec_two: + xorps %xmm4,%xmm4 + call _aesni_decrypt3 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_dec_three: + call _aesni_decrypt3 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_dec_four: + call _aesni_decrypt4 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_dec_five: + xorps %xmm7,%xmm7 + call _aesni_decrypt6 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + jmp .Lecb_ret +.align 16 +.Lecb_dec_six: + call _aesni_decrypt6 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + +.Lecb_ret: + .byte 0xf3,0xc3 +.size aesni_ecb_encrypt,.-aesni_ecb_encrypt +.globl aesni_ccm64_encrypt_blocks +.type aesni_ccm64_encrypt_blocks,@function +.align 16 +aesni_ccm64_encrypt_blocks: + movl 240(%rcx),%eax + movdqu (%r8),%xmm9 + movdqa .Lincrement64(%rip),%xmm6 + movdqa .Lbswap_mask(%rip),%xmm7 + + shrl $1,%eax + leaq 0(%rcx),%r11 + movdqu (%r9),%xmm3 + movdqa %xmm9,%xmm2 + movl %eax,%r10d +.byte 102,68,15,56,0,207 + jmp .Lccm64_enc_outer +.align 16 +.Lccm64_enc_outer: + movups (%r11),%xmm0 + movl %r10d,%eax + movups (%rdi),%xmm8 + + xorps %xmm0,%xmm2 + movups 16(%r11),%xmm1 + xorps %xmm8,%xmm0 + leaq 32(%r11),%rcx + xorps %xmm0,%xmm3 + movups (%rcx),%xmm0 + +.Lccm64_enc2_loop: +.byte 102,15,56,220,209 + decl %eax +.byte 102,15,56,220,217 + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,216 + movups 0(%rcx),%xmm0 + jnz .Lccm64_enc2_loop +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + paddq %xmm6,%xmm9 +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 + + decq %rdx + leaq 16(%rdi),%rdi + xorps %xmm2,%xmm8 + movdqa %xmm9,%xmm2 + movups %xmm8,(%rsi) + leaq 16(%rsi),%rsi +.byte 102,15,56,0,215 + jnz .Lccm64_enc_outer + + movups %xmm3,(%r9) + .byte 0xf3,0xc3 +.size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks +.globl aesni_ccm64_decrypt_blocks +.type aesni_ccm64_decrypt_blocks,@function +.align 16 +aesni_ccm64_decrypt_blocks: + movl 240(%rcx),%eax + movups (%r8),%xmm9 + movdqu (%r9),%xmm3 + movdqa .Lincrement64(%rip),%xmm6 + movdqa .Lbswap_mask(%rip),%xmm7 + + movaps %xmm9,%xmm2 + movl %eax,%r10d + movq %rcx,%r11 +.byte 102,68,15,56,0,207 + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_enc1_5: +.byte 102,15,56,220,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_enc1_5 +.byte 102,15,56,221,209 + movups (%rdi),%xmm8 + paddq %xmm6,%xmm9 + leaq 16(%rdi),%rdi + jmp .Lccm64_dec_outer +.align 16 +.Lccm64_dec_outer: + xorps %xmm2,%xmm8 + movdqa %xmm9,%xmm2 + movl %r10d,%eax + movups %xmm8,(%rsi) + leaq 16(%rsi),%rsi +.byte 102,15,56,0,215 + + subq $1,%rdx + jz .Lccm64_dec_break + + movups (%r11),%xmm0 + shrl $1,%eax + movups 16(%r11),%xmm1 + xorps %xmm0,%xmm8 + leaq 32(%r11),%rcx + xorps %xmm0,%xmm2 + xorps %xmm8,%xmm3 + movups (%rcx),%xmm0 + +.Lccm64_dec2_loop: +.byte 102,15,56,220,209 + decl %eax +.byte 102,15,56,220,217 + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,216 + movups 0(%rcx),%xmm0 + jnz .Lccm64_dec2_loop + movups (%rdi),%xmm8 + paddq %xmm6,%xmm9 +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + leaq 16(%rdi),%rdi +.byte 102,15,56,221,208 +.byte 102,15,56,221,216 + jmp .Lccm64_dec_outer + +.align 16 +.Lccm64_dec_break: + + movups (%r11),%xmm0 + movups 16(%r11),%xmm1 + xorps %xmm0,%xmm8 + leaq 32(%r11),%r11 + xorps %xmm8,%xmm3 +.Loop_enc1_6: +.byte 102,15,56,220,217 + decl %eax + movups (%r11),%xmm1 + leaq 16(%r11),%r11 + jnz .Loop_enc1_6 +.byte 102,15,56,221,217 + movups %xmm3,(%r9) + .byte 0xf3,0xc3 +.size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks +.globl aesni_ctr32_encrypt_blocks +.type aesni_ctr32_encrypt_blocks,@function +.align 16 +aesni_ctr32_encrypt_blocks: + cmpq $1,%rdx + je .Lctr32_one_shortcut + + movdqu (%r8),%xmm14 + movdqa .Lbswap_mask(%rip),%xmm15 + xorl %eax,%eax +.byte 102,69,15,58,22,242,3 +.byte 102,68,15,58,34,240,3 + + movl 240(%rcx),%eax + bswapl %r10d + pxor %xmm12,%xmm12 + pxor %xmm13,%xmm13 +.byte 102,69,15,58,34,226,0 + leaq 3(%r10),%r11 +.byte 102,69,15,58,34,235,0 + incl %r10d +.byte 102,69,15,58,34,226,1 + incq %r11 +.byte 102,69,15,58,34,235,1 + incl %r10d +.byte 102,69,15,58,34,226,2 + incq %r11 +.byte 102,69,15,58,34,235,2 + movdqa %xmm12,-40(%rsp) +.byte 102,69,15,56,0,231 + movdqa %xmm13,-24(%rsp) +.byte 102,69,15,56,0,239 + + pshufd $192,%xmm12,%xmm2 + pshufd $128,%xmm12,%xmm3 + pshufd $64,%xmm12,%xmm4 + cmpq $6,%rdx + jb .Lctr32_tail + shrl $1,%eax + movq %rcx,%r11 + movl %eax,%r10d + subq $6,%rdx + jmp .Lctr32_loop6 + +.align 16 +.Lctr32_loop6: + pshufd $192,%xmm13,%xmm5 + por %xmm14,%xmm2 + movups (%r11),%xmm0 + pshufd $128,%xmm13,%xmm6 + por %xmm14,%xmm3 + movups 16(%r11),%xmm1 + pshufd $64,%xmm13,%xmm7 + por %xmm14,%xmm4 + por %xmm14,%xmm5 + xorps %xmm0,%xmm2 + por %xmm14,%xmm6 + por %xmm14,%xmm7 + + + + + pxor %xmm0,%xmm3 +.byte 102,15,56,220,209 + leaq 32(%r11),%rcx + pxor %xmm0,%xmm4 +.byte 102,15,56,220,217 + movdqa .Lincrement32(%rip),%xmm13 + pxor %xmm0,%xmm5 +.byte 102,15,56,220,225 + movdqa -40(%rsp),%xmm12 + pxor %xmm0,%xmm6 +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 + movups (%rcx),%xmm0 + decl %eax +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 + jmp .Lctr32_enc_loop6_enter +.align 16 +.Lctr32_enc_loop6: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %eax +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.Lctr32_enc_loop6_enter: + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 +.byte 102,15,56,220,240 +.byte 102,15,56,220,248 + movups (%rcx),%xmm0 + jnz .Lctr32_enc_loop6 + +.byte 102,15,56,220,209 + paddd %xmm13,%xmm12 +.byte 102,15,56,220,217 + paddd -24(%rsp),%xmm13 +.byte 102,15,56,220,225 + movdqa %xmm12,-40(%rsp) +.byte 102,15,56,220,233 + movdqa %xmm13,-24(%rsp) +.byte 102,15,56,220,241 +.byte 102,69,15,56,0,231 +.byte 102,15,56,220,249 +.byte 102,69,15,56,0,239 + +.byte 102,15,56,221,208 + movups (%rdi),%xmm8 +.byte 102,15,56,221,216 + movups 16(%rdi),%xmm9 +.byte 102,15,56,221,224 + movups 32(%rdi),%xmm10 +.byte 102,15,56,221,232 + movups 48(%rdi),%xmm11 +.byte 102,15,56,221,240 + movups 64(%rdi),%xmm1 +.byte 102,15,56,221,248 + movups 80(%rdi),%xmm0 + leaq 96(%rdi),%rdi + + xorps %xmm2,%xmm8 + pshufd $192,%xmm12,%xmm2 + xorps %xmm3,%xmm9 + pshufd $128,%xmm12,%xmm3 + movups %xmm8,(%rsi) + xorps %xmm4,%xmm10 + pshufd $64,%xmm12,%xmm4 + movups %xmm9,16(%rsi) + xorps %xmm5,%xmm11 + movups %xmm10,32(%rsi) + xorps %xmm6,%xmm1 + movups %xmm11,48(%rsi) + xorps %xmm7,%xmm0 + movups %xmm1,64(%rsi) + movups %xmm0,80(%rsi) + leaq 96(%rsi),%rsi + movl %r10d,%eax + subq $6,%rdx + jnc .Lctr32_loop6 + + addq $6,%rdx + jz .Lctr32_done + movq %r11,%rcx + leal 1(%rax,%rax,1),%eax + +.Lctr32_tail: + por %xmm14,%xmm2 + movups (%rdi),%xmm8 + cmpq $2,%rdx + jb .Lctr32_one + + por %xmm14,%xmm3 + movups 16(%rdi),%xmm9 + je .Lctr32_two + + pshufd $192,%xmm13,%xmm5 + por %xmm14,%xmm4 + movups 32(%rdi),%xmm10 + cmpq $4,%rdx + jb .Lctr32_three + + pshufd $128,%xmm13,%xmm6 + por %xmm14,%xmm5 + movups 48(%rdi),%xmm11 + je .Lctr32_four + + por %xmm14,%xmm6 + xorps %xmm7,%xmm7 + + call _aesni_encrypt6 + + movups 64(%rdi),%xmm1 + xorps %xmm2,%xmm8 + xorps %xmm3,%xmm9 + movups %xmm8,(%rsi) + xorps %xmm4,%xmm10 + movups %xmm9,16(%rsi) + xorps %xmm5,%xmm11 + movups %xmm10,32(%rsi) + xorps %xmm6,%xmm1 + movups %xmm11,48(%rsi) + movups %xmm1,64(%rsi) + jmp .Lctr32_done + +.align 16 +.Lctr32_one_shortcut: + movups (%r8),%xmm2 + movups (%rdi),%xmm8 + movl 240(%rcx),%eax +.Lctr32_one: + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_enc1_7: +.byte 102,15,56,220,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_enc1_7 +.byte 102,15,56,221,209 + xorps %xmm2,%xmm8 + movups %xmm8,(%rsi) + jmp .Lctr32_done + +.align 16 +.Lctr32_two: + xorps %xmm4,%xmm4 + call _aesni_encrypt3 + xorps %xmm2,%xmm8 + xorps %xmm3,%xmm9 + movups %xmm8,(%rsi) + movups %xmm9,16(%rsi) + jmp .Lctr32_done + +.align 16 +.Lctr32_three: + call _aesni_encrypt3 + xorps %xmm2,%xmm8 + xorps %xmm3,%xmm9 + movups %xmm8,(%rsi) + xorps %xmm4,%xmm10 + movups %xmm9,16(%rsi) + movups %xmm10,32(%rsi) + jmp .Lctr32_done + +.align 16 +.Lctr32_four: + call _aesni_encrypt4 + xorps %xmm2,%xmm8 + xorps %xmm3,%xmm9 + movups %xmm8,(%rsi) + xorps %xmm4,%xmm10 + movups %xmm9,16(%rsi) + xorps %xmm5,%xmm11 + movups %xmm10,32(%rsi) + movups %xmm11,48(%rsi) + +.Lctr32_done: + .byte 0xf3,0xc3 +.size aesni_ctr32_encrypt_blocks,.-aesni_ctr32_encrypt_blocks +.globl aesni_xts_encrypt +.type aesni_xts_encrypt,@function +.align 16 +aesni_xts_encrypt: + leaq -104(%rsp),%rsp + movups (%r9),%xmm15 + movl 240(%r8),%eax + movl 240(%rcx),%r10d + movups (%r8),%xmm0 + movups 16(%r8),%xmm1 + leaq 32(%r8),%r8 + xorps %xmm0,%xmm15 +.Loop_enc1_8: +.byte 102,68,15,56,220,249 + decl %eax + movups (%r8),%xmm1 + leaq 16(%r8),%r8 + jnz .Loop_enc1_8 +.byte 102,68,15,56,221,249 + movq %rcx,%r11 + movl %r10d,%eax + movq %rdx,%r9 + andq $-16,%rdx + + movdqa .Lxts_magic(%rip),%xmm8 + pxor %xmm14,%xmm14 + pcmpgtd %xmm15,%xmm14 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm10 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm11 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm12 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm13 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + subq $96,%rdx + jc .Lxts_enc_short + + shrl $1,%eax + subl $1,%eax + movl %eax,%r10d + jmp .Lxts_enc_grandloop + +.align 16 +.Lxts_enc_grandloop: + pshufd $19,%xmm14,%xmm9 + movdqa %xmm15,%xmm14 + paddq %xmm15,%xmm15 + movdqu 0(%rdi),%xmm2 + pand %xmm8,%xmm9 + movdqu 16(%rdi),%xmm3 + pxor %xmm9,%xmm15 + + movdqu 32(%rdi),%xmm4 + pxor %xmm10,%xmm2 + movdqu 48(%rdi),%xmm5 + pxor %xmm11,%xmm3 + movdqu 64(%rdi),%xmm6 + pxor %xmm12,%xmm4 + movdqu 80(%rdi),%xmm7 + leaq 96(%rdi),%rdi + pxor %xmm13,%xmm5 + movups (%r11),%xmm0 + pxor %xmm14,%xmm6 + pxor %xmm15,%xmm7 + + + + movups 16(%r11),%xmm1 + pxor %xmm0,%xmm2 + pxor %xmm0,%xmm3 + movdqa %xmm10,0(%rsp) +.byte 102,15,56,220,209 + leaq 32(%r11),%rcx + pxor %xmm0,%xmm4 + movdqa %xmm11,16(%rsp) +.byte 102,15,56,220,217 + pxor %xmm0,%xmm5 + movdqa %xmm12,32(%rsp) +.byte 102,15,56,220,225 + pxor %xmm0,%xmm6 + movdqa %xmm13,48(%rsp) +.byte 102,15,56,220,233 + pxor %xmm0,%xmm7 + movups (%rcx),%xmm0 + decl %eax + movdqa %xmm14,64(%rsp) +.byte 102,15,56,220,241 + movdqa %xmm15,80(%rsp) +.byte 102,15,56,220,249 + pxor %xmm14,%xmm14 + pcmpgtd %xmm15,%xmm14 + jmp .Lxts_enc_loop6_enter + +.align 16 +.Lxts_enc_loop6: +.byte 102,15,56,220,209 +.byte 102,15,56,220,217 + decl %eax +.byte 102,15,56,220,225 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 +.Lxts_enc_loop6_enter: + movups 16(%rcx),%xmm1 +.byte 102,15,56,220,208 +.byte 102,15,56,220,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,220,224 +.byte 102,15,56,220,232 +.byte 102,15,56,220,240 +.byte 102,15,56,220,248 + movups (%rcx),%xmm0 + jnz .Lxts_enc_loop6 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + paddq %xmm15,%xmm15 +.byte 102,15,56,220,209 + pand %xmm8,%xmm9 +.byte 102,15,56,220,217 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,220,225 + pxor %xmm9,%xmm15 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 + movups 16(%rcx),%xmm1 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm10 + paddq %xmm15,%xmm15 +.byte 102,15,56,220,208 + pand %xmm8,%xmm9 +.byte 102,15,56,220,216 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,220,224 + pxor %xmm9,%xmm15 +.byte 102,15,56,220,232 +.byte 102,15,56,220,240 +.byte 102,15,56,220,248 + movups 32(%rcx),%xmm0 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm11 + paddq %xmm15,%xmm15 +.byte 102,15,56,220,209 + pand %xmm8,%xmm9 +.byte 102,15,56,220,217 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,220,225 + pxor %xmm9,%xmm15 +.byte 102,15,56,220,233 +.byte 102,15,56,220,241 +.byte 102,15,56,220,249 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm12 + paddq %xmm15,%xmm15 +.byte 102,15,56,221,208 + pand %xmm8,%xmm9 +.byte 102,15,56,221,216 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,221,224 + pxor %xmm9,%xmm15 +.byte 102,15,56,221,232 +.byte 102,15,56,221,240 +.byte 102,15,56,221,248 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm13 + paddq %xmm15,%xmm15 + xorps 0(%rsp),%xmm2 + pand %xmm8,%xmm9 + xorps 16(%rsp),%xmm3 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + + xorps 32(%rsp),%xmm4 + movups %xmm2,0(%rsi) + xorps 48(%rsp),%xmm5 + movups %xmm3,16(%rsi) + xorps 64(%rsp),%xmm6 + movups %xmm4,32(%rsi) + xorps 80(%rsp),%xmm7 + movups %xmm5,48(%rsi) + movl %r10d,%eax + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + leaq 96(%rsi),%rsi + subq $96,%rdx + jnc .Lxts_enc_grandloop + + leal 3(%rax,%rax,1),%eax + movq %r11,%rcx + movl %eax,%r10d + +.Lxts_enc_short: + addq $96,%rdx + jz .Lxts_enc_done + + cmpq $32,%rdx + jb .Lxts_enc_one + je .Lxts_enc_two + + cmpq $64,%rdx + jb .Lxts_enc_three + je .Lxts_enc_four + + pshufd $19,%xmm14,%xmm9 + movdqa %xmm15,%xmm14 + paddq %xmm15,%xmm15 + movdqu (%rdi),%xmm2 + pand %xmm8,%xmm9 + movdqu 16(%rdi),%xmm3 + pxor %xmm9,%xmm15 + + movdqu 32(%rdi),%xmm4 + pxor %xmm10,%xmm2 + movdqu 48(%rdi),%xmm5 + pxor %xmm11,%xmm3 + movdqu 64(%rdi),%xmm6 + leaq 80(%rdi),%rdi + pxor %xmm12,%xmm4 + pxor %xmm13,%xmm5 + pxor %xmm14,%xmm6 + + call _aesni_encrypt6 + + xorps %xmm10,%xmm2 + movdqa %xmm15,%xmm10 + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + movdqu %xmm2,(%rsi) + xorps %xmm13,%xmm5 + movdqu %xmm3,16(%rsi) + xorps %xmm14,%xmm6 + movdqu %xmm4,32(%rsi) + movdqu %xmm5,48(%rsi) + movdqu %xmm6,64(%rsi) + leaq 80(%rsi),%rsi + jmp .Lxts_enc_done + +.align 16 +.Lxts_enc_one: + movups (%rdi),%xmm2 + leaq 16(%rdi),%rdi + xorps %xmm10,%xmm2 + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_enc1_9: +.byte 102,15,56,220,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_enc1_9 +.byte 102,15,56,221,209 + xorps %xmm10,%xmm2 + movdqa %xmm11,%xmm10 + movups %xmm2,(%rsi) + leaq 16(%rsi),%rsi + jmp .Lxts_enc_done + +.align 16 +.Lxts_enc_two: + movups (%rdi),%xmm2 + movups 16(%rdi),%xmm3 + leaq 32(%rdi),%rdi + xorps %xmm10,%xmm2 + xorps %xmm11,%xmm3 + + call _aesni_encrypt3 + + xorps %xmm10,%xmm2 + movdqa %xmm12,%xmm10 + xorps %xmm11,%xmm3 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + leaq 32(%rsi),%rsi + jmp .Lxts_enc_done + +.align 16 +.Lxts_enc_three: + movups (%rdi),%xmm2 + movups 16(%rdi),%xmm3 + movups 32(%rdi),%xmm4 + leaq 48(%rdi),%rdi + xorps %xmm10,%xmm2 + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + + call _aesni_encrypt3 + + xorps %xmm10,%xmm2 + movdqa %xmm13,%xmm10 + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + leaq 48(%rsi),%rsi + jmp .Lxts_enc_done + +.align 16 +.Lxts_enc_four: + movups (%rdi),%xmm2 + movups 16(%rdi),%xmm3 + movups 32(%rdi),%xmm4 + xorps %xmm10,%xmm2 + movups 48(%rdi),%xmm5 + leaq 64(%rdi),%rdi + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + xorps %xmm13,%xmm5 + + call _aesni_encrypt4 + + xorps %xmm10,%xmm2 + movdqa %xmm15,%xmm10 + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + movups %xmm2,(%rsi) + xorps %xmm13,%xmm5 + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + leaq 64(%rsi),%rsi + jmp .Lxts_enc_done + +.align 16 +.Lxts_enc_done: + andq $15,%r9 + jz .Lxts_enc_ret + movq %r9,%rdx + +.Lxts_enc_steal: + movzbl (%rdi),%eax + movzbl -16(%rsi),%ecx + leaq 1(%rdi),%rdi + movb %al,-16(%rsi) + movb %cl,0(%rsi) + leaq 1(%rsi),%rsi + subq $1,%rdx + jnz .Lxts_enc_steal + + subq %r9,%rsi + movq %r11,%rcx + movl %r10d,%eax + + movups -16(%rsi),%xmm2 + xorps %xmm10,%xmm2 + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_enc1_10: +.byte 102,15,56,220,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_enc1_10 +.byte 102,15,56,221,209 + xorps %xmm10,%xmm2 + movups %xmm2,-16(%rsi) + +.Lxts_enc_ret: + leaq 104(%rsp),%rsp +.Lxts_enc_epilogue: + .byte 0xf3,0xc3 +.size aesni_xts_encrypt,.-aesni_xts_encrypt +.globl aesni_xts_decrypt +.type aesni_xts_decrypt,@function +.align 16 +aesni_xts_decrypt: + leaq -104(%rsp),%rsp + movups (%r9),%xmm15 + movl 240(%r8),%eax + movl 240(%rcx),%r10d + movups (%r8),%xmm0 + movups 16(%r8),%xmm1 + leaq 32(%r8),%r8 + xorps %xmm0,%xmm15 +.Loop_enc1_11: +.byte 102,68,15,56,220,249 + decl %eax + movups (%r8),%xmm1 + leaq 16(%r8),%r8 + jnz .Loop_enc1_11 +.byte 102,68,15,56,221,249 + xorl %eax,%eax + testq $15,%rdx + setnz %al + shlq $4,%rax + subq %rax,%rdx + + movq %rcx,%r11 + movl %r10d,%eax + movq %rdx,%r9 + andq $-16,%rdx + + movdqa .Lxts_magic(%rip),%xmm8 + pxor %xmm14,%xmm14 + pcmpgtd %xmm15,%xmm14 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm10 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm11 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm12 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm13 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm9 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + subq $96,%rdx + jc .Lxts_dec_short + + shrl $1,%eax + subl $1,%eax + movl %eax,%r10d + jmp .Lxts_dec_grandloop + +.align 16 +.Lxts_dec_grandloop: + pshufd $19,%xmm14,%xmm9 + movdqa %xmm15,%xmm14 + paddq %xmm15,%xmm15 + movdqu 0(%rdi),%xmm2 + pand %xmm8,%xmm9 + movdqu 16(%rdi),%xmm3 + pxor %xmm9,%xmm15 + + movdqu 32(%rdi),%xmm4 + pxor %xmm10,%xmm2 + movdqu 48(%rdi),%xmm5 + pxor %xmm11,%xmm3 + movdqu 64(%rdi),%xmm6 + pxor %xmm12,%xmm4 + movdqu 80(%rdi),%xmm7 + leaq 96(%rdi),%rdi + pxor %xmm13,%xmm5 + movups (%r11),%xmm0 + pxor %xmm14,%xmm6 + pxor %xmm15,%xmm7 + + + + movups 16(%r11),%xmm1 + pxor %xmm0,%xmm2 + pxor %xmm0,%xmm3 + movdqa %xmm10,0(%rsp) +.byte 102,15,56,222,209 + leaq 32(%r11),%rcx + pxor %xmm0,%xmm4 + movdqa %xmm11,16(%rsp) +.byte 102,15,56,222,217 + pxor %xmm0,%xmm5 + movdqa %xmm12,32(%rsp) +.byte 102,15,56,222,225 + pxor %xmm0,%xmm6 + movdqa %xmm13,48(%rsp) +.byte 102,15,56,222,233 + pxor %xmm0,%xmm7 + movups (%rcx),%xmm0 + decl %eax + movdqa %xmm14,64(%rsp) +.byte 102,15,56,222,241 + movdqa %xmm15,80(%rsp) +.byte 102,15,56,222,249 + pxor %xmm14,%xmm14 + pcmpgtd %xmm15,%xmm14 + jmp .Lxts_dec_loop6_enter + +.align 16 +.Lxts_dec_loop6: +.byte 102,15,56,222,209 +.byte 102,15,56,222,217 + decl %eax +.byte 102,15,56,222,225 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 +.Lxts_dec_loop6_enter: + movups 16(%rcx),%xmm1 +.byte 102,15,56,222,208 +.byte 102,15,56,222,216 + leaq 32(%rcx),%rcx +.byte 102,15,56,222,224 +.byte 102,15,56,222,232 +.byte 102,15,56,222,240 +.byte 102,15,56,222,248 + movups (%rcx),%xmm0 + jnz .Lxts_dec_loop6 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + paddq %xmm15,%xmm15 +.byte 102,15,56,222,209 + pand %xmm8,%xmm9 +.byte 102,15,56,222,217 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,222,225 + pxor %xmm9,%xmm15 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 + movups 16(%rcx),%xmm1 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm10 + paddq %xmm15,%xmm15 +.byte 102,15,56,222,208 + pand %xmm8,%xmm9 +.byte 102,15,56,222,216 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,222,224 + pxor %xmm9,%xmm15 +.byte 102,15,56,222,232 +.byte 102,15,56,222,240 +.byte 102,15,56,222,248 + movups 32(%rcx),%xmm0 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm11 + paddq %xmm15,%xmm15 +.byte 102,15,56,222,209 + pand %xmm8,%xmm9 +.byte 102,15,56,222,217 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,222,225 + pxor %xmm9,%xmm15 +.byte 102,15,56,222,233 +.byte 102,15,56,222,241 +.byte 102,15,56,222,249 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm12 + paddq %xmm15,%xmm15 +.byte 102,15,56,223,208 + pand %xmm8,%xmm9 +.byte 102,15,56,223,216 + pcmpgtd %xmm15,%xmm14 +.byte 102,15,56,223,224 + pxor %xmm9,%xmm15 +.byte 102,15,56,223,232 +.byte 102,15,56,223,240 +.byte 102,15,56,223,248 + + pshufd $19,%xmm14,%xmm9 + pxor %xmm14,%xmm14 + movdqa %xmm15,%xmm13 + paddq %xmm15,%xmm15 + xorps 0(%rsp),%xmm2 + pand %xmm8,%xmm9 + xorps 16(%rsp),%xmm3 + pcmpgtd %xmm15,%xmm14 + pxor %xmm9,%xmm15 + + xorps 32(%rsp),%xmm4 + movups %xmm2,0(%rsi) + xorps 48(%rsp),%xmm5 + movups %xmm3,16(%rsi) + xorps 64(%rsp),%xmm6 + movups %xmm4,32(%rsi) + xorps 80(%rsp),%xmm7 + movups %xmm5,48(%rsi) + movl %r10d,%eax + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + leaq 96(%rsi),%rsi + subq $96,%rdx + jnc .Lxts_dec_grandloop + + leal 3(%rax,%rax,1),%eax + movq %r11,%rcx + movl %eax,%r10d + +.Lxts_dec_short: + addq $96,%rdx + jz .Lxts_dec_done + + cmpq $32,%rdx + jb .Lxts_dec_one + je .Lxts_dec_two + + cmpq $64,%rdx + jb .Lxts_dec_three + je .Lxts_dec_four + + pshufd $19,%xmm14,%xmm9 + movdqa %xmm15,%xmm14 + paddq %xmm15,%xmm15 + movdqu (%rdi),%xmm2 + pand %xmm8,%xmm9 + movdqu 16(%rdi),%xmm3 + pxor %xmm9,%xmm15 + + movdqu 32(%rdi),%xmm4 + pxor %xmm10,%xmm2 + movdqu 48(%rdi),%xmm5 + pxor %xmm11,%xmm3 + movdqu 64(%rdi),%xmm6 + leaq 80(%rdi),%rdi + pxor %xmm12,%xmm4 + pxor %xmm13,%xmm5 + pxor %xmm14,%xmm6 + + call _aesni_decrypt6 + + xorps %xmm10,%xmm2 + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + movdqu %xmm2,(%rsi) + xorps %xmm13,%xmm5 + movdqu %xmm3,16(%rsi) + xorps %xmm14,%xmm6 + movdqu %xmm4,32(%rsi) + pxor %xmm14,%xmm14 + movdqu %xmm5,48(%rsi) + pcmpgtd %xmm15,%xmm14 + movdqu %xmm6,64(%rsi) + leaq 80(%rsi),%rsi + pshufd $19,%xmm14,%xmm11 + andq $15,%r9 + jz .Lxts_dec_ret + + movdqa %xmm15,%xmm10 + paddq %xmm15,%xmm15 + pand %xmm8,%xmm11 + pxor %xmm15,%xmm11 + jmp .Lxts_dec_done2 + +.align 16 +.Lxts_dec_one: + movups (%rdi),%xmm2 + leaq 16(%rdi),%rdi + xorps %xmm10,%xmm2 + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_dec1_12: +.byte 102,15,56,222,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_dec1_12 +.byte 102,15,56,223,209 + xorps %xmm10,%xmm2 + movdqa %xmm11,%xmm10 + movups %xmm2,(%rsi) + movdqa %xmm12,%xmm11 + leaq 16(%rsi),%rsi + jmp .Lxts_dec_done + +.align 16 +.Lxts_dec_two: + movups (%rdi),%xmm2 + movups 16(%rdi),%xmm3 + leaq 32(%rdi),%rdi + xorps %xmm10,%xmm2 + xorps %xmm11,%xmm3 + + call _aesni_decrypt3 + + xorps %xmm10,%xmm2 + movdqa %xmm12,%xmm10 + xorps %xmm11,%xmm3 + movdqa %xmm13,%xmm11 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + leaq 32(%rsi),%rsi + jmp .Lxts_dec_done + +.align 16 +.Lxts_dec_three: + movups (%rdi),%xmm2 + movups 16(%rdi),%xmm3 + movups 32(%rdi),%xmm4 + leaq 48(%rdi),%rdi + xorps %xmm10,%xmm2 + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + + call _aesni_decrypt3 + + xorps %xmm10,%xmm2 + movdqa %xmm13,%xmm10 + xorps %xmm11,%xmm3 + movdqa %xmm15,%xmm11 + xorps %xmm12,%xmm4 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + leaq 48(%rsi),%rsi + jmp .Lxts_dec_done + +.align 16 +.Lxts_dec_four: + pshufd $19,%xmm14,%xmm9 + movdqa %xmm15,%xmm14 + paddq %xmm15,%xmm15 + movups (%rdi),%xmm2 + pand %xmm8,%xmm9 + movups 16(%rdi),%xmm3 + pxor %xmm9,%xmm15 + + movups 32(%rdi),%xmm4 + xorps %xmm10,%xmm2 + movups 48(%rdi),%xmm5 + leaq 64(%rdi),%rdi + xorps %xmm11,%xmm3 + xorps %xmm12,%xmm4 + xorps %xmm13,%xmm5 + + call _aesni_decrypt4 + + xorps %xmm10,%xmm2 + movdqa %xmm14,%xmm10 + xorps %xmm11,%xmm3 + movdqa %xmm15,%xmm11 + xorps %xmm12,%xmm4 + movups %xmm2,(%rsi) + xorps %xmm13,%xmm5 + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + leaq 64(%rsi),%rsi + jmp .Lxts_dec_done + +.align 16 +.Lxts_dec_done: + andq $15,%r9 + jz .Lxts_dec_ret +.Lxts_dec_done2: + movq %r9,%rdx + movq %r11,%rcx + movl %r10d,%eax + + movups (%rdi),%xmm2 + xorps %xmm11,%xmm2 + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_dec1_13: +.byte 102,15,56,222,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_dec1_13 +.byte 102,15,56,223,209 + xorps %xmm11,%xmm2 + movups %xmm2,(%rsi) + +.Lxts_dec_steal: + movzbl 16(%rdi),%eax + movzbl (%rsi),%ecx + leaq 1(%rdi),%rdi + movb %al,(%rsi) + movb %cl,16(%rsi) + leaq 1(%rsi),%rsi + subq $1,%rdx + jnz .Lxts_dec_steal + + subq %r9,%rsi + movq %r11,%rcx + movl %r10d,%eax + + movups (%rsi),%xmm2 + xorps %xmm10,%xmm2 + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_dec1_14: +.byte 102,15,56,222,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_dec1_14 +.byte 102,15,56,223,209 + xorps %xmm10,%xmm2 + movups %xmm2,(%rsi) + +.Lxts_dec_ret: + leaq 104(%rsp),%rsp +.Lxts_dec_epilogue: + .byte 0xf3,0xc3 +.size aesni_xts_decrypt,.-aesni_xts_decrypt +.globl aesni_cbc_encrypt +.type aesni_cbc_encrypt,@function +.align 16 +aesni_cbc_encrypt: + testq %rdx,%rdx + jz .Lcbc_ret + + movl 240(%rcx),%r10d + movq %rcx,%r11 + testl %r9d,%r9d + jz .Lcbc_decrypt + + movups (%r8),%xmm2 + movl %r10d,%eax + cmpq $16,%rdx + jb .Lcbc_enc_tail + subq $16,%rdx + jmp .Lcbc_enc_loop +.align 16 +.Lcbc_enc_loop: + movups (%rdi),%xmm3 + leaq 16(%rdi),%rdi + + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + xorps %xmm0,%xmm3 + leaq 32(%rcx),%rcx + xorps %xmm3,%xmm2 +.Loop_enc1_15: +.byte 102,15,56,220,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_enc1_15 +.byte 102,15,56,221,209 + movl %r10d,%eax + movq %r11,%rcx + movups %xmm2,0(%rsi) + leaq 16(%rsi),%rsi + subq $16,%rdx + jnc .Lcbc_enc_loop + addq $16,%rdx + jnz .Lcbc_enc_tail + movups %xmm2,(%r8) + jmp .Lcbc_ret + +.Lcbc_enc_tail: + movq %rdx,%rcx + xchgq %rdi,%rsi +.long 0x9066A4F3 + movl $16,%ecx + subq %rdx,%rcx + xorl %eax,%eax +.long 0x9066AAF3 + leaq -16(%rdi),%rdi + movl %r10d,%eax + movq %rdi,%rsi + movq %r11,%rcx + xorq %rdx,%rdx + jmp .Lcbc_enc_loop + +.align 16 +.Lcbc_decrypt: + movups (%r8),%xmm9 + movl %r10d,%eax + cmpq $112,%rdx + jbe .Lcbc_dec_tail + shrl $1,%r10d + subq $112,%rdx + movl %r10d,%eax + movaps %xmm9,-24(%rsp) + jmp .Lcbc_dec_loop8_enter +.align 16 +.Lcbc_dec_loop8: + movaps %xmm0,-24(%rsp) + movups %xmm9,(%rsi) + leaq 16(%rsi),%rsi +.Lcbc_dec_loop8_enter: + movups (%rcx),%xmm0 + movups (%rdi),%xmm2 + movups 16(%rdi),%xmm3 + movups 16(%rcx),%xmm1 + + leaq 32(%rcx),%rcx + movdqu 32(%rdi),%xmm4 + xorps %xmm0,%xmm2 + movdqu 48(%rdi),%xmm5 + xorps %xmm0,%xmm3 + movdqu 64(%rdi),%xmm6 +.byte 102,15,56,222,209 + pxor %xmm0,%xmm4 + movdqu 80(%rdi),%xmm7 +.byte 102,15,56,222,217 + pxor %xmm0,%xmm5 + movdqu 96(%rdi),%xmm8 +.byte 102,15,56,222,225 + pxor %xmm0,%xmm6 + movdqu 112(%rdi),%xmm9 +.byte 102,15,56,222,233 + pxor %xmm0,%xmm7 + decl %eax +.byte 102,15,56,222,241 + pxor %xmm0,%xmm8 +.byte 102,15,56,222,249 + pxor %xmm0,%xmm9 + movups (%rcx),%xmm0 +.byte 102,68,15,56,222,193 +.byte 102,68,15,56,222,201 + movups 16(%rcx),%xmm1 + + call .Ldec_loop8_enter + + movups (%rdi),%xmm1 + movups 16(%rdi),%xmm0 + xorps -24(%rsp),%xmm2 + xorps %xmm1,%xmm3 + movups 32(%rdi),%xmm1 + xorps %xmm0,%xmm4 + movups 48(%rdi),%xmm0 + xorps %xmm1,%xmm5 + movups 64(%rdi),%xmm1 + xorps %xmm0,%xmm6 + movups 80(%rdi),%xmm0 + xorps %xmm1,%xmm7 + movups 96(%rdi),%xmm1 + xorps %xmm0,%xmm8 + movups 112(%rdi),%xmm0 + xorps %xmm1,%xmm9 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movl %r10d,%eax + movups %xmm6,64(%rsi) + movq %r11,%rcx + movups %xmm7,80(%rsi) + leaq 128(%rdi),%rdi + movups %xmm8,96(%rsi) + leaq 112(%rsi),%rsi + subq $128,%rdx + ja .Lcbc_dec_loop8 + + movaps %xmm9,%xmm2 + movaps %xmm0,%xmm9 + addq $112,%rdx + jle .Lcbc_dec_tail_collected + movups %xmm2,(%rsi) + leal 1(%r10,%r10,1),%eax + leaq 16(%rsi),%rsi +.Lcbc_dec_tail: + movups (%rdi),%xmm2 + movaps %xmm2,%xmm8 + cmpq $16,%rdx + jbe .Lcbc_dec_one + + movups 16(%rdi),%xmm3 + movaps %xmm3,%xmm7 + cmpq $32,%rdx + jbe .Lcbc_dec_two + + movups 32(%rdi),%xmm4 + movaps %xmm4,%xmm6 + cmpq $48,%rdx + jbe .Lcbc_dec_three + + movups 48(%rdi),%xmm5 + cmpq $64,%rdx + jbe .Lcbc_dec_four + + movups 64(%rdi),%xmm6 + cmpq $80,%rdx + jbe .Lcbc_dec_five + + movups 80(%rdi),%xmm7 + cmpq $96,%rdx + jbe .Lcbc_dec_six + + movups 96(%rdi),%xmm8 + movaps %xmm9,-24(%rsp) + call _aesni_decrypt8 + movups (%rdi),%xmm1 + movups 16(%rdi),%xmm0 + xorps -24(%rsp),%xmm2 + xorps %xmm1,%xmm3 + movups 32(%rdi),%xmm1 + xorps %xmm0,%xmm4 + movups 48(%rdi),%xmm0 + xorps %xmm1,%xmm5 + movups 64(%rdi),%xmm1 + xorps %xmm0,%xmm6 + movups 80(%rdi),%xmm0 + xorps %xmm1,%xmm7 + movups 96(%rdi),%xmm9 + xorps %xmm0,%xmm8 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + movups %xmm7,80(%rsi) + leaq 96(%rsi),%rsi + movaps %xmm8,%xmm2 + subq $112,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_one: + movups (%rcx),%xmm0 + movups 16(%rcx),%xmm1 + leaq 32(%rcx),%rcx + xorps %xmm0,%xmm2 +.Loop_dec1_16: +.byte 102,15,56,222,209 + decl %eax + movups (%rcx),%xmm1 + leaq 16(%rcx),%rcx + jnz .Loop_dec1_16 +.byte 102,15,56,223,209 + xorps %xmm9,%xmm2 + movaps %xmm8,%xmm9 + subq $16,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_two: + xorps %xmm4,%xmm4 + call _aesni_decrypt3 + xorps %xmm9,%xmm2 + xorps %xmm8,%xmm3 + movups %xmm2,(%rsi) + movaps %xmm7,%xmm9 + movaps %xmm3,%xmm2 + leaq 16(%rsi),%rsi + subq $32,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_three: + call _aesni_decrypt3 + xorps %xmm9,%xmm2 + xorps %xmm8,%xmm3 + movups %xmm2,(%rsi) + xorps %xmm7,%xmm4 + movups %xmm3,16(%rsi) + movaps %xmm6,%xmm9 + movaps %xmm4,%xmm2 + leaq 32(%rsi),%rsi + subq $48,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_four: + call _aesni_decrypt4 + xorps %xmm9,%xmm2 + movups 48(%rdi),%xmm9 + xorps %xmm8,%xmm3 + movups %xmm2,(%rsi) + xorps %xmm7,%xmm4 + movups %xmm3,16(%rsi) + xorps %xmm6,%xmm5 + movups %xmm4,32(%rsi) + movaps %xmm5,%xmm2 + leaq 48(%rsi),%rsi + subq $64,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_five: + xorps %xmm7,%xmm7 + call _aesni_decrypt6 + movups 16(%rdi),%xmm1 + movups 32(%rdi),%xmm0 + xorps %xmm9,%xmm2 + xorps %xmm8,%xmm3 + xorps %xmm1,%xmm4 + movups 48(%rdi),%xmm1 + xorps %xmm0,%xmm5 + movups 64(%rdi),%xmm9 + xorps %xmm1,%xmm6 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + leaq 64(%rsi),%rsi + movaps %xmm6,%xmm2 + subq $80,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_six: + call _aesni_decrypt6 + movups 16(%rdi),%xmm1 + movups 32(%rdi),%xmm0 + xorps %xmm9,%xmm2 + xorps %xmm8,%xmm3 + xorps %xmm1,%xmm4 + movups 48(%rdi),%xmm1 + xorps %xmm0,%xmm5 + movups 64(%rdi),%xmm0 + xorps %xmm1,%xmm6 + movups 80(%rdi),%xmm9 + xorps %xmm0,%xmm7 + movups %xmm2,(%rsi) + movups %xmm3,16(%rsi) + movups %xmm4,32(%rsi) + movups %xmm5,48(%rsi) + movups %xmm6,64(%rsi) + leaq 80(%rsi),%rsi + movaps %xmm7,%xmm2 + subq $96,%rdx + jmp .Lcbc_dec_tail_collected +.align 16 +.Lcbc_dec_tail_collected: + andq $15,%rdx + movups %xmm9,(%r8) + jnz .Lcbc_dec_tail_partial + movups %xmm2,(%rsi) + jmp .Lcbc_dec_ret +.align 16 +.Lcbc_dec_tail_partial: + movaps %xmm2,-24(%rsp) + movq $16,%rcx + movq %rsi,%rdi + subq %rdx,%rcx + leaq -24(%rsp),%rsi +.long 0x9066A4F3 + +.Lcbc_dec_ret: +.Lcbc_ret: + .byte 0xf3,0xc3 +.size aesni_cbc_encrypt,.-aesni_cbc_encrypt +.globl aesni_set_decrypt_key +.type aesni_set_decrypt_key,@function +.align 16 +aesni_set_decrypt_key: +.byte 0x48,0x83,0xEC,0x08 + call __aesni_set_encrypt_key + shll $4,%esi + testl %eax,%eax + jnz .Ldec_key_ret + leaq 16(%rdx,%rsi,1),%rdi + + movups (%rdx),%xmm0 + movups (%rdi),%xmm1 + movups %xmm0,(%rdi) + movups %xmm1,(%rdx) + leaq 16(%rdx),%rdx + leaq -16(%rdi),%rdi + +.Ldec_key_inverse: + movups (%rdx),%xmm0 + movups (%rdi),%xmm1 +.byte 102,15,56,219,192 +.byte 102,15,56,219,201 + leaq 16(%rdx),%rdx + leaq -16(%rdi),%rdi + movups %xmm0,16(%rdi) + movups %xmm1,-16(%rdx) + cmpq %rdx,%rdi + ja .Ldec_key_inverse + + movups (%rdx),%xmm0 +.byte 102,15,56,219,192 + movups %xmm0,(%rdi) +.Ldec_key_ret: + addq $8,%rsp + .byte 0xf3,0xc3 +.LSEH_end_set_decrypt_key: +.size aesni_set_decrypt_key,.-aesni_set_decrypt_key +.globl aesni_set_encrypt_key +.type aesni_set_encrypt_key,@function +.align 16 +aesni_set_encrypt_key: +__aesni_set_encrypt_key: +.byte 0x48,0x83,0xEC,0x08 + movq $-1,%rax + testq %rdi,%rdi + jz .Lenc_key_ret + testq %rdx,%rdx + jz .Lenc_key_ret + + movups (%rdi),%xmm0 + xorps %xmm4,%xmm4 + leaq 16(%rdx),%rax + cmpl $256,%esi + je .L14rounds + cmpl $192,%esi + je .L12rounds + cmpl $128,%esi + jne .Lbad_keybits + +.L10rounds: + movl $9,%esi + movups %xmm0,(%rdx) +.byte 102,15,58,223,200,1 + call .Lkey_expansion_128_cold +.byte 102,15,58,223,200,2 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,4 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,8 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,16 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,32 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,64 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,128 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,27 + call .Lkey_expansion_128 +.byte 102,15,58,223,200,54 + call .Lkey_expansion_128 + movups %xmm0,(%rax) + movl %esi,80(%rax) + xorl %eax,%eax + jmp .Lenc_key_ret + +.align 16 +.L12rounds: + movq 16(%rdi),%xmm2 + movl $11,%esi + movups %xmm0,(%rdx) +.byte 102,15,58,223,202,1 + call .Lkey_expansion_192a_cold +.byte 102,15,58,223,202,2 + call .Lkey_expansion_192b +.byte 102,15,58,223,202,4 + call .Lkey_expansion_192a +.byte 102,15,58,223,202,8 + call .Lkey_expansion_192b +.byte 102,15,58,223,202,16 + call .Lkey_expansion_192a +.byte 102,15,58,223,202,32 + call .Lkey_expansion_192b +.byte 102,15,58,223,202,64 + call .Lkey_expansion_192a +.byte 102,15,58,223,202,128 + call .Lkey_expansion_192b + movups %xmm0,(%rax) + movl %esi,48(%rax) + xorq %rax,%rax + jmp .Lenc_key_ret + +.align 16 +.L14rounds: + movups 16(%rdi),%xmm2 + movl $13,%esi + leaq 16(%rax),%rax + movups %xmm0,(%rdx) + movups %xmm2,16(%rdx) +.byte 102,15,58,223,202,1 + call .Lkey_expansion_256a_cold +.byte 102,15,58,223,200,1 + call .Lkey_expansion_256b +.byte 102,15,58,223,202,2 + call .Lkey_expansion_256a +.byte 102,15,58,223,200,2 + call .Lkey_expansion_256b +.byte 102,15,58,223,202,4 + call .Lkey_expansion_256a +.byte 102,15,58,223,200,4 + call .Lkey_expansion_256b +.byte 102,15,58,223,202,8 + call .Lkey_expansion_256a +.byte 102,15,58,223,200,8 + call .Lkey_expansion_256b +.byte 102,15,58,223,202,16 + call .Lkey_expansion_256a +.byte 102,15,58,223,200,16 + call .Lkey_expansion_256b +.byte 102,15,58,223,202,32 + call .Lkey_expansion_256a +.byte 102,15,58,223,200,32 + call .Lkey_expansion_256b +.byte 102,15,58,223,202,64 + call .Lkey_expansion_256a + movups %xmm0,(%rax) + movl %esi,16(%rax) + xorq %rax,%rax + jmp .Lenc_key_ret + +.align 16 +.Lbad_keybits: + movq $-2,%rax +.Lenc_key_ret: + addq $8,%rsp + .byte 0xf3,0xc3 +.LSEH_end_set_encrypt_key: + +.align 16 +.Lkey_expansion_128: + movups %xmm0,(%rax) + leaq 16(%rax),%rax +.Lkey_expansion_128_cold: + shufps $16,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $140,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $255,%xmm1,%xmm1 + xorps %xmm1,%xmm0 + .byte 0xf3,0xc3 + +.align 16 +.Lkey_expansion_192a: + movups %xmm0,(%rax) + leaq 16(%rax),%rax +.Lkey_expansion_192a_cold: + movaps %xmm2,%xmm5 +.Lkey_expansion_192b_warm: + shufps $16,%xmm0,%xmm4 + movdqa %xmm2,%xmm3 + xorps %xmm4,%xmm0 + shufps $140,%xmm0,%xmm4 + pslldq $4,%xmm3 + xorps %xmm4,%xmm0 + pshufd $85,%xmm1,%xmm1 + pxor %xmm3,%xmm2 + pxor %xmm1,%xmm0 + pshufd $255,%xmm0,%xmm3 + pxor %xmm3,%xmm2 + .byte 0xf3,0xc3 + +.align 16 +.Lkey_expansion_192b: + movaps %xmm0,%xmm3 + shufps $68,%xmm0,%xmm5 + movups %xmm5,(%rax) + shufps $78,%xmm2,%xmm3 + movups %xmm3,16(%rax) + leaq 32(%rax),%rax + jmp .Lkey_expansion_192b_warm + +.align 16 +.Lkey_expansion_256a: + movups %xmm2,(%rax) + leaq 16(%rax),%rax +.Lkey_expansion_256a_cold: + shufps $16,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $140,%xmm0,%xmm4 + xorps %xmm4,%xmm0 + shufps $255,%xmm1,%xmm1 + xorps %xmm1,%xmm0 + .byte 0xf3,0xc3 + +.align 16 +.Lkey_expansion_256b: + movups %xmm0,(%rax) + leaq 16(%rax),%rax + + shufps $16,%xmm2,%xmm4 + xorps %xmm4,%xmm2 + shufps $140,%xmm2,%xmm4 + xorps %xmm4,%xmm2 + shufps $170,%xmm1,%xmm1 + xorps %xmm1,%xmm2 + .byte 0xf3,0xc3 +.size aesni_set_encrypt_key,.-aesni_set_encrypt_key +.size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key +.align 64 +.Lbswap_mask: +.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 +.Lincrement32: +.long 6,6,6,0 +.Lincrement64: +.long 1,0,0,0 +.Lxts_magic: +.long 0x87,0,1,0 + +.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.align 64 diff --git a/secure/lib/libcrypto/asm/bsaes-x86_64.s b/secure/lib/libcrypto/asm/bsaes-x86_64.s new file mode 100644 index 0000000000..c50be0fd55 --- /dev/null +++ b/secure/lib/libcrypto/asm/bsaes-x86_64.s @@ -0,0 +1,2576 @@ +.text + + + + +.type _bsaes_encrypt8,@function +.align 64 +_bsaes_encrypt8: + leaq .LBS0(%rip),%r11 + + movdqa (%rax),%xmm8 + leaq 16(%rax),%rax + movdqa 96(%r11),%xmm7 + pxor %xmm8,%xmm15 + pxor %xmm8,%xmm0 +.byte 102,68,15,56,0,255 + pxor %xmm8,%xmm1 +.byte 102,15,56,0,199 + pxor %xmm8,%xmm2 +.byte 102,15,56,0,207 + pxor %xmm8,%xmm3 +.byte 102,15,56,0,215 + pxor %xmm8,%xmm4 +.byte 102,15,56,0,223 + pxor %xmm8,%xmm5 +.byte 102,15,56,0,231 + pxor %xmm8,%xmm6 +.byte 102,15,56,0,239 +.byte 102,15,56,0,247 +_bsaes_encrypt8_bitslice: + movdqa 0(%r11),%xmm7 + movdqa 16(%r11),%xmm8 + movdqa %xmm5,%xmm9 + psrlq $1,%xmm5 + movdqa %xmm3,%xmm10 + psrlq $1,%xmm3 + pxor %xmm6,%xmm5 + pxor %xmm4,%xmm3 + pand %xmm7,%xmm5 + pand %xmm7,%xmm3 + pxor %xmm5,%xmm6 + psllq $1,%xmm5 + pxor %xmm3,%xmm4 + psllq $1,%xmm3 + pxor %xmm9,%xmm5 + pxor %xmm10,%xmm3 + movdqa %xmm1,%xmm9 + psrlq $1,%xmm1 + movdqa %xmm15,%xmm10 + psrlq $1,%xmm15 + pxor %xmm2,%xmm1 + pxor %xmm0,%xmm15 + pand %xmm7,%xmm1 + pand %xmm7,%xmm15 + pxor %xmm1,%xmm2 + psllq $1,%xmm1 + pxor %xmm15,%xmm0 + psllq $1,%xmm15 + pxor %xmm9,%xmm1 + pxor %xmm10,%xmm15 + movdqa 32(%r11),%xmm7 + movdqa %xmm4,%xmm9 + psrlq $2,%xmm4 + movdqa %xmm3,%xmm10 + psrlq $2,%xmm3 + pxor %xmm6,%xmm4 + pxor %xmm5,%xmm3 + pand %xmm8,%xmm4 + pand %xmm8,%xmm3 + pxor %xmm4,%xmm6 + psllq $2,%xmm4 + pxor %xmm3,%xmm5 + psllq $2,%xmm3 + pxor %xmm9,%xmm4 + pxor %xmm10,%xmm3 + movdqa %xmm0,%xmm9 + psrlq $2,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $2,%xmm15 + pxor %xmm2,%xmm0 + pxor %xmm1,%xmm15 + pand %xmm8,%xmm0 + pand %xmm8,%xmm15 + pxor %xmm0,%xmm2 + psllq $2,%xmm0 + pxor %xmm15,%xmm1 + psllq $2,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + movdqa %xmm2,%xmm9 + psrlq $4,%xmm2 + movdqa %xmm1,%xmm10 + psrlq $4,%xmm1 + pxor %xmm6,%xmm2 + pxor %xmm5,%xmm1 + pand %xmm7,%xmm2 + pand %xmm7,%xmm1 + pxor %xmm2,%xmm6 + psllq $4,%xmm2 + pxor %xmm1,%xmm5 + psllq $4,%xmm1 + pxor %xmm9,%xmm2 + pxor %xmm10,%xmm1 + movdqa %xmm0,%xmm9 + psrlq $4,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $4,%xmm15 + pxor %xmm4,%xmm0 + pxor %xmm3,%xmm15 + pand %xmm7,%xmm0 + pand %xmm7,%xmm15 + pxor %xmm0,%xmm4 + psllq $4,%xmm0 + pxor %xmm15,%xmm3 + psllq $4,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + decl %r10d + jmp .Lenc_sbox +.align 16 +.Lenc_loop: + pxor 0(%rax),%xmm15 + pxor 16(%rax),%xmm0 +.byte 102,68,15,56,0,255 + pxor 32(%rax),%xmm1 +.byte 102,15,56,0,199 + pxor 48(%rax),%xmm2 +.byte 102,15,56,0,207 + pxor 64(%rax),%xmm3 +.byte 102,15,56,0,215 + pxor 80(%rax),%xmm4 +.byte 102,15,56,0,223 + pxor 96(%rax),%xmm5 +.byte 102,15,56,0,231 + pxor 112(%rax),%xmm6 +.byte 102,15,56,0,239 + leaq 128(%rax),%rax +.byte 102,15,56,0,247 +.Lenc_sbox: + pxor %xmm5,%xmm4 + pxor %xmm0,%xmm1 + pxor %xmm15,%xmm2 + pxor %xmm1,%xmm5 + pxor %xmm15,%xmm4 + + pxor %xmm2,%xmm5 + pxor %xmm6,%xmm2 + pxor %xmm4,%xmm6 + pxor %xmm3,%xmm2 + pxor %xmm4,%xmm3 + pxor %xmm0,%xmm2 + + pxor %xmm6,%xmm1 + pxor %xmm4,%xmm0 + movdqa %xmm6,%xmm10 + movdqa %xmm0,%xmm9 + movdqa %xmm4,%xmm8 + movdqa %xmm1,%xmm12 + movdqa %xmm5,%xmm11 + + pxor %xmm3,%xmm10 + pxor %xmm1,%xmm9 + pxor %xmm2,%xmm8 + movdqa %xmm10,%xmm13 + pxor %xmm3,%xmm12 + movdqa %xmm9,%xmm7 + pxor %xmm15,%xmm11 + movdqa %xmm10,%xmm14 + + por %xmm8,%xmm9 + por %xmm11,%xmm10 + pxor %xmm7,%xmm14 + pand %xmm11,%xmm13 + pxor %xmm8,%xmm11 + pand %xmm8,%xmm7 + pand %xmm11,%xmm14 + movdqa %xmm2,%xmm11 + pxor %xmm15,%xmm11 + pand %xmm11,%xmm12 + pxor %xmm12,%xmm10 + pxor %xmm12,%xmm9 + movdqa %xmm6,%xmm12 + movdqa %xmm4,%xmm11 + pxor %xmm0,%xmm12 + pxor %xmm5,%xmm11 + movdqa %xmm12,%xmm8 + pand %xmm11,%xmm12 + por %xmm11,%xmm8 + pxor %xmm12,%xmm7 + pxor %xmm14,%xmm10 + pxor %xmm13,%xmm9 + pxor %xmm14,%xmm8 + movdqa %xmm1,%xmm11 + pxor %xmm13,%xmm7 + movdqa %xmm3,%xmm12 + pxor %xmm13,%xmm8 + movdqa %xmm0,%xmm13 + pand %xmm2,%xmm11 + movdqa %xmm6,%xmm14 + pand %xmm15,%xmm12 + pand %xmm4,%xmm13 + por %xmm5,%xmm14 + pxor %xmm11,%xmm10 + pxor %xmm12,%xmm9 + pxor %xmm13,%xmm8 + pxor %xmm14,%xmm7 + + + + + + movdqa %xmm10,%xmm11 + pand %xmm8,%xmm10 + pxor %xmm9,%xmm11 + + movdqa %xmm7,%xmm13 + movdqa %xmm11,%xmm14 + pxor %xmm10,%xmm13 + pand %xmm13,%xmm14 + + movdqa %xmm8,%xmm12 + pxor %xmm9,%xmm14 + pxor %xmm7,%xmm12 + + pxor %xmm9,%xmm10 + + pand %xmm10,%xmm12 + + movdqa %xmm13,%xmm9 + pxor %xmm7,%xmm12 + + pxor %xmm12,%xmm9 + pxor %xmm12,%xmm8 + + pand %xmm7,%xmm9 + + pxor %xmm9,%xmm13 + pxor %xmm9,%xmm8 + + pand %xmm14,%xmm13 + + pxor %xmm11,%xmm13 + movdqa %xmm5,%xmm11 + movdqa %xmm4,%xmm7 + movdqa %xmm14,%xmm9 + pxor %xmm13,%xmm9 + pand %xmm5,%xmm9 + pxor %xmm4,%xmm5 + pand %xmm14,%xmm4 + pand %xmm13,%xmm5 + pxor %xmm4,%xmm5 + pxor %xmm9,%xmm4 + pxor %xmm15,%xmm11 + pxor %xmm2,%xmm7 + pxor %xmm12,%xmm14 + pxor %xmm8,%xmm13 + movdqa %xmm14,%xmm10 + movdqa %xmm12,%xmm9 + pxor %xmm13,%xmm10 + pxor %xmm8,%xmm9 + pand %xmm11,%xmm10 + pand %xmm15,%xmm9 + pxor %xmm7,%xmm11 + pxor %xmm2,%xmm15 + pand %xmm14,%xmm7 + pand %xmm12,%xmm2 + pand %xmm13,%xmm11 + pand %xmm8,%xmm15 + pxor %xmm11,%xmm7 + pxor %xmm2,%xmm15 + pxor %xmm10,%xmm11 + pxor %xmm9,%xmm2 + pxor %xmm11,%xmm5 + pxor %xmm11,%xmm15 + pxor %xmm7,%xmm4 + pxor %xmm7,%xmm2 + + movdqa %xmm6,%xmm11 + movdqa %xmm0,%xmm7 + pxor %xmm3,%xmm11 + pxor %xmm1,%xmm7 + movdqa %xmm14,%xmm10 + movdqa %xmm12,%xmm9 + pxor %xmm13,%xmm10 + pxor %xmm8,%xmm9 + pand %xmm11,%xmm10 + pand %xmm3,%xmm9 + pxor %xmm7,%xmm11 + pxor %xmm1,%xmm3 + pand %xmm14,%xmm7 + pand %xmm12,%xmm1 + pand %xmm13,%xmm11 + pand %xmm8,%xmm3 + pxor %xmm11,%xmm7 + pxor %xmm1,%xmm3 + pxor %xmm10,%xmm11 + pxor %xmm9,%xmm1 + pxor %xmm12,%xmm14 + pxor %xmm8,%xmm13 + movdqa %xmm14,%xmm10 + pxor %xmm13,%xmm10 + pand %xmm6,%xmm10 + pxor %xmm0,%xmm6 + pand %xmm14,%xmm0 + pand %xmm13,%xmm6 + pxor %xmm0,%xmm6 + pxor %xmm10,%xmm0 + pxor %xmm11,%xmm6 + pxor %xmm11,%xmm3 + pxor %xmm7,%xmm0 + pxor %xmm7,%xmm1 + pxor %xmm15,%xmm6 + pxor %xmm5,%xmm0 + pxor %xmm6,%xmm3 + pxor %xmm15,%xmm5 + pxor %xmm0,%xmm15 + + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + pxor %xmm2,%xmm1 + pxor %xmm4,%xmm2 + pxor %xmm4,%xmm3 + + pxor %xmm2,%xmm5 + decl %r10d + jl .Lenc_done + pshufd $147,%xmm15,%xmm7 + pshufd $147,%xmm0,%xmm8 + pxor %xmm7,%xmm15 + pshufd $147,%xmm3,%xmm9 + pxor %xmm8,%xmm0 + pshufd $147,%xmm5,%xmm10 + pxor %xmm9,%xmm3 + pshufd $147,%xmm2,%xmm11 + pxor %xmm10,%xmm5 + pshufd $147,%xmm6,%xmm12 + pxor %xmm11,%xmm2 + pshufd $147,%xmm1,%xmm13 + pxor %xmm12,%xmm6 + pshufd $147,%xmm4,%xmm14 + pxor %xmm13,%xmm1 + pxor %xmm14,%xmm4 + + pxor %xmm15,%xmm8 + pxor %xmm4,%xmm7 + pxor %xmm4,%xmm8 + pshufd $78,%xmm15,%xmm15 + pxor %xmm0,%xmm9 + pshufd $78,%xmm0,%xmm0 + pxor %xmm2,%xmm12 + pxor %xmm7,%xmm15 + pxor %xmm6,%xmm13 + pxor %xmm8,%xmm0 + pxor %xmm5,%xmm11 + pshufd $78,%xmm2,%xmm7 + pxor %xmm1,%xmm14 + pshufd $78,%xmm6,%xmm8 + pxor %xmm3,%xmm10 + pshufd $78,%xmm5,%xmm2 + pxor %xmm4,%xmm10 + pshufd $78,%xmm4,%xmm6 + pxor %xmm4,%xmm11 + pshufd $78,%xmm1,%xmm5 + pxor %xmm11,%xmm7 + pshufd $78,%xmm3,%xmm1 + pxor %xmm12,%xmm8 + + pxor %xmm10,%xmm2 + pxor %xmm14,%xmm6 + pxor %xmm13,%xmm5 + movdqa %xmm7,%xmm3 + pxor %xmm9,%xmm1 + movdqa %xmm8,%xmm4 + movdqa 48(%r11),%xmm7 + jnz .Lenc_loop + movdqa 64(%r11),%xmm7 + jmp .Lenc_loop +.align 16 +.Lenc_done: + movdqa 0(%r11),%xmm7 + movdqa 16(%r11),%xmm8 + movdqa %xmm1,%xmm9 + psrlq $1,%xmm1 + movdqa %xmm2,%xmm10 + psrlq $1,%xmm2 + pxor %xmm4,%xmm1 + pxor %xmm6,%xmm2 + pand %xmm7,%xmm1 + pand %xmm7,%xmm2 + pxor %xmm1,%xmm4 + psllq $1,%xmm1 + pxor %xmm2,%xmm6 + psllq $1,%xmm2 + pxor %xmm9,%xmm1 + pxor %xmm10,%xmm2 + movdqa %xmm3,%xmm9 + psrlq $1,%xmm3 + movdqa %xmm15,%xmm10 + psrlq $1,%xmm15 + pxor %xmm5,%xmm3 + pxor %xmm0,%xmm15 + pand %xmm7,%xmm3 + pand %xmm7,%xmm15 + pxor %xmm3,%xmm5 + psllq $1,%xmm3 + pxor %xmm15,%xmm0 + psllq $1,%xmm15 + pxor %xmm9,%xmm3 + pxor %xmm10,%xmm15 + movdqa 32(%r11),%xmm7 + movdqa %xmm6,%xmm9 + psrlq $2,%xmm6 + movdqa %xmm2,%xmm10 + psrlq $2,%xmm2 + pxor %xmm4,%xmm6 + pxor %xmm1,%xmm2 + pand %xmm8,%xmm6 + pand %xmm8,%xmm2 + pxor %xmm6,%xmm4 + psllq $2,%xmm6 + pxor %xmm2,%xmm1 + psllq $2,%xmm2 + pxor %xmm9,%xmm6 + pxor %xmm10,%xmm2 + movdqa %xmm0,%xmm9 + psrlq $2,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $2,%xmm15 + pxor %xmm5,%xmm0 + pxor %xmm3,%xmm15 + pand %xmm8,%xmm0 + pand %xmm8,%xmm15 + pxor %xmm0,%xmm5 + psllq $2,%xmm0 + pxor %xmm15,%xmm3 + psllq $2,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + movdqa %xmm5,%xmm9 + psrlq $4,%xmm5 + movdqa %xmm3,%xmm10 + psrlq $4,%xmm3 + pxor %xmm4,%xmm5 + pxor %xmm1,%xmm3 + pand %xmm7,%xmm5 + pand %xmm7,%xmm3 + pxor %xmm5,%xmm4 + psllq $4,%xmm5 + pxor %xmm3,%xmm1 + psllq $4,%xmm3 + pxor %xmm9,%xmm5 + pxor %xmm10,%xmm3 + movdqa %xmm0,%xmm9 + psrlq $4,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $4,%xmm15 + pxor %xmm6,%xmm0 + pxor %xmm2,%xmm15 + pand %xmm7,%xmm0 + pand %xmm7,%xmm15 + pxor %xmm0,%xmm6 + psllq $4,%xmm0 + pxor %xmm15,%xmm2 + psllq $4,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + movdqa (%rax),%xmm7 + pxor %xmm7,%xmm3 + pxor %xmm7,%xmm5 + pxor %xmm7,%xmm2 + pxor %xmm7,%xmm6 + pxor %xmm7,%xmm1 + pxor %xmm7,%xmm4 + pxor %xmm7,%xmm15 + pxor %xmm7,%xmm0 + .byte 0xf3,0xc3 +.size _bsaes_encrypt8,.-_bsaes_encrypt8 + +.type _bsaes_decrypt8,@function +.align 64 +_bsaes_decrypt8: + leaq .LBS0(%rip),%r11 + + movdqa (%rax),%xmm8 + leaq 16(%rax),%rax + movdqa -48(%r11),%xmm7 + pxor %xmm8,%xmm15 + pxor %xmm8,%xmm0 +.byte 102,68,15,56,0,255 + pxor %xmm8,%xmm1 +.byte 102,15,56,0,199 + pxor %xmm8,%xmm2 +.byte 102,15,56,0,207 + pxor %xmm8,%xmm3 +.byte 102,15,56,0,215 + pxor %xmm8,%xmm4 +.byte 102,15,56,0,223 + pxor %xmm8,%xmm5 +.byte 102,15,56,0,231 + pxor %xmm8,%xmm6 +.byte 102,15,56,0,239 +.byte 102,15,56,0,247 + movdqa 0(%r11),%xmm7 + movdqa 16(%r11),%xmm8 + movdqa %xmm5,%xmm9 + psrlq $1,%xmm5 + movdqa %xmm3,%xmm10 + psrlq $1,%xmm3 + pxor %xmm6,%xmm5 + pxor %xmm4,%xmm3 + pand %xmm7,%xmm5 + pand %xmm7,%xmm3 + pxor %xmm5,%xmm6 + psllq $1,%xmm5 + pxor %xmm3,%xmm4 + psllq $1,%xmm3 + pxor %xmm9,%xmm5 + pxor %xmm10,%xmm3 + movdqa %xmm1,%xmm9 + psrlq $1,%xmm1 + movdqa %xmm15,%xmm10 + psrlq $1,%xmm15 + pxor %xmm2,%xmm1 + pxor %xmm0,%xmm15 + pand %xmm7,%xmm1 + pand %xmm7,%xmm15 + pxor %xmm1,%xmm2 + psllq $1,%xmm1 + pxor %xmm15,%xmm0 + psllq $1,%xmm15 + pxor %xmm9,%xmm1 + pxor %xmm10,%xmm15 + movdqa 32(%r11),%xmm7 + movdqa %xmm4,%xmm9 + psrlq $2,%xmm4 + movdqa %xmm3,%xmm10 + psrlq $2,%xmm3 + pxor %xmm6,%xmm4 + pxor %xmm5,%xmm3 + pand %xmm8,%xmm4 + pand %xmm8,%xmm3 + pxor %xmm4,%xmm6 + psllq $2,%xmm4 + pxor %xmm3,%xmm5 + psllq $2,%xmm3 + pxor %xmm9,%xmm4 + pxor %xmm10,%xmm3 + movdqa %xmm0,%xmm9 + psrlq $2,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $2,%xmm15 + pxor %xmm2,%xmm0 + pxor %xmm1,%xmm15 + pand %xmm8,%xmm0 + pand %xmm8,%xmm15 + pxor %xmm0,%xmm2 + psllq $2,%xmm0 + pxor %xmm15,%xmm1 + psllq $2,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + movdqa %xmm2,%xmm9 + psrlq $4,%xmm2 + movdqa %xmm1,%xmm10 + psrlq $4,%xmm1 + pxor %xmm6,%xmm2 + pxor %xmm5,%xmm1 + pand %xmm7,%xmm2 + pand %xmm7,%xmm1 + pxor %xmm2,%xmm6 + psllq $4,%xmm2 + pxor %xmm1,%xmm5 + psllq $4,%xmm1 + pxor %xmm9,%xmm2 + pxor %xmm10,%xmm1 + movdqa %xmm0,%xmm9 + psrlq $4,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $4,%xmm15 + pxor %xmm4,%xmm0 + pxor %xmm3,%xmm15 + pand %xmm7,%xmm0 + pand %xmm7,%xmm15 + pxor %xmm0,%xmm4 + psllq $4,%xmm0 + pxor %xmm15,%xmm3 + psllq $4,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + decl %r10d + jmp .Ldec_sbox +.align 16 +.Ldec_loop: + pxor 0(%rax),%xmm15 + pxor 16(%rax),%xmm0 +.byte 102,68,15,56,0,255 + pxor 32(%rax),%xmm1 +.byte 102,15,56,0,199 + pxor 48(%rax),%xmm2 +.byte 102,15,56,0,207 + pxor 64(%rax),%xmm3 +.byte 102,15,56,0,215 + pxor 80(%rax),%xmm4 +.byte 102,15,56,0,223 + pxor 96(%rax),%xmm5 +.byte 102,15,56,0,231 + pxor 112(%rax),%xmm6 +.byte 102,15,56,0,239 + leaq 128(%rax),%rax +.byte 102,15,56,0,247 +.Ldec_sbox: + pxor %xmm3,%xmm2 + + pxor %xmm6,%xmm3 + pxor %xmm6,%xmm1 + pxor %xmm3,%xmm5 + pxor %xmm5,%xmm6 + pxor %xmm6,%xmm0 + + pxor %xmm0,%xmm15 + pxor %xmm4,%xmm1 + pxor %xmm15,%xmm2 + pxor %xmm15,%xmm4 + pxor %xmm2,%xmm0 + movdqa %xmm2,%xmm10 + movdqa %xmm6,%xmm9 + movdqa %xmm0,%xmm8 + movdqa %xmm3,%xmm12 + movdqa %xmm4,%xmm11 + + pxor %xmm15,%xmm10 + pxor %xmm3,%xmm9 + pxor %xmm5,%xmm8 + movdqa %xmm10,%xmm13 + pxor %xmm15,%xmm12 + movdqa %xmm9,%xmm7 + pxor %xmm1,%xmm11 + movdqa %xmm10,%xmm14 + + por %xmm8,%xmm9 + por %xmm11,%xmm10 + pxor %xmm7,%xmm14 + pand %xmm11,%xmm13 + pxor %xmm8,%xmm11 + pand %xmm8,%xmm7 + pand %xmm11,%xmm14 + movdqa %xmm5,%xmm11 + pxor %xmm1,%xmm11 + pand %xmm11,%xmm12 + pxor %xmm12,%xmm10 + pxor %xmm12,%xmm9 + movdqa %xmm2,%xmm12 + movdqa %xmm0,%xmm11 + pxor %xmm6,%xmm12 + pxor %xmm4,%xmm11 + movdqa %xmm12,%xmm8 + pand %xmm11,%xmm12 + por %xmm11,%xmm8 + pxor %xmm12,%xmm7 + pxor %xmm14,%xmm10 + pxor %xmm13,%xmm9 + pxor %xmm14,%xmm8 + movdqa %xmm3,%xmm11 + pxor %xmm13,%xmm7 + movdqa %xmm15,%xmm12 + pxor %xmm13,%xmm8 + movdqa %xmm6,%xmm13 + pand %xmm5,%xmm11 + movdqa %xmm2,%xmm14 + pand %xmm1,%xmm12 + pand %xmm0,%xmm13 + por %xmm4,%xmm14 + pxor %xmm11,%xmm10 + pxor %xmm12,%xmm9 + pxor %xmm13,%xmm8 + pxor %xmm14,%xmm7 + + + + + + movdqa %xmm10,%xmm11 + pand %xmm8,%xmm10 + pxor %xmm9,%xmm11 + + movdqa %xmm7,%xmm13 + movdqa %xmm11,%xmm14 + pxor %xmm10,%xmm13 + pand %xmm13,%xmm14 + + movdqa %xmm8,%xmm12 + pxor %xmm9,%xmm14 + pxor %xmm7,%xmm12 + + pxor %xmm9,%xmm10 + + pand %xmm10,%xmm12 + + movdqa %xmm13,%xmm9 + pxor %xmm7,%xmm12 + + pxor %xmm12,%xmm9 + pxor %xmm12,%xmm8 + + pand %xmm7,%xmm9 + + pxor %xmm9,%xmm13 + pxor %xmm9,%xmm8 + + pand %xmm14,%xmm13 + + pxor %xmm11,%xmm13 + movdqa %xmm4,%xmm11 + movdqa %xmm0,%xmm7 + movdqa %xmm14,%xmm9 + pxor %xmm13,%xmm9 + pand %xmm4,%xmm9 + pxor %xmm0,%xmm4 + pand %xmm14,%xmm0 + pand %xmm13,%xmm4 + pxor %xmm0,%xmm4 + pxor %xmm9,%xmm0 + pxor %xmm1,%xmm11 + pxor %xmm5,%xmm7 + pxor %xmm12,%xmm14 + pxor %xmm8,%xmm13 + movdqa %xmm14,%xmm10 + movdqa %xmm12,%xmm9 + pxor %xmm13,%xmm10 + pxor %xmm8,%xmm9 + pand %xmm11,%xmm10 + pand %xmm1,%xmm9 + pxor %xmm7,%xmm11 + pxor %xmm5,%xmm1 + pand %xmm14,%xmm7 + pand %xmm12,%xmm5 + pand %xmm13,%xmm11 + pand %xmm8,%xmm1 + pxor %xmm11,%xmm7 + pxor %xmm5,%xmm1 + pxor %xmm10,%xmm11 + pxor %xmm9,%xmm5 + pxor %xmm11,%xmm4 + pxor %xmm11,%xmm1 + pxor %xmm7,%xmm0 + pxor %xmm7,%xmm5 + + movdqa %xmm2,%xmm11 + movdqa %xmm6,%xmm7 + pxor %xmm15,%xmm11 + pxor %xmm3,%xmm7 + movdqa %xmm14,%xmm10 + movdqa %xmm12,%xmm9 + pxor %xmm13,%xmm10 + pxor %xmm8,%xmm9 + pand %xmm11,%xmm10 + pand %xmm15,%xmm9 + pxor %xmm7,%xmm11 + pxor %xmm3,%xmm15 + pand %xmm14,%xmm7 + pand %xmm12,%xmm3 + pand %xmm13,%xmm11 + pand %xmm8,%xmm15 + pxor %xmm11,%xmm7 + pxor %xmm3,%xmm15 + pxor %xmm10,%xmm11 + pxor %xmm9,%xmm3 + pxor %xmm12,%xmm14 + pxor %xmm8,%xmm13 + movdqa %xmm14,%xmm10 + pxor %xmm13,%xmm10 + pand %xmm2,%xmm10 + pxor %xmm6,%xmm2 + pand %xmm14,%xmm6 + pand %xmm13,%xmm2 + pxor %xmm6,%xmm2 + pxor %xmm10,%xmm6 + pxor %xmm11,%xmm2 + pxor %xmm11,%xmm15 + pxor %xmm7,%xmm6 + pxor %xmm7,%xmm3 + pxor %xmm6,%xmm0 + pxor %xmm4,%xmm5 + + pxor %xmm0,%xmm3 + pxor %xmm6,%xmm1 + pxor %xmm6,%xmm4 + pxor %xmm1,%xmm3 + pxor %xmm15,%xmm6 + pxor %xmm4,%xmm3 + pxor %xmm5,%xmm2 + pxor %xmm0,%xmm5 + pxor %xmm3,%xmm2 + + pxor %xmm15,%xmm3 + pxor %xmm2,%xmm6 + decl %r10d + jl .Ldec_done + + pshufd $147,%xmm4,%xmm14 + movdqa %xmm5,%xmm9 + pxor %xmm6,%xmm4 + pxor %xmm6,%xmm5 + pshufd $147,%xmm15,%xmm7 + movdqa %xmm6,%xmm12 + pxor %xmm15,%xmm6 + pxor %xmm0,%xmm15 + pshufd $147,%xmm0,%xmm8 + pxor %xmm5,%xmm0 + pxor %xmm2,%xmm15 + pxor %xmm3,%xmm0 + pshufd $147,%xmm3,%xmm10 + pxor %xmm15,%xmm5 + pxor %xmm4,%xmm3 + pxor %xmm2,%xmm4 + pshufd $147,%xmm2,%xmm13 + movdqa %xmm1,%xmm11 + pxor %xmm1,%xmm2 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm3 + pxor %xmm12,%xmm2 + pxor %xmm9,%xmm3 + pxor %xmm11,%xmm3 + pshufd $147,%xmm12,%xmm12 + + pxor %xmm4,%xmm6 + pxor %xmm7,%xmm4 + pxor %xmm8,%xmm6 + pshufd $147,%xmm9,%xmm9 + pxor %xmm12,%xmm4 + pxor %xmm13,%xmm6 + pxor %xmm14,%xmm4 + pshufd $147,%xmm11,%xmm11 + pxor %xmm13,%xmm14 + pxor %xmm4,%xmm6 + + pxor %xmm7,%xmm5 + pshufd $147,%xmm7,%xmm7 + pxor %xmm8,%xmm15 + pxor %xmm8,%xmm0 + pxor %xmm9,%xmm15 + pshufd $147,%xmm8,%xmm8 + pxor %xmm9,%xmm5 + pxor %xmm9,%xmm3 + pxor %xmm14,%xmm15 + pshufd $147,%xmm9,%xmm9 + pxor %xmm10,%xmm5 + pxor %xmm10,%xmm1 + pxor %xmm10,%xmm0 + pshufd $147,%xmm10,%xmm10 + pxor %xmm11,%xmm2 + pxor %xmm11,%xmm3 + pxor %xmm14,%xmm2 + pxor %xmm12,%xmm5 + pxor %xmm11,%xmm0 + pxor %xmm12,%xmm14 + + pxor %xmm14,%xmm3 + pshufd $147,%xmm11,%xmm11 + pxor %xmm14,%xmm1 + pxor %xmm14,%xmm0 + + pxor %xmm12,%xmm14 + pshufd $147,%xmm12,%xmm12 + pxor %xmm13,%xmm14 + + + pxor %xmm2,%xmm0 + pxor %xmm11,%xmm2 + pshufd $147,%xmm13,%xmm13 + pxor %xmm7,%xmm15 + pxor %xmm12,%xmm2 + pxor %xmm9,%xmm15 + pshufd $147,%xmm14,%xmm14 + + pxor %xmm6,%xmm5 + pxor %xmm8,%xmm6 + pxor %xmm7,%xmm4 + pxor %xmm7,%xmm5 + pxor %xmm12,%xmm6 + pxor %xmm12,%xmm4 + pxor %xmm14,%xmm6 + pshufd $147,%xmm7,%xmm7 + pxor %xmm13,%xmm4 + pxor %xmm6,%xmm5 + pxor %xmm8,%xmm0 + pshufd $147,%xmm8,%xmm8 + + pxor %xmm14,%xmm2 + pxor %xmm9,%xmm0 + pxor %xmm9,%xmm3 + pshufd $147,%xmm9,%xmm9 + pxor %xmm13,%xmm15 + pxor %xmm10,%xmm13 + pxor %xmm2,%xmm0 + pxor %xmm13,%xmm5 + + pxor %xmm13,%xmm1 + pxor %xmm12,%xmm3 + pxor %xmm11,%xmm1 + pshufd $147,%xmm11,%xmm11 + pxor %xmm13,%xmm3 + pxor %xmm14,%xmm1 + pxor %xmm10,%xmm13 + + pshufd $147,%xmm12,%xmm12 + pshufd $147,%xmm13,%xmm13 + pshufd $147,%xmm14,%xmm14 + pshufd $147,%xmm10,%xmm10 + + + pxor %xmm6,%xmm0 + pxor %xmm6,%xmm8 + pxor %xmm12,%xmm7 + pxor %xmm12,%xmm8 + pxor %xmm7,%xmm5 + pxor %xmm4,%xmm7 + pxor %xmm13,%xmm8 + pxor %xmm14,%xmm13 + pxor %xmm8,%xmm0 + pxor %xmm11,%xmm2 + pxor %xmm0,%xmm11 + pxor %xmm10,%xmm1 + pxor %xmm5,%xmm10 + pxor %xmm9,%xmm3 + pxor %xmm15,%xmm9 + pxor %xmm14,%xmm10 + pxor %xmm3,%xmm12 + pxor %xmm13,%xmm9 + pxor %xmm13,%xmm12 + pxor %xmm1,%xmm13 + pxor %xmm2,%xmm14 + + movdqa %xmm7,%xmm15 + movdqa %xmm8,%xmm0 + movdqa %xmm9,%xmm1 + movdqa %xmm10,%xmm2 + movdqa %xmm11,%xmm3 + movdqa %xmm12,%xmm4 + movdqa %xmm13,%xmm5 + movdqa %xmm14,%xmm6 + movdqa -16(%r11),%xmm7 + jnz .Ldec_loop + movdqa -32(%r11),%xmm7 + jmp .Ldec_loop +.align 16 +.Ldec_done: + movdqa 0(%r11),%xmm7 + movdqa 16(%r11),%xmm8 + movdqa %xmm2,%xmm9 + psrlq $1,%xmm2 + movdqa %xmm1,%xmm10 + psrlq $1,%xmm1 + pxor %xmm4,%xmm2 + pxor %xmm6,%xmm1 + pand %xmm7,%xmm2 + pand %xmm7,%xmm1 + pxor %xmm2,%xmm4 + psllq $1,%xmm2 + pxor %xmm1,%xmm6 + psllq $1,%xmm1 + pxor %xmm9,%xmm2 + pxor %xmm10,%xmm1 + movdqa %xmm5,%xmm9 + psrlq $1,%xmm5 + movdqa %xmm15,%xmm10 + psrlq $1,%xmm15 + pxor %xmm3,%xmm5 + pxor %xmm0,%xmm15 + pand %xmm7,%xmm5 + pand %xmm7,%xmm15 + pxor %xmm5,%xmm3 + psllq $1,%xmm5 + pxor %xmm15,%xmm0 + psllq $1,%xmm15 + pxor %xmm9,%xmm5 + pxor %xmm10,%xmm15 + movdqa 32(%r11),%xmm7 + movdqa %xmm6,%xmm9 + psrlq $2,%xmm6 + movdqa %xmm1,%xmm10 + psrlq $2,%xmm1 + pxor %xmm4,%xmm6 + pxor %xmm2,%xmm1 + pand %xmm8,%xmm6 + pand %xmm8,%xmm1 + pxor %xmm6,%xmm4 + psllq $2,%xmm6 + pxor %xmm1,%xmm2 + psllq $2,%xmm1 + pxor %xmm9,%xmm6 + pxor %xmm10,%xmm1 + movdqa %xmm0,%xmm9 + psrlq $2,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $2,%xmm15 + pxor %xmm3,%xmm0 + pxor %xmm5,%xmm15 + pand %xmm8,%xmm0 + pand %xmm8,%xmm15 + pxor %xmm0,%xmm3 + psllq $2,%xmm0 + pxor %xmm15,%xmm5 + psllq $2,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + movdqa %xmm3,%xmm9 + psrlq $4,%xmm3 + movdqa %xmm5,%xmm10 + psrlq $4,%xmm5 + pxor %xmm4,%xmm3 + pxor %xmm2,%xmm5 + pand %xmm7,%xmm3 + pand %xmm7,%xmm5 + pxor %xmm3,%xmm4 + psllq $4,%xmm3 + pxor %xmm5,%xmm2 + psllq $4,%xmm5 + pxor %xmm9,%xmm3 + pxor %xmm10,%xmm5 + movdqa %xmm0,%xmm9 + psrlq $4,%xmm0 + movdqa %xmm15,%xmm10 + psrlq $4,%xmm15 + pxor %xmm6,%xmm0 + pxor %xmm1,%xmm15 + pand %xmm7,%xmm0 + pand %xmm7,%xmm15 + pxor %xmm0,%xmm6 + psllq $4,%xmm0 + pxor %xmm15,%xmm1 + psllq $4,%xmm15 + pxor %xmm9,%xmm0 + pxor %xmm10,%xmm15 + movdqa (%rax),%xmm7 + pxor %xmm7,%xmm5 + pxor %xmm7,%xmm3 + pxor %xmm7,%xmm1 + pxor %xmm7,%xmm6 + pxor %xmm7,%xmm2 + pxor %xmm7,%xmm4 + pxor %xmm7,%xmm15 + pxor %xmm7,%xmm0 + .byte 0xf3,0xc3 +.size _bsaes_decrypt8,.-_bsaes_decrypt8 +.type _bsaes_key_convert,@function +.align 16 +_bsaes_key_convert: + leaq .LBS1(%rip),%r11 + movdqu (%rcx),%xmm7 + movdqa -16(%r11),%xmm8 + movdqa 0(%r11),%xmm9 + movdqa 16(%r11),%xmm10 + movdqa 64(%r11),%xmm13 + movdqa 96(%r11),%xmm14 + + movdqu 16(%rcx),%xmm6 + leaq 16(%rcx),%rcx + movdqa %xmm7,(%rax) + leaq 16(%rax),%rax + decl %r10d + jmp .Lkey_loop +.align 16 +.Lkey_loop: +.byte 102,65,15,56,0,245 + movdqa %xmm6,%xmm7 + movdqa %xmm6,%xmm11 + psrlq $1,%xmm6 + pxor %xmm7,%xmm6 + pand %xmm8,%xmm6 + pxor %xmm6,%xmm7 + psllq $1,%xmm6 + pxor %xmm11,%xmm6 + + movdqa %xmm7,%xmm5 + movdqa %xmm6,%xmm4 + movdqa %xmm5,%xmm11 + psrlq $2,%xmm5 + movdqa %xmm4,%xmm12 + psrlq $2,%xmm4 + pxor %xmm7,%xmm5 + pxor %xmm6,%xmm4 + pand %xmm9,%xmm5 + pand %xmm9,%xmm4 + pxor %xmm5,%xmm7 + psllq $2,%xmm5 + pxor %xmm4,%xmm6 + psllq $2,%xmm4 + pxor %xmm11,%xmm5 + pxor %xmm12,%xmm4 + + movdqa %xmm7,%xmm3 + movdqa %xmm5,%xmm1 + movdqa %xmm6,%xmm2 + movdqa %xmm4,%xmm0 + movdqa %xmm3,%xmm11 + psrlq $4,%xmm3 + movdqa %xmm2,%xmm12 + psrlq $4,%xmm2 + pxor %xmm7,%xmm3 + pxor %xmm6,%xmm2 + pand %xmm10,%xmm3 + pand %xmm10,%xmm2 + pxor %xmm3,%xmm7 + psllq $4,%xmm3 + pxor %xmm2,%xmm6 + psllq $4,%xmm2 + pxor %xmm11,%xmm3 + pxor %xmm12,%xmm2 + movdqa %xmm1,%xmm11 + psrlq $4,%xmm1 + movdqa %xmm0,%xmm12 + psrlq $4,%xmm0 + pxor %xmm5,%xmm1 + pxor %xmm4,%xmm0 + pand %xmm10,%xmm1 + pand %xmm10,%xmm0 + pxor %xmm1,%xmm5 + psllq $4,%xmm1 + pxor %xmm0,%xmm4 + psllq $4,%xmm0 + pxor %xmm11,%xmm1 + pxor %xmm12,%xmm0 + pxor %xmm14,%xmm5 + pxor %xmm14,%xmm6 + pxor %xmm14,%xmm0 + pxor %xmm14,%xmm1 + leaq 16(%rcx),%rcx + movdqa %xmm0,0(%rax) + movdqa %xmm1,16(%rax) + movdqa %xmm2,32(%rax) + movdqa %xmm3,48(%rax) + movdqa %xmm4,64(%rax) + movdqa %xmm5,80(%rax) + movdqa %xmm6,96(%rax) + movdqa %xmm7,112(%rax) + leaq 128(%rax),%rax + movdqu (%rcx),%xmm6 + decl %r10d + jnz .Lkey_loop + + movdqa 112(%r11),%xmm7 + + .byte 0xf3,0xc3 +.size _bsaes_key_convert,.-_bsaes_key_convert + +.globl bsaes_cbc_encrypt +.type bsaes_cbc_encrypt,@function +.align 16 +bsaes_cbc_encrypt: + cmpl $0,%r9d + jne asm_AES_cbc_encrypt + cmpq $128,%rdx + jb asm_AES_cbc_encrypt + + movq %rsp,%rax +.Lcbc_dec_prologue: + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + leaq -72(%rsp),%rsp + movq %rsp,%rbp + movl 240(%rcx),%eax + movq %rdi,%r12 + movq %rsi,%r13 + movq %rdx,%r14 + movq %rcx,%r15 + movq %r8,%rbx + shrq $4,%r14 + + movl %eax,%edx + shlq $7,%rax + subq $96,%rax + subq %rax,%rsp + + movq %rsp,%rax + movq %r15,%rcx + movl %edx,%r10d + call _bsaes_key_convert + pxor (%rsp),%xmm7 + movdqa %xmm6,(%rax) + movdqa %xmm7,(%rsp) + + movdqu (%rbx),%xmm14 + subq $8,%r14 +.Lcbc_dec_loop: + movdqu 0(%r12),%xmm15 + movdqu 16(%r12),%xmm0 + movdqu 32(%r12),%xmm1 + movdqu 48(%r12),%xmm2 + movdqu 64(%r12),%xmm3 + movdqu 80(%r12),%xmm4 + movq %rsp,%rax + movdqu 96(%r12),%xmm5 + movl %edx,%r10d + movdqu 112(%r12),%xmm6 + movdqa %xmm14,32(%rbp) + + call _bsaes_decrypt8 + + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm0 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm5 + movdqu 48(%r12),%xmm10 + pxor %xmm9,%xmm3 + movdqu 64(%r12),%xmm11 + pxor %xmm10,%xmm1 + movdqu 80(%r12),%xmm12 + pxor %xmm11,%xmm6 + movdqu 96(%r12),%xmm13 + pxor %xmm12,%xmm2 + movdqu 112(%r12),%xmm14 + pxor %xmm13,%xmm4 + movdqu %xmm15,0(%r13) + leaq 128(%r12),%r12 + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + movdqu %xmm3,48(%r13) + movdqu %xmm1,64(%r13) + movdqu %xmm6,80(%r13) + movdqu %xmm2,96(%r13) + movdqu %xmm4,112(%r13) + leaq 128(%r13),%r13 + subq $8,%r14 + jnc .Lcbc_dec_loop + + addq $8,%r14 + jz .Lcbc_dec_done + + movdqu 0(%r12),%xmm15 + movq %rsp,%rax + movl %edx,%r10d + cmpq $2,%r14 + jb .Lcbc_dec_one + movdqu 16(%r12),%xmm0 + je .Lcbc_dec_two + movdqu 32(%r12),%xmm1 + cmpq $4,%r14 + jb .Lcbc_dec_three + movdqu 48(%r12),%xmm2 + je .Lcbc_dec_four + movdqu 64(%r12),%xmm3 + cmpq $6,%r14 + jb .Lcbc_dec_five + movdqu 80(%r12),%xmm4 + je .Lcbc_dec_six + movdqu 96(%r12),%xmm5 + movdqa %xmm14,32(%rbp) + call _bsaes_decrypt8 + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm0 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm5 + movdqu 48(%r12),%xmm10 + pxor %xmm9,%xmm3 + movdqu 64(%r12),%xmm11 + pxor %xmm10,%xmm1 + movdqu 80(%r12),%xmm12 + pxor %xmm11,%xmm6 + movdqu 96(%r12),%xmm14 + pxor %xmm12,%xmm2 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + movdqu %xmm3,48(%r13) + movdqu %xmm1,64(%r13) + movdqu %xmm6,80(%r13) + movdqu %xmm2,96(%r13) + jmp .Lcbc_dec_done +.align 16 +.Lcbc_dec_six: + movdqa %xmm14,32(%rbp) + call _bsaes_decrypt8 + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm0 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm5 + movdqu 48(%r12),%xmm10 + pxor %xmm9,%xmm3 + movdqu 64(%r12),%xmm11 + pxor %xmm10,%xmm1 + movdqu 80(%r12),%xmm14 + pxor %xmm11,%xmm6 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + movdqu %xmm3,48(%r13) + movdqu %xmm1,64(%r13) + movdqu %xmm6,80(%r13) + jmp .Lcbc_dec_done +.align 16 +.Lcbc_dec_five: + movdqa %xmm14,32(%rbp) + call _bsaes_decrypt8 + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm0 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm5 + movdqu 48(%r12),%xmm10 + pxor %xmm9,%xmm3 + movdqu 64(%r12),%xmm14 + pxor %xmm10,%xmm1 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + movdqu %xmm3,48(%r13) + movdqu %xmm1,64(%r13) + jmp .Lcbc_dec_done +.align 16 +.Lcbc_dec_four: + movdqa %xmm14,32(%rbp) + call _bsaes_decrypt8 + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm0 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm5 + movdqu 48(%r12),%xmm14 + pxor %xmm9,%xmm3 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + movdqu %xmm3,48(%r13) + jmp .Lcbc_dec_done +.align 16 +.Lcbc_dec_three: + movdqa %xmm14,32(%rbp) + call _bsaes_decrypt8 + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm0 + movdqu 32(%r12),%xmm14 + pxor %xmm8,%xmm5 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + jmp .Lcbc_dec_done +.align 16 +.Lcbc_dec_two: + movdqa %xmm14,32(%rbp) + call _bsaes_decrypt8 + pxor 32(%rbp),%xmm15 + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm14 + pxor %xmm7,%xmm0 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + jmp .Lcbc_dec_done +.align 16 +.Lcbc_dec_one: + leaq (%r12),%rdi + leaq 32(%rbp),%rsi + leaq (%r15),%rdx + call asm_AES_decrypt + pxor 32(%rbp),%xmm14 + movdqu %xmm14,(%r13) + movdqa %xmm15,%xmm14 + +.Lcbc_dec_done: + movdqu %xmm14,(%rbx) + leaq (%rsp),%rax + pxor %xmm0,%xmm0 +.Lcbc_dec_bzero: + movdqa %xmm0,0(%rax) + movdqa %xmm0,16(%rax) + leaq 32(%rax),%rax + cmpq %rax,%rbp + ja .Lcbc_dec_bzero + + leaq (%rbp),%rsp + movq 72(%rsp),%r15 + movq 80(%rsp),%r14 + movq 88(%rsp),%r13 + movq 96(%rsp),%r12 + movq 104(%rsp),%rbx + movq 112(%rsp),%rax + leaq 120(%rsp),%rsp + movq %rax,%rbp +.Lcbc_dec_epilogue: + .byte 0xf3,0xc3 +.size bsaes_cbc_encrypt,.-bsaes_cbc_encrypt + +.globl bsaes_ctr32_encrypt_blocks +.type bsaes_ctr32_encrypt_blocks,@function +.align 16 +bsaes_ctr32_encrypt_blocks: + movq %rsp,%rax +.Lctr_enc_prologue: + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + leaq -72(%rsp),%rsp + movq %rsp,%rbp + movdqu (%r8),%xmm0 + movl 240(%rcx),%eax + movq %rdi,%r12 + movq %rsi,%r13 + movq %rdx,%r14 + movq %rcx,%r15 + movdqa %xmm0,32(%rbp) + cmpq $8,%rdx + jb .Lctr_enc_short + + movl %eax,%ebx + shlq $7,%rax + subq $96,%rax + subq %rax,%rsp + + movq %rsp,%rax + movq %r15,%rcx + movl %ebx,%r10d + call _bsaes_key_convert + pxor %xmm6,%xmm7 + movdqa %xmm7,(%rax) + + movdqa (%rsp),%xmm8 + leaq .LADD1(%rip),%r11 + movdqa 32(%rbp),%xmm15 + movdqa -32(%r11),%xmm7 +.byte 102,68,15,56,0,199 +.byte 102,68,15,56,0,255 + movdqa %xmm8,(%rsp) + jmp .Lctr_enc_loop +.align 16 +.Lctr_enc_loop: + movdqa %xmm15,32(%rbp) + movdqa %xmm15,%xmm0 + movdqa %xmm15,%xmm1 + paddd 0(%r11),%xmm0 + movdqa %xmm15,%xmm2 + paddd 16(%r11),%xmm1 + movdqa %xmm15,%xmm3 + paddd 32(%r11),%xmm2 + movdqa %xmm15,%xmm4 + paddd 48(%r11),%xmm3 + movdqa %xmm15,%xmm5 + paddd 64(%r11),%xmm4 + movdqa %xmm15,%xmm6 + paddd 80(%r11),%xmm5 + paddd 96(%r11),%xmm6 + + + + movdqa (%rsp),%xmm8 + leaq 16(%rsp),%rax + movdqa -16(%r11),%xmm7 + pxor %xmm8,%xmm15 + pxor %xmm8,%xmm0 +.byte 102,68,15,56,0,255 + pxor %xmm8,%xmm1 +.byte 102,15,56,0,199 + pxor %xmm8,%xmm2 +.byte 102,15,56,0,207 + pxor %xmm8,%xmm3 +.byte 102,15,56,0,215 + pxor %xmm8,%xmm4 +.byte 102,15,56,0,223 + pxor %xmm8,%xmm5 +.byte 102,15,56,0,231 + pxor %xmm8,%xmm6 +.byte 102,15,56,0,239 + leaq .LBS0(%rip),%r11 +.byte 102,15,56,0,247 + movl %ebx,%r10d + + call _bsaes_encrypt8_bitslice + + subq $8,%r14 + jc .Lctr_enc_loop_done + + movdqu 0(%r12),%xmm7 + movdqu 16(%r12),%xmm8 + movdqu 32(%r12),%xmm9 + movdqu 48(%r12),%xmm10 + movdqu 64(%r12),%xmm11 + movdqu 80(%r12),%xmm12 + movdqu 96(%r12),%xmm13 + movdqu 112(%r12),%xmm14 + leaq 128(%r12),%r12 + pxor %xmm15,%xmm7 + movdqa 32(%rbp),%xmm15 + pxor %xmm8,%xmm0 + movdqu %xmm7,0(%r13) + pxor %xmm9,%xmm3 + movdqu %xmm0,16(%r13) + pxor %xmm10,%xmm5 + movdqu %xmm3,32(%r13) + pxor %xmm11,%xmm2 + movdqu %xmm5,48(%r13) + pxor %xmm12,%xmm6 + movdqu %xmm2,64(%r13) + pxor %xmm13,%xmm1 + movdqu %xmm6,80(%r13) + pxor %xmm14,%xmm4 + movdqu %xmm1,96(%r13) + leaq .LADD1(%rip),%r11 + movdqu %xmm4,112(%r13) + leaq 128(%r13),%r13 + paddd 112(%r11),%xmm15 + jnz .Lctr_enc_loop + + jmp .Lctr_enc_done +.align 16 +.Lctr_enc_loop_done: + addq $8,%r14 + movdqu 0(%r12),%xmm7 + pxor %xmm7,%xmm15 + movdqu %xmm15,0(%r13) + cmpq $2,%r14 + jb .Lctr_enc_done + movdqu 16(%r12),%xmm8 + pxor %xmm8,%xmm0 + movdqu %xmm0,16(%r13) + je .Lctr_enc_done + movdqu 32(%r12),%xmm9 + pxor %xmm9,%xmm3 + movdqu %xmm3,32(%r13) + cmpq $4,%r14 + jb .Lctr_enc_done + movdqu 48(%r12),%xmm10 + pxor %xmm10,%xmm5 + movdqu %xmm5,48(%r13) + je .Lctr_enc_done + movdqu 64(%r12),%xmm11 + pxor %xmm11,%xmm2 + movdqu %xmm2,64(%r13) + cmpq $6,%r14 + jb .Lctr_enc_done + movdqu 80(%r12),%xmm12 + pxor %xmm12,%xmm6 + movdqu %xmm6,80(%r13) + je .Lctr_enc_done + movdqu 96(%r12),%xmm13 + pxor %xmm13,%xmm1 + movdqu %xmm1,96(%r13) + jmp .Lctr_enc_done + +.align 16 +.Lctr_enc_short: + leaq 32(%rbp),%rdi + leaq 48(%rbp),%rsi + leaq (%r15),%rdx + call asm_AES_encrypt + movdqu (%r12),%xmm0 + leaq 16(%r12),%r12 + movl 44(%rbp),%eax + bswapl %eax + pxor 48(%rbp),%xmm0 + incl %eax + movdqu %xmm0,(%r13) + bswapl %eax + leaq 16(%r13),%r13 + movl %eax,44(%rsp) + decq %r14 + jnz .Lctr_enc_short + +.Lctr_enc_done: + leaq (%rsp),%rax + pxor %xmm0,%xmm0 +.Lctr_enc_bzero: + movdqa %xmm0,0(%rax) + movdqa %xmm0,16(%rax) + leaq 32(%rax),%rax + cmpq %rax,%rbp + ja .Lctr_enc_bzero + + leaq (%rbp),%rsp + movq 72(%rsp),%r15 + movq 80(%rsp),%r14 + movq 88(%rsp),%r13 + movq 96(%rsp),%r12 + movq 104(%rsp),%rbx + movq 112(%rsp),%rax + leaq 120(%rsp),%rsp + movq %rax,%rbp +.Lctr_enc_epilogue: + .byte 0xf3,0xc3 +.size bsaes_ctr32_encrypt_blocks,.-bsaes_ctr32_encrypt_blocks +.globl bsaes_xts_encrypt +.type bsaes_xts_encrypt,@function +.align 16 +bsaes_xts_encrypt: + movq %rsp,%rax +.Lxts_enc_prologue: + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + leaq -72(%rsp),%rsp + movq %rsp,%rbp + movq %rdi,%r12 + movq %rsi,%r13 + movq %rdx,%r14 + movq %rcx,%r15 + + leaq (%r9),%rdi + leaq 32(%rbp),%rsi + leaq (%r8),%rdx + call asm_AES_encrypt + + movl 240(%r15),%eax + movq %r14,%rbx + + movl %eax,%edx + shlq $7,%rax + subq $96,%rax + subq %rax,%rsp + + movq %rsp,%rax + movq %r15,%rcx + movl %edx,%r10d + call _bsaes_key_convert + pxor %xmm6,%xmm7 + movdqa %xmm7,(%rax) + + andq $-16,%r14 + subq $128,%rsp + movdqa 32(%rbp),%xmm6 + + pxor %xmm14,%xmm14 + movdqa .Lxts_magic(%rip),%xmm12 + pcmpgtd %xmm6,%xmm14 + + subq $128,%r14 + jc .Lxts_enc_short + jmp .Lxts_enc_loop + +.align 16 +.Lxts_enc_loop: + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm15 + movdqa %xmm6,0(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm0 + movdqa %xmm6,16(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 0(%r12),%xmm7 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm1 + movdqa %xmm6,32(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm15 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm2 + movdqa %xmm6,48(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm0 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm3 + movdqa %xmm6,64(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 48(%r12),%xmm10 + pxor %xmm9,%xmm1 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm4 + movdqa %xmm6,80(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 64(%r12),%xmm11 + pxor %xmm10,%xmm2 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm5 + movdqa %xmm6,96(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 80(%r12),%xmm12 + pxor %xmm11,%xmm3 + movdqu 96(%r12),%xmm13 + pxor %xmm12,%xmm4 + movdqu 112(%r12),%xmm14 + leaq 128(%r12),%r12 + movdqa %xmm6,112(%rsp) + pxor %xmm13,%xmm5 + leaq 128(%rsp),%rax + pxor %xmm14,%xmm6 + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm3 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm5 + movdqu %xmm3,32(%r13) + pxor 64(%rsp),%xmm2 + movdqu %xmm5,48(%r13) + pxor 80(%rsp),%xmm6 + movdqu %xmm2,64(%r13) + pxor 96(%rsp),%xmm1 + movdqu %xmm6,80(%r13) + pxor 112(%rsp),%xmm4 + movdqu %xmm1,96(%r13) + movdqu %xmm4,112(%r13) + leaq 128(%r13),%r13 + + movdqa 112(%rsp),%xmm6 + pxor %xmm14,%xmm14 + movdqa .Lxts_magic(%rip),%xmm12 + pcmpgtd %xmm6,%xmm14 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + + subq $128,%r14 + jnc .Lxts_enc_loop + +.Lxts_enc_short: + addq $128,%r14 + jz .Lxts_enc_done + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm15 + movdqa %xmm6,0(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm0 + movdqa %xmm6,16(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 0(%r12),%xmm7 + cmpq $16,%r14 + je .Lxts_enc_1 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm1 + movdqa %xmm6,32(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 16(%r12),%xmm8 + cmpq $32,%r14 + je .Lxts_enc_2 + pxor %xmm7,%xmm15 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm2 + movdqa %xmm6,48(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 32(%r12),%xmm9 + cmpq $48,%r14 + je .Lxts_enc_3 + pxor %xmm8,%xmm0 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm3 + movdqa %xmm6,64(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 48(%r12),%xmm10 + cmpq $64,%r14 + je .Lxts_enc_4 + pxor %xmm9,%xmm1 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm4 + movdqa %xmm6,80(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 64(%r12),%xmm11 + cmpq $80,%r14 + je .Lxts_enc_5 + pxor %xmm10,%xmm2 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm5 + movdqa %xmm6,96(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 80(%r12),%xmm12 + cmpq $96,%r14 + je .Lxts_enc_6 + pxor %xmm11,%xmm3 + movdqu 96(%r12),%xmm13 + pxor %xmm12,%xmm4 + movdqa %xmm6,112(%rsp) + leaq 112(%r12),%r12 + pxor %xmm13,%xmm5 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm3 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm5 + movdqu %xmm3,32(%r13) + pxor 64(%rsp),%xmm2 + movdqu %xmm5,48(%r13) + pxor 80(%rsp),%xmm6 + movdqu %xmm2,64(%r13) + pxor 96(%rsp),%xmm1 + movdqu %xmm6,80(%r13) + movdqu %xmm1,96(%r13) + leaq 112(%r13),%r13 + + movdqa 112(%rsp),%xmm6 + jmp .Lxts_enc_done +.align 16 +.Lxts_enc_6: + pxor %xmm11,%xmm3 + leaq 96(%r12),%r12 + pxor %xmm12,%xmm4 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm3 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm5 + movdqu %xmm3,32(%r13) + pxor 64(%rsp),%xmm2 + movdqu %xmm5,48(%r13) + pxor 80(%rsp),%xmm6 + movdqu %xmm2,64(%r13) + movdqu %xmm6,80(%r13) + leaq 96(%r13),%r13 + + movdqa 96(%rsp),%xmm6 + jmp .Lxts_enc_done +.align 16 +.Lxts_enc_5: + pxor %xmm10,%xmm2 + leaq 80(%r12),%r12 + pxor %xmm11,%xmm3 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm3 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm5 + movdqu %xmm3,32(%r13) + pxor 64(%rsp),%xmm2 + movdqu %xmm5,48(%r13) + movdqu %xmm2,64(%r13) + leaq 80(%r13),%r13 + + movdqa 80(%rsp),%xmm6 + jmp .Lxts_enc_done +.align 16 +.Lxts_enc_4: + pxor %xmm9,%xmm1 + leaq 64(%r12),%r12 + pxor %xmm10,%xmm2 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm3 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm5 + movdqu %xmm3,32(%r13) + movdqu %xmm5,48(%r13) + leaq 64(%r13),%r13 + + movdqa 64(%rsp),%xmm6 + jmp .Lxts_enc_done +.align 16 +.Lxts_enc_3: + pxor %xmm8,%xmm0 + leaq 48(%r12),%r12 + pxor %xmm9,%xmm1 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm3 + movdqu %xmm0,16(%r13) + movdqu %xmm3,32(%r13) + leaq 48(%r13),%r13 + + movdqa 48(%rsp),%xmm6 + jmp .Lxts_enc_done +.align 16 +.Lxts_enc_2: + pxor %xmm7,%xmm15 + leaq 32(%r12),%r12 + pxor %xmm8,%xmm0 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_encrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + leaq 32(%r13),%r13 + + movdqa 32(%rsp),%xmm6 + jmp .Lxts_enc_done +.align 16 +.Lxts_enc_1: + pxor %xmm15,%xmm7 + leaq 16(%r12),%r12 + movdqa %xmm7,32(%rbp) + leaq 32(%rbp),%rdi + leaq 32(%rbp),%rsi + leaq (%r15),%rdx + call asm_AES_encrypt + pxor 32(%rbp),%xmm15 + + + + + + movdqu %xmm15,0(%r13) + leaq 16(%r13),%r13 + + movdqa 16(%rsp),%xmm6 + +.Lxts_enc_done: + andl $15,%ebx + jz .Lxts_enc_ret + movq %r13,%rdx + +.Lxts_enc_steal: + movzbl (%r12),%eax + movzbl -16(%rdx),%ecx + leaq 1(%r12),%r12 + movb %al,-16(%rdx) + movb %cl,0(%rdx) + leaq 1(%rdx),%rdx + subl $1,%ebx + jnz .Lxts_enc_steal + + movdqu -16(%r13),%xmm15 + leaq 32(%rbp),%rdi + pxor %xmm6,%xmm15 + leaq 32(%rbp),%rsi + movdqa %xmm15,32(%rbp) + leaq (%r15),%rdx + call asm_AES_encrypt + pxor 32(%rbp),%xmm6 + movdqu %xmm6,-16(%r13) + +.Lxts_enc_ret: + leaq (%rsp),%rax + pxor %xmm0,%xmm0 +.Lxts_enc_bzero: + movdqa %xmm0,0(%rax) + movdqa %xmm0,16(%rax) + leaq 32(%rax),%rax + cmpq %rax,%rbp + ja .Lxts_enc_bzero + + leaq (%rbp),%rsp + movq 72(%rsp),%r15 + movq 80(%rsp),%r14 + movq 88(%rsp),%r13 + movq 96(%rsp),%r12 + movq 104(%rsp),%rbx + movq 112(%rsp),%rax + leaq 120(%rsp),%rsp + movq %rax,%rbp +.Lxts_enc_epilogue: + .byte 0xf3,0xc3 +.size bsaes_xts_encrypt,.-bsaes_xts_encrypt + +.globl bsaes_xts_decrypt +.type bsaes_xts_decrypt,@function +.align 16 +bsaes_xts_decrypt: + movq %rsp,%rax +.Lxts_dec_prologue: + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + leaq -72(%rsp),%rsp + movq %rsp,%rbp + movq %rdi,%r12 + movq %rsi,%r13 + movq %rdx,%r14 + movq %rcx,%r15 + + leaq (%r9),%rdi + leaq 32(%rbp),%rsi + leaq (%r8),%rdx + call asm_AES_encrypt + + movl 240(%r15),%eax + movq %r14,%rbx + + movl %eax,%edx + shlq $7,%rax + subq $96,%rax + subq %rax,%rsp + + movq %rsp,%rax + movq %r15,%rcx + movl %edx,%r10d + call _bsaes_key_convert + pxor (%rsp),%xmm7 + movdqa %xmm6,(%rax) + movdqa %xmm7,(%rsp) + + xorl %eax,%eax + andq $-16,%r14 + testl $15,%ebx + setnz %al + shlq $4,%rax + subq %rax,%r14 + + subq $128,%rsp + movdqa 32(%rbp),%xmm6 + + pxor %xmm14,%xmm14 + movdqa .Lxts_magic(%rip),%xmm12 + pcmpgtd %xmm6,%xmm14 + + subq $128,%r14 + jc .Lxts_dec_short + jmp .Lxts_dec_loop + +.align 16 +.Lxts_dec_loop: + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm15 + movdqa %xmm6,0(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm0 + movdqa %xmm6,16(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 0(%r12),%xmm7 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm1 + movdqa %xmm6,32(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 16(%r12),%xmm8 + pxor %xmm7,%xmm15 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm2 + movdqa %xmm6,48(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 32(%r12),%xmm9 + pxor %xmm8,%xmm0 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm3 + movdqa %xmm6,64(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 48(%r12),%xmm10 + pxor %xmm9,%xmm1 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm4 + movdqa %xmm6,80(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 64(%r12),%xmm11 + pxor %xmm10,%xmm2 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm5 + movdqa %xmm6,96(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 80(%r12),%xmm12 + pxor %xmm11,%xmm3 + movdqu 96(%r12),%xmm13 + pxor %xmm12,%xmm4 + movdqu 112(%r12),%xmm14 + leaq 128(%r12),%r12 + movdqa %xmm6,112(%rsp) + pxor %xmm13,%xmm5 + leaq 128(%rsp),%rax + pxor %xmm14,%xmm6 + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm5 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm3 + movdqu %xmm5,32(%r13) + pxor 64(%rsp),%xmm1 + movdqu %xmm3,48(%r13) + pxor 80(%rsp),%xmm6 + movdqu %xmm1,64(%r13) + pxor 96(%rsp),%xmm2 + movdqu %xmm6,80(%r13) + pxor 112(%rsp),%xmm4 + movdqu %xmm2,96(%r13) + movdqu %xmm4,112(%r13) + leaq 128(%r13),%r13 + + movdqa 112(%rsp),%xmm6 + pxor %xmm14,%xmm14 + movdqa .Lxts_magic(%rip),%xmm12 + pcmpgtd %xmm6,%xmm14 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + + subq $128,%r14 + jnc .Lxts_dec_loop + +.Lxts_dec_short: + addq $128,%r14 + jz .Lxts_dec_done + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm15 + movdqa %xmm6,0(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm0 + movdqa %xmm6,16(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 0(%r12),%xmm7 + cmpq $16,%r14 + je .Lxts_dec_1 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm1 + movdqa %xmm6,32(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 16(%r12),%xmm8 + cmpq $32,%r14 + je .Lxts_dec_2 + pxor %xmm7,%xmm15 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm2 + movdqa %xmm6,48(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 32(%r12),%xmm9 + cmpq $48,%r14 + je .Lxts_dec_3 + pxor %xmm8,%xmm0 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm3 + movdqa %xmm6,64(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 48(%r12),%xmm10 + cmpq $64,%r14 + je .Lxts_dec_4 + pxor %xmm9,%xmm1 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm4 + movdqa %xmm6,80(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 64(%r12),%xmm11 + cmpq $80,%r14 + je .Lxts_dec_5 + pxor %xmm10,%xmm2 + pshufd $19,%xmm14,%xmm13 + pxor %xmm14,%xmm14 + movdqa %xmm6,%xmm5 + movdqa %xmm6,96(%rsp) + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + pcmpgtd %xmm6,%xmm14 + pxor %xmm13,%xmm6 + movdqu 80(%r12),%xmm12 + cmpq $96,%r14 + je .Lxts_dec_6 + pxor %xmm11,%xmm3 + movdqu 96(%r12),%xmm13 + pxor %xmm12,%xmm4 + movdqa %xmm6,112(%rsp) + leaq 112(%r12),%r12 + pxor %xmm13,%xmm5 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm5 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm3 + movdqu %xmm5,32(%r13) + pxor 64(%rsp),%xmm1 + movdqu %xmm3,48(%r13) + pxor 80(%rsp),%xmm6 + movdqu %xmm1,64(%r13) + pxor 96(%rsp),%xmm2 + movdqu %xmm6,80(%r13) + movdqu %xmm2,96(%r13) + leaq 112(%r13),%r13 + + movdqa 112(%rsp),%xmm6 + jmp .Lxts_dec_done +.align 16 +.Lxts_dec_6: + pxor %xmm11,%xmm3 + leaq 96(%r12),%r12 + pxor %xmm12,%xmm4 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm5 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm3 + movdqu %xmm5,32(%r13) + pxor 64(%rsp),%xmm1 + movdqu %xmm3,48(%r13) + pxor 80(%rsp),%xmm6 + movdqu %xmm1,64(%r13) + movdqu %xmm6,80(%r13) + leaq 96(%r13),%r13 + + movdqa 96(%rsp),%xmm6 + jmp .Lxts_dec_done +.align 16 +.Lxts_dec_5: + pxor %xmm10,%xmm2 + leaq 80(%r12),%r12 + pxor %xmm11,%xmm3 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm5 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm3 + movdqu %xmm5,32(%r13) + pxor 64(%rsp),%xmm1 + movdqu %xmm3,48(%r13) + movdqu %xmm1,64(%r13) + leaq 80(%r13),%r13 + + movdqa 80(%rsp),%xmm6 + jmp .Lxts_dec_done +.align 16 +.Lxts_dec_4: + pxor %xmm9,%xmm1 + leaq 64(%r12),%r12 + pxor %xmm10,%xmm2 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm5 + movdqu %xmm0,16(%r13) + pxor 48(%rsp),%xmm3 + movdqu %xmm5,32(%r13) + movdqu %xmm3,48(%r13) + leaq 64(%r13),%r13 + + movdqa 64(%rsp),%xmm6 + jmp .Lxts_dec_done +.align 16 +.Lxts_dec_3: + pxor %xmm8,%xmm0 + leaq 48(%r12),%r12 + pxor %xmm9,%xmm1 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + pxor 32(%rsp),%xmm5 + movdqu %xmm0,16(%r13) + movdqu %xmm5,32(%r13) + leaq 48(%r13),%r13 + + movdqa 48(%rsp),%xmm6 + jmp .Lxts_dec_done +.align 16 +.Lxts_dec_2: + pxor %xmm7,%xmm15 + leaq 32(%r12),%r12 + pxor %xmm8,%xmm0 + leaq 128(%rsp),%rax + movl %edx,%r10d + + call _bsaes_decrypt8 + + pxor 0(%rsp),%xmm15 + pxor 16(%rsp),%xmm0 + movdqu %xmm15,0(%r13) + movdqu %xmm0,16(%r13) + leaq 32(%r13),%r13 + + movdqa 32(%rsp),%xmm6 + jmp .Lxts_dec_done +.align 16 +.Lxts_dec_1: + pxor %xmm15,%xmm7 + leaq 16(%r12),%r12 + movdqa %xmm7,32(%rbp) + leaq 32(%rbp),%rdi + leaq 32(%rbp),%rsi + leaq (%r15),%rdx + call asm_AES_decrypt + pxor 32(%rbp),%xmm15 + + + + + + movdqu %xmm15,0(%r13) + leaq 16(%r13),%r13 + + movdqa 16(%rsp),%xmm6 + +.Lxts_dec_done: + andl $15,%ebx + jz .Lxts_dec_ret + + pxor %xmm14,%xmm14 + movdqa .Lxts_magic(%rip),%xmm12 + pcmpgtd %xmm6,%xmm14 + pshufd $19,%xmm14,%xmm13 + movdqa %xmm6,%xmm5 + paddq %xmm6,%xmm6 + pand %xmm12,%xmm13 + movdqu (%r12),%xmm15 + pxor %xmm13,%xmm6 + + leaq 32(%rbp),%rdi + pxor %xmm6,%xmm15 + leaq 32(%rbp),%rsi + movdqa %xmm15,32(%rbp) + leaq (%r15),%rdx + call asm_AES_decrypt + pxor 32(%rbp),%xmm6 + movq %r13,%rdx + movdqu %xmm6,(%r13) + +.Lxts_dec_steal: + movzbl 16(%r12),%eax + movzbl (%rdx),%ecx + leaq 1(%r12),%r12 + movb %al,(%rdx) + movb %cl,16(%rdx) + leaq 1(%rdx),%rdx + subl $1,%ebx + jnz .Lxts_dec_steal + + movdqu (%r13),%xmm15 + leaq 32(%rbp),%rdi + pxor %xmm5,%xmm15 + leaq 32(%rbp),%rsi + movdqa %xmm15,32(%rbp) + leaq (%r15),%rdx + call asm_AES_decrypt + pxor 32(%rbp),%xmm5 + movdqu %xmm5,(%r13) + +.Lxts_dec_ret: + leaq (%rsp),%rax + pxor %xmm0,%xmm0 +.Lxts_dec_bzero: + movdqa %xmm0,0(%rax) + movdqa %xmm0,16(%rax) + leaq 32(%rax),%rax + cmpq %rax,%rbp + ja .Lxts_dec_bzero + + leaq (%rbp),%rsp + movq 72(%rsp),%r15 + movq 80(%rsp),%r14 + movq 88(%rsp),%r13 + movq 96(%rsp),%r12 + movq 104(%rsp),%rbx + movq 112(%rsp),%rax + leaq 120(%rsp),%rsp + movq %rax,%rbp +.Lxts_dec_epilogue: + .byte 0xf3,0xc3 +.size bsaes_xts_decrypt,.-bsaes_xts_decrypt +.type _bsaes_const,@object +.align 64 +_bsaes_const: +.LM0ISR: +.quad 0x0a0e0206070b0f03, 0x0004080c0d010509 +.LISRM0: +.quad 0x01040b0e0205080f, 0x0306090c00070a0d +.LISR: +.quad 0x0504070602010003, 0x0f0e0d0c080b0a09 +.LBS0: +.quad 0x5555555555555555, 0x5555555555555555 +.LBS1: +.quad 0x3333333333333333, 0x3333333333333333 +.LBS2: +.quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f +.LSR: +.quad 0x0504070600030201, 0x0f0e0d0c0a09080b +.LSRM0: +.quad 0x0304090e00050a0f, 0x01060b0c0207080d +.LM0: +.quad 0x02060a0e03070b0f, 0x0004080c0105090d +.LM0SR: +.quad 0x0a0e02060f03070b, 0x0004080c05090d01 +.LNOT: +.quad 0xffffffffffffffff, 0xffffffffffffffff +.L63: +.quad 0x6363636363636363, 0x6363636363636363 +.LSWPUP: +.quad 0x0706050403020100, 0x0c0d0e0f0b0a0908 +.LSWPUPM0SR: +.quad 0x0a0d02060c03070b, 0x0004080f05090e01 +.LADD1: +.quad 0x0000000000000000, 0x0000000100000000 +.LADD2: +.quad 0x0000000000000000, 0x0000000200000000 +.LADD3: +.quad 0x0000000000000000, 0x0000000300000000 +.LADD4: +.quad 0x0000000000000000, 0x0000000400000000 +.LADD5: +.quad 0x0000000000000000, 0x0000000500000000 +.LADD6: +.quad 0x0000000000000000, 0x0000000600000000 +.LADD7: +.quad 0x0000000000000000, 0x0000000700000000 +.LADD8: +.quad 0x0000000000000000, 0x0000000800000000 +.Lxts_magic: +.long 0x87,0,1,0 +.byte 66,105,116,45,115,108,105,99,101,100,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,69,109,105,108,105,97,32,75,195,164,115,112,101,114,44,32,80,101,116,101,114,32,83,99,104,119,97,98,101,44,32,65,110,100,121,32,80,111,108,121,97,107,111,118,0 +.align 64 +.size _bsaes_const,.-_bsaes_const diff --git a/secure/lib/libcrypto/asm/cmll-x86.s b/secure/lib/libcrypto/asm/cmll-x86.s index a896314cfe..5c87910e34 100644 --- a/secure/lib/libcrypto/asm/cmll-x86.s +++ b/secure/lib/libcrypto/asm/cmll-x86.s @@ -1537,11 +1537,11 @@ Camellia_Ekeygen: popl %ebp ret .size Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin -.globl Camellia_set_key -.type Camellia_set_key,@function +.globl private_Camellia_set_key +.type private_Camellia_set_key,@function .align 16 -Camellia_set_key: -.L_Camellia_set_key_begin: +private_Camellia_set_key: +.L_private_Camellia_set_key_begin: pushl %ebx movl 8(%esp),%ecx movl 12(%esp),%ebx @@ -1571,7 +1571,7 @@ Camellia_set_key: .L014done: popl %ebx ret -.size Camellia_set_key,.-.L_Camellia_set_key_begin +.size private_Camellia_set_key,.-.L_private_Camellia_set_key_begin .align 64 .LCamellia_SIGMA: .long 2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0 diff --git a/secure/lib/libcrypto/asm/ghash-x86.s b/secure/lib/libcrypto/asm/ghash-x86.s new file mode 100644 index 0000000000..50473201ee --- /dev/null +++ b/secure/lib/libcrypto/asm/ghash-x86.s @@ -0,0 +1,1269 @@ +.file "ghash-x86.s" +.text +.globl gcm_gmult_4bit_x86 +.type gcm_gmult_4bit_x86,@function +.align 16 +gcm_gmult_4bit_x86: +.L_gcm_gmult_4bit_x86_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + subl $84,%esp + movl 104(%esp),%edi + movl 108(%esp),%esi + movl (%edi),%ebp + movl 4(%edi),%edx + movl 8(%edi),%ecx + movl 12(%edi),%ebx + movl $0,16(%esp) + movl $471859200,20(%esp) + movl $943718400,24(%esp) + movl $610271232,28(%esp) + movl $1887436800,32(%esp) + movl $1822425088,36(%esp) + movl $1220542464,40(%esp) + movl $1423966208,44(%esp) + movl $3774873600,48(%esp) + movl $4246732800,52(%esp) + movl $3644850176,56(%esp) + movl $3311403008,60(%esp) + movl $2441084928,64(%esp) + movl $2376073216,68(%esp) + movl $2847932416,72(%esp) + movl $3051356160,76(%esp) + movl %ebp,(%esp) + movl %edx,4(%esp) + movl %ecx,8(%esp) + movl %ebx,12(%esp) + shrl $20,%ebx + andl $240,%ebx + movl 4(%esi,%ebx,1),%ebp + movl (%esi,%ebx,1),%edx + movl 12(%esi,%ebx,1),%ecx + movl 8(%esi,%ebx,1),%ebx + xorl %eax,%eax + movl $15,%edi + jmp .L000x86_loop +.align 16 +.L000x86_loop: + movb %bl,%al + shrdl $4,%ecx,%ebx + andb $15,%al + shrdl $4,%edx,%ecx + shrdl $4,%ebp,%edx + shrl $4,%ebp + xorl 16(%esp,%eax,4),%ebp + movb (%esp,%edi,1),%al + andb $240,%al + xorl 8(%esi,%eax,1),%ebx + xorl 12(%esi,%eax,1),%ecx + xorl (%esi,%eax,1),%edx + xorl 4(%esi,%eax,1),%ebp + decl %edi + js .L001x86_break + movb %bl,%al + shrdl $4,%ecx,%ebx + andb $15,%al + shrdl $4,%edx,%ecx + shrdl $4,%ebp,%edx + shrl $4,%ebp + xorl 16(%esp,%eax,4),%ebp + movb (%esp,%edi,1),%al + shlb $4,%al + xorl 8(%esi,%eax,1),%ebx + xorl 12(%esi,%eax,1),%ecx + xorl (%esi,%eax,1),%edx + xorl 4(%esi,%eax,1),%ebp + jmp .L000x86_loop +.align 16 +.L001x86_break: + bswap %ebx + bswap %ecx + bswap %edx + bswap %ebp + movl 104(%esp),%edi + movl %ebx,12(%edi) + movl %ecx,8(%edi) + movl %edx,4(%edi) + movl %ebp,(%edi) + addl $84,%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin +.globl gcm_ghash_4bit_x86 +.type gcm_ghash_4bit_x86,@function +.align 16 +gcm_ghash_4bit_x86: +.L_gcm_ghash_4bit_x86_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + subl $84,%esp + movl 104(%esp),%ebx + movl 108(%esp),%esi + movl 112(%esp),%edi + movl 116(%esp),%ecx + addl %edi,%ecx + movl %ecx,116(%esp) + movl (%ebx),%ebp + movl 4(%ebx),%edx + movl 8(%ebx),%ecx + movl 12(%ebx),%ebx + movl $0,16(%esp) + movl $471859200,20(%esp) + movl $943718400,24(%esp) + movl $610271232,28(%esp) + movl $1887436800,32(%esp) + movl $1822425088,36(%esp) + movl $1220542464,40(%esp) + movl $1423966208,44(%esp) + movl $3774873600,48(%esp) + movl $4246732800,52(%esp) + movl $3644850176,56(%esp) + movl $3311403008,60(%esp) + movl $2441084928,64(%esp) + movl $2376073216,68(%esp) + movl $2847932416,72(%esp) + movl $3051356160,76(%esp) +.align 16 +.L002x86_outer_loop: + xorl 12(%edi),%ebx + xorl 8(%edi),%ecx + xorl 4(%edi),%edx + xorl (%edi),%ebp + movl %ebx,12(%esp) + movl %ecx,8(%esp) + movl %edx,4(%esp) + movl %ebp,(%esp) + shrl $20,%ebx + andl $240,%ebx + movl 4(%esi,%ebx,1),%ebp + movl (%esi,%ebx,1),%edx + movl 12(%esi,%ebx,1),%ecx + movl 8(%esi,%ebx,1),%ebx + xorl %eax,%eax + movl $15,%edi + jmp .L003x86_loop +.align 16 +.L003x86_loop: + movb %bl,%al + shrdl $4,%ecx,%ebx + andb $15,%al + shrdl $4,%edx,%ecx + shrdl $4,%ebp,%edx + shrl $4,%ebp + xorl 16(%esp,%eax,4),%ebp + movb (%esp,%edi,1),%al + andb $240,%al + xorl 8(%esi,%eax,1),%ebx + xorl 12(%esi,%eax,1),%ecx + xorl (%esi,%eax,1),%edx + xorl 4(%esi,%eax,1),%ebp + decl %edi + js .L004x86_break + movb %bl,%al + shrdl $4,%ecx,%ebx + andb $15,%al + shrdl $4,%edx,%ecx + shrdl $4,%ebp,%edx + shrl $4,%ebp + xorl 16(%esp,%eax,4),%ebp + movb (%esp,%edi,1),%al + shlb $4,%al + xorl 8(%esi,%eax,1),%ebx + xorl 12(%esi,%eax,1),%ecx + xorl (%esi,%eax,1),%edx + xorl 4(%esi,%eax,1),%ebp + jmp .L003x86_loop +.align 16 +.L004x86_break: + bswap %ebx + bswap %ecx + bswap %edx + bswap %ebp + movl 112(%esp),%edi + leal 16(%edi),%edi + cmpl 116(%esp),%edi + movl %edi,112(%esp) + jb .L002x86_outer_loop + movl 104(%esp),%edi + movl %ebx,12(%edi) + movl %ecx,8(%edi) + movl %edx,4(%edi) + movl %ebp,(%edi) + addl $84,%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin +.globl gcm_gmult_4bit_mmx +.type gcm_gmult_4bit_mmx,@function +.align 16 +gcm_gmult_4bit_mmx: +.L_gcm_gmult_4bit_mmx_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%edi + movl 24(%esp),%esi + call .L005pic_point +.L005pic_point: + popl %eax + leal .Lrem_4bit-.L005pic_point(%eax),%eax + movzbl 15(%edi),%ebx + xorl %ecx,%ecx + movl %ebx,%edx + movb %dl,%cl + movl $14,%ebp + shlb $4,%cl + andl $240,%edx + movq 8(%esi,%ecx,1),%mm0 + movq (%esi,%ecx,1),%mm1 + movd %mm0,%ebx + jmp .L006mmx_loop +.align 16 +.L006mmx_loop: + psrlq $4,%mm0 + andl $15,%ebx + movq %mm1,%mm2 + psrlq $4,%mm1 + pxor 8(%esi,%edx,1),%mm0 + movb (%edi,%ebp,1),%cl + psllq $60,%mm2 + pxor (%eax,%ebx,8),%mm1 + decl %ebp + movd %mm0,%ebx + pxor (%esi,%edx,1),%mm1 + movl %ecx,%edx + pxor %mm2,%mm0 + js .L007mmx_break + shlb $4,%cl + andl $15,%ebx + psrlq $4,%mm0 + andl $240,%edx + movq %mm1,%mm2 + psrlq $4,%mm1 + pxor 8(%esi,%ecx,1),%mm0 + psllq $60,%mm2 + pxor (%eax,%ebx,8),%mm1 + movd %mm0,%ebx + pxor (%esi,%ecx,1),%mm1 + pxor %mm2,%mm0 + jmp .L006mmx_loop +.align 16 +.L007mmx_break: + shlb $4,%cl + andl $15,%ebx + psrlq $4,%mm0 + andl $240,%edx + movq %mm1,%mm2 + psrlq $4,%mm1 + pxor 8(%esi,%ecx,1),%mm0 + psllq $60,%mm2 + pxor (%eax,%ebx,8),%mm1 + movd %mm0,%ebx + pxor (%esi,%ecx,1),%mm1 + pxor %mm2,%mm0 + psrlq $4,%mm0 + andl $15,%ebx + movq %mm1,%mm2 + psrlq $4,%mm1 + pxor 8(%esi,%edx,1),%mm0 + psllq $60,%mm2 + pxor (%eax,%ebx,8),%mm1 + movd %mm0,%ebx + pxor (%esi,%edx,1),%mm1 + pxor %mm2,%mm0 + psrlq $32,%mm0 + movd %mm1,%edx + psrlq $32,%mm1 + movd %mm0,%ecx + movd %mm1,%ebp + bswap %ebx + bswap %edx + bswap %ecx + bswap %ebp + emms + movl %ebx,12(%edi) + movl %edx,4(%edi) + movl %ecx,8(%edi) + movl %ebp,(%edi) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin +.globl gcm_ghash_4bit_mmx +.type gcm_ghash_4bit_mmx,@function +.align 16 +gcm_ghash_4bit_mmx: +.L_gcm_ghash_4bit_mmx_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%eax + movl 24(%esp),%ebx + movl 28(%esp),%ecx + movl 32(%esp),%edx + movl %esp,%ebp + call .L008pic_point +.L008pic_point: + popl %esi + leal .Lrem_8bit-.L008pic_point(%esi),%esi + subl $544,%esp + andl $-64,%esp + subl $16,%esp + addl %ecx,%edx + movl %eax,544(%esp) + movl %edx,552(%esp) + movl %ebp,556(%esp) + addl $128,%ebx + leal 144(%esp),%edi + leal 400(%esp),%ebp + movl -120(%ebx),%edx + movq -120(%ebx),%mm0 + movq -128(%ebx),%mm3 + shll $4,%edx + movb %dl,(%esp) + movl -104(%ebx),%edx + movq -104(%ebx),%mm2 + movq -112(%ebx),%mm5 + movq %mm0,-128(%edi) + psrlq $4,%mm0 + movq %mm3,(%edi) + movq %mm3,%mm7 + psrlq $4,%mm3 + shll $4,%edx + movb %dl,1(%esp) + movl -88(%ebx),%edx + movq -88(%ebx),%mm1 + psllq $60,%mm7 + movq -96(%ebx),%mm4 + por %mm7,%mm0 + movq %mm2,-120(%edi) + psrlq $4,%mm2 + movq %mm5,8(%edi) + movq %mm5,%mm6 + movq %mm0,-128(%ebp) + psrlq $4,%mm5 + movq %mm3,(%ebp) + shll $4,%edx + movb %dl,2(%esp) + movl -72(%ebx),%edx + movq -72(%ebx),%mm0 + psllq $60,%mm6 + movq -80(%ebx),%mm3 + por %mm6,%mm2 + movq %mm1,-112(%edi) + psrlq $4,%mm1 + movq %mm4,16(%edi) + movq %mm4,%mm7 + movq %mm2,-120(%ebp) + psrlq $4,%mm4 + movq %mm5,8(%ebp) + shll $4,%edx + movb %dl,3(%esp) + movl -56(%ebx),%edx + movq -56(%ebx),%mm2 + psllq $60,%mm7 + movq -64(%ebx),%mm5 + por %mm7,%mm1 + movq %mm0,-104(%edi) + psrlq $4,%mm0 + movq %mm3,24(%edi) + movq %mm3,%mm6 + movq %mm1,-112(%ebp) + psrlq $4,%mm3 + movq %mm4,16(%ebp) + shll $4,%edx + movb %dl,4(%esp) + movl -40(%ebx),%edx + movq -40(%ebx),%mm1 + psllq $60,%mm6 + movq -48(%ebx),%mm4 + por %mm6,%mm0 + movq %mm2,-96(%edi) + psrlq $4,%mm2 + movq %mm5,32(%edi) + movq %mm5,%mm7 + movq %mm0,-104(%ebp) + psrlq $4,%mm5 + movq %mm3,24(%ebp) + shll $4,%edx + movb %dl,5(%esp) + movl -24(%ebx),%edx + movq -24(%ebx),%mm0 + psllq $60,%mm7 + movq -32(%ebx),%mm3 + por %mm7,%mm2 + movq %mm1,-88(%edi) + psrlq $4,%mm1 + movq %mm4,40(%edi) + movq %mm4,%mm6 + movq %mm2,-96(%ebp) + psrlq $4,%mm4 + movq %mm5,32(%ebp) + shll $4,%edx + movb %dl,6(%esp) + movl -8(%ebx),%edx + movq -8(%ebx),%mm2 + psllq $60,%mm6 + movq -16(%ebx),%mm5 + por %mm6,%mm1 + movq %mm0,-80(%edi) + psrlq $4,%mm0 + movq %mm3,48(%edi) + movq %mm3,%mm7 + movq %mm1,-88(%ebp) + psrlq $4,%mm3 + movq %mm4,40(%ebp) + shll $4,%edx + movb %dl,7(%esp) + movl 8(%ebx),%edx + movq 8(%ebx),%mm1 + psllq $60,%mm7 + movq (%ebx),%mm4 + por %mm7,%mm0 + movq %mm2,-72(%edi) + psrlq $4,%mm2 + movq %mm5,56(%edi) + movq %mm5,%mm6 + movq %mm0,-80(%ebp) + psrlq $4,%mm5 + movq %mm3,48(%ebp) + shll $4,%edx + movb %dl,8(%esp) + movl 24(%ebx),%edx + movq 24(%ebx),%mm0 + psllq $60,%mm6 + movq 16(%ebx),%mm3 + por %mm6,%mm2 + movq %mm1,-64(%edi) + psrlq $4,%mm1 + movq %mm4,64(%edi) + movq %mm4,%mm7 + movq %mm2,-72(%ebp) + psrlq $4,%mm4 + movq %mm5,56(%ebp) + shll $4,%edx + movb %dl,9(%esp) + movl 40(%ebx),%edx + movq 40(%ebx),%mm2 + psllq $60,%mm7 + movq 32(%ebx),%mm5 + por %mm7,%mm1 + movq %mm0,-56(%edi) + psrlq $4,%mm0 + movq %mm3,72(%edi) + movq %mm3,%mm6 + movq %mm1,-64(%ebp) + psrlq $4,%mm3 + movq %mm4,64(%ebp) + shll $4,%edx + movb %dl,10(%esp) + movl 56(%ebx),%edx + movq 56(%ebx),%mm1 + psllq $60,%mm6 + movq 48(%ebx),%mm4 + por %mm6,%mm0 + movq %mm2,-48(%edi) + psrlq $4,%mm2 + movq %mm5,80(%edi) + movq %mm5,%mm7 + movq %mm0,-56(%ebp) + psrlq $4,%mm5 + movq %mm3,72(%ebp) + shll $4,%edx + movb %dl,11(%esp) + movl 72(%ebx),%edx + movq 72(%ebx),%mm0 + psllq $60,%mm7 + movq 64(%ebx),%mm3 + por %mm7,%mm2 + movq %mm1,-40(%edi) + psrlq $4,%mm1 + movq %mm4,88(%edi) + movq %mm4,%mm6 + movq %mm2,-48(%ebp) + psrlq $4,%mm4 + movq %mm5,80(%ebp) + shll $4,%edx + movb %dl,12(%esp) + movl 88(%ebx),%edx + movq 88(%ebx),%mm2 + psllq $60,%mm6 + movq 80(%ebx),%mm5 + por %mm6,%mm1 + movq %mm0,-32(%edi) + psrlq $4,%mm0 + movq %mm3,96(%edi) + movq %mm3,%mm7 + movq %mm1,-40(%ebp) + psrlq $4,%mm3 + movq %mm4,88(%ebp) + shll $4,%edx + movb %dl,13(%esp) + movl 104(%ebx),%edx + movq 104(%ebx),%mm1 + psllq $60,%mm7 + movq 96(%ebx),%mm4 + por %mm7,%mm0 + movq %mm2,-24(%edi) + psrlq $4,%mm2 + movq %mm5,104(%edi) + movq %mm5,%mm6 + movq %mm0,-32(%ebp) + psrlq $4,%mm5 + movq %mm3,96(%ebp) + shll $4,%edx + movb %dl,14(%esp) + movl 120(%ebx),%edx + movq 120(%ebx),%mm0 + psllq $60,%mm6 + movq 112(%ebx),%mm3 + por %mm6,%mm2 + movq %mm1,-16(%edi) + psrlq $4,%mm1 + movq %mm4,112(%edi) + movq %mm4,%mm7 + movq %mm2,-24(%ebp) + psrlq $4,%mm4 + movq %mm5,104(%ebp) + shll $4,%edx + movb %dl,15(%esp) + psllq $60,%mm7 + por %mm7,%mm1 + movq %mm0,-8(%edi) + psrlq $4,%mm0 + movq %mm3,120(%edi) + movq %mm3,%mm6 + movq %mm1,-16(%ebp) + psrlq $4,%mm3 + movq %mm4,112(%ebp) + psllq $60,%mm6 + por %mm6,%mm0 + movq %mm0,-8(%ebp) + movq %mm3,120(%ebp) + movq (%eax),%mm6 + movl 8(%eax),%ebx + movl 12(%eax),%edx +.align 16 +.L009outer: + xorl 12(%ecx),%edx + xorl 8(%ecx),%ebx + pxor (%ecx),%mm6 + leal 16(%ecx),%ecx + movl %ebx,536(%esp) + movq %mm6,528(%esp) + movl %ecx,548(%esp) + xorl %eax,%eax + roll $8,%edx + movb %dl,%al + movl %eax,%ebp + andb $15,%al + shrl $4,%ebp + pxor %mm0,%mm0 + roll $8,%edx + pxor %mm1,%mm1 + pxor %mm2,%mm2 + movq 16(%esp,%eax,8),%mm7 + movq 144(%esp,%eax,8),%mm6 + movb %dl,%al + movd %mm7,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + shrl $4,%edi + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm2 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movl 536(%esp),%edx + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm2,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm1 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm1,%mm6 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm0 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm0,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm2 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm2,%mm6 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm1 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movl 532(%esp),%edx + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm1,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm0 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm0,%mm6 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm2 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm2,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm1 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm1,%mm6 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm0 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movl 528(%esp),%edx + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm0,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm2 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm2,%mm6 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm1 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm1,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm0 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + xorb (%esp,%ebp,1),%bl + movb %dl,%al + movd %mm7,%ecx + movzbl %bl,%ebx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%ebp + psrlq $8,%mm6 + pxor 272(%esp,%edi,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm0,%mm6 + shrl $4,%ebp + pinsrw $2,(%esi,%ebx,2),%mm2 + pxor 16(%esp,%eax,8),%mm7 + roll $8,%edx + pxor 144(%esp,%eax,8),%mm6 + pxor %mm3,%mm7 + pxor 400(%esp,%edi,8),%mm6 + xorb (%esp,%edi,1),%cl + movb %dl,%al + movl 524(%esp),%edx + movd %mm7,%ebx + movzbl %cl,%ecx + psrlq $8,%mm7 + movq %mm6,%mm3 + movl %eax,%edi + psrlq $8,%mm6 + pxor 272(%esp,%ebp,8),%mm7 + andb $15,%al + psllq $56,%mm3 + pxor %mm2,%mm6 + shrl $4,%edi + pinsrw $2,(%esi,%ecx,2),%mm1 + pxor 16(%esp,%eax,8),%mm7 + pxor 144(%esp,%eax,8),%mm6 + xorb (%esp,%ebp,1),%bl + pxor %mm3,%mm7 + pxor 400(%esp,%ebp,8),%mm6 + movzbl %bl,%ebx + pxor %mm2,%mm2 + psllq $4,%mm1 + movd %mm7,%ecx + psrlq $4,%mm7 + movq %mm6,%mm3 + psrlq $4,%mm6 + shll $4,%ecx + pxor 16(%esp,%edi,8),%mm7 + psllq $60,%mm3 + movzbl %cl,%ecx + pxor %mm3,%mm7 + pxor 144(%esp,%edi,8),%mm6 + pinsrw $2,(%esi,%ebx,2),%mm0 + pxor %mm1,%mm6 + movd %mm7,%edx + pinsrw $3,(%esi,%ecx,2),%mm2 + psllq $12,%mm0 + pxor %mm0,%mm6 + psrlq $32,%mm7 + pxor %mm2,%mm6 + movl 548(%esp),%ecx + movd %mm7,%ebx + movq %mm6,%mm3 + psllw $8,%mm6 + psrlw $8,%mm3 + por %mm3,%mm6 + bswap %edx + pshufw $27,%mm6,%mm6 + bswap %ebx + cmpl 552(%esp),%ecx + jne .L009outer + movl 544(%esp),%eax + movl %edx,12(%eax) + movl %ebx,8(%eax) + movq %mm6,(%eax) + movl 556(%esp),%esp + emms + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin +.globl gcm_init_clmul +.type gcm_init_clmul,@function +.align 16 +gcm_init_clmul: +.L_gcm_init_clmul_begin: + movl 4(%esp),%edx + movl 8(%esp),%eax + call .L010pic +.L010pic: + popl %ecx + leal .Lbswap-.L010pic(%ecx),%ecx + movdqu (%eax),%xmm2 + pshufd $78,%xmm2,%xmm2 + pshufd $255,%xmm2,%xmm4 + movdqa %xmm2,%xmm3 + psllq $1,%xmm2 + pxor %xmm5,%xmm5 + psrlq $63,%xmm3 + pcmpgtd %xmm4,%xmm5 + pslldq $8,%xmm3 + por %xmm3,%xmm2 + pand 16(%ecx),%xmm5 + pxor %xmm5,%xmm2 + movdqa %xmm2,%xmm0 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + xorps %xmm0,%xmm3 + xorps %xmm1,%xmm3 + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + movdqu %xmm2,(%edx) + movdqu %xmm0,16(%edx) + ret +.size gcm_init_clmul,.-.L_gcm_init_clmul_begin +.globl gcm_gmult_clmul +.type gcm_gmult_clmul,@function +.align 16 +gcm_gmult_clmul: +.L_gcm_gmult_clmul_begin: + movl 4(%esp),%eax + movl 8(%esp),%edx + call .L011pic +.L011pic: + popl %ecx + leal .Lbswap-.L011pic(%ecx),%ecx + movdqu (%eax),%xmm0 + movdqa (%ecx),%xmm5 + movups (%edx),%xmm2 +.byte 102,15,56,0,197 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + xorps %xmm0,%xmm3 + xorps %xmm1,%xmm3 + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,197 + movdqu %xmm0,(%eax) + ret +.size gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin +.globl gcm_ghash_clmul +.type gcm_ghash_clmul,@function +.align 16 +gcm_ghash_clmul: +.L_gcm_ghash_clmul_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%eax + movl 24(%esp),%edx + movl 28(%esp),%esi + movl 32(%esp),%ebx + call .L012pic +.L012pic: + popl %ecx + leal .Lbswap-.L012pic(%ecx),%ecx + movdqu (%eax),%xmm0 + movdqa (%ecx),%xmm5 + movdqu (%edx),%xmm2 +.byte 102,15,56,0,197 + subl $16,%ebx + jz .L013odd_tail + movdqu (%esi),%xmm3 + movdqu 16(%esi),%xmm6 +.byte 102,15,56,0,221 +.byte 102,15,56,0,245 + pxor %xmm3,%xmm0 + movdqa %xmm6,%xmm7 + pshufd $78,%xmm6,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm6,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,242,0 +.byte 102,15,58,68,250,17 +.byte 102,15,58,68,220,0 + xorps %xmm6,%xmm3 + xorps %xmm7,%xmm3 + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm7 + pxor %xmm4,%xmm6 + movups 16(%edx),%xmm2 + leal 32(%esi),%esi + subl $32,%ebx + jbe .L014even_tail +.L015mod_loop: + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + xorps %xmm0,%xmm3 + xorps %xmm1,%xmm3 + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + movdqu (%esi),%xmm3 + movups (%edx),%xmm2 + pxor %xmm6,%xmm0 + pxor %xmm7,%xmm1 + movdqu 16(%esi),%xmm6 +.byte 102,15,56,0,221 +.byte 102,15,56,0,245 + movdqa %xmm6,%xmm5 + movdqa %xmm6,%xmm7 + pxor %xmm3,%xmm1 + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 +.byte 102,15,58,68,242,0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pshufd $78,%xmm5,%xmm3 + pxor %xmm4,%xmm1 + pxor %xmm5,%xmm3 + pshufd $78,%xmm2,%xmm5 + pxor %xmm2,%xmm5 +.byte 102,15,58,68,250,17 + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 +.byte 102,15,58,68,221,0 + movups 16(%edx),%xmm2 + xorps %xmm6,%xmm3 + xorps %xmm7,%xmm3 + movdqa %xmm3,%xmm5 + psrldq $8,%xmm3 + pslldq $8,%xmm5 + pxor %xmm3,%xmm7 + pxor %xmm5,%xmm6 + movdqa (%ecx),%xmm5 + leal 32(%esi),%esi + subl $32,%ebx + ja .L015mod_loop +.L014even_tail: + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + xorps %xmm0,%xmm3 + xorps %xmm1,%xmm3 + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + pxor %xmm6,%xmm0 + pxor %xmm7,%xmm1 + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + testl %ebx,%ebx + jnz .L016done + movups (%edx),%xmm2 +.L013odd_tail: + movdqu (%esi),%xmm3 +.byte 102,15,56,0,221 + pxor %xmm3,%xmm0 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + xorps %xmm0,%xmm3 + xorps %xmm1,%xmm3 + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 +.L016done: +.byte 102,15,56,0,197 + movdqu %xmm0,(%eax) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin +.align 64 +.Lbswap: +.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 +.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194 +.align 64 +.Lrem_4bit: +.long 0,0,0,471859200,0,943718400,0,610271232 +.long 0,1887436800,0,1822425088,0,1220542464,0,1423966208 +.long 0,3774873600,0,4246732800,0,3644850176,0,3311403008 +.long 0,2441084928,0,2376073216,0,2847932416,0,3051356160 +.align 64 +.Lrem_8bit: +.value 0,450,900,582,1800,1738,1164,1358 +.value 3600,4050,3476,3158,2328,2266,2716,2910 +.value 7200,7650,8100,7782,6952,6890,6316,6510 +.value 4656,5106,4532,4214,5432,5370,5820,6014 +.value 14400,14722,15300,14854,16200,16010,15564,15630 +.value 13904,14226,13780,13334,12632,12442,13020,13086 +.value 9312,9634,10212,9766,9064,8874,8428,8494 +.value 10864,11186,10740,10294,11640,11450,12028,12094 +.value 28800,28994,29444,29382,30600,30282,29708,30158 +.value 32400,32594,32020,31958,31128,30810,31260,31710 +.value 27808,28002,28452,28390,27560,27242,26668,27118 +.value 25264,25458,24884,24822,26040,25722,26172,26622 +.value 18624,18690,19268,19078,20424,19978,19532,19854 +.value 18128,18194,17748,17558,16856,16410,16988,17310 +.value 21728,21794,22372,22182,21480,21034,20588,20910 +.value 23280,23346,22900,22710,24056,23610,24188,24510 +.value 57600,57538,57988,58182,58888,59338,58764,58446 +.value 61200,61138,60564,60758,59416,59866,60316,59998 +.value 64800,64738,65188,65382,64040,64490,63916,63598 +.value 62256,62194,61620,61814,62520,62970,63420,63102 +.value 55616,55426,56004,56070,56904,57226,56780,56334 +.value 55120,54930,54484,54550,53336,53658,54236,53790 +.value 50528,50338,50916,50982,49768,50090,49644,49198 +.value 52080,51890,51444,51510,52344,52666,53244,52798 +.value 37248,36930,37380,37830,38536,38730,38156,38094 +.value 40848,40530,39956,40406,39064,39258,39708,39646 +.value 36256,35938,36388,36838,35496,35690,35116,35054 +.value 33712,33394,32820,33270,33976,34170,34620,34558 +.value 43456,43010,43588,43910,44744,44810,44364,44174 +.value 42960,42514,42068,42390,41176,41242,41820,41630 +.value 46560,46114,46692,47014,45800,45866,45420,45230 +.value 48112,47666,47220,47542,48376,48442,49020,48830 +.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67 +.byte 82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112 +.byte 112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62 +.byte 0 diff --git a/secure/lib/libcrypto/asm/ghash-x86_64.s b/secure/lib/libcrypto/asm/ghash-x86_64.s new file mode 100644 index 0000000000..62d39c65f5 --- /dev/null +++ b/secure/lib/libcrypto/asm/ghash-x86_64.s @@ -0,0 +1,1026 @@ +.text + +.globl gcm_gmult_4bit +.type gcm_gmult_4bit,@function +.align 16 +gcm_gmult_4bit: + pushq %rbx + pushq %rbp + pushq %r12 +.Lgmult_prologue: + + movzbq 15(%rdi),%r8 + leaq .Lrem_4bit(%rip),%r11 + xorq %rax,%rax + xorq %rbx,%rbx + movb %r8b,%al + movb %r8b,%bl + shlb $4,%al + movq $14,%rcx + movq 8(%rsi,%rax,1),%r8 + movq (%rsi,%rax,1),%r9 + andb $240,%bl + movq %r8,%rdx + jmp .Loop1 + +.align 16 +.Loop1: + shrq $4,%r8 + andq $15,%rdx + movq %r9,%r10 + movb (%rdi,%rcx,1),%al + shrq $4,%r9 + xorq 8(%rsi,%rbx,1),%r8 + shlq $60,%r10 + xorq (%rsi,%rbx,1),%r9 + movb %al,%bl + xorq (%r11,%rdx,8),%r9 + movq %r8,%rdx + shlb $4,%al + xorq %r10,%r8 + decq %rcx + js .Lbreak1 + + shrq $4,%r8 + andq $15,%rdx + movq %r9,%r10 + shrq $4,%r9 + xorq 8(%rsi,%rax,1),%r8 + shlq $60,%r10 + xorq (%rsi,%rax,1),%r9 + andb $240,%bl + xorq (%r11,%rdx,8),%r9 + movq %r8,%rdx + xorq %r10,%r8 + jmp .Loop1 + +.align 16 +.Lbreak1: + shrq $4,%r8 + andq $15,%rdx + movq %r9,%r10 + shrq $4,%r9 + xorq 8(%rsi,%rax,1),%r8 + shlq $60,%r10 + xorq (%rsi,%rax,1),%r9 + andb $240,%bl + xorq (%r11,%rdx,8),%r9 + movq %r8,%rdx + xorq %r10,%r8 + + shrq $4,%r8 + andq $15,%rdx + movq %r9,%r10 + shrq $4,%r9 + xorq 8(%rsi,%rbx,1),%r8 + shlq $60,%r10 + xorq (%rsi,%rbx,1),%r9 + xorq %r10,%r8 + xorq (%r11,%rdx,8),%r9 + + bswapq %r8 + bswapq %r9 + movq %r8,8(%rdi) + movq %r9,(%rdi) + + movq 16(%rsp),%rbx + leaq 24(%rsp),%rsp +.Lgmult_epilogue: + .byte 0xf3,0xc3 +.size gcm_gmult_4bit,.-gcm_gmult_4bit +.globl gcm_ghash_4bit +.type gcm_ghash_4bit,@function +.align 16 +gcm_ghash_4bit: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $280,%rsp +.Lghash_prologue: + movq %rdx,%r14 + movq %rcx,%r15 + subq $-128,%rsi + leaq 16+128(%rsp),%rbp + xorl %edx,%edx + movq 0+0-128(%rsi),%r8 + movq 0+8-128(%rsi),%rax + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq 16+0-128(%rsi),%r9 + shlb $4,%dl + movq 16+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,0(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,0(%rbp) + movq 32+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,0-128(%rbp) + movq 32+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,1(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,8(%rbp) + movq 48+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,8-128(%rbp) + movq 48+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,2(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,16(%rbp) + movq 64+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,16-128(%rbp) + movq 64+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,3(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,24(%rbp) + movq 80+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,24-128(%rbp) + movq 80+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,4(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,32(%rbp) + movq 96+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,32-128(%rbp) + movq 96+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,5(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,40(%rbp) + movq 112+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,40-128(%rbp) + movq 112+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,6(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,48(%rbp) + movq 128+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,48-128(%rbp) + movq 128+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,7(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,56(%rbp) + movq 144+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,56-128(%rbp) + movq 144+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,8(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,64(%rbp) + movq 160+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,64-128(%rbp) + movq 160+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,9(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,72(%rbp) + movq 176+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,72-128(%rbp) + movq 176+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,10(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,80(%rbp) + movq 192+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,80-128(%rbp) + movq 192+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,11(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,88(%rbp) + movq 208+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,88-128(%rbp) + movq 208+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,12(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,96(%rbp) + movq 224+0-128(%rsi),%r8 + shlb $4,%dl + movq %rax,96-128(%rbp) + movq 224+8-128(%rsi),%rax + shlq $60,%r10 + movb %dl,13(%rsp) + orq %r10,%rbx + movb %al,%dl + shrq $4,%rax + movq %r8,%r10 + shrq $4,%r8 + movq %r9,104(%rbp) + movq 240+0-128(%rsi),%r9 + shlb $4,%dl + movq %rbx,104-128(%rbp) + movq 240+8-128(%rsi),%rbx + shlq $60,%r10 + movb %dl,14(%rsp) + orq %r10,%rax + movb %bl,%dl + shrq $4,%rbx + movq %r9,%r10 + shrq $4,%r9 + movq %r8,112(%rbp) + shlb $4,%dl + movq %rax,112-128(%rbp) + shlq $60,%r10 + movb %dl,15(%rsp) + orq %r10,%rbx + movq %r9,120(%rbp) + movq %rbx,120-128(%rbp) + addq $-128,%rsi + movq 8(%rdi),%r8 + movq 0(%rdi),%r9 + addq %r14,%r15 + leaq .Lrem_8bit(%rip),%r11 + jmp .Louter_loop +.align 16 +.Louter_loop: + xorq (%r14),%r9 + movq 8(%r14),%rdx + leaq 16(%r14),%r14 + xorq %r8,%rdx + movq %r9,(%rdi) + movq %rdx,8(%rdi) + shrq $32,%rdx + xorq %rax,%rax + roll $8,%edx + movb %dl,%al + movzbl %dl,%ebx + shlb $4,%al + shrl $4,%ebx + roll $8,%edx + movq 8(%rsi,%rax,1),%r8 + movq (%rsi,%rax,1),%r9 + movb %dl,%al + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + xorq %r8,%r12 + movq %r9,%r10 + shrq $8,%r8 + movzbq %r12b,%r12 + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + movl 8(%rdi),%edx + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + movl 4(%rdi),%edx + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + movl 0(%rdi),%edx + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + shrl $4,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r12,2),%r12 + movzbl %dl,%ebx + shlb $4,%al + movzbq (%rsp,%rcx,1),%r13 + shrl $4,%ebx + shlq $48,%r12 + xorq %r8,%r13 + movq %r9,%r10 + xorq %r12,%r9 + shrq $8,%r8 + movzbq %r13b,%r13 + shrq $8,%r9 + xorq -128(%rbp,%rcx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rcx,8),%r9 + roll $8,%edx + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + movb %dl,%al + xorq %r10,%r8 + movzwq (%r11,%r13,2),%r13 + movzbl %dl,%ecx + shlb $4,%al + movzbq (%rsp,%rbx,1),%r12 + andl $240,%ecx + shlq $48,%r13 + xorq %r8,%r12 + movq %r9,%r10 + xorq %r13,%r9 + shrq $8,%r8 + movzbq %r12b,%r12 + movl -4(%rdi),%edx + shrq $8,%r9 + xorq -128(%rbp,%rbx,8),%r8 + shlq $56,%r10 + xorq (%rbp,%rbx,8),%r9 + movzwq (%r11,%r12,2),%r12 + xorq 8(%rsi,%rax,1),%r8 + xorq (%rsi,%rax,1),%r9 + shlq $48,%r12 + xorq %r10,%r8 + xorq %r12,%r9 + movzbq %r8b,%r13 + shrq $4,%r8 + movq %r9,%r10 + shlb $4,%r13b + shrq $4,%r9 + xorq 8(%rsi,%rcx,1),%r8 + movzwq (%r11,%r13,2),%r13 + shlq $60,%r10 + xorq (%rsi,%rcx,1),%r9 + xorq %r10,%r8 + shlq $48,%r13 + bswapq %r8 + xorq %r13,%r9 + bswapq %r9 + cmpq %r15,%r14 + jb .Louter_loop + movq %r8,8(%rdi) + movq %r9,(%rdi) + + leaq 280(%rsp),%rsi + movq 0(%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lghash_epilogue: + .byte 0xf3,0xc3 +.size gcm_ghash_4bit,.-gcm_ghash_4bit +.globl gcm_init_clmul +.type gcm_init_clmul,@function +.align 16 +gcm_init_clmul: + movdqu (%rsi),%xmm2 + pshufd $78,%xmm2,%xmm2 + + + pshufd $255,%xmm2,%xmm4 + movdqa %xmm2,%xmm3 + psllq $1,%xmm2 + pxor %xmm5,%xmm5 + psrlq $63,%xmm3 + pcmpgtd %xmm4,%xmm5 + pslldq $8,%xmm3 + por %xmm3,%xmm2 + + + pand .L0x1c2_polynomial(%rip),%xmm5 + pxor %xmm5,%xmm2 + + + movdqa %xmm2,%xmm0 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + pxor %xmm0,%xmm3 + pxor %xmm1,%xmm3 + + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + + + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + movdqu %xmm2,(%rdi) + movdqu %xmm0,16(%rdi) + .byte 0xf3,0xc3 +.size gcm_init_clmul,.-gcm_init_clmul +.globl gcm_gmult_clmul +.type gcm_gmult_clmul,@function +.align 16 +gcm_gmult_clmul: + movdqu (%rdi),%xmm0 + movdqa .Lbswap_mask(%rip),%xmm5 + movdqu (%rsi),%xmm2 +.byte 102,15,56,0,197 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + pxor %xmm0,%xmm3 + pxor %xmm1,%xmm3 + + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + + + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,197 + movdqu %xmm0,(%rdi) + .byte 0xf3,0xc3 +.size gcm_gmult_clmul,.-gcm_gmult_clmul +.globl gcm_ghash_clmul +.type gcm_ghash_clmul,@function +.align 16 +gcm_ghash_clmul: + movdqa .Lbswap_mask(%rip),%xmm5 + + movdqu (%rdi),%xmm0 + movdqu (%rsi),%xmm2 +.byte 102,15,56,0,197 + + subq $16,%rcx + jz .Lodd_tail + + movdqu 16(%rsi),%xmm8 + + + + + + movdqu (%rdx),%xmm3 + movdqu 16(%rdx),%xmm6 +.byte 102,15,56,0,221 +.byte 102,15,56,0,245 + pxor %xmm3,%xmm0 + movdqa %xmm6,%xmm7 + pshufd $78,%xmm6,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm6,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,242,0 +.byte 102,15,58,68,250,17 +.byte 102,15,58,68,220,0 + pxor %xmm6,%xmm3 + pxor %xmm7,%xmm3 + + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm7 + pxor %xmm4,%xmm6 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm8,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm8,%xmm4 + + leaq 32(%rdx),%rdx + subq $32,%rcx + jbe .Leven_tail + +.Lmod_loop: +.byte 102,65,15,58,68,192,0 +.byte 102,65,15,58,68,200,17 +.byte 102,15,58,68,220,0 + pxor %xmm0,%xmm3 + pxor %xmm1,%xmm3 + + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + movdqu (%rdx),%xmm3 + pxor %xmm6,%xmm0 + pxor %xmm7,%xmm1 + + movdqu 16(%rdx),%xmm6 +.byte 102,15,56,0,221 +.byte 102,15,56,0,245 + + movdqa %xmm6,%xmm7 + pshufd $78,%xmm6,%xmm9 + pshufd $78,%xmm2,%xmm10 + pxor %xmm6,%xmm9 + pxor %xmm2,%xmm10 + pxor %xmm3,%xmm1 + + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 +.byte 102,15,58,68,242,0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + +.byte 102,15,58,68,250,17 + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + +.byte 102,69,15,58,68,202,0 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm8,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm8,%xmm4 + + pxor %xmm6,%xmm9 + pxor %xmm7,%xmm9 + movdqa %xmm9,%xmm10 + psrldq $8,%xmm9 + pslldq $8,%xmm10 + pxor %xmm9,%xmm7 + pxor %xmm10,%xmm6 + + leaq 32(%rdx),%rdx + subq $32,%rcx + ja .Lmod_loop + +.Leven_tail: +.byte 102,65,15,58,68,192,0 +.byte 102,65,15,58,68,200,17 +.byte 102,15,58,68,220,0 + pxor %xmm0,%xmm3 + pxor %xmm1,%xmm3 + + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + pxor %xmm6,%xmm0 + pxor %xmm7,%xmm1 + + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + + + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + testq %rcx,%rcx + jnz .Ldone + +.Lodd_tail: + movdqu (%rdx),%xmm3 +.byte 102,15,56,0,221 + pxor %xmm3,%xmm0 + movdqa %xmm0,%xmm1 + pshufd $78,%xmm0,%xmm3 + pshufd $78,%xmm2,%xmm4 + pxor %xmm0,%xmm3 + pxor %xmm2,%xmm4 +.byte 102,15,58,68,194,0 +.byte 102,15,58,68,202,17 +.byte 102,15,58,68,220,0 + pxor %xmm0,%xmm3 + pxor %xmm1,%xmm3 + + movdqa %xmm3,%xmm4 + psrldq $8,%xmm3 + pslldq $8,%xmm4 + pxor %xmm3,%xmm1 + pxor %xmm4,%xmm0 + + movdqa %xmm0,%xmm3 + psllq $1,%xmm0 + pxor %xmm3,%xmm0 + psllq $5,%xmm0 + pxor %xmm3,%xmm0 + psllq $57,%xmm0 + movdqa %xmm0,%xmm4 + pslldq $8,%xmm0 + psrldq $8,%xmm4 + pxor %xmm3,%xmm0 + pxor %xmm4,%xmm1 + + + movdqa %xmm0,%xmm4 + psrlq $5,%xmm0 + pxor %xmm4,%xmm0 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 + pxor %xmm1,%xmm4 + psrlq $1,%xmm0 + pxor %xmm4,%xmm0 +.Ldone: +.byte 102,15,56,0,197 + movdqu %xmm0,(%rdi) + .byte 0xf3,0xc3 +.LSEH_end_gcm_ghash_clmul: +.size gcm_ghash_clmul,.-gcm_ghash_clmul +.align 64 +.Lbswap_mask: +.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 +.L0x1c2_polynomial: +.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2 +.align 64 +.type .Lrem_4bit,@object +.Lrem_4bit: +.long 0,0,0,471859200,0,943718400,0,610271232 +.long 0,1887436800,0,1822425088,0,1220542464,0,1423966208 +.long 0,3774873600,0,4246732800,0,3644850176,0,3311403008 +.long 0,2441084928,0,2376073216,0,2847932416,0,3051356160 +.type .Lrem_8bit,@object +.Lrem_8bit: +.value 0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E +.value 0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E +.value 0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E +.value 0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E +.value 0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E +.value 0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E +.value 0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E +.value 0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E +.value 0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE +.value 0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE +.value 0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE +.value 0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE +.value 0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E +.value 0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E +.value 0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE +.value 0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE +.value 0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E +.value 0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E +.value 0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E +.value 0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E +.value 0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E +.value 0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E +.value 0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E +.value 0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E +.value 0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE +.value 0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE +.value 0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE +.value 0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE +.value 0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E +.value 0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E +.value 0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE +.value 0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE + +.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.align 64 diff --git a/secure/lib/libcrypto/asm/modexp512-x86_64.s b/secure/lib/libcrypto/asm/modexp512-x86_64.s new file mode 100644 index 0000000000..6cccafb868 --- /dev/null +++ b/secure/lib/libcrypto/asm/modexp512-x86_64.s @@ -0,0 +1,1773 @@ +.text + +.type MULADD_128x512,@function +.align 16 +MULADD_128x512: + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + movq %r8,0(%rcx) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%r8 + movq 8(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + movq %r9,8(%rcx) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%r9 + .byte 0xf3,0xc3 +.size MULADD_128x512,.-MULADD_128x512 +.type mont_reduce,@function +.align 16 +mont_reduce: + leaq 192(%rsp),%rdi + movq 32(%rsp),%rsi + addq $576,%rsi + leaq 520(%rsp),%rcx + + movq 96(%rcx),%rbp + movq 0(%rsi),%rax + mulq %rbp + movq (%rcx),%r8 + addq %rax,%r8 + adcq $0,%rdx + movq %r8,0(%rdi) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + movq 8(%rcx),%r9 + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + movq 16(%rcx),%r10 + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + movq 24(%rcx),%r11 + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + movq 32(%rcx),%r12 + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + movq 40(%rcx),%r13 + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + movq 48(%rcx),%r14 + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + movq 56(%rcx),%r15 + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%r8 + movq 104(%rcx),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + movq %r9,8(%rdi) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%r9 + movq 112(%rcx),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + movq %r10,16(%rdi) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%r10 + movq 120(%rcx),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + movq %r11,24(%rdi) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%r11 + xorq %rax,%rax + + addq 64(%rcx),%r8 + adcq 72(%rcx),%r9 + adcq 80(%rcx),%r10 + adcq 88(%rcx),%r11 + adcq $0,%rax + + + + + movq %r8,64(%rdi) + movq %r9,72(%rdi) + movq %r10,%rbp + movq %r11,88(%rdi) + + movq %rax,384(%rsp) + + movq 0(%rdi),%r8 + movq 8(%rdi),%r9 + movq 16(%rdi),%r10 + movq 24(%rdi),%r11 + + + + + + + + + addq $80,%rdi + + addq $64,%rsi + leaq 296(%rsp),%rcx + + call MULADD_128x512 + + movq 384(%rsp),%rax + + + addq -16(%rdi),%r8 + adcq -8(%rdi),%r9 + movq %r8,64(%rcx) + movq %r9,72(%rcx) + + adcq %rax,%rax + movq %rax,384(%rsp) + + leaq 192(%rsp),%rdi + addq $64,%rsi + + + + + + movq (%rsi),%r8 + movq 8(%rsi),%rbx + + movq (%rcx),%rax + mulq %r8 + movq %rax,%rbp + movq %rdx,%r9 + + movq 8(%rcx),%rax + mulq %r8 + addq %rax,%r9 + + movq (%rcx),%rax + mulq %rbx + addq %rax,%r9 + + movq %r9,8(%rdi) + + + subq $192,%rsi + + movq (%rcx),%r8 + movq 8(%rcx),%r9 + + call MULADD_128x512 + + + + + movq 0(%rsi),%rax + movq 8(%rsi),%rbx + movq 16(%rsi),%rdi + movq 24(%rsi),%rdx + + + movq 384(%rsp),%rbp + + addq 64(%rcx),%r8 + adcq 72(%rcx),%r9 + + + adcq %rbp,%rbp + + + + shlq $3,%rbp + movq 32(%rsp),%rcx + addq %rcx,%rbp + + + xorq %rsi,%rsi + + addq 0(%rbp),%r10 + adcq 64(%rbp),%r11 + adcq 128(%rbp),%r12 + adcq 192(%rbp),%r13 + adcq 256(%rbp),%r14 + adcq 320(%rbp),%r15 + adcq 384(%rbp),%r8 + adcq 448(%rbp),%r9 + + + + sbbq $0,%rsi + + + andq %rsi,%rax + andq %rsi,%rbx + andq %rsi,%rdi + andq %rsi,%rdx + + movq $1,%rbp + subq %rax,%r10 + sbbq %rbx,%r11 + sbbq %rdi,%r12 + sbbq %rdx,%r13 + + + + + sbbq $0,%rbp + + + + addq $512,%rcx + movq 32(%rcx),%rax + movq 40(%rcx),%rbx + movq 48(%rcx),%rdi + movq 56(%rcx),%rdx + + + + andq %rsi,%rax + andq %rsi,%rbx + andq %rsi,%rdi + andq %rsi,%rdx + + + + subq $1,%rbp + + sbbq %rax,%r14 + sbbq %rbx,%r15 + sbbq %rdi,%r8 + sbbq %rdx,%r9 + + + + movq 144(%rsp),%rsi + movq %r10,0(%rsi) + movq %r11,8(%rsi) + movq %r12,16(%rsi) + movq %r13,24(%rsi) + movq %r14,32(%rsi) + movq %r15,40(%rsi) + movq %r8,48(%rsi) + movq %r9,56(%rsi) + + .byte 0xf3,0xc3 +.size mont_reduce,.-mont_reduce +.type mont_mul_a3b,@function +.align 16 +mont_mul_a3b: + + + + + movq 0(%rdi),%rbp + + movq %r10,%rax + mulq %rbp + movq %rax,520(%rsp) + movq %rdx,%r10 + movq %r11,%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + movq %rdx,%r11 + movq %r12,%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + movq %rdx,%r12 + movq %r13,%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + movq %rdx,%r13 + movq %r14,%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + movq %rdx,%r14 + movq %r15,%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + movq %rdx,%r15 + movq %r8,%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + movq %rdx,%r8 + movq %r9,%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + movq %rdx,%r9 + movq 8(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + movq %r10,528(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%r10 + movq 16(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + movq %r11,536(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%r11 + movq 24(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + movq %r12,544(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%r12 + movq 32(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + movq %r13,552(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%r13 + movq 40(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + movq %r14,560(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%r14 + movq 48(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + movq %r15,568(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + addq %rbx,%r8 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%r15 + movq 56(%rdi),%rbp + movq 0(%rsi),%rax + mulq %rbp + addq %rax,%r8 + adcq $0,%rdx + movq %r8,576(%rsp) + movq %rdx,%rbx + + movq 8(%rsi),%rax + mulq %rbp + addq %rax,%r9 + adcq $0,%rdx + addq %rbx,%r9 + adcq $0,%rdx + movq %rdx,%rbx + + movq 16(%rsi),%rax + mulq %rbp + addq %rax,%r10 + adcq $0,%rdx + addq %rbx,%r10 + adcq $0,%rdx + movq %rdx,%rbx + + movq 24(%rsi),%rax + mulq %rbp + addq %rax,%r11 + adcq $0,%rdx + addq %rbx,%r11 + adcq $0,%rdx + movq %rdx,%rbx + + movq 32(%rsi),%rax + mulq %rbp + addq %rax,%r12 + adcq $0,%rdx + addq %rbx,%r12 + adcq $0,%rdx + movq %rdx,%rbx + + movq 40(%rsi),%rax + mulq %rbp + addq %rax,%r13 + adcq $0,%rdx + addq %rbx,%r13 + adcq $0,%rdx + movq %rdx,%rbx + + movq 48(%rsi),%rax + mulq %rbp + addq %rax,%r14 + adcq $0,%rdx + addq %rbx,%r14 + adcq $0,%rdx + movq %rdx,%rbx + + movq 56(%rsi),%rax + mulq %rbp + addq %rax,%r15 + adcq $0,%rdx + addq %rbx,%r15 + adcq $0,%rdx + movq %rdx,%r8 + movq %r9,584(%rsp) + movq %r10,592(%rsp) + movq %r11,600(%rsp) + movq %r12,608(%rsp) + movq %r13,616(%rsp) + movq %r14,624(%rsp) + movq %r15,632(%rsp) + movq %r8,640(%rsp) + + + + + + jmp mont_reduce + + +.size mont_mul_a3b,.-mont_mul_a3b +.type sqr_reduce,@function +.align 16 +sqr_reduce: + movq 16(%rsp),%rcx + + + + movq %r10,%rbx + + movq %r11,%rax + mulq %rbx + movq %rax,528(%rsp) + movq %rdx,%r10 + movq %r12,%rax + mulq %rbx + addq %rax,%r10 + adcq $0,%rdx + movq %rdx,%r11 + movq %r13,%rax + mulq %rbx + addq %rax,%r11 + adcq $0,%rdx + movq %rdx,%r12 + movq %r14,%rax + mulq %rbx + addq %rax,%r12 + adcq $0,%rdx + movq %rdx,%r13 + movq %r15,%rax + mulq %rbx + addq %rax,%r13 + adcq $0,%rdx + movq %rdx,%r14 + movq %r8,%rax + mulq %rbx + addq %rax,%r14 + adcq $0,%rdx + movq %rdx,%r15 + movq %r9,%rax + mulq %rbx + addq %rax,%r15 + adcq $0,%rdx + movq %rdx,%rsi + + movq %r10,536(%rsp) + + + + + + movq 8(%rcx),%rbx + + movq 16(%rcx),%rax + mulq %rbx + addq %rax,%r11 + adcq $0,%rdx + movq %r11,544(%rsp) + + movq %rdx,%r10 + movq 24(%rcx),%rax + mulq %rbx + addq %rax,%r12 + adcq $0,%rdx + addq %r10,%r12 + adcq $0,%rdx + movq %r12,552(%rsp) + + movq %rdx,%r10 + movq 32(%rcx),%rax + mulq %rbx + addq %rax,%r13 + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + + movq %rdx,%r10 + movq 40(%rcx),%rax + mulq %rbx + addq %rax,%r14 + adcq $0,%rdx + addq %r10,%r14 + adcq $0,%rdx + + movq %rdx,%r10 + movq %r8,%rax + mulq %rbx + addq %rax,%r15 + adcq $0,%rdx + addq %r10,%r15 + adcq $0,%rdx + + movq %rdx,%r10 + movq %r9,%rax + mulq %rbx + addq %rax,%rsi + adcq $0,%rdx + addq %r10,%rsi + adcq $0,%rdx + + movq %rdx,%r11 + + + + + movq 16(%rcx),%rbx + + movq 24(%rcx),%rax + mulq %rbx + addq %rax,%r13 + adcq $0,%rdx + movq %r13,560(%rsp) + + movq %rdx,%r10 + movq 32(%rcx),%rax + mulq %rbx + addq %rax,%r14 + adcq $0,%rdx + addq %r10,%r14 + adcq $0,%rdx + movq %r14,568(%rsp) + + movq %rdx,%r10 + movq 40(%rcx),%rax + mulq %rbx + addq %rax,%r15 + adcq $0,%rdx + addq %r10,%r15 + adcq $0,%rdx + + movq %rdx,%r10 + movq %r8,%rax + mulq %rbx + addq %rax,%rsi + adcq $0,%rdx + addq %r10,%rsi + adcq $0,%rdx + + movq %rdx,%r10 + movq %r9,%rax + mulq %rbx + addq %rax,%r11 + adcq $0,%rdx + addq %r10,%r11 + adcq $0,%rdx + + movq %rdx,%r12 + + + + + + movq 24(%rcx),%rbx + + movq 32(%rcx),%rax + mulq %rbx + addq %rax,%r15 + adcq $0,%rdx + movq %r15,576(%rsp) + + movq %rdx,%r10 + movq 40(%rcx),%rax + mulq %rbx + addq %rax,%rsi + adcq $0,%rdx + addq %r10,%rsi + adcq $0,%rdx + movq %rsi,584(%rsp) + + movq %rdx,%r10 + movq %r8,%rax + mulq %rbx + addq %rax,%r11 + adcq $0,%rdx + addq %r10,%r11 + adcq $0,%rdx + + movq %rdx,%r10 + movq %r9,%rax + mulq %rbx + addq %rax,%r12 + adcq $0,%rdx + addq %r10,%r12 + adcq $0,%rdx + + movq %rdx,%r15 + + + + + movq 32(%rcx),%rbx + + movq 40(%rcx),%rax + mulq %rbx + addq %rax,%r11 + adcq $0,%rdx + movq %r11,592(%rsp) + + movq %rdx,%r10 + movq %r8,%rax + mulq %rbx + addq %rax,%r12 + adcq $0,%rdx + addq %r10,%r12 + adcq $0,%rdx + movq %r12,600(%rsp) + + movq %rdx,%r10 + movq %r9,%rax + mulq %rbx + addq %rax,%r15 + adcq $0,%rdx + addq %r10,%r15 + adcq $0,%rdx + + movq %rdx,%r11 + + + + + movq 40(%rcx),%rbx + + movq %r8,%rax + mulq %rbx + addq %rax,%r15 + adcq $0,%rdx + movq %r15,608(%rsp) + + movq %rdx,%r10 + movq %r9,%rax + mulq %rbx + addq %rax,%r11 + adcq $0,%rdx + addq %r10,%r11 + adcq $0,%rdx + movq %r11,616(%rsp) + + movq %rdx,%r12 + + + + + movq %r8,%rbx + + movq %r9,%rax + mulq %rbx + addq %rax,%r12 + adcq $0,%rdx + movq %r12,624(%rsp) + + movq %rdx,632(%rsp) + + + movq 528(%rsp),%r10 + movq 536(%rsp),%r11 + movq 544(%rsp),%r12 + movq 552(%rsp),%r13 + movq 560(%rsp),%r14 + movq 568(%rsp),%r15 + + movq 24(%rcx),%rax + mulq %rax + movq %rax,%rdi + movq %rdx,%r8 + + addq %r10,%r10 + adcq %r11,%r11 + adcq %r12,%r12 + adcq %r13,%r13 + adcq %r14,%r14 + adcq %r15,%r15 + adcq $0,%r8 + + movq 0(%rcx),%rax + mulq %rax + movq %rax,520(%rsp) + movq %rdx,%rbx + + movq 8(%rcx),%rax + mulq %rax + + addq %rbx,%r10 + adcq %rax,%r11 + adcq $0,%rdx + + movq %rdx,%rbx + movq %r10,528(%rsp) + movq %r11,536(%rsp) + + movq 16(%rcx),%rax + mulq %rax + + addq %rbx,%r12 + adcq %rax,%r13 + adcq $0,%rdx + + movq %rdx,%rbx + + movq %r12,544(%rsp) + movq %r13,552(%rsp) + + xorq %rbp,%rbp + addq %rbx,%r14 + adcq %rdi,%r15 + adcq $0,%rbp + + movq %r14,560(%rsp) + movq %r15,568(%rsp) + + + + + movq 576(%rsp),%r10 + movq 584(%rsp),%r11 + movq 592(%rsp),%r12 + movq 600(%rsp),%r13 + movq 608(%rsp),%r14 + movq 616(%rsp),%r15 + movq 624(%rsp),%rdi + movq 632(%rsp),%rsi + + movq %r9,%rax + mulq %rax + movq %rax,%r9 + movq %rdx,%rbx + + addq %r10,%r10 + adcq %r11,%r11 + adcq %r12,%r12 + adcq %r13,%r13 + adcq %r14,%r14 + adcq %r15,%r15 + adcq %rdi,%rdi + adcq %rsi,%rsi + adcq $0,%rbx + + addq %rbp,%r10 + + movq 32(%rcx),%rax + mulq %rax + + addq %r8,%r10 + adcq %rax,%r11 + adcq $0,%rdx + + movq %rdx,%rbp + + movq %r10,576(%rsp) + movq %r11,584(%rsp) + + movq 40(%rcx),%rax + mulq %rax + + addq %rbp,%r12 + adcq %rax,%r13 + adcq $0,%rdx + + movq %rdx,%rbp + + movq %r12,592(%rsp) + movq %r13,600(%rsp) + + movq 48(%rcx),%rax + mulq %rax + + addq %rbp,%r14 + adcq %rax,%r15 + adcq $0,%rdx + + movq %r14,608(%rsp) + movq %r15,616(%rsp) + + addq %rdx,%rdi + adcq %r9,%rsi + adcq $0,%rbx + + movq %rdi,624(%rsp) + movq %rsi,632(%rsp) + movq %rbx,640(%rsp) + + jmp mont_reduce + + +.size sqr_reduce,.-sqr_reduce +.globl mod_exp_512 +.type mod_exp_512,@function +mod_exp_512: + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + + + movq %rsp,%r8 + subq $2688,%rsp + andq $-64,%rsp + + + movq %r8,0(%rsp) + movq %rdi,8(%rsp) + movq %rsi,16(%rsp) + movq %rcx,24(%rsp) +.Lbody: + + + + pxor %xmm4,%xmm4 + movdqu 0(%rsi),%xmm0 + movdqu 16(%rsi),%xmm1 + movdqu 32(%rsi),%xmm2 + movdqu 48(%rsi),%xmm3 + movdqa %xmm4,512(%rsp) + movdqa %xmm4,528(%rsp) + movdqa %xmm4,608(%rsp) + movdqa %xmm4,624(%rsp) + movdqa %xmm0,544(%rsp) + movdqa %xmm1,560(%rsp) + movdqa %xmm2,576(%rsp) + movdqa %xmm3,592(%rsp) + + + movdqu 0(%rdx),%xmm0 + movdqu 16(%rdx),%xmm1 + movdqu 32(%rdx),%xmm2 + movdqu 48(%rdx),%xmm3 + + leaq 384(%rsp),%rbx + movq %rbx,136(%rsp) + call mont_reduce + + + leaq 448(%rsp),%rcx + xorq %rax,%rax + movq %rax,0(%rcx) + movq %rax,8(%rcx) + movq %rax,24(%rcx) + movq %rax,32(%rcx) + movq %rax,40(%rcx) + movq %rax,48(%rcx) + movq %rax,56(%rcx) + movq %rax,128(%rsp) + movq $1,16(%rcx) + + leaq 640(%rsp),%rbp + movq %rcx,%rsi + movq %rbp,%rdi + movq $8,%rax +loop_0: + movq (%rcx),%rbx + movw %bx,(%rdi) + shrq $16,%rbx + movw %bx,64(%rdi) + shrq $16,%rbx + movw %bx,128(%rdi) + shrq $16,%rbx + movw %bx,192(%rdi) + leaq 8(%rcx),%rcx + leaq 256(%rdi),%rdi + decq %rax + jnz loop_0 + movq $31,%rax + movq %rax,32(%rsp) + movq %rbp,40(%rsp) + + movq %rsi,136(%rsp) + movq 0(%rsi),%r10 + movq 8(%rsi),%r11 + movq 16(%rsi),%r12 + movq 24(%rsi),%r13 + movq 32(%rsi),%r14 + movq 40(%rsi),%r15 + movq 48(%rsi),%r8 + movq 56(%rsi),%r9 +init_loop: + leaq 384(%rsp),%rdi + call mont_mul_a3b + leaq 448(%rsp),%rsi + movq 40(%rsp),%rbp + addq $2,%rbp + movq %rbp,40(%rsp) + movq %rsi,%rcx + movq $8,%rax +loop_1: + movq (%rcx),%rbx + movw %bx,(%rbp) + shrq $16,%rbx + movw %bx,64(%rbp) + shrq $16,%rbx + movw %bx,128(%rbp) + shrq $16,%rbx + movw %bx,192(%rbp) + leaq 8(%rcx),%rcx + leaq 256(%rbp),%rbp + decq %rax + jnz loop_1 + movq 32(%rsp),%rax + subq $1,%rax + movq %rax,32(%rsp) + jne init_loop + + + + movdqa %xmm0,64(%rsp) + movdqa %xmm1,80(%rsp) + movdqa %xmm2,96(%rsp) + movdqa %xmm3,112(%rsp) + + + + + + movl 126(%rsp),%eax + movq %rax,%rdx + shrq $11,%rax + andl $2047,%edx + movl %edx,126(%rsp) + leaq 640(%rsp,%rax,2),%rsi + movq 8(%rsp),%rdx + movq $4,%rbp +loop_2: + movzwq 192(%rsi),%rbx + movzwq 448(%rsi),%rax + shlq $16,%rbx + shlq $16,%rax + movw 128(%rsi),%bx + movw 384(%rsi),%ax + shlq $16,%rbx + shlq $16,%rax + movw 64(%rsi),%bx + movw 320(%rsi),%ax + shlq $16,%rbx + shlq $16,%rax + movw 0(%rsi),%bx + movw 256(%rsi),%ax + movq %rbx,0(%rdx) + movq %rax,8(%rdx) + leaq 512(%rsi),%rsi + leaq 16(%rdx),%rdx + subq $1,%rbp + jnz loop_2 + movq $505,48(%rsp) + + movq 8(%rsp),%rcx + movq %rcx,136(%rsp) + movq 0(%rcx),%r10 + movq 8(%rcx),%r11 + movq 16(%rcx),%r12 + movq 24(%rcx),%r13 + movq 32(%rcx),%r14 + movq 40(%rcx),%r15 + movq 48(%rcx),%r8 + movq 56(%rcx),%r9 + jmp sqr_2 + +main_loop_a3b: + call sqr_reduce + call sqr_reduce + call sqr_reduce +sqr_2: + call sqr_reduce + call sqr_reduce + + + + movq 48(%rsp),%rcx + movq %rcx,%rax + shrq $4,%rax + movl 64(%rsp,%rax,2),%edx + andq $15,%rcx + shrq %cl,%rdx + andq $31,%rdx + + leaq 640(%rsp,%rdx,2),%rsi + leaq 448(%rsp),%rdx + movq %rdx,%rdi + movq $4,%rbp +loop_3: + movzwq 192(%rsi),%rbx + movzwq 448(%rsi),%rax + shlq $16,%rbx + shlq $16,%rax + movw 128(%rsi),%bx + movw 384(%rsi),%ax + shlq $16,%rbx + shlq $16,%rax + movw 64(%rsi),%bx + movw 320(%rsi),%ax + shlq $16,%rbx + shlq $16,%rax + movw 0(%rsi),%bx + movw 256(%rsi),%ax + movq %rbx,0(%rdx) + movq %rax,8(%rdx) + leaq 512(%rsi),%rsi + leaq 16(%rdx),%rdx + subq $1,%rbp + jnz loop_3 + movq 8(%rsp),%rsi + call mont_mul_a3b + + + + movq 48(%rsp),%rcx + subq $5,%rcx + movq %rcx,48(%rsp) + jge main_loop_a3b + + + +end_main_loop_a3b: + + + movq 8(%rsp),%rdx + pxor %xmm4,%xmm4 + movdqu 0(%rdx),%xmm0 + movdqu 16(%rdx),%xmm1 + movdqu 32(%rdx),%xmm2 + movdqu 48(%rdx),%xmm3 + movdqa %xmm4,576(%rsp) + movdqa %xmm4,592(%rsp) + movdqa %xmm4,608(%rsp) + movdqa %xmm4,624(%rsp) + movdqa %xmm0,512(%rsp) + movdqa %xmm1,528(%rsp) + movdqa %xmm2,544(%rsp) + movdqa %xmm3,560(%rsp) + call mont_reduce + + + + movq 8(%rsp),%rax + movq 0(%rax),%r8 + movq 8(%rax),%r9 + movq 16(%rax),%r10 + movq 24(%rax),%r11 + movq 32(%rax),%r12 + movq 40(%rax),%r13 + movq 48(%rax),%r14 + movq 56(%rax),%r15 + + + movq 24(%rsp),%rbx + addq $512,%rbx + + subq 0(%rbx),%r8 + sbbq 8(%rbx),%r9 + sbbq 16(%rbx),%r10 + sbbq 24(%rbx),%r11 + sbbq 32(%rbx),%r12 + sbbq 40(%rbx),%r13 + sbbq 48(%rbx),%r14 + sbbq 56(%rbx),%r15 + + + movq 0(%rax),%rsi + movq 8(%rax),%rdi + movq 16(%rax),%rcx + movq 24(%rax),%rdx + cmovncq %r8,%rsi + cmovncq %r9,%rdi + cmovncq %r10,%rcx + cmovncq %r11,%rdx + movq %rsi,0(%rax) + movq %rdi,8(%rax) + movq %rcx,16(%rax) + movq %rdx,24(%rax) + + movq 32(%rax),%rsi + movq 40(%rax),%rdi + movq 48(%rax),%rcx + movq 56(%rax),%rdx + cmovncq %r12,%rsi + cmovncq %r13,%rdi + cmovncq %r14,%rcx + cmovncq %r15,%rdx + movq %rsi,32(%rax) + movq %rdi,40(%rax) + movq %rcx,48(%rax) + movq %rdx,56(%rax) + + movq 0(%rsp),%rsi + movq 0(%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbx + movq 40(%rsi),%rbp + leaq 48(%rsi),%rsp +.Lepilogue: + .byte 0xf3,0xc3 +.size mod_exp_512, . - mod_exp_512 diff --git a/secure/lib/libcrypto/asm/rc4-586.s b/secure/lib/libcrypto/asm/rc4-586.s index 9ba94e4b1a..513ce6a58b 100644 --- a/secure/lib/libcrypto/asm/rc4-586.s +++ b/secure/lib/libcrypto/asm/rc4-586.s @@ -29,11 +29,146 @@ RC4: movl (%edi,%eax,4),%ecx andl $-4,%edx jz .L002loop1 - leal -4(%esi,%edx,1),%edx - movl %edx,28(%esp) + testl $-8,%edx movl %ebp,32(%esp) + jz .L003go4loop4 + leal OPENSSL_ia32cap_P,%ebp + btl $26,(%ebp) + jnc .L003go4loop4 + movl 32(%esp),%ebp + andl $-8,%edx + leal -8(%esi,%edx,1),%edx + movl %edx,-4(%edi) + addb %cl,%bl + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + movq (%esi),%mm0 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm2 + jmp .L004loop_mmx_enter +.align 16 +.L005loop_mmx: + addb %cl,%bl + psllq $56,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movq (%esi),%mm0 + movq %mm2,-8(%ebp,%esi,1) + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm2 +.L004loop_mmx_enter: + addb %cl,%bl + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm0,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + addb %cl,%bl + psllq $8,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + addb %cl,%bl + psllq $16,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + addb %cl,%bl + psllq $24,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + addb %cl,%bl + psllq $32,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + addb %cl,%bl + psllq $40,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + addb %cl,%bl + psllq $48,%mm1 + movl (%edi,%ebx,4),%edx + movl %ecx,(%edi,%ebx,4) + movl %edx,(%edi,%eax,4) + incl %eax + addl %ecx,%edx + movzbl %al,%eax + movzbl %dl,%edx + pxor %mm1,%mm2 + movl (%edi,%eax,4),%ecx + movd (%edi,%edx,4),%mm1 + movl %ebx,%edx + xorl %ebx,%ebx + movb %dl,%bl + cmpl -4(%edi),%esi + leal 8(%esi),%esi + jb .L005loop_mmx + psllq $56,%mm1 + pxor %mm1,%mm2 + movq %mm2,-8(%ebp,%esi,1) + emms + cmpl 24(%esp),%esi + je .L006done + jmp .L002loop1 .align 16 -.L003loop4: +.L003go4loop4: + leal -4(%esi,%edx,1),%edx + movl %edx,28(%esp) +.L007loop4: addb %cl,%bl movl (%edi,%ebx,4),%edx movl %ecx,(%edi,%ebx,4) @@ -79,9 +214,9 @@ RC4: movl %ebp,(%ecx,%esi,1) leal 4(%esi),%esi movl (%edi,%eax,4),%ecx - jb .L003loop4 + jb .L007loop4 cmpl 24(%esp),%esi - je .L004done + je .L006done movl 32(%esp),%ebp .align 16 .L002loop1: @@ -99,11 +234,11 @@ RC4: cmpl 24(%esp),%esi movb %dl,-1(%ebp,%esi,1) jb .L002loop1 - jmp .L004done + jmp .L006done .align 16 .L001RC4_CHAR: movzbl (%edi,%eax,1),%ecx -.L005cloop1: +.L008cloop1: addb %cl,%bl movzbl (%edi,%ebx,1),%edx movb %cl,(%edi,%ebx,1) @@ -116,10 +251,10 @@ RC4: movzbl (%edi,%eax,1),%ecx cmpl 24(%esp),%esi movb %dl,-1(%ebp,%esi,1) - jb .L005cloop1 -.L004done: + jb .L008cloop1 +.L006done: decb %al - movb %bl,-4(%edi) + movl %ebx,-4(%edi) movb %al,-8(%edi) .L000abort: popl %edi @@ -128,11 +263,11 @@ RC4: popl %ebp ret .size RC4,.-.L_RC4_begin -.globl RC4_set_key -.type RC4_set_key,@function +.globl private_RC4_set_key +.type private_RC4_set_key,@function .align 16 -RC4_set_key: -.L_RC4_set_key_begin: +private_RC4_set_key: +.L_private_RC4_set_key_begin: pushl %ebp pushl %ebx pushl %esi @@ -147,53 +282,53 @@ RC4_set_key: xorl %eax,%eax movl %ebp,-4(%edi) btl $20,(%edx) - jc .L006c1stloop + jc .L009c1stloop .align 16 -.L007w1stloop: +.L010w1stloop: movl %eax,(%edi,%eax,4) addb $1,%al - jnc .L007w1stloop + jnc .L010w1stloop xorl %ecx,%ecx xorl %edx,%edx .align 16 -.L008w2ndloop: +.L011w2ndloop: movl (%edi,%ecx,4),%eax addb (%esi,%ebp,1),%dl addb %al,%dl addl $1,%ebp movl (%edi,%edx,4),%ebx - jnz .L009wnowrap + jnz .L012wnowrap movl -4(%edi),%ebp -.L009wnowrap: +.L012wnowrap: movl %eax,(%edi,%edx,4) movl %ebx,(%edi,%ecx,4) addb $1,%cl - jnc .L008w2ndloop - jmp .L010exit + jnc .L011w2ndloop + jmp .L013exit .align 16 -.L006c1stloop: +.L009c1stloop: movb %al,(%edi,%eax,1) addb $1,%al - jnc .L006c1stloop + jnc .L009c1stloop xorl %ecx,%ecx xorl %edx,%edx xorl %ebx,%ebx .align 16 -.L011c2ndloop: +.L014c2ndloop: movb (%edi,%ecx,1),%al addb (%esi,%ebp,1),%dl addb %al,%dl addl $1,%ebp movb (%edi,%edx,1),%bl - jnz .L012cnowrap + jnz .L015cnowrap movl -4(%edi),%ebp -.L012cnowrap: +.L015cnowrap: movb %al,(%edi,%edx,1) movb %bl,(%edi,%ecx,1) addb $1,%cl - jnc .L011c2ndloop + jnc .L014c2ndloop movl $-1,256(%edi) -.L010exit: +.L013exit: xorl %eax,%eax movl %eax,-8(%edi) movl %eax,-4(%edi) @@ -202,29 +337,36 @@ RC4_set_key: popl %ebx popl %ebp ret -.size RC4_set_key,.-.L_RC4_set_key_begin +.size private_RC4_set_key,.-.L_private_RC4_set_key_begin .globl RC4_options .type RC4_options,@function .align 16 RC4_options: .L_RC4_options_begin: - call .L013pic_point -.L013pic_point: + call .L016pic_point +.L016pic_point: popl %eax - leal .L014opts-.L013pic_point(%eax),%eax + leal .L017opts-.L016pic_point(%eax),%eax leal OPENSSL_ia32cap_P,%edx - btl $20,(%edx) - jnc .L015skip + movl (%edx),%edx + btl $20,%edx + jc .L0181xchar + btl $26,%edx + jnc .L019ret + addl $25,%eax + ret +.L0181xchar: addl $12,%eax -.L015skip: +.L019ret: ret .align 64 -.L014opts: +.L017opts: .byte 114,99,52,40,52,120,44,105,110,116,41,0 .byte 114,99,52,40,49,120,44,99,104,97,114,41,0 +.byte 114,99,52,40,56,120,44,109,109,120,41,0 .byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89 .byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114 .byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 64 .size RC4_options,.-.L_RC4_options_begin -.comm OPENSSL_ia32cap_P,4,4 +.comm OPENSSL_ia32cap_P,8,4 diff --git a/secure/lib/libcrypto/asm/rc4-md5-x86_64.s b/secure/lib/libcrypto/asm/rc4-md5-x86_64.s new file mode 100644 index 0000000000..aab3c6db13 --- /dev/null +++ b/secure/lib/libcrypto/asm/rc4-md5-x86_64.s @@ -0,0 +1,1259 @@ +.text +.align 16 + +.globl rc4_md5_enc +.type rc4_md5_enc,@function +rc4_md5_enc: + cmpq $0,%r9 + je .Labort + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $40,%rsp +.Lbody: + movq %rcx,%r11 + movq %r9,%r12 + movq %rsi,%r13 + movq %rdx,%r14 + movq %r8,%r15 + xorq %rbp,%rbp + xorq %rcx,%rcx + + leaq 8(%rdi),%rdi + movb -8(%rdi),%bpl + movb -4(%rdi),%cl + + incb %bpl + subq %r13,%r14 + movl (%rdi,%rbp,4),%eax + addb %al,%cl + leaq (%rdi,%rbp,4),%rsi + shlq $6,%r12 + addq %r15,%r12 + movq %r12,16(%rsp) + + movq %r11,24(%rsp) + movl 0(%r11),%r8d + movl 4(%r11),%r9d + movl 8(%r11),%r10d + movl 12(%r11),%r11d + jmp .Loop + +.align 16 +.Loop: + movl %r8d,0(%rsp) + movl %r9d,4(%rsp) + movl %r10d,8(%rsp) + movl %r11d,%r12d + movl %r11d,12(%rsp) + pxor %xmm0,%xmm0 + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 0(%r15),%r8d + addb %dl,%al + movl 4(%rsi),%ebx + addl $3614090360,%r8d + xorl %r11d,%r12d + movzbl %al,%eax + movl %edx,0(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $7,%r8d + movl %r10d,%r12d + movd (%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + pxor %xmm1,%xmm1 + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 4(%r15),%r11d + addb %dl,%bl + movl 8(%rsi),%eax + addl $3905402710,%r11d + xorl %r10d,%r12d + movzbl %bl,%ebx + movl %edx,4(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $12,%r11d + movl %r9d,%r12d + movd (%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 8(%r15),%r10d + addb %dl,%al + movl 12(%rsi),%ebx + addl $606105819,%r10d + xorl %r9d,%r12d + movzbl %al,%eax + movl %edx,8(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $17,%r10d + movl %r8d,%r12d + pinsrw $1,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 12(%r15),%r9d + addb %dl,%bl + movl 16(%rsi),%eax + addl $3250441966,%r9d + xorl %r8d,%r12d + movzbl %bl,%ebx + movl %edx,12(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $22,%r9d + movl %r11d,%r12d + pinsrw $1,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 16(%r15),%r8d + addb %dl,%al + movl 20(%rsi),%ebx + addl $4118548399,%r8d + xorl %r11d,%r12d + movzbl %al,%eax + movl %edx,16(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $7,%r8d + movl %r10d,%r12d + pinsrw $2,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 20(%r15),%r11d + addb %dl,%bl + movl 24(%rsi),%eax + addl $1200080426,%r11d + xorl %r10d,%r12d + movzbl %bl,%ebx + movl %edx,20(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $12,%r11d + movl %r9d,%r12d + pinsrw $2,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 24(%r15),%r10d + addb %dl,%al + movl 28(%rsi),%ebx + addl $2821735955,%r10d + xorl %r9d,%r12d + movzbl %al,%eax + movl %edx,24(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $17,%r10d + movl %r8d,%r12d + pinsrw $3,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 28(%r15),%r9d + addb %dl,%bl + movl 32(%rsi),%eax + addl $4249261313,%r9d + xorl %r8d,%r12d + movzbl %bl,%ebx + movl %edx,28(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $22,%r9d + movl %r11d,%r12d + pinsrw $3,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 32(%r15),%r8d + addb %dl,%al + movl 36(%rsi),%ebx + addl $1770035416,%r8d + xorl %r11d,%r12d + movzbl %al,%eax + movl %edx,32(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $7,%r8d + movl %r10d,%r12d + pinsrw $4,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 36(%r15),%r11d + addb %dl,%bl + movl 40(%rsi),%eax + addl $2336552879,%r11d + xorl %r10d,%r12d + movzbl %bl,%ebx + movl %edx,36(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $12,%r11d + movl %r9d,%r12d + pinsrw $4,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 40(%r15),%r10d + addb %dl,%al + movl 44(%rsi),%ebx + addl $4294925233,%r10d + xorl %r9d,%r12d + movzbl %al,%eax + movl %edx,40(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $17,%r10d + movl %r8d,%r12d + pinsrw $5,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 44(%r15),%r9d + addb %dl,%bl + movl 48(%rsi),%eax + addl $2304563134,%r9d + xorl %r8d,%r12d + movzbl %bl,%ebx + movl %edx,44(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $22,%r9d + movl %r11d,%r12d + pinsrw $5,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 48(%r15),%r8d + addb %dl,%al + movl 52(%rsi),%ebx + addl $1804603682,%r8d + xorl %r11d,%r12d + movzbl %al,%eax + movl %edx,48(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $7,%r8d + movl %r10d,%r12d + pinsrw $6,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 52(%r15),%r11d + addb %dl,%bl + movl 56(%rsi),%eax + addl $4254626195,%r11d + xorl %r10d,%r12d + movzbl %bl,%ebx + movl %edx,52(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $12,%r11d + movl %r9d,%r12d + pinsrw $6,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 56(%r15),%r10d + addb %dl,%al + movl 60(%rsi),%ebx + addl $2792965006,%r10d + xorl %r9d,%r12d + movzbl %al,%eax + movl %edx,56(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $17,%r10d + movl %r8d,%r12d + pinsrw $7,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movdqu (%r13),%xmm2 + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 60(%r15),%r9d + addb %dl,%bl + movl 64(%rsi),%eax + addl $1236535329,%r9d + xorl %r8d,%r12d + movzbl %bl,%ebx + movl %edx,60(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $22,%r9d + movl %r10d,%r12d + pinsrw $7,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + psllq $8,%xmm1 + pxor %xmm0,%xmm2 + pxor %xmm1,%xmm2 + pxor %xmm0,%xmm0 + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 4(%r15),%r8d + addb %dl,%al + movl 68(%rsi),%ebx + addl $4129170786,%r8d + xorl %r10d,%r12d + movzbl %al,%eax + movl %edx,64(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $5,%r8d + movl %r9d,%r12d + movd (%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + pxor %xmm1,%xmm1 + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 24(%r15),%r11d + addb %dl,%bl + movl 72(%rsi),%eax + addl $3225465664,%r11d + xorl %r9d,%r12d + movzbl %bl,%ebx + movl %edx,68(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $9,%r11d + movl %r8d,%r12d + movd (%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 44(%r15),%r10d + addb %dl,%al + movl 76(%rsi),%ebx + addl $643717713,%r10d + xorl %r8d,%r12d + movzbl %al,%eax + movl %edx,72(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $14,%r10d + movl %r11d,%r12d + pinsrw $1,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 0(%r15),%r9d + addb %dl,%bl + movl 80(%rsi),%eax + addl $3921069994,%r9d + xorl %r11d,%r12d + movzbl %bl,%ebx + movl %edx,76(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $20,%r9d + movl %r10d,%r12d + pinsrw $1,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 20(%r15),%r8d + addb %dl,%al + movl 84(%rsi),%ebx + addl $3593408605,%r8d + xorl %r10d,%r12d + movzbl %al,%eax + movl %edx,80(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $5,%r8d + movl %r9d,%r12d + pinsrw $2,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 40(%r15),%r11d + addb %dl,%bl + movl 88(%rsi),%eax + addl $38016083,%r11d + xorl %r9d,%r12d + movzbl %bl,%ebx + movl %edx,84(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $9,%r11d + movl %r8d,%r12d + pinsrw $2,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 60(%r15),%r10d + addb %dl,%al + movl 92(%rsi),%ebx + addl $3634488961,%r10d + xorl %r8d,%r12d + movzbl %al,%eax + movl %edx,88(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $14,%r10d + movl %r11d,%r12d + pinsrw $3,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 16(%r15),%r9d + addb %dl,%bl + movl 96(%rsi),%eax + addl $3889429448,%r9d + xorl %r11d,%r12d + movzbl %bl,%ebx + movl %edx,92(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $20,%r9d + movl %r10d,%r12d + pinsrw $3,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 36(%r15),%r8d + addb %dl,%al + movl 100(%rsi),%ebx + addl $568446438,%r8d + xorl %r10d,%r12d + movzbl %al,%eax + movl %edx,96(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $5,%r8d + movl %r9d,%r12d + pinsrw $4,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 56(%r15),%r11d + addb %dl,%bl + movl 104(%rsi),%eax + addl $3275163606,%r11d + xorl %r9d,%r12d + movzbl %bl,%ebx + movl %edx,100(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $9,%r11d + movl %r8d,%r12d + pinsrw $4,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 12(%r15),%r10d + addb %dl,%al + movl 108(%rsi),%ebx + addl $4107603335,%r10d + xorl %r8d,%r12d + movzbl %al,%eax + movl %edx,104(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $14,%r10d + movl %r11d,%r12d + pinsrw $5,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 32(%r15),%r9d + addb %dl,%bl + movl 112(%rsi),%eax + addl $1163531501,%r9d + xorl %r11d,%r12d + movzbl %bl,%ebx + movl %edx,108(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $20,%r9d + movl %r10d,%r12d + pinsrw $5,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r11d,%r12d + addl 52(%r15),%r8d + addb %dl,%al + movl 116(%rsi),%ebx + addl $2850285829,%r8d + xorl %r10d,%r12d + movzbl %al,%eax + movl %edx,112(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $5,%r8d + movl %r9d,%r12d + pinsrw $6,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r10d,%r12d + addl 8(%r15),%r11d + addb %dl,%bl + movl 120(%rsi),%eax + addl $4243563512,%r11d + xorl %r9d,%r12d + movzbl %bl,%ebx + movl %edx,116(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $9,%r11d + movl %r8d,%r12d + pinsrw $6,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + andl %r9d,%r12d + addl 28(%r15),%r10d + addb %dl,%al + movl 124(%rsi),%ebx + addl $1735328473,%r10d + xorl %r8d,%r12d + movzbl %al,%eax + movl %edx,120(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $14,%r10d + movl %r11d,%r12d + pinsrw $7,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movdqu 16(%r13),%xmm3 + addb $32,%bpl + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + andl %r8d,%r12d + addl 48(%r15),%r9d + addb %dl,%bl + movl 0(%rdi,%rbp,4),%eax + addl $2368359562,%r9d + xorl %r11d,%r12d + movzbl %bl,%ebx + movl %edx,124(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $20,%r9d + movl %r11d,%r12d + pinsrw $7,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movq %rcx,%rsi + xorq %rcx,%rcx + movb %sil,%cl + leaq (%rdi,%rbp,4),%rsi + psllq $8,%xmm1 + pxor %xmm0,%xmm3 + pxor %xmm1,%xmm3 + pxor %xmm0,%xmm0 + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r9d,%r12d + addl 20(%r15),%r8d + addb %dl,%al + movl 4(%rsi),%ebx + addl $4294588738,%r8d + movzbl %al,%eax + addl %r12d,%r8d + movl %edx,0(%rsi) + addb %bl,%cl + roll $4,%r8d + movl %r10d,%r12d + movd (%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + pxor %xmm1,%xmm1 + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r8d,%r12d + addl 32(%r15),%r11d + addb %dl,%bl + movl 8(%rsi),%eax + addl $2272392833,%r11d + movzbl %bl,%ebx + addl %r12d,%r11d + movl %edx,4(%rsi) + addb %al,%cl + roll $11,%r11d + movl %r9d,%r12d + movd (%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r11d,%r12d + addl 44(%r15),%r10d + addb %dl,%al + movl 12(%rsi),%ebx + addl $1839030562,%r10d + movzbl %al,%eax + addl %r12d,%r10d + movl %edx,8(%rsi) + addb %bl,%cl + roll $16,%r10d + movl %r8d,%r12d + pinsrw $1,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r10d,%r12d + addl 56(%r15),%r9d + addb %dl,%bl + movl 16(%rsi),%eax + addl $4259657740,%r9d + movzbl %bl,%ebx + addl %r12d,%r9d + movl %edx,12(%rsi) + addb %al,%cl + roll $23,%r9d + movl %r11d,%r12d + pinsrw $1,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r9d,%r12d + addl 4(%r15),%r8d + addb %dl,%al + movl 20(%rsi),%ebx + addl $2763975236,%r8d + movzbl %al,%eax + addl %r12d,%r8d + movl %edx,16(%rsi) + addb %bl,%cl + roll $4,%r8d + movl %r10d,%r12d + pinsrw $2,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r8d,%r12d + addl 16(%r15),%r11d + addb %dl,%bl + movl 24(%rsi),%eax + addl $1272893353,%r11d + movzbl %bl,%ebx + addl %r12d,%r11d + movl %edx,20(%rsi) + addb %al,%cl + roll $11,%r11d + movl %r9d,%r12d + pinsrw $2,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r11d,%r12d + addl 28(%r15),%r10d + addb %dl,%al + movl 28(%rsi),%ebx + addl $4139469664,%r10d + movzbl %al,%eax + addl %r12d,%r10d + movl %edx,24(%rsi) + addb %bl,%cl + roll $16,%r10d + movl %r8d,%r12d + pinsrw $3,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r10d,%r12d + addl 40(%r15),%r9d + addb %dl,%bl + movl 32(%rsi),%eax + addl $3200236656,%r9d + movzbl %bl,%ebx + addl %r12d,%r9d + movl %edx,28(%rsi) + addb %al,%cl + roll $23,%r9d + movl %r11d,%r12d + pinsrw $3,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r9d,%r12d + addl 52(%r15),%r8d + addb %dl,%al + movl 36(%rsi),%ebx + addl $681279174,%r8d + movzbl %al,%eax + addl %r12d,%r8d + movl %edx,32(%rsi) + addb %bl,%cl + roll $4,%r8d + movl %r10d,%r12d + pinsrw $4,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r8d,%r12d + addl 0(%r15),%r11d + addb %dl,%bl + movl 40(%rsi),%eax + addl $3936430074,%r11d + movzbl %bl,%ebx + addl %r12d,%r11d + movl %edx,36(%rsi) + addb %al,%cl + roll $11,%r11d + movl %r9d,%r12d + pinsrw $4,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r11d,%r12d + addl 12(%r15),%r10d + addb %dl,%al + movl 44(%rsi),%ebx + addl $3572445317,%r10d + movzbl %al,%eax + addl %r12d,%r10d + movl %edx,40(%rsi) + addb %bl,%cl + roll $16,%r10d + movl %r8d,%r12d + pinsrw $5,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r10d,%r12d + addl 24(%r15),%r9d + addb %dl,%bl + movl 48(%rsi),%eax + addl $76029189,%r9d + movzbl %bl,%ebx + addl %r12d,%r9d + movl %edx,44(%rsi) + addb %al,%cl + roll $23,%r9d + movl %r11d,%r12d + pinsrw $5,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r9d,%r12d + addl 36(%r15),%r8d + addb %dl,%al + movl 52(%rsi),%ebx + addl $3654602809,%r8d + movzbl %al,%eax + addl %r12d,%r8d + movl %edx,48(%rsi) + addb %bl,%cl + roll $4,%r8d + movl %r10d,%r12d + pinsrw $6,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r8d,%r12d + addl 48(%r15),%r11d + addb %dl,%bl + movl 56(%rsi),%eax + addl $3873151461,%r11d + movzbl %bl,%ebx + addl %r12d,%r11d + movl %edx,52(%rsi) + addb %al,%cl + roll $11,%r11d + movl %r9d,%r12d + pinsrw $6,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %eax,(%rdi,%rcx,4) + xorl %r11d,%r12d + addl 60(%r15),%r10d + addb %dl,%al + movl 60(%rsi),%ebx + addl $530742520,%r10d + movzbl %al,%eax + addl %r12d,%r10d + movl %edx,56(%rsi) + addb %bl,%cl + roll $16,%r10d + movl %r8d,%r12d + pinsrw $7,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movdqu 32(%r13),%xmm4 + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %ebx,(%rdi,%rcx,4) + xorl %r10d,%r12d + addl 8(%r15),%r9d + addb %dl,%bl + movl 64(%rsi),%eax + addl $3299628645,%r9d + movzbl %bl,%ebx + addl %r12d,%r9d + movl %edx,60(%rsi) + addb %al,%cl + roll $23,%r9d + movl $-1,%r12d + pinsrw $7,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + psllq $8,%xmm1 + pxor %xmm0,%xmm4 + pxor %xmm1,%xmm4 + pxor %xmm0,%xmm0 + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r9d,%r12d + addl 0(%r15),%r8d + addb %dl,%al + movl 68(%rsi),%ebx + addl $4096336452,%r8d + movzbl %al,%eax + xorl %r10d,%r12d + movl %edx,64(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $6,%r8d + movl $-1,%r12d + movd (%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + pxor %xmm1,%xmm1 + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r8d,%r12d + addl 28(%r15),%r11d + addb %dl,%bl + movl 72(%rsi),%eax + addl $1126891415,%r11d + movzbl %bl,%ebx + xorl %r9d,%r12d + movl %edx,68(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $10,%r11d + movl $-1,%r12d + movd (%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r11d,%r12d + addl 56(%r15),%r10d + addb %dl,%al + movl 76(%rsi),%ebx + addl $2878612391,%r10d + movzbl %al,%eax + xorl %r8d,%r12d + movl %edx,72(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $15,%r10d + movl $-1,%r12d + pinsrw $1,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r10d,%r12d + addl 20(%r15),%r9d + addb %dl,%bl + movl 80(%rsi),%eax + addl $4237533241,%r9d + movzbl %bl,%ebx + xorl %r11d,%r12d + movl %edx,76(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $21,%r9d + movl $-1,%r12d + pinsrw $1,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r9d,%r12d + addl 48(%r15),%r8d + addb %dl,%al + movl 84(%rsi),%ebx + addl $1700485571,%r8d + movzbl %al,%eax + xorl %r10d,%r12d + movl %edx,80(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $6,%r8d + movl $-1,%r12d + pinsrw $2,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r8d,%r12d + addl 12(%r15),%r11d + addb %dl,%bl + movl 88(%rsi),%eax + addl $2399980690,%r11d + movzbl %bl,%ebx + xorl %r9d,%r12d + movl %edx,84(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $10,%r11d + movl $-1,%r12d + pinsrw $2,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r11d,%r12d + addl 40(%r15),%r10d + addb %dl,%al + movl 92(%rsi),%ebx + addl $4293915773,%r10d + movzbl %al,%eax + xorl %r8d,%r12d + movl %edx,88(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $15,%r10d + movl $-1,%r12d + pinsrw $3,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r10d,%r12d + addl 4(%r15),%r9d + addb %dl,%bl + movl 96(%rsi),%eax + addl $2240044497,%r9d + movzbl %bl,%ebx + xorl %r11d,%r12d + movl %edx,92(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $21,%r9d + movl $-1,%r12d + pinsrw $3,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r9d,%r12d + addl 32(%r15),%r8d + addb %dl,%al + movl 100(%rsi),%ebx + addl $1873313359,%r8d + movzbl %al,%eax + xorl %r10d,%r12d + movl %edx,96(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $6,%r8d + movl $-1,%r12d + pinsrw $4,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r8d,%r12d + addl 60(%r15),%r11d + addb %dl,%bl + movl 104(%rsi),%eax + addl $4264355552,%r11d + movzbl %bl,%ebx + xorl %r9d,%r12d + movl %edx,100(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $10,%r11d + movl $-1,%r12d + pinsrw $4,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r11d,%r12d + addl 24(%r15),%r10d + addb %dl,%al + movl 108(%rsi),%ebx + addl $2734768916,%r10d + movzbl %al,%eax + xorl %r8d,%r12d + movl %edx,104(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $15,%r10d + movl $-1,%r12d + pinsrw $5,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r10d,%r12d + addl 52(%r15),%r9d + addb %dl,%bl + movl 112(%rsi),%eax + addl $1309151649,%r9d + movzbl %bl,%ebx + xorl %r11d,%r12d + movl %edx,108(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $21,%r9d + movl $-1,%r12d + pinsrw $5,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movl (%rdi,%rcx,4),%edx + xorl %r11d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r9d,%r12d + addl 16(%r15),%r8d + addb %dl,%al + movl 116(%rsi),%ebx + addl $4149444226,%r8d + movzbl %al,%eax + xorl %r10d,%r12d + movl %edx,112(%rsi) + addl %r12d,%r8d + addb %bl,%cl + roll $6,%r8d + movl $-1,%r12d + pinsrw $6,(%rdi,%rax,4),%xmm0 + + addl %r9d,%r8d + movl (%rdi,%rcx,4),%edx + xorl %r10d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r8d,%r12d + addl 44(%r15),%r11d + addb %dl,%bl + movl 120(%rsi),%eax + addl $3174756917,%r11d + movzbl %bl,%ebx + xorl %r9d,%r12d + movl %edx,116(%rsi) + addl %r12d,%r11d + addb %al,%cl + roll $10,%r11d + movl $-1,%r12d + pinsrw $6,(%rdi,%rbx,4),%xmm1 + + addl %r8d,%r11d + movl (%rdi,%rcx,4),%edx + xorl %r9d,%r12d + movl %eax,(%rdi,%rcx,4) + orl %r11d,%r12d + addl 8(%r15),%r10d + addb %dl,%al + movl 124(%rsi),%ebx + addl $718787259,%r10d + movzbl %al,%eax + xorl %r8d,%r12d + movl %edx,120(%rsi) + addl %r12d,%r10d + addb %bl,%cl + roll $15,%r10d + movl $-1,%r12d + pinsrw $7,(%rdi,%rax,4),%xmm0 + + addl %r11d,%r10d + movdqu 48(%r13),%xmm5 + addb $32,%bpl + movl (%rdi,%rcx,4),%edx + xorl %r8d,%r12d + movl %ebx,(%rdi,%rcx,4) + orl %r10d,%r12d + addl 36(%r15),%r9d + addb %dl,%bl + movl 0(%rdi,%rbp,4),%eax + addl $3951481745,%r9d + movzbl %bl,%ebx + xorl %r11d,%r12d + movl %edx,124(%rsi) + addl %r12d,%r9d + addb %al,%cl + roll $21,%r9d + movl $-1,%r12d + pinsrw $7,(%rdi,%rbx,4),%xmm1 + + addl %r10d,%r9d + movq %rbp,%rsi + xorq %rbp,%rbp + movb %sil,%bpl + movq %rcx,%rsi + xorq %rcx,%rcx + movb %sil,%cl + leaq (%rdi,%rbp,4),%rsi + psllq $8,%xmm1 + pxor %xmm0,%xmm5 + pxor %xmm1,%xmm5 + addl 0(%rsp),%r8d + addl 4(%rsp),%r9d + addl 8(%rsp),%r10d + addl 12(%rsp),%r11d + + movdqu %xmm2,(%r14,%r13,1) + movdqu %xmm3,16(%r14,%r13,1) + movdqu %xmm4,32(%r14,%r13,1) + movdqu %xmm5,48(%r14,%r13,1) + leaq 64(%r15),%r15 + leaq 64(%r13),%r13 + cmpq 16(%rsp),%r15 + jb .Loop + + movq 24(%rsp),%r12 + subb %al,%cl + movl %r8d,0(%r12) + movl %r9d,4(%r12) + movl %r10d,8(%r12) + movl %r11d,12(%r12) + subb $1,%bpl + movl %ebp,-8(%rdi) + movl %ecx,-4(%rdi) + + movq 40(%rsp),%r15 + movq 48(%rsp),%r14 + movq 56(%rsp),%r13 + movq 64(%rsp),%r12 + movq 72(%rsp),%rbp + movq 80(%rsp),%rbx + leaq 88(%rsp),%rsp +.Lepilogue: +.Labort: + .byte 0xf3,0xc3 +.size rc4_md5_enc,.-rc4_md5_enc diff --git a/secure/lib/libcrypto/asm/rc4-x86_64.s b/secure/lib/libcrypto/asm/rc4-x86_64.s index d018ffe1e7..af161582aa 100644 --- a/secure/lib/libcrypto/asm/rc4-x86_64.s +++ b/secure/lib/libcrypto/asm/rc4-x86_64.s @@ -1,5 +1,6 @@ .text + .globl RC4 .type RC4,@function .align 16 @@ -11,308 +12,503 @@ RC4: orq %rsi,%rsi pushq %r12 pushq %r13 .Lprologue: + movq %rsi,%r11 + movq %rdx,%r12 + movq %rcx,%r13 + xorq %r10,%r10 + xorq %rcx,%rcx - addq $8,%rdi - movl -8(%rdi),%r8d - movl -4(%rdi),%r12d + leaq 8(%rdi),%rdi + movb -8(%rdi),%r10b + movb -4(%rdi),%cl cmpl $-1,256(%rdi) je .LRC4_CHAR - incb %r8b - movl (%rdi,%r8,4),%r9d - testq $-8,%rsi - jz .Lloop1 - jmp .Lloop8 -.align 16 -.Lloop8: - addb %r9b,%r12b - movq %r8,%r10 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax - incb %r10b - movl (%rdi,%r10,4),%r11d - cmpq %r10,%r12 - movl %r9d,(%rdi,%r12,4) - cmoveq %r9,%r11 - movl %r13d,(%rdi,%r8,4) - addb %r9b,%r13b - movb (%rdi,%r13,4),%al - addb %r11b,%r12b - movq %r10,%r8 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax - incb %r8b - movl (%rdi,%r8,4),%r9d - cmpq %r8,%r12 - movl %r11d,(%rdi,%r12,4) - cmoveq %r11,%r9 - movl %r13d,(%rdi,%r10,4) - addb %r11b,%r13b - movb (%rdi,%r13,4),%al - addb %r9b,%r12b - movq %r8,%r10 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax + movl OPENSSL_ia32cap_P(%rip),%r8d + xorq %rbx,%rbx incb %r10b - movl (%rdi,%r10,4),%r11d - cmpq %r10,%r12 - movl %r9d,(%rdi,%r12,4) - cmoveq %r9,%r11 - movl %r13d,(%rdi,%r8,4) - addb %r9b,%r13b - movb (%rdi,%r13,4),%al - addb %r11b,%r12b - movq %r10,%r8 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax - incb %r8b - movl (%rdi,%r8,4),%r9d - cmpq %r8,%r12 - movl %r11d,(%rdi,%r12,4) - cmoveq %r11,%r9 - movl %r13d,(%rdi,%r10,4) - addb %r11b,%r13b - movb (%rdi,%r13,4),%al - addb %r9b,%r12b - movq %r8,%r10 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax + subq %r10,%rbx + subq %r12,%r13 + movl (%rdi,%r10,4),%eax + testq $-16,%r11 + jz .Lloop1 + btl $30,%r8d + jc .Lintel + andq $7,%rbx + leaq 1(%r10),%rsi + jz .Loop8 + subq %rbx,%r11 +.Loop8_warmup: + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl %edx,(%rdi,%r10,4) + addb %dl,%al incb %r10b - movl (%rdi,%r10,4),%r11d - cmpq %r10,%r12 - movl %r9d,(%rdi,%r12,4) - cmoveq %r9,%r11 - movl %r13d,(%rdi,%r8,4) - addb %r9b,%r13b - movb (%rdi,%r13,4),%al - addb %r11b,%r12b - movq %r10,%r8 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax - incb %r8b - movl (%rdi,%r8,4),%r9d - cmpq %r8,%r12 - movl %r11d,(%rdi,%r12,4) - cmoveq %r11,%r9 - movl %r13d,(%rdi,%r10,4) - addb %r11b,%r13b - movb (%rdi,%r13,4),%al - addb %r9b,%r12b - movq %r8,%r10 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax + movl (%rdi,%rax,4),%edx + movl (%rdi,%r10,4),%eax + xorb (%r12),%dl + movb %dl,(%r13,%r12,1) + leaq 1(%r12),%r12 + decq %rbx + jnz .Loop8_warmup + + leaq 1(%r10),%rsi + jmp .Loop8 +.align 16 +.Loop8: + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl 0(%rdi,%rsi,4),%ebx + rorq $8,%r8 + movl %edx,0(%rdi,%r10,4) + addb %al,%dl + movb (%rdi,%rdx,4),%r8b + addb %bl,%cl + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + movl 4(%rdi,%rsi,4),%eax + rorq $8,%r8 + movl %edx,4(%rdi,%r10,4) + addb %bl,%dl + movb (%rdi,%rdx,4),%r8b + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl 8(%rdi,%rsi,4),%ebx + rorq $8,%r8 + movl %edx,8(%rdi,%r10,4) + addb %al,%dl + movb (%rdi,%rdx,4),%r8b + addb %bl,%cl + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + movl 12(%rdi,%rsi,4),%eax + rorq $8,%r8 + movl %edx,12(%rdi,%r10,4) + addb %bl,%dl + movb (%rdi,%rdx,4),%r8b + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl 16(%rdi,%rsi,4),%ebx + rorq $8,%r8 + movl %edx,16(%rdi,%r10,4) + addb %al,%dl + movb (%rdi,%rdx,4),%r8b + addb %bl,%cl + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + movl 20(%rdi,%rsi,4),%eax + rorq $8,%r8 + movl %edx,20(%rdi,%r10,4) + addb %bl,%dl + movb (%rdi,%rdx,4),%r8b + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl 24(%rdi,%rsi,4),%ebx + rorq $8,%r8 + movl %edx,24(%rdi,%r10,4) + addb %al,%dl + movb (%rdi,%rdx,4),%r8b + addb $8,%sil + addb %bl,%cl + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + movl -4(%rdi,%rsi,4),%eax + rorq $8,%r8 + movl %edx,28(%rdi,%r10,4) + addb %bl,%dl + movb (%rdi,%rdx,4),%r8b + addb $8,%r10b + rorq $8,%r8 + subq $8,%r11 + + xorq (%r12),%r8 + movq %r8,(%r13,%r12,1) + leaq 8(%r12),%r12 + + testq $-8,%r11 + jnz .Loop8 + cmpq $0,%r11 + jne .Lloop1 + jmp .Lexit + +.align 16 +.Lintel: + testq $-32,%r11 + jz .Lloop1 + andq $15,%rbx + jz .Loop16_is_hot + subq %rbx,%r11 +.Loop16_warmup: + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl %edx,(%rdi,%r10,4) + addb %dl,%al incb %r10b - movl (%rdi,%r10,4),%r11d - cmpq %r10,%r12 - movl %r9d,(%rdi,%r12,4) - cmoveq %r9,%r11 - movl %r13d,(%rdi,%r8,4) - addb %r9b,%r13b - movb (%rdi,%r13,4),%al - addb %r11b,%r12b - movq %r10,%r8 - movl (%rdi,%r12,4),%r13d - rorq $8,%rax - incb %r8b - movl (%rdi,%r8,4),%r9d - cmpq %r8,%r12 - movl %r11d,(%rdi,%r12,4) - cmoveq %r11,%r9 - movl %r13d,(%rdi,%r10,4) - addb %r11b,%r13b - movb (%rdi,%r13,4),%al - rorq $8,%rax - subq $8,%rsi + movl (%rdi,%rax,4),%edx + movl (%rdi,%r10,4),%eax + xorb (%r12),%dl + movb %dl,(%r13,%r12,1) + leaq 1(%r12),%r12 + decq %rbx + jnz .Loop16_warmup - xorq (%rdx),%rax - addq $8,%rdx - movq %rax,(%rcx) - addq $8,%rcx + movq %rcx,%rbx + xorq %rcx,%rcx + movb %bl,%cl + +.Loop16_is_hot: + leaq (%rdi,%r10,4),%rsi + addb %al,%cl + movl (%rdi,%rcx,4),%edx + pxor %xmm0,%xmm0 + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 4(%rsi),%ebx + movzbl %al,%eax + movl %edx,0(%rsi) + addb %bl,%cl + pinsrw $0,(%rdi,%rax,4),%xmm0 + jmp .Loop16_enter +.align 16 +.Loop16: + addb %al,%cl + movl (%rdi,%rcx,4),%edx + pxor %xmm0,%xmm2 + psllq $8,%xmm1 + pxor %xmm0,%xmm0 + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 4(%rsi),%ebx + movzbl %al,%eax + movl %edx,0(%rsi) + pxor %xmm1,%xmm2 + addb %bl,%cl + pinsrw $0,(%rdi,%rax,4),%xmm0 + movdqu %xmm2,(%r13,%r12,1) + leaq 16(%r12),%r12 +.Loop16_enter: + movl (%rdi,%rcx,4),%edx + pxor %xmm1,%xmm1 + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 8(%rsi),%eax + movzbl %bl,%ebx + movl %edx,4(%rsi) + addb %al,%cl + pinsrw $0,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 12(%rsi),%ebx + movzbl %al,%eax + movl %edx,8(%rsi) + addb %bl,%cl + pinsrw $1,(%rdi,%rax,4),%xmm0 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 16(%rsi),%eax + movzbl %bl,%ebx + movl %edx,12(%rsi) + addb %al,%cl + pinsrw $1,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 20(%rsi),%ebx + movzbl %al,%eax + movl %edx,16(%rsi) + addb %bl,%cl + pinsrw $2,(%rdi,%rax,4),%xmm0 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 24(%rsi),%eax + movzbl %bl,%ebx + movl %edx,20(%rsi) + addb %al,%cl + pinsrw $2,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 28(%rsi),%ebx + movzbl %al,%eax + movl %edx,24(%rsi) + addb %bl,%cl + pinsrw $3,(%rdi,%rax,4),%xmm0 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 32(%rsi),%eax + movzbl %bl,%ebx + movl %edx,28(%rsi) + addb %al,%cl + pinsrw $3,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 36(%rsi),%ebx + movzbl %al,%eax + movl %edx,32(%rsi) + addb %bl,%cl + pinsrw $4,(%rdi,%rax,4),%xmm0 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 40(%rsi),%eax + movzbl %bl,%ebx + movl %edx,36(%rsi) + addb %al,%cl + pinsrw $4,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 44(%rsi),%ebx + movzbl %al,%eax + movl %edx,40(%rsi) + addb %bl,%cl + pinsrw $5,(%rdi,%rax,4),%xmm0 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 48(%rsi),%eax + movzbl %bl,%ebx + movl %edx,44(%rsi) + addb %al,%cl + pinsrw $5,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 52(%rsi),%ebx + movzbl %al,%eax + movl %edx,48(%rsi) + addb %bl,%cl + pinsrw $6,(%rdi,%rax,4),%xmm0 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movl 56(%rsi),%eax + movzbl %bl,%ebx + movl %edx,52(%rsi) + addb %al,%cl + pinsrw $6,(%rdi,%rbx,4),%xmm1 + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + addb %dl,%al + movl 60(%rsi),%ebx + movzbl %al,%eax + movl %edx,56(%rsi) + addb %bl,%cl + pinsrw $7,(%rdi,%rax,4),%xmm0 + addb $16,%r10b + movdqu (%r12),%xmm2 + movl (%rdi,%rcx,4),%edx + movl %ebx,(%rdi,%rcx,4) + addb %dl,%bl + movzbl %bl,%ebx + movl %edx,60(%rsi) + leaq (%rdi,%r10,4),%rsi + pinsrw $7,(%rdi,%rbx,4),%xmm1 + movl (%rsi),%eax + movq %rcx,%rbx + xorq %rcx,%rcx + subq $16,%r11 + movb %bl,%cl + testq $-16,%r11 + jnz .Loop16 - testq $-8,%rsi - jnz .Lloop8 - cmpq $0,%rsi + psllq $8,%xmm1 + pxor %xmm0,%xmm2 + pxor %xmm1,%xmm2 + movdqu %xmm2,(%r13,%r12,1) + leaq 16(%r12),%r12 + + cmpq $0,%r11 jne .Lloop1 jmp .Lexit .align 16 .Lloop1: - addb %r9b,%r12b - movl (%rdi,%r12,4),%r13d - movl %r9d,(%rdi,%r12,4) - movl %r13d,(%rdi,%r8,4) - addb %r13b,%r9b - incb %r8b - movl (%rdi,%r9,4),%r13d - movl (%rdi,%r8,4),%r9d - xorb (%rdx),%r13b - incq %rdx - movb %r13b,(%rcx) - incq %rcx - decq %rsi + addb %al,%cl + movl (%rdi,%rcx,4),%edx + movl %eax,(%rdi,%rcx,4) + movl %edx,(%rdi,%r10,4) + addb %dl,%al + incb %r10b + movl (%rdi,%rax,4),%edx + movl (%rdi,%r10,4),%eax + xorb (%r12),%dl + movb %dl,(%r13,%r12,1) + leaq 1(%r12),%r12 + decq %r11 jnz .Lloop1 jmp .Lexit .align 16 .LRC4_CHAR: - addb $1,%r8b - movzbl (%rdi,%r8,1),%r9d - testq $-8,%rsi + addb $1,%r10b + movzbl (%rdi,%r10,1),%eax + testq $-8,%r11 jz .Lcloop1 - cmpl $0,260(%rdi) - jnz .Lcloop1 jmp .Lcloop8 .align 16 .Lcloop8: - movl (%rdx),%eax - movl 4(%rdx),%ebx - addb %r9b,%r12b - leaq 1(%r8),%r10 - movzbl (%rdi,%r12,1),%r13d - movzbl %r10b,%r10d - movzbl (%rdi,%r10,1),%r11d - movb %r9b,(%rdi,%r12,1) - cmpq %r10,%r12 - movb %r13b,(%rdi,%r8,1) + movl (%r12),%r8d + movl 4(%r12),%r9d + addb %al,%cl + leaq 1(%r10),%rsi + movzbl (%rdi,%rcx,1),%edx + movzbl %sil,%esi + movzbl (%rdi,%rsi,1),%ebx + movb %al,(%rdi,%rcx,1) + cmpq %rsi,%rcx + movb %dl,(%rdi,%r10,1) jne .Lcmov0 - movq %r9,%r11 + movq %rax,%rbx .Lcmov0: - addb %r9b,%r13b - xorb (%rdi,%r13,1),%al - rorl $8,%eax - addb %r11b,%r12b - leaq 1(%r10),%r8 - movzbl (%rdi,%r12,1),%r13d - movzbl %r8b,%r8d - movzbl (%rdi,%r8,1),%r9d - movb %r11b,(%rdi,%r12,1) - cmpq %r8,%r12 - movb %r13b,(%rdi,%r10,1) + addb %al,%dl + xorb (%rdi,%rdx,1),%r8b + rorl $8,%r8d + addb %bl,%cl + leaq 1(%rsi),%r10 + movzbl (%rdi,%rcx,1),%edx + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%eax + movb %bl,(%rdi,%rcx,1) + cmpq %r10,%rcx + movb %dl,(%rdi,%rsi,1) jne .Lcmov1 - movq %r11,%r9 + movq %rbx,%rax .Lcmov1: - addb %r11b,%r13b - xorb (%rdi,%r13,1),%al - rorl $8,%eax - addb %r9b,%r12b - leaq 1(%r8),%r10 - movzbl (%rdi,%r12,1),%r13d - movzbl %r10b,%r10d - movzbl (%rdi,%r10,1),%r11d - movb %r9b,(%rdi,%r12,1) - cmpq %r10,%r12 - movb %r13b,(%rdi,%r8,1) + addb %bl,%dl + xorb (%rdi,%rdx,1),%r8b + rorl $8,%r8d + addb %al,%cl + leaq 1(%r10),%rsi + movzbl (%rdi,%rcx,1),%edx + movzbl %sil,%esi + movzbl (%rdi,%rsi,1),%ebx + movb %al,(%rdi,%rcx,1) + cmpq %rsi,%rcx + movb %dl,(%rdi,%r10,1) jne .Lcmov2 - movq %r9,%r11 + movq %rax,%rbx .Lcmov2: - addb %r9b,%r13b - xorb (%rdi,%r13,1),%al - rorl $8,%eax - addb %r11b,%r12b - leaq 1(%r10),%r8 - movzbl (%rdi,%r12,1),%r13d - movzbl %r8b,%r8d - movzbl (%rdi,%r8,1),%r9d - movb %r11b,(%rdi,%r12,1) - cmpq %r8,%r12 - movb %r13b,(%rdi,%r10,1) + addb %al,%dl + xorb (%rdi,%rdx,1),%r8b + rorl $8,%r8d + addb %bl,%cl + leaq 1(%rsi),%r10 + movzbl (%rdi,%rcx,1),%edx + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%eax + movb %bl,(%rdi,%rcx,1) + cmpq %r10,%rcx + movb %dl,(%rdi,%rsi,1) jne .Lcmov3 - movq %r11,%r9 + movq %rbx,%rax .Lcmov3: - addb %r11b,%r13b - xorb (%rdi,%r13,1),%al - rorl $8,%eax - addb %r9b,%r12b - leaq 1(%r8),%r10 - movzbl (%rdi,%r12,1),%r13d - movzbl %r10b,%r10d - movzbl (%rdi,%r10,1),%r11d - movb %r9b,(%rdi,%r12,1) - cmpq %r10,%r12 - movb %r13b,(%rdi,%r8,1) + addb %bl,%dl + xorb (%rdi,%rdx,1),%r8b + rorl $8,%r8d + addb %al,%cl + leaq 1(%r10),%rsi + movzbl (%rdi,%rcx,1),%edx + movzbl %sil,%esi + movzbl (%rdi,%rsi,1),%ebx + movb %al,(%rdi,%rcx,1) + cmpq %rsi,%rcx + movb %dl,(%rdi,%r10,1) jne .Lcmov4 - movq %r9,%r11 + movq %rax,%rbx .Lcmov4: - addb %r9b,%r13b - xorb (%rdi,%r13,1),%bl - rorl $8,%ebx - addb %r11b,%r12b - leaq 1(%r10),%r8 - movzbl (%rdi,%r12,1),%r13d - movzbl %r8b,%r8d - movzbl (%rdi,%r8,1),%r9d - movb %r11b,(%rdi,%r12,1) - cmpq %r8,%r12 - movb %r13b,(%rdi,%r10,1) + addb %al,%dl + xorb (%rdi,%rdx,1),%r9b + rorl $8,%r9d + addb %bl,%cl + leaq 1(%rsi),%r10 + movzbl (%rdi,%rcx,1),%edx + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%eax + movb %bl,(%rdi,%rcx,1) + cmpq %r10,%rcx + movb %dl,(%rdi,%rsi,1) jne .Lcmov5 - movq %r11,%r9 + movq %rbx,%rax .Lcmov5: - addb %r11b,%r13b - xorb (%rdi,%r13,1),%bl - rorl $8,%ebx - addb %r9b,%r12b - leaq 1(%r8),%r10 - movzbl (%rdi,%r12,1),%r13d - movzbl %r10b,%r10d - movzbl (%rdi,%r10,1),%r11d - movb %r9b,(%rdi,%r12,1) - cmpq %r10,%r12 - movb %r13b,(%rdi,%r8,1) + addb %bl,%dl + xorb (%rdi,%rdx,1),%r9b + rorl $8,%r9d + addb %al,%cl + leaq 1(%r10),%rsi + movzbl (%rdi,%rcx,1),%edx + movzbl %sil,%esi + movzbl (%rdi,%rsi,1),%ebx + movb %al,(%rdi,%rcx,1) + cmpq %rsi,%rcx + movb %dl,(%rdi,%r10,1) jne .Lcmov6 - movq %r9,%r11 + movq %rax,%rbx .Lcmov6: - addb %r9b,%r13b - xorb (%rdi,%r13,1),%bl - rorl $8,%ebx - addb %r11b,%r12b - leaq 1(%r10),%r8 - movzbl (%rdi,%r12,1),%r13d - movzbl %r8b,%r8d - movzbl (%rdi,%r8,1),%r9d - movb %r11b,(%rdi,%r12,1) - cmpq %r8,%r12 - movb %r13b,(%rdi,%r10,1) + addb %al,%dl + xorb (%rdi,%rdx,1),%r9b + rorl $8,%r9d + addb %bl,%cl + leaq 1(%rsi),%r10 + movzbl (%rdi,%rcx,1),%edx + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%eax + movb %bl,(%rdi,%rcx,1) + cmpq %r10,%rcx + movb %dl,(%rdi,%rsi,1) jne .Lcmov7 - movq %r11,%r9 + movq %rbx,%rax .Lcmov7: - addb %r11b,%r13b - xorb (%rdi,%r13,1),%bl - rorl $8,%ebx - leaq -8(%rsi),%rsi - movl %eax,(%rcx) - leaq 8(%rdx),%rdx - movl %ebx,4(%rcx) - leaq 8(%rcx),%rcx + addb %bl,%dl + xorb (%rdi,%rdx,1),%r9b + rorl $8,%r9d + leaq -8(%r11),%r11 + movl %r8d,(%r13) + leaq 8(%r12),%r12 + movl %r9d,4(%r13) + leaq 8(%r13),%r13 - testq $-8,%rsi + testq $-8,%r11 jnz .Lcloop8 - cmpq $0,%rsi + cmpq $0,%r11 jne .Lcloop1 jmp .Lexit .align 16 .Lcloop1: - addb %r9b,%r12b - movzbl (%rdi,%r12,1),%r13d - movb %r9b,(%rdi,%r12,1) - movb %r13b,(%rdi,%r8,1) - addb %r9b,%r13b - addb $1,%r8b - movzbl %r13b,%r13d - movzbl %r8b,%r8d - movzbl (%rdi,%r13,1),%r13d - movzbl (%rdi,%r8,1),%r9d - xorb (%rdx),%r13b - leaq 1(%rdx),%rdx - movb %r13b,(%rcx) - leaq 1(%rcx),%rcx - subq $1,%rsi + addb %al,%cl + movzbl %cl,%ecx + movzbl (%rdi,%rcx,1),%edx + movb %al,(%rdi,%rcx,1) + movb %dl,(%rdi,%r10,1) + addb %al,%dl + addb $1,%r10b + movzbl %dl,%edx + movzbl %r10b,%r10d + movzbl (%rdi,%rdx,1),%edx + movzbl (%rdi,%r10,1),%eax + xorb (%r12),%dl + leaq 1(%r12),%r12 + movb %dl,(%r13) + leaq 1(%r13),%r13 + subq $1,%r11 jnz .Lcloop1 jmp .Lexit .align 16 .Lexit: - subb $1,%r8b - movl %r8d,-8(%rdi) - movl %r12d,-4(%rdi) + subb $1,%r10b + movl %r10d,-8(%rdi) + movl %ecx,-4(%rdi) movq (%rsp),%r13 movq 8(%rsp),%r12 @@ -321,11 +517,10 @@ RC4: orq %rsi,%rsi .Lepilogue: .byte 0xf3,0xc3 .size RC4,.-RC4 - -.globl RC4_set_key -.type RC4_set_key,@function +.globl private_RC4_set_key +.type private_RC4_set_key,@function .align 16 -RC4_set_key: +private_RC4_set_key: leaq 8(%rdi),%rdi leaq (%rdx,%rsi,1),%rdx negq %rsi @@ -337,11 +532,8 @@ RC4_set_key: movl OPENSSL_ia32cap_P(%rip),%r8d btl $20,%r8d - jnc .Lw1stloop - btl $30,%r8d - setc %r9b - movl %r9d,260(%rdi) - jmp .Lc1stloop + jc .Lc1stloop + jmp .Lw1stloop .align 16 .Lw1stloop: @@ -395,7 +587,7 @@ RC4_set_key: movl %eax,-8(%rdi) movl %eax,-4(%rdi) .byte 0xf3,0xc3 -.size RC4_set_key,.-RC4_set_key +.size private_RC4_set_key,.-private_RC4_set_key .globl RC4_options .type RC4_options,@function @@ -404,18 +596,20 @@ RC4_options: leaq .Lopts(%rip),%rax movl OPENSSL_ia32cap_P(%rip),%edx btl $20,%edx - jnc .Ldone - addq $12,%rax + jc .L8xchar btl $30,%edx jnc .Ldone - addq $13,%rax + addq $25,%rax + .byte 0xf3,0xc3 +.L8xchar: + addq $12,%rax .Ldone: .byte 0xf3,0xc3 .align 64 .Lopts: .byte 114,99,52,40,56,120,44,105,110,116,41,0 .byte 114,99,52,40,56,120,44,99,104,97,114,41,0 -.byte 114,99,52,40,49,120,44,99,104,97,114,41,0 +.byte 114,99,52,40,49,54,120,44,105,110,116,41,0 .byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 64 .size RC4_options,.-RC4_options diff --git a/secure/lib/libcrypto/asm/sha1-586.s b/secure/lib/libcrypto/asm/sha1-586.s index cccb1aba85..75ad8f1cf8 100644 --- a/secure/lib/libcrypto/asm/sha1-586.s +++ b/secure/lib/libcrypto/asm/sha1-586.s @@ -9,16 +9,31 @@ sha1_block_data_order: pushl %ebx pushl %esi pushl %edi + call .L000pic_point +.L000pic_point: + popl %ebp + leal OPENSSL_ia32cap_P,%esi + leal .LK_XX_XX-.L000pic_point(%ebp),%ebp + movl (%esi),%eax + movl 4(%esi),%edx + testl $512,%edx + jz .L001x86 + testl $16777216,%eax + jz .L001x86 + jmp .Lssse3_shortcut +.align 16 +.L001x86: movl 20(%esp),%ebp movl 24(%esp),%esi movl 28(%esp),%eax - subl $64,%esp + subl $76,%esp shll $6,%eax addl %esi,%eax - movl %eax,92(%esp) + movl %eax,104(%esp) movl 16(%ebp),%edi + jmp .L002loop .align 16 -.L000loop: +.L002loop: movl (%esi),%eax movl 4(%esi),%ebx movl 8(%esi),%ecx @@ -67,7 +82,7 @@ sha1_block_data_order: movl %ebx,52(%esp) movl %ecx,56(%esp) movl %edx,60(%esp) - movl %esi,88(%esp) + movl %esi,100(%esp) movl (%ebp),%eax movl 4(%ebp),%ebx movl 8(%ebp),%ecx @@ -78,10 +93,10 @@ sha1_block_data_order: roll $5,%ebp xorl %edx,%esi addl %edi,%ebp - andl %ebx,%esi movl (%esp),%edi - xorl %edx,%esi + andl %ebx,%esi rorl $2,%ebx + xorl %edx,%esi leal 1518500249(%ebp,%edi,1),%ebp addl %esi,%ebp @@ -90,10 +105,10 @@ sha1_block_data_order: roll $5,%ebp xorl %ecx,%edi addl %edx,%ebp - andl %eax,%edi movl 4(%esp),%edx - xorl %ecx,%edi + andl %eax,%edi rorl $2,%eax + xorl %ecx,%edi leal 1518500249(%ebp,%edx,1),%ebp addl %edi,%ebp @@ -102,10 +117,10 @@ sha1_block_data_order: roll $5,%ebp xorl %ebx,%edx addl %ecx,%ebp - andl %esi,%edx movl 8(%esp),%ecx - xorl %ebx,%edx + andl %esi,%edx rorl $2,%esi + xorl %ebx,%edx leal 1518500249(%ebp,%ecx,1),%ebp addl %edx,%ebp @@ -114,10 +129,10 @@ sha1_block_data_order: roll $5,%ebp xorl %eax,%ecx addl %ebx,%ebp - andl %edi,%ecx movl 12(%esp),%ebx - xorl %eax,%ecx + andl %edi,%ecx rorl $2,%edi + xorl %eax,%ecx leal 1518500249(%ebp,%ebx,1),%ebp addl %ecx,%ebp @@ -126,10 +141,10 @@ sha1_block_data_order: roll $5,%ebp xorl %esi,%ebx addl %eax,%ebp - andl %edx,%ebx movl 16(%esp),%eax - xorl %esi,%ebx + andl %edx,%ebx rorl $2,%edx + xorl %esi,%ebx leal 1518500249(%ebp,%eax,1),%ebp addl %ebx,%ebp @@ -138,10 +153,10 @@ sha1_block_data_order: roll $5,%ebp xorl %edi,%eax addl %esi,%ebp - andl %ecx,%eax movl 20(%esp),%esi - xorl %edi,%eax + andl %ecx,%eax rorl $2,%ecx + xorl %edi,%eax leal 1518500249(%ebp,%esi,1),%ebp addl %eax,%ebp @@ -150,10 +165,10 @@ sha1_block_data_order: roll $5,%ebp xorl %edx,%esi addl %edi,%ebp - andl %ebx,%esi movl 24(%esp),%edi - xorl %edx,%esi + andl %ebx,%esi rorl $2,%ebx + xorl %edx,%esi leal 1518500249(%ebp,%edi,1),%ebp addl %esi,%ebp @@ -162,10 +177,10 @@ sha1_block_data_order: roll $5,%ebp xorl %ecx,%edi addl %edx,%ebp - andl %eax,%edi movl 28(%esp),%edx - xorl %ecx,%edi + andl %eax,%edi rorl $2,%eax + xorl %ecx,%edi leal 1518500249(%ebp,%edx,1),%ebp addl %edi,%ebp @@ -174,10 +189,10 @@ sha1_block_data_order: roll $5,%ebp xorl %ebx,%edx addl %ecx,%ebp - andl %esi,%edx movl 32(%esp),%ecx - xorl %ebx,%edx + andl %esi,%edx rorl $2,%esi + xorl %ebx,%edx leal 1518500249(%ebp,%ecx,1),%ebp addl %edx,%ebp @@ -186,10 +201,10 @@ sha1_block_data_order: roll $5,%ebp xorl %eax,%ecx addl %ebx,%ebp - andl %edi,%ecx movl 36(%esp),%ebx - xorl %eax,%ecx + andl %edi,%ecx rorl $2,%edi + xorl %eax,%ecx leal 1518500249(%ebp,%ebx,1),%ebp addl %ecx,%ebp @@ -198,10 +213,10 @@ sha1_block_data_order: roll $5,%ebp xorl %esi,%ebx addl %eax,%ebp - andl %edx,%ebx movl 40(%esp),%eax - xorl %esi,%ebx + andl %edx,%ebx rorl $2,%edx + xorl %esi,%ebx leal 1518500249(%ebp,%eax,1),%ebp addl %ebx,%ebp @@ -210,10 +225,10 @@ sha1_block_data_order: roll $5,%ebp xorl %edi,%eax addl %esi,%ebp - andl %ecx,%eax movl 44(%esp),%esi - xorl %edi,%eax + andl %ecx,%eax rorl $2,%ecx + xorl %edi,%eax leal 1518500249(%ebp,%esi,1),%ebp addl %eax,%ebp @@ -222,10 +237,10 @@ sha1_block_data_order: roll $5,%ebp xorl %edx,%esi addl %edi,%ebp - andl %ebx,%esi movl 48(%esp),%edi - xorl %edx,%esi + andl %ebx,%esi rorl $2,%ebx + xorl %edx,%esi leal 1518500249(%ebp,%edi,1),%ebp addl %esi,%ebp @@ -234,10 +249,10 @@ sha1_block_data_order: roll $5,%ebp xorl %ecx,%edi addl %edx,%ebp - andl %eax,%edi movl 52(%esp),%edx - xorl %ecx,%edi + andl %eax,%edi rorl $2,%eax + xorl %ecx,%edi leal 1518500249(%ebp,%edx,1),%ebp addl %edi,%ebp @@ -246,10 +261,10 @@ sha1_block_data_order: roll $5,%ebp xorl %ebx,%edx addl %ecx,%ebp - andl %esi,%edx movl 56(%esp),%ecx - xorl %ebx,%edx + andl %esi,%edx rorl $2,%esi + xorl %ebx,%edx leal 1518500249(%ebp,%ecx,1),%ebp addl %edx,%ebp @@ -258,1162 +273,1099 @@ sha1_block_data_order: roll $5,%ebp xorl %eax,%ecx addl %ebx,%ebp - andl %edi,%ecx movl 60(%esp),%ebx - xorl %eax,%ecx + andl %edi,%ecx rorl $2,%edi + xorl %eax,%ecx leal 1518500249(%ebp,%ebx,1),%ebp + movl (%esp),%ebx addl %ebp,%ecx - movl (%esp),%ebx movl %edi,%ebp xorl 8(%esp),%ebx xorl %esi,%ebp xorl 32(%esp),%ebx andl %edx,%ebp - rorl $2,%edx xorl 52(%esp),%ebx roll $1,%ebx xorl %esi,%ebp + addl %ebp,%eax + movl %ecx,%ebp + rorl $2,%edx movl %ebx,(%esp) + roll $5,%ebp leal 1518500249(%ebx,%eax,1),%ebx - movl %ecx,%eax - roll $5,%eax + movl 4(%esp),%eax addl %ebp,%ebx - addl %eax,%ebx - movl 4(%esp),%eax movl %edx,%ebp xorl 12(%esp),%eax xorl %edi,%ebp xorl 36(%esp),%eax andl %ecx,%ebp - rorl $2,%ecx xorl 56(%esp),%eax roll $1,%eax xorl %edi,%ebp + addl %ebp,%esi + movl %ebx,%ebp + rorl $2,%ecx movl %eax,4(%esp) + roll $5,%ebp leal 1518500249(%eax,%esi,1),%eax - movl %ebx,%esi - roll $5,%esi + movl 8(%esp),%esi addl %ebp,%eax - addl %esi,%eax - movl 8(%esp),%esi movl %ecx,%ebp xorl 16(%esp),%esi xorl %edx,%ebp xorl 40(%esp),%esi andl %ebx,%ebp - rorl $2,%ebx xorl 60(%esp),%esi roll $1,%esi xorl %edx,%ebp + addl %ebp,%edi + movl %eax,%ebp + rorl $2,%ebx movl %esi,8(%esp) + roll $5,%ebp leal 1518500249(%esi,%edi,1),%esi - movl %eax,%edi - roll $5,%edi + movl 12(%esp),%edi addl %ebp,%esi - addl %edi,%esi - movl 12(%esp),%edi movl %ebx,%ebp xorl 20(%esp),%edi xorl %ecx,%ebp xorl 44(%esp),%edi andl %eax,%ebp - rorl $2,%eax xorl (%esp),%edi roll $1,%edi xorl %ecx,%ebp + addl %ebp,%edx + movl %esi,%ebp + rorl $2,%eax movl %edi,12(%esp) + roll $5,%ebp leal 1518500249(%edi,%edx,1),%edi - movl %esi,%edx - roll $5,%edx + movl 16(%esp),%edx addl %ebp,%edi - addl %edx,%edi movl %esi,%ebp - movl 16(%esp),%edx - rorl $2,%esi xorl 24(%esp),%edx xorl %eax,%ebp xorl 48(%esp),%edx xorl %ebx,%ebp xorl 4(%esp),%edx roll $1,%edx - addl %ecx,%ebp + addl %ebp,%ecx + rorl $2,%esi + movl %edi,%ebp + roll $5,%ebp movl %edx,16(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 1859775393(%edx,%ebp,1),%edx - addl %ecx,%edx + leal 1859775393(%edx,%ecx,1),%edx + movl 20(%esp),%ecx + addl %ebp,%edx movl %edi,%ebp - movl 20(%esp),%ecx - rorl $2,%edi xorl 28(%esp),%ecx xorl %esi,%ebp xorl 52(%esp),%ecx xorl %eax,%ebp xorl 8(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,20(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 1859775393(%ecx,%ebp,1),%ecx - addl %ebx,%ecx + leal 1859775393(%ecx,%ebx,1),%ecx + movl 24(%esp),%ebx + addl %ebp,%ecx movl %edx,%ebp - movl 24(%esp),%ebx - rorl $2,%edx xorl 32(%esp),%ebx xorl %edi,%ebp xorl 56(%esp),%ebx xorl %esi,%ebp xorl 12(%esp),%ebx roll $1,%ebx - addl %eax,%ebp + addl %ebp,%eax + rorl $2,%edx + movl %ecx,%ebp + roll $5,%ebp movl %ebx,24(%esp) - movl %ecx,%eax - roll $5,%eax - leal 1859775393(%ebx,%ebp,1),%ebx - addl %eax,%ebx + leal 1859775393(%ebx,%eax,1),%ebx + movl 28(%esp),%eax + addl %ebp,%ebx movl %ecx,%ebp - movl 28(%esp),%eax - rorl $2,%ecx xorl 36(%esp),%eax xorl %edx,%ebp xorl 60(%esp),%eax xorl %edi,%ebp xorl 16(%esp),%eax roll $1,%eax - addl %esi,%ebp + addl %ebp,%esi + rorl $2,%ecx + movl %ebx,%ebp + roll $5,%ebp movl %eax,28(%esp) - movl %ebx,%esi - roll $5,%esi - leal 1859775393(%eax,%ebp,1),%eax - addl %esi,%eax + leal 1859775393(%eax,%esi,1),%eax + movl 32(%esp),%esi + addl %ebp,%eax movl %ebx,%ebp - movl 32(%esp),%esi - rorl $2,%ebx xorl 40(%esp),%esi xorl %ecx,%ebp xorl (%esp),%esi xorl %edx,%ebp xorl 20(%esp),%esi roll $1,%esi - addl %edi,%ebp + addl %ebp,%edi + rorl $2,%ebx + movl %eax,%ebp + roll $5,%ebp movl %esi,32(%esp) - movl %eax,%edi - roll $5,%edi - leal 1859775393(%esi,%ebp,1),%esi - addl %edi,%esi + leal 1859775393(%esi,%edi,1),%esi + movl 36(%esp),%edi + addl %ebp,%esi movl %eax,%ebp - movl 36(%esp),%edi - rorl $2,%eax xorl 44(%esp),%edi xorl %ebx,%ebp xorl 4(%esp),%edi xorl %ecx,%ebp xorl 24(%esp),%edi roll $1,%edi - addl %edx,%ebp + addl %ebp,%edx + rorl $2,%eax + movl %esi,%ebp + roll $5,%ebp movl %edi,36(%esp) - movl %esi,%edx - roll $5,%edx - leal 1859775393(%edi,%ebp,1),%edi - addl %edx,%edi + leal 1859775393(%edi,%edx,1),%edi + movl 40(%esp),%edx + addl %ebp,%edi movl %esi,%ebp - movl 40(%esp),%edx - rorl $2,%esi xorl 48(%esp),%edx xorl %eax,%ebp xorl 8(%esp),%edx xorl %ebx,%ebp xorl 28(%esp),%edx roll $1,%edx - addl %ecx,%ebp + addl %ebp,%ecx + rorl $2,%esi + movl %edi,%ebp + roll $5,%ebp movl %edx,40(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 1859775393(%edx,%ebp,1),%edx - addl %ecx,%edx + leal 1859775393(%edx,%ecx,1),%edx + movl 44(%esp),%ecx + addl %ebp,%edx movl %edi,%ebp - movl 44(%esp),%ecx - rorl $2,%edi xorl 52(%esp),%ecx xorl %esi,%ebp xorl 12(%esp),%ecx xorl %eax,%ebp xorl 32(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,44(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 1859775393(%ecx,%ebp,1),%ecx - addl %ebx,%ecx + leal 1859775393(%ecx,%ebx,1),%ecx + movl 48(%esp),%ebx + addl %ebp,%ecx movl %edx,%ebp - movl 48(%esp),%ebx - rorl $2,%edx xorl 56(%esp),%ebx xorl %edi,%ebp xorl 16(%esp),%ebx xorl %esi,%ebp xorl 36(%esp),%ebx roll $1,%ebx - addl %eax,%ebp + addl %ebp,%eax + rorl $2,%edx + movl %ecx,%ebp + roll $5,%ebp movl %ebx,48(%esp) - movl %ecx,%eax - roll $5,%eax - leal 1859775393(%ebx,%ebp,1),%ebx - addl %eax,%ebx + leal 1859775393(%ebx,%eax,1),%ebx + movl 52(%esp),%eax + addl %ebp,%ebx movl %ecx,%ebp - movl 52(%esp),%eax - rorl $2,%ecx xorl 60(%esp),%eax xorl %edx,%ebp xorl 20(%esp),%eax xorl %edi,%ebp xorl 40(%esp),%eax roll $1,%eax - addl %esi,%ebp + addl %ebp,%esi + rorl $2,%ecx + movl %ebx,%ebp + roll $5,%ebp movl %eax,52(%esp) - movl %ebx,%esi - roll $5,%esi - leal 1859775393(%eax,%ebp,1),%eax - addl %esi,%eax + leal 1859775393(%eax,%esi,1),%eax + movl 56(%esp),%esi + addl %ebp,%eax movl %ebx,%ebp - movl 56(%esp),%esi - rorl $2,%ebx xorl (%esp),%esi xorl %ecx,%ebp xorl 24(%esp),%esi xorl %edx,%ebp xorl 44(%esp),%esi roll $1,%esi - addl %edi,%ebp + addl %ebp,%edi + rorl $2,%ebx + movl %eax,%ebp + roll $5,%ebp movl %esi,56(%esp) - movl %eax,%edi - roll $5,%edi - leal 1859775393(%esi,%ebp,1),%esi - addl %edi,%esi + leal 1859775393(%esi,%edi,1),%esi + movl 60(%esp),%edi + addl %ebp,%esi movl %eax,%ebp - movl 60(%esp),%edi - rorl $2,%eax xorl 4(%esp),%edi xorl %ebx,%ebp xorl 28(%esp),%edi xorl %ecx,%ebp xorl 48(%esp),%edi roll $1,%edi - addl %edx,%ebp + addl %ebp,%edx + rorl $2,%eax + movl %esi,%ebp + roll $5,%ebp movl %edi,60(%esp) - movl %esi,%edx - roll $5,%edx - leal 1859775393(%edi,%ebp,1),%edi - addl %edx,%edi + leal 1859775393(%edi,%edx,1),%edi + movl (%esp),%edx + addl %ebp,%edi movl %esi,%ebp - movl (%esp),%edx - rorl $2,%esi xorl 8(%esp),%edx xorl %eax,%ebp xorl 32(%esp),%edx xorl %ebx,%ebp xorl 52(%esp),%edx roll $1,%edx - addl %ecx,%ebp - movl %edx,(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 1859775393(%edx,%ebp,1),%edx - addl %ecx,%edx - + addl %ebp,%ecx + rorl $2,%esi movl %edi,%ebp + roll $5,%ebp + movl %edx,(%esp) + leal 1859775393(%edx,%ecx,1),%edx movl 4(%esp),%ecx - rorl $2,%edi + addl %ebp,%edx + + movl %edi,%ebp xorl 12(%esp),%ecx xorl %esi,%ebp xorl 36(%esp),%ecx xorl %eax,%ebp xorl 56(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,4(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 1859775393(%ecx,%ebp,1),%ecx - addl %ebx,%ecx + leal 1859775393(%ecx,%ebx,1),%ecx + movl 8(%esp),%ebx + addl %ebp,%ecx movl %edx,%ebp - movl 8(%esp),%ebx - rorl $2,%edx xorl 16(%esp),%ebx xorl %edi,%ebp xorl 40(%esp),%ebx xorl %esi,%ebp xorl 60(%esp),%ebx roll $1,%ebx - addl %eax,%ebp + addl %ebp,%eax + rorl $2,%edx + movl %ecx,%ebp + roll $5,%ebp movl %ebx,8(%esp) - movl %ecx,%eax - roll $5,%eax - leal 1859775393(%ebx,%ebp,1),%ebx - addl %eax,%ebx + leal 1859775393(%ebx,%eax,1),%ebx + movl 12(%esp),%eax + addl %ebp,%ebx movl %ecx,%ebp - movl 12(%esp),%eax - rorl $2,%ecx xorl 20(%esp),%eax xorl %edx,%ebp xorl 44(%esp),%eax xorl %edi,%ebp xorl (%esp),%eax roll $1,%eax - addl %esi,%ebp + addl %ebp,%esi + rorl $2,%ecx + movl %ebx,%ebp + roll $5,%ebp movl %eax,12(%esp) - movl %ebx,%esi - roll $5,%esi - leal 1859775393(%eax,%ebp,1),%eax - addl %esi,%eax + leal 1859775393(%eax,%esi,1),%eax + movl 16(%esp),%esi + addl %ebp,%eax movl %ebx,%ebp - movl 16(%esp),%esi - rorl $2,%ebx xorl 24(%esp),%esi xorl %ecx,%ebp xorl 48(%esp),%esi xorl %edx,%ebp xorl 4(%esp),%esi roll $1,%esi - addl %edi,%ebp + addl %ebp,%edi + rorl $2,%ebx + movl %eax,%ebp + roll $5,%ebp movl %esi,16(%esp) - movl %eax,%edi - roll $5,%edi - leal 1859775393(%esi,%ebp,1),%esi - addl %edi,%esi + leal 1859775393(%esi,%edi,1),%esi + movl 20(%esp),%edi + addl %ebp,%esi movl %eax,%ebp - movl 20(%esp),%edi - rorl $2,%eax xorl 28(%esp),%edi xorl %ebx,%ebp xorl 52(%esp),%edi xorl %ecx,%ebp xorl 8(%esp),%edi roll $1,%edi - addl %edx,%ebp + addl %ebp,%edx + rorl $2,%eax + movl %esi,%ebp + roll $5,%ebp movl %edi,20(%esp) - movl %esi,%edx - roll $5,%edx - leal 1859775393(%edi,%ebp,1),%edi - addl %edx,%edi + leal 1859775393(%edi,%edx,1),%edi + movl 24(%esp),%edx + addl %ebp,%edi movl %esi,%ebp - movl 24(%esp),%edx - rorl $2,%esi xorl 32(%esp),%edx xorl %eax,%ebp xorl 56(%esp),%edx xorl %ebx,%ebp xorl 12(%esp),%edx roll $1,%edx - addl %ecx,%ebp + addl %ebp,%ecx + rorl $2,%esi + movl %edi,%ebp + roll $5,%ebp movl %edx,24(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 1859775393(%edx,%ebp,1),%edx - addl %ecx,%edx + leal 1859775393(%edx,%ecx,1),%edx + movl 28(%esp),%ecx + addl %ebp,%edx movl %edi,%ebp - movl 28(%esp),%ecx - rorl $2,%edi xorl 36(%esp),%ecx xorl %esi,%ebp xorl 60(%esp),%ecx xorl %eax,%ebp xorl 16(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,28(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 1859775393(%ecx,%ebp,1),%ecx - addl %ebx,%ecx - + leal 1859775393(%ecx,%ebx,1),%ecx movl 32(%esp),%ebx - movl 40(%esp),%ebp - xorl %ebp,%ebx - movl (%esp),%ebp - xorl %ebp,%ebx - movl 20(%esp),%ebp - xorl %ebp,%ebx - movl %edx,%ebp + addl %ebp,%ecx + + movl %edi,%ebp + xorl 40(%esp),%ebx + xorl %esi,%ebp + xorl (%esp),%ebx + andl %edx,%ebp + xorl 20(%esp),%ebx roll $1,%ebx - orl %edi,%ebp - movl %ebx,32(%esp) - andl %esi,%ebp - leal 2400959708(%ebx,%eax,1),%ebx - movl %edx,%eax + addl %eax,%ebp rorl $2,%edx - andl %edi,%eax - orl %eax,%ebp movl %ecx,%eax roll $5,%eax - addl %ebp,%ebx + movl %ebx,32(%esp) + leal 2400959708(%ebx,%ebp,1),%ebx + movl %edi,%ebp addl %eax,%ebx - + andl %esi,%ebp movl 36(%esp),%eax - movl 44(%esp),%ebp - xorl %ebp,%eax - movl 4(%esp),%ebp - xorl %ebp,%eax - movl 24(%esp),%ebp - xorl %ebp,%eax - movl %ecx,%ebp + addl %ebp,%ebx + + movl %edx,%ebp + xorl 44(%esp),%eax + xorl %edi,%ebp + xorl 4(%esp),%eax + andl %ecx,%ebp + xorl 24(%esp),%eax roll $1,%eax - orl %edx,%ebp - movl %eax,36(%esp) - andl %edi,%ebp - leal 2400959708(%eax,%esi,1),%eax - movl %ecx,%esi + addl %esi,%ebp rorl $2,%ecx - andl %edx,%esi - orl %esi,%ebp movl %ebx,%esi roll $5,%esi - addl %ebp,%eax + movl %eax,36(%esp) + leal 2400959708(%eax,%ebp,1),%eax + movl %edx,%ebp addl %esi,%eax - + andl %edi,%ebp movl 40(%esp),%esi - movl 48(%esp),%ebp - xorl %ebp,%esi - movl 8(%esp),%ebp - xorl %ebp,%esi - movl 28(%esp),%ebp - xorl %ebp,%esi - movl %ebx,%ebp + addl %ebp,%eax + + movl %ecx,%ebp + xorl 48(%esp),%esi + xorl %edx,%ebp + xorl 8(%esp),%esi + andl %ebx,%ebp + xorl 28(%esp),%esi roll $1,%esi - orl %ecx,%ebp - movl %esi,40(%esp) - andl %edx,%ebp - leal 2400959708(%esi,%edi,1),%esi - movl %ebx,%edi + addl %edi,%ebp rorl $2,%ebx - andl %ecx,%edi - orl %edi,%ebp movl %eax,%edi roll $5,%edi - addl %ebp,%esi + movl %esi,40(%esp) + leal 2400959708(%esi,%ebp,1),%esi + movl %ecx,%ebp addl %edi,%esi - + andl %edx,%ebp movl 44(%esp),%edi - movl 52(%esp),%ebp - xorl %ebp,%edi - movl 12(%esp),%ebp - xorl %ebp,%edi - movl 32(%esp),%ebp - xorl %ebp,%edi - movl %eax,%ebp + addl %ebp,%esi + + movl %ebx,%ebp + xorl 52(%esp),%edi + xorl %ecx,%ebp + xorl 12(%esp),%edi + andl %eax,%ebp + xorl 32(%esp),%edi roll $1,%edi - orl %ebx,%ebp - movl %edi,44(%esp) - andl %ecx,%ebp - leal 2400959708(%edi,%edx,1),%edi - movl %eax,%edx + addl %edx,%ebp rorl $2,%eax - andl %ebx,%edx - orl %edx,%ebp movl %esi,%edx roll $5,%edx - addl %ebp,%edi + movl %edi,44(%esp) + leal 2400959708(%edi,%ebp,1),%edi + movl %ebx,%ebp addl %edx,%edi - + andl %ecx,%ebp movl 48(%esp),%edx - movl 56(%esp),%ebp - xorl %ebp,%edx - movl 16(%esp),%ebp - xorl %ebp,%edx - movl 36(%esp),%ebp - xorl %ebp,%edx - movl %esi,%ebp + addl %ebp,%edi + + movl %eax,%ebp + xorl 56(%esp),%edx + xorl %ebx,%ebp + xorl 16(%esp),%edx + andl %esi,%ebp + xorl 36(%esp),%edx roll $1,%edx - orl %eax,%ebp - movl %edx,48(%esp) - andl %ebx,%ebp - leal 2400959708(%edx,%ecx,1),%edx - movl %esi,%ecx + addl %ecx,%ebp rorl $2,%esi - andl %eax,%ecx - orl %ecx,%ebp movl %edi,%ecx roll $5,%ecx - addl %ebp,%edx + movl %edx,48(%esp) + leal 2400959708(%edx,%ebp,1),%edx + movl %eax,%ebp addl %ecx,%edx - + andl %ebx,%ebp movl 52(%esp),%ecx - movl 60(%esp),%ebp - xorl %ebp,%ecx - movl 20(%esp),%ebp - xorl %ebp,%ecx - movl 40(%esp),%ebp - xorl %ebp,%ecx - movl %edi,%ebp + addl %ebp,%edx + + movl %esi,%ebp + xorl 60(%esp),%ecx + xorl %eax,%ebp + xorl 20(%esp),%ecx + andl %edi,%ebp + xorl 40(%esp),%ecx roll $1,%ecx - orl %esi,%ebp - movl %ecx,52(%esp) - andl %eax,%ebp - leal 2400959708(%ecx,%ebx,1),%ecx - movl %edi,%ebx + addl %ebx,%ebp rorl $2,%edi - andl %esi,%ebx - orl %ebx,%ebp movl %edx,%ebx roll $5,%ebx - addl %ebp,%ecx + movl %ecx,52(%esp) + leal 2400959708(%ecx,%ebp,1),%ecx + movl %esi,%ebp addl %ebx,%ecx - + andl %eax,%ebp movl 56(%esp),%ebx - movl (%esp),%ebp - xorl %ebp,%ebx - movl 24(%esp),%ebp - xorl %ebp,%ebx - movl 44(%esp),%ebp - xorl %ebp,%ebx - movl %edx,%ebp + addl %ebp,%ecx + + movl %edi,%ebp + xorl (%esp),%ebx + xorl %esi,%ebp + xorl 24(%esp),%ebx + andl %edx,%ebp + xorl 44(%esp),%ebx roll $1,%ebx - orl %edi,%ebp - movl %ebx,56(%esp) - andl %esi,%ebp - leal 2400959708(%ebx,%eax,1),%ebx - movl %edx,%eax + addl %eax,%ebp rorl $2,%edx - andl %edi,%eax - orl %eax,%ebp movl %ecx,%eax roll $5,%eax - addl %ebp,%ebx + movl %ebx,56(%esp) + leal 2400959708(%ebx,%ebp,1),%ebx + movl %edi,%ebp addl %eax,%ebx - + andl %esi,%ebp movl 60(%esp),%eax - movl 4(%esp),%ebp - xorl %ebp,%eax - movl 28(%esp),%ebp - xorl %ebp,%eax - movl 48(%esp),%ebp - xorl %ebp,%eax - movl %ecx,%ebp + addl %ebp,%ebx + + movl %edx,%ebp + xorl 4(%esp),%eax + xorl %edi,%ebp + xorl 28(%esp),%eax + andl %ecx,%ebp + xorl 48(%esp),%eax roll $1,%eax - orl %edx,%ebp - movl %eax,60(%esp) - andl %edi,%ebp - leal 2400959708(%eax,%esi,1),%eax - movl %ecx,%esi + addl %esi,%ebp rorl $2,%ecx - andl %edx,%esi - orl %esi,%ebp movl %ebx,%esi roll $5,%esi - addl %ebp,%eax + movl %eax,60(%esp) + leal 2400959708(%eax,%ebp,1),%eax + movl %edx,%ebp addl %esi,%eax - + andl %edi,%ebp movl (%esp),%esi - movl 8(%esp),%ebp - xorl %ebp,%esi - movl 32(%esp),%ebp - xorl %ebp,%esi - movl 52(%esp),%ebp - xorl %ebp,%esi - movl %ebx,%ebp + addl %ebp,%eax + + movl %ecx,%ebp + xorl 8(%esp),%esi + xorl %edx,%ebp + xorl 32(%esp),%esi + andl %ebx,%ebp + xorl 52(%esp),%esi roll $1,%esi - orl %ecx,%ebp - movl %esi,(%esp) - andl %edx,%ebp - leal 2400959708(%esi,%edi,1),%esi - movl %ebx,%edi + addl %edi,%ebp rorl $2,%ebx - andl %ecx,%edi - orl %edi,%ebp movl %eax,%edi roll $5,%edi - addl %ebp,%esi + movl %esi,(%esp) + leal 2400959708(%esi,%ebp,1),%esi + movl %ecx,%ebp addl %edi,%esi - + andl %edx,%ebp movl 4(%esp),%edi - movl 12(%esp),%ebp - xorl %ebp,%edi - movl 36(%esp),%ebp - xorl %ebp,%edi - movl 56(%esp),%ebp - xorl %ebp,%edi - movl %eax,%ebp + addl %ebp,%esi + + movl %ebx,%ebp + xorl 12(%esp),%edi + xorl %ecx,%ebp + xorl 36(%esp),%edi + andl %eax,%ebp + xorl 56(%esp),%edi roll $1,%edi - orl %ebx,%ebp - movl %edi,4(%esp) - andl %ecx,%ebp - leal 2400959708(%edi,%edx,1),%edi - movl %eax,%edx + addl %edx,%ebp rorl $2,%eax - andl %ebx,%edx - orl %edx,%ebp movl %esi,%edx roll $5,%edx - addl %ebp,%edi + movl %edi,4(%esp) + leal 2400959708(%edi,%ebp,1),%edi + movl %ebx,%ebp addl %edx,%edi - + andl %ecx,%ebp movl 8(%esp),%edx - movl 16(%esp),%ebp - xorl %ebp,%edx - movl 40(%esp),%ebp - xorl %ebp,%edx - movl 60(%esp),%ebp - xorl %ebp,%edx - movl %esi,%ebp + addl %ebp,%edi + + movl %eax,%ebp + xorl 16(%esp),%edx + xorl %ebx,%ebp + xorl 40(%esp),%edx + andl %esi,%ebp + xorl 60(%esp),%edx roll $1,%edx - orl %eax,%ebp - movl %edx,8(%esp) - andl %ebx,%ebp - leal 2400959708(%edx,%ecx,1),%edx - movl %esi,%ecx + addl %ecx,%ebp rorl $2,%esi - andl %eax,%ecx - orl %ecx,%ebp movl %edi,%ecx roll $5,%ecx - addl %ebp,%edx + movl %edx,8(%esp) + leal 2400959708(%edx,%ebp,1),%edx + movl %eax,%ebp addl %ecx,%edx - + andl %ebx,%ebp movl 12(%esp),%ecx - movl 20(%esp),%ebp - xorl %ebp,%ecx - movl 44(%esp),%ebp - xorl %ebp,%ecx - movl (%esp),%ebp - xorl %ebp,%ecx - movl %edi,%ebp + addl %ebp,%edx + + movl %esi,%ebp + xorl 20(%esp),%ecx + xorl %eax,%ebp + xorl 44(%esp),%ecx + andl %edi,%ebp + xorl (%esp),%ecx roll $1,%ecx - orl %esi,%ebp - movl %ecx,12(%esp) - andl %eax,%ebp - leal 2400959708(%ecx,%ebx,1),%ecx - movl %edi,%ebx + addl %ebx,%ebp rorl $2,%edi - andl %esi,%ebx - orl %ebx,%ebp movl %edx,%ebx roll $5,%ebx - addl %ebp,%ecx + movl %ecx,12(%esp) + leal 2400959708(%ecx,%ebp,1),%ecx + movl %esi,%ebp addl %ebx,%ecx - + andl %eax,%ebp movl 16(%esp),%ebx - movl 24(%esp),%ebp - xorl %ebp,%ebx - movl 48(%esp),%ebp - xorl %ebp,%ebx - movl 4(%esp),%ebp - xorl %ebp,%ebx - movl %edx,%ebp + addl %ebp,%ecx + + movl %edi,%ebp + xorl 24(%esp),%ebx + xorl %esi,%ebp + xorl 48(%esp),%ebx + andl %edx,%ebp + xorl 4(%esp),%ebx roll $1,%ebx - orl %edi,%ebp - movl %ebx,16(%esp) - andl %esi,%ebp - leal 2400959708(%ebx,%eax,1),%ebx - movl %edx,%eax + addl %eax,%ebp rorl $2,%edx - andl %edi,%eax - orl %eax,%ebp movl %ecx,%eax roll $5,%eax - addl %ebp,%ebx + movl %ebx,16(%esp) + leal 2400959708(%ebx,%ebp,1),%ebx + movl %edi,%ebp addl %eax,%ebx - + andl %esi,%ebp movl 20(%esp),%eax - movl 28(%esp),%ebp - xorl %ebp,%eax - movl 52(%esp),%ebp - xorl %ebp,%eax - movl 8(%esp),%ebp - xorl %ebp,%eax - movl %ecx,%ebp + addl %ebp,%ebx + + movl %edx,%ebp + xorl 28(%esp),%eax + xorl %edi,%ebp + xorl 52(%esp),%eax + andl %ecx,%ebp + xorl 8(%esp),%eax roll $1,%eax - orl %edx,%ebp - movl %eax,20(%esp) - andl %edi,%ebp - leal 2400959708(%eax,%esi,1),%eax - movl %ecx,%esi + addl %esi,%ebp rorl $2,%ecx - andl %edx,%esi - orl %esi,%ebp movl %ebx,%esi roll $5,%esi - addl %ebp,%eax + movl %eax,20(%esp) + leal 2400959708(%eax,%ebp,1),%eax + movl %edx,%ebp addl %esi,%eax - + andl %edi,%ebp movl 24(%esp),%esi - movl 32(%esp),%ebp - xorl %ebp,%esi - movl 56(%esp),%ebp - xorl %ebp,%esi - movl 12(%esp),%ebp - xorl %ebp,%esi - movl %ebx,%ebp + addl %ebp,%eax + + movl %ecx,%ebp + xorl 32(%esp),%esi + xorl %edx,%ebp + xorl 56(%esp),%esi + andl %ebx,%ebp + xorl 12(%esp),%esi roll $1,%esi - orl %ecx,%ebp - movl %esi,24(%esp) - andl %edx,%ebp - leal 2400959708(%esi,%edi,1),%esi - movl %ebx,%edi + addl %edi,%ebp rorl $2,%ebx - andl %ecx,%edi - orl %edi,%ebp movl %eax,%edi roll $5,%edi - addl %ebp,%esi + movl %esi,24(%esp) + leal 2400959708(%esi,%ebp,1),%esi + movl %ecx,%ebp addl %edi,%esi - + andl %edx,%ebp movl 28(%esp),%edi - movl 36(%esp),%ebp - xorl %ebp,%edi - movl 60(%esp),%ebp - xorl %ebp,%edi - movl 16(%esp),%ebp - xorl %ebp,%edi - movl %eax,%ebp + addl %ebp,%esi + + movl %ebx,%ebp + xorl 36(%esp),%edi + xorl %ecx,%ebp + xorl 60(%esp),%edi + andl %eax,%ebp + xorl 16(%esp),%edi roll $1,%edi - orl %ebx,%ebp - movl %edi,28(%esp) - andl %ecx,%ebp - leal 2400959708(%edi,%edx,1),%edi - movl %eax,%edx + addl %edx,%ebp rorl $2,%eax - andl %ebx,%edx - orl %edx,%ebp movl %esi,%edx roll $5,%edx - addl %ebp,%edi + movl %edi,28(%esp) + leal 2400959708(%edi,%ebp,1),%edi + movl %ebx,%ebp addl %edx,%edi - + andl %ecx,%ebp movl 32(%esp),%edx - movl 40(%esp),%ebp - xorl %ebp,%edx - movl (%esp),%ebp - xorl %ebp,%edx - movl 20(%esp),%ebp - xorl %ebp,%edx - movl %esi,%ebp + addl %ebp,%edi + + movl %eax,%ebp + xorl 40(%esp),%edx + xorl %ebx,%ebp + xorl (%esp),%edx + andl %esi,%ebp + xorl 20(%esp),%edx roll $1,%edx - orl %eax,%ebp - movl %edx,32(%esp) - andl %ebx,%ebp - leal 2400959708(%edx,%ecx,1),%edx - movl %esi,%ecx + addl %ecx,%ebp rorl $2,%esi - andl %eax,%ecx - orl %ecx,%ebp movl %edi,%ecx roll $5,%ecx - addl %ebp,%edx + movl %edx,32(%esp) + leal 2400959708(%edx,%ebp,1),%edx + movl %eax,%ebp addl %ecx,%edx - + andl %ebx,%ebp movl 36(%esp),%ecx - movl 44(%esp),%ebp - xorl %ebp,%ecx - movl 4(%esp),%ebp - xorl %ebp,%ecx - movl 24(%esp),%ebp - xorl %ebp,%ecx - movl %edi,%ebp + addl %ebp,%edx + + movl %esi,%ebp + xorl 44(%esp),%ecx + xorl %eax,%ebp + xorl 4(%esp),%ecx + andl %edi,%ebp + xorl 24(%esp),%ecx roll $1,%ecx - orl %esi,%ebp - movl %ecx,36(%esp) - andl %eax,%ebp - leal 2400959708(%ecx,%ebx,1),%ecx - movl %edi,%ebx + addl %ebx,%ebp rorl $2,%edi - andl %esi,%ebx - orl %ebx,%ebp movl %edx,%ebx roll $5,%ebx - addl %ebp,%ecx + movl %ecx,36(%esp) + leal 2400959708(%ecx,%ebp,1),%ecx + movl %esi,%ebp addl %ebx,%ecx - + andl %eax,%ebp movl 40(%esp),%ebx - movl 48(%esp),%ebp - xorl %ebp,%ebx - movl 8(%esp),%ebp - xorl %ebp,%ebx - movl 28(%esp),%ebp - xorl %ebp,%ebx - movl %edx,%ebp + addl %ebp,%ecx + + movl %edi,%ebp + xorl 48(%esp),%ebx + xorl %esi,%ebp + xorl 8(%esp),%ebx + andl %edx,%ebp + xorl 28(%esp),%ebx roll $1,%ebx - orl %edi,%ebp - movl %ebx,40(%esp) - andl %esi,%ebp - leal 2400959708(%ebx,%eax,1),%ebx - movl %edx,%eax + addl %eax,%ebp rorl $2,%edx - andl %edi,%eax - orl %eax,%ebp movl %ecx,%eax roll $5,%eax - addl %ebp,%ebx + movl %ebx,40(%esp) + leal 2400959708(%ebx,%ebp,1),%ebx + movl %edi,%ebp addl %eax,%ebx - + andl %esi,%ebp movl 44(%esp),%eax - movl 52(%esp),%ebp - xorl %ebp,%eax - movl 12(%esp),%ebp - xorl %ebp,%eax - movl 32(%esp),%ebp - xorl %ebp,%eax - movl %ecx,%ebp + addl %ebp,%ebx + + movl %edx,%ebp + xorl 52(%esp),%eax + xorl %edi,%ebp + xorl 12(%esp),%eax + andl %ecx,%ebp + xorl 32(%esp),%eax roll $1,%eax - orl %edx,%ebp - movl %eax,44(%esp) - andl %edi,%ebp - leal 2400959708(%eax,%esi,1),%eax - movl %ecx,%esi + addl %esi,%ebp rorl $2,%ecx - andl %edx,%esi - orl %esi,%ebp movl %ebx,%esi roll $5,%esi - addl %ebp,%eax + movl %eax,44(%esp) + leal 2400959708(%eax,%ebp,1),%eax + movl %edx,%ebp addl %esi,%eax + andl %edi,%ebp + movl 48(%esp),%esi + addl %ebp,%eax movl %ebx,%ebp - movl 48(%esp),%esi - rorl $2,%ebx xorl 56(%esp),%esi xorl %ecx,%ebp xorl 16(%esp),%esi xorl %edx,%ebp xorl 36(%esp),%esi roll $1,%esi - addl %edi,%ebp + addl %ebp,%edi + rorl $2,%ebx + movl %eax,%ebp + roll $5,%ebp movl %esi,48(%esp) - movl %eax,%edi - roll $5,%edi - leal 3395469782(%esi,%ebp,1),%esi - addl %edi,%esi + leal 3395469782(%esi,%edi,1),%esi + movl 52(%esp),%edi + addl %ebp,%esi movl %eax,%ebp - movl 52(%esp),%edi - rorl $2,%eax xorl 60(%esp),%edi xorl %ebx,%ebp xorl 20(%esp),%edi xorl %ecx,%ebp xorl 40(%esp),%edi roll $1,%edi - addl %edx,%ebp + addl %ebp,%edx + rorl $2,%eax + movl %esi,%ebp + roll $5,%ebp movl %edi,52(%esp) - movl %esi,%edx - roll $5,%edx - leal 3395469782(%edi,%ebp,1),%edi - addl %edx,%edi + leal 3395469782(%edi,%edx,1),%edi + movl 56(%esp),%edx + addl %ebp,%edi movl %esi,%ebp - movl 56(%esp),%edx - rorl $2,%esi xorl (%esp),%edx xorl %eax,%ebp xorl 24(%esp),%edx xorl %ebx,%ebp xorl 44(%esp),%edx roll $1,%edx - addl %ecx,%ebp + addl %ebp,%ecx + rorl $2,%esi + movl %edi,%ebp + roll $5,%ebp movl %edx,56(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 3395469782(%edx,%ebp,1),%edx - addl %ecx,%edx + leal 3395469782(%edx,%ecx,1),%edx + movl 60(%esp),%ecx + addl %ebp,%edx movl %edi,%ebp - movl 60(%esp),%ecx - rorl $2,%edi xorl 4(%esp),%ecx xorl %esi,%ebp xorl 28(%esp),%ecx xorl %eax,%ebp xorl 48(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,60(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 3395469782(%ecx,%ebp,1),%ecx - addl %ebx,%ecx + leal 3395469782(%ecx,%ebx,1),%ecx + movl (%esp),%ebx + addl %ebp,%ecx movl %edx,%ebp - movl (%esp),%ebx - rorl $2,%edx xorl 8(%esp),%ebx xorl %edi,%ebp xorl 32(%esp),%ebx xorl %esi,%ebp xorl 52(%esp),%ebx roll $1,%ebx - addl %eax,%ebp + addl %ebp,%eax + rorl $2,%edx + movl %ecx,%ebp + roll $5,%ebp movl %ebx,(%esp) - movl %ecx,%eax - roll $5,%eax - leal 3395469782(%ebx,%ebp,1),%ebx - addl %eax,%ebx + leal 3395469782(%ebx,%eax,1),%ebx + movl 4(%esp),%eax + addl %ebp,%ebx movl %ecx,%ebp - movl 4(%esp),%eax - rorl $2,%ecx xorl 12(%esp),%eax xorl %edx,%ebp xorl 36(%esp),%eax xorl %edi,%ebp xorl 56(%esp),%eax roll $1,%eax - addl %esi,%ebp + addl %ebp,%esi + rorl $2,%ecx + movl %ebx,%ebp + roll $5,%ebp movl %eax,4(%esp) - movl %ebx,%esi - roll $5,%esi - leal 3395469782(%eax,%ebp,1),%eax - addl %esi,%eax + leal 3395469782(%eax,%esi,1),%eax + movl 8(%esp),%esi + addl %ebp,%eax movl %ebx,%ebp - movl 8(%esp),%esi - rorl $2,%ebx xorl 16(%esp),%esi xorl %ecx,%ebp xorl 40(%esp),%esi xorl %edx,%ebp xorl 60(%esp),%esi roll $1,%esi - addl %edi,%ebp + addl %ebp,%edi + rorl $2,%ebx + movl %eax,%ebp + roll $5,%ebp movl %esi,8(%esp) - movl %eax,%edi - roll $5,%edi - leal 3395469782(%esi,%ebp,1),%esi - addl %edi,%esi + leal 3395469782(%esi,%edi,1),%esi + movl 12(%esp),%edi + addl %ebp,%esi movl %eax,%ebp - movl 12(%esp),%edi - rorl $2,%eax xorl 20(%esp),%edi xorl %ebx,%ebp xorl 44(%esp),%edi xorl %ecx,%ebp xorl (%esp),%edi roll $1,%edi - addl %edx,%ebp - movl %edi,12(%esp) - movl %esi,%edx - roll $5,%edx - leal 3395469782(%edi,%ebp,1),%edi - addl %edx,%edi - + addl %ebp,%edx + rorl $2,%eax movl %esi,%ebp + roll $5,%ebp + movl %edi,12(%esp) + leal 3395469782(%edi,%edx,1),%edi movl 16(%esp),%edx - rorl $2,%esi + addl %ebp,%edi + + movl %esi,%ebp xorl 24(%esp),%edx xorl %eax,%ebp xorl 48(%esp),%edx xorl %ebx,%ebp xorl 4(%esp),%edx roll $1,%edx - addl %ecx,%ebp + addl %ebp,%ecx + rorl $2,%esi + movl %edi,%ebp + roll $5,%ebp movl %edx,16(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 3395469782(%edx,%ebp,1),%edx - addl %ecx,%edx + leal 3395469782(%edx,%ecx,1),%edx + movl 20(%esp),%ecx + addl %ebp,%edx movl %edi,%ebp - movl 20(%esp),%ecx - rorl $2,%edi xorl 28(%esp),%ecx xorl %esi,%ebp xorl 52(%esp),%ecx xorl %eax,%ebp xorl 8(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,20(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 3395469782(%ecx,%ebp,1),%ecx - addl %ebx,%ecx + leal 3395469782(%ecx,%ebx,1),%ecx + movl 24(%esp),%ebx + addl %ebp,%ecx movl %edx,%ebp - movl 24(%esp),%ebx - rorl $2,%edx xorl 32(%esp),%ebx xorl %edi,%ebp xorl 56(%esp),%ebx xorl %esi,%ebp xorl 12(%esp),%ebx roll $1,%ebx - addl %eax,%ebp + addl %ebp,%eax + rorl $2,%edx + movl %ecx,%ebp + roll $5,%ebp movl %ebx,24(%esp) - movl %ecx,%eax - roll $5,%eax - leal 3395469782(%ebx,%ebp,1),%ebx - addl %eax,%ebx + leal 3395469782(%ebx,%eax,1),%ebx + movl 28(%esp),%eax + addl %ebp,%ebx movl %ecx,%ebp - movl 28(%esp),%eax - rorl $2,%ecx xorl 36(%esp),%eax xorl %edx,%ebp xorl 60(%esp),%eax xorl %edi,%ebp xorl 16(%esp),%eax roll $1,%eax - addl %esi,%ebp + addl %ebp,%esi + rorl $2,%ecx + movl %ebx,%ebp + roll $5,%ebp movl %eax,28(%esp) - movl %ebx,%esi - roll $5,%esi - leal 3395469782(%eax,%ebp,1),%eax - addl %esi,%eax + leal 3395469782(%eax,%esi,1),%eax + movl 32(%esp),%esi + addl %ebp,%eax movl %ebx,%ebp - movl 32(%esp),%esi - rorl $2,%ebx xorl 40(%esp),%esi xorl %ecx,%ebp xorl (%esp),%esi xorl %edx,%ebp xorl 20(%esp),%esi roll $1,%esi - addl %edi,%ebp + addl %ebp,%edi + rorl $2,%ebx + movl %eax,%ebp + roll $5,%ebp movl %esi,32(%esp) - movl %eax,%edi - roll $5,%edi - leal 3395469782(%esi,%ebp,1),%esi - addl %edi,%esi + leal 3395469782(%esi,%edi,1),%esi + movl 36(%esp),%edi + addl %ebp,%esi movl %eax,%ebp - movl 36(%esp),%edi - rorl $2,%eax xorl 44(%esp),%edi xorl %ebx,%ebp xorl 4(%esp),%edi xorl %ecx,%ebp xorl 24(%esp),%edi roll $1,%edi - addl %edx,%ebp + addl %ebp,%edx + rorl $2,%eax + movl %esi,%ebp + roll $5,%ebp movl %edi,36(%esp) - movl %esi,%edx - roll $5,%edx - leal 3395469782(%edi,%ebp,1),%edi - addl %edx,%edi + leal 3395469782(%edi,%edx,1),%edi + movl 40(%esp),%edx + addl %ebp,%edi movl %esi,%ebp - movl 40(%esp),%edx - rorl $2,%esi xorl 48(%esp),%edx xorl %eax,%ebp xorl 8(%esp),%edx xorl %ebx,%ebp xorl 28(%esp),%edx roll $1,%edx - addl %ecx,%ebp + addl %ebp,%ecx + rorl $2,%esi + movl %edi,%ebp + roll $5,%ebp movl %edx,40(%esp) - movl %edi,%ecx - roll $5,%ecx - leal 3395469782(%edx,%ebp,1),%edx - addl %ecx,%edx + leal 3395469782(%edx,%ecx,1),%edx + movl 44(%esp),%ecx + addl %ebp,%edx movl %edi,%ebp - movl 44(%esp),%ecx - rorl $2,%edi xorl 52(%esp),%ecx xorl %esi,%ebp xorl 12(%esp),%ecx xorl %eax,%ebp xorl 32(%esp),%ecx roll $1,%ecx - addl %ebx,%ebp + addl %ebp,%ebx + rorl $2,%edi + movl %edx,%ebp + roll $5,%ebp movl %ecx,44(%esp) - movl %edx,%ebx - roll $5,%ebx - leal 3395469782(%ecx,%ebp,1),%ecx - addl %ebx,%ecx + leal 3395469782(%ecx,%ebx,1),%ecx + movl 48(%esp),%ebx + addl %ebp,%ecx movl %edx,%ebp - movl 48(%esp),%ebx - rorl $2,%edx xorl 56(%esp),%ebx xorl %edi,%ebp xorl 16(%esp),%ebx xorl %esi,%ebp xorl 36(%esp),%ebx roll $1,%ebx - addl %eax,%ebp + addl %ebp,%eax + rorl $2,%edx + movl %ecx,%ebp + roll $5,%ebp movl %ebx,48(%esp) - movl %ecx,%eax - roll $5,%eax - leal 3395469782(%ebx,%ebp,1),%ebx - addl %eax,%ebx + leal 3395469782(%ebx,%eax,1),%ebx + movl 52(%esp),%eax + addl %ebp,%ebx movl %ecx,%ebp - movl 52(%esp),%eax - rorl $2,%ecx xorl 60(%esp),%eax xorl %edx,%ebp xorl 20(%esp),%eax xorl %edi,%ebp xorl 40(%esp),%eax roll $1,%eax - addl %esi,%ebp - movl %eax,52(%esp) - movl %ebx,%esi - roll $5,%esi - leal 3395469782(%eax,%ebp,1),%eax - addl %esi,%eax - + addl %ebp,%esi + rorl $2,%ecx movl %ebx,%ebp + roll $5,%ebp + leal 3395469782(%eax,%esi,1),%eax movl 56(%esp),%esi - rorl $2,%ebx + addl %ebp,%eax + + movl %ebx,%ebp xorl (%esp),%esi xorl %ecx,%ebp xorl 24(%esp),%esi xorl %edx,%ebp xorl 44(%esp),%esi roll $1,%esi - addl %edi,%ebp - movl %esi,56(%esp) - movl %eax,%edi - roll $5,%edi - leal 3395469782(%esi,%ebp,1),%esi - addl %edi,%esi - + addl %ebp,%edi + rorl $2,%ebx movl %eax,%ebp + roll $5,%ebp + leal 3395469782(%esi,%edi,1),%esi movl 60(%esp),%edi - rorl $2,%eax + addl %ebp,%esi + + movl %eax,%ebp xorl 4(%esp),%edi xorl %ebx,%ebp xorl 28(%esp),%edi xorl %ecx,%ebp xorl 48(%esp),%edi roll $1,%edi - addl %edx,%ebp - movl %edi,60(%esp) - movl %esi,%edx - roll $5,%edx - leal 3395469782(%edi,%ebp,1),%edi - addl %edx,%edi - movl 84(%esp),%ebp - movl 88(%esp),%edx + addl %ebp,%edx + rorl $2,%eax + movl %esi,%ebp + roll $5,%ebp + leal 3395469782(%edi,%edx,1),%edi + addl %ebp,%edi + movl 96(%esp),%ebp + movl 100(%esp),%edx addl (%ebp),%edi addl 4(%ebp),%esi addl 8(%ebp),%eax @@ -1422,21 +1374,1265 @@ sha1_block_data_order: movl %edi,(%ebp) addl $64,%edx movl %esi,4(%ebp) - cmpl 92(%esp),%edx + cmpl 104(%esp),%edx movl %eax,8(%ebp) movl %ecx,%edi movl %ebx,12(%ebp) movl %edx,%esi movl %ecx,16(%ebp) - jb .L000loop - addl $64,%esp + jb .L002loop + addl $76,%esp popl %edi popl %esi popl %ebx popl %ebp ret .size sha1_block_data_order,.-.L_sha1_block_data_order_begin +.type _sha1_block_data_order_ssse3,@function +.align 16 +_sha1_block_data_order_ssse3: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + call .L003pic_point +.L003pic_point: + popl %ebp + leal .LK_XX_XX-.L003pic_point(%ebp),%ebp +.Lssse3_shortcut: + movdqa (%ebp),%xmm7 + movdqa 16(%ebp),%xmm0 + movdqa 32(%ebp),%xmm1 + movdqa 48(%ebp),%xmm2 + movdqa 64(%ebp),%xmm6 + movl 20(%esp),%edi + movl 24(%esp),%ebp + movl 28(%esp),%edx + movl %esp,%esi + subl $208,%esp + andl $-64,%esp + movdqa %xmm0,112(%esp) + movdqa %xmm1,128(%esp) + movdqa %xmm2,144(%esp) + shll $6,%edx + movdqa %xmm7,160(%esp) + addl %ebp,%edx + movdqa %xmm6,176(%esp) + addl $64,%ebp + movl %edi,192(%esp) + movl %ebp,196(%esp) + movl %edx,200(%esp) + movl %esi,204(%esp) + movl (%edi),%eax + movl 4(%edi),%ebx + movl 8(%edi),%ecx + movl 12(%edi),%edx + movl 16(%edi),%edi + movl %ebx,%esi + movdqu -64(%ebp),%xmm0 + movdqu -48(%ebp),%xmm1 + movdqu -32(%ebp),%xmm2 + movdqu -16(%ebp),%xmm3 +.byte 102,15,56,0,198 +.byte 102,15,56,0,206 +.byte 102,15,56,0,214 + movdqa %xmm7,96(%esp) +.byte 102,15,56,0,222 + paddd %xmm7,%xmm0 + paddd %xmm7,%xmm1 + paddd %xmm7,%xmm2 + movdqa %xmm0,(%esp) + psubd %xmm7,%xmm0 + movdqa %xmm1,16(%esp) + psubd %xmm7,%xmm1 + movdqa %xmm2,32(%esp) + psubd %xmm7,%xmm2 + movdqa %xmm1,%xmm4 + jmp .L004loop +.align 16 +.L004loop: + addl (%esp),%edi + xorl %edx,%ecx +.byte 102,15,58,15,224,8 + movdqa %xmm3,%xmm6 + movl %eax,%ebp + roll $5,%eax + paddd %xmm3,%xmm7 + movdqa %xmm0,64(%esp) + andl %ecx,%esi + xorl %edx,%ecx + psrldq $4,%xmm6 + xorl %edx,%esi + addl %eax,%edi + pxor %xmm0,%xmm4 + rorl $2,%ebx + addl %esi,%edi + pxor %xmm2,%xmm6 + addl 4(%esp),%edx + xorl %ecx,%ebx + movl %edi,%esi + roll $5,%edi + pxor %xmm6,%xmm4 + andl %ebx,%ebp + xorl %ecx,%ebx + movdqa %xmm7,48(%esp) + xorl %ecx,%ebp + addl %edi,%edx + movdqa %xmm4,%xmm0 + movdqa %xmm4,%xmm6 + rorl $7,%eax + addl %ebp,%edx + addl 8(%esp),%ecx + xorl %ebx,%eax + pslldq $12,%xmm0 + paddd %xmm4,%xmm4 + movl %edx,%ebp + roll $5,%edx + andl %eax,%esi + xorl %ebx,%eax + psrld $31,%xmm6 + xorl %ebx,%esi + addl %edx,%ecx + movdqa %xmm0,%xmm7 + rorl $7,%edi + addl %esi,%ecx + psrld $30,%xmm0 + por %xmm6,%xmm4 + addl 12(%esp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + pslld $2,%xmm7 + pxor %xmm0,%xmm4 + andl %edi,%ebp + xorl %eax,%edi + movdqa 96(%esp),%xmm0 + xorl %eax,%ebp + addl %ecx,%ebx + pxor %xmm7,%xmm4 + movdqa %xmm2,%xmm5 + rorl $7,%edx + addl %ebp,%ebx + addl 16(%esp),%eax + xorl %edi,%edx +.byte 102,15,58,15,233,8 + movdqa %xmm4,%xmm7 + movl %ebx,%ebp + roll $5,%ebx + paddd %xmm4,%xmm0 + movdqa %xmm1,80(%esp) + andl %edx,%esi + xorl %edi,%edx + psrldq $4,%xmm7 + xorl %edi,%esi + addl %ebx,%eax + pxor %xmm1,%xmm5 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm3,%xmm7 + addl 20(%esp),%edi + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pxor %xmm7,%xmm5 + andl %ecx,%ebp + xorl %edx,%ecx + movdqa %xmm0,(%esp) + xorl %edx,%ebp + addl %eax,%edi + movdqa %xmm5,%xmm1 + movdqa %xmm5,%xmm7 + rorl $7,%ebx + addl %ebp,%edi + addl 24(%esp),%edx + xorl %ecx,%ebx + pslldq $12,%xmm1 + paddd %xmm5,%xmm5 + movl %edi,%ebp + roll $5,%edi + andl %ebx,%esi + xorl %ecx,%ebx + psrld $31,%xmm7 + xorl %ecx,%esi + addl %edi,%edx + movdqa %xmm1,%xmm0 + rorl $7,%eax + addl %esi,%edx + psrld $30,%xmm1 + por %xmm7,%xmm5 + addl 28(%esp),%ecx + xorl %ebx,%eax + movl %edx,%esi + roll $5,%edx + pslld $2,%xmm0 + pxor %xmm1,%xmm5 + andl %eax,%ebp + xorl %ebx,%eax + movdqa 112(%esp),%xmm1 + xorl %ebx,%ebp + addl %edx,%ecx + pxor %xmm0,%xmm5 + movdqa %xmm3,%xmm6 + rorl $7,%edi + addl %ebp,%ecx + addl 32(%esp),%ebx + xorl %eax,%edi +.byte 102,15,58,15,242,8 + movdqa %xmm5,%xmm0 + movl %ecx,%ebp + roll $5,%ecx + paddd %xmm5,%xmm1 + movdqa %xmm2,96(%esp) + andl %edi,%esi + xorl %eax,%edi + psrldq $4,%xmm0 + xorl %eax,%esi + addl %ecx,%ebx + pxor %xmm2,%xmm6 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm4,%xmm0 + addl 36(%esp),%eax + xorl %edi,%edx + movl %ebx,%esi + roll $5,%ebx + pxor %xmm0,%xmm6 + andl %edx,%ebp + xorl %edi,%edx + movdqa %xmm1,16(%esp) + xorl %edi,%ebp + addl %ebx,%eax + movdqa %xmm6,%xmm2 + movdqa %xmm6,%xmm0 + rorl $7,%ecx + addl %ebp,%eax + addl 40(%esp),%edi + xorl %edx,%ecx + pslldq $12,%xmm2 + paddd %xmm6,%xmm6 + movl %eax,%ebp + roll $5,%eax + andl %ecx,%esi + xorl %edx,%ecx + psrld $31,%xmm0 + xorl %edx,%esi + addl %eax,%edi + movdqa %xmm2,%xmm1 + rorl $7,%ebx + addl %esi,%edi + psrld $30,%xmm2 + por %xmm0,%xmm6 + addl 44(%esp),%edx + xorl %ecx,%ebx + movdqa 64(%esp),%xmm0 + movl %edi,%esi + roll $5,%edi + pslld $2,%xmm1 + pxor %xmm2,%xmm6 + andl %ebx,%ebp + xorl %ecx,%ebx + movdqa 112(%esp),%xmm2 + xorl %ecx,%ebp + addl %edi,%edx + pxor %xmm1,%xmm6 + movdqa %xmm4,%xmm7 + rorl $7,%eax + addl %ebp,%edx + addl 48(%esp),%ecx + xorl %ebx,%eax +.byte 102,15,58,15,251,8 + movdqa %xmm6,%xmm1 + movl %edx,%ebp + roll $5,%edx + paddd %xmm6,%xmm2 + movdqa %xmm3,64(%esp) + andl %eax,%esi + xorl %ebx,%eax + psrldq $4,%xmm1 + xorl %ebx,%esi + addl %edx,%ecx + pxor %xmm3,%xmm7 + rorl $7,%edi + addl %esi,%ecx + pxor %xmm5,%xmm1 + addl 52(%esp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + pxor %xmm1,%xmm7 + andl %edi,%ebp + xorl %eax,%edi + movdqa %xmm2,32(%esp) + xorl %eax,%ebp + addl %ecx,%ebx + movdqa %xmm7,%xmm3 + movdqa %xmm7,%xmm1 + rorl $7,%edx + addl %ebp,%ebx + addl 56(%esp),%eax + xorl %edi,%edx + pslldq $12,%xmm3 + paddd %xmm7,%xmm7 + movl %ebx,%ebp + roll $5,%ebx + andl %edx,%esi + xorl %edi,%edx + psrld $31,%xmm1 + xorl %edi,%esi + addl %ebx,%eax + movdqa %xmm3,%xmm2 + rorl $7,%ecx + addl %esi,%eax + psrld $30,%xmm3 + por %xmm1,%xmm7 + addl 60(%esp),%edi + xorl %edx,%ecx + movdqa 80(%esp),%xmm1 + movl %eax,%esi + roll $5,%eax + pslld $2,%xmm2 + pxor %xmm3,%xmm7 + andl %ecx,%ebp + xorl %edx,%ecx + movdqa 112(%esp),%xmm3 + xorl %edx,%ebp + addl %eax,%edi + pxor %xmm2,%xmm7 + rorl $7,%ebx + addl %ebp,%edi + movdqa %xmm7,%xmm2 + addl (%esp),%edx + pxor %xmm4,%xmm0 +.byte 102,15,58,15,214,8 + xorl %ecx,%ebx + movl %edi,%ebp + roll $5,%edi + pxor %xmm1,%xmm0 + movdqa %xmm4,80(%esp) + andl %ebx,%esi + xorl %ecx,%ebx + movdqa %xmm3,%xmm4 + paddd %xmm7,%xmm3 + xorl %ecx,%esi + addl %edi,%edx + pxor %xmm2,%xmm0 + rorl $7,%eax + addl %esi,%edx + addl 4(%esp),%ecx + xorl %ebx,%eax + movdqa %xmm0,%xmm2 + movdqa %xmm3,48(%esp) + movl %edx,%esi + roll $5,%edx + andl %eax,%ebp + xorl %ebx,%eax + pslld $2,%xmm0 + xorl %ebx,%ebp + addl %edx,%ecx + psrld $30,%xmm2 + rorl $7,%edi + addl %ebp,%ecx + addl 8(%esp),%ebx + xorl %eax,%edi + movl %ecx,%ebp + roll $5,%ecx + por %xmm2,%xmm0 + andl %edi,%esi + xorl %eax,%edi + movdqa 96(%esp),%xmm2 + xorl %eax,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 12(%esp),%eax + movdqa %xmm0,%xmm3 + xorl %edi,%edx + movl %ebx,%esi + roll $5,%ebx + andl %edx,%ebp + xorl %edi,%edx + xorl %edi,%ebp + addl %ebx,%eax + rorl $7,%ecx + addl %ebp,%eax + addl 16(%esp),%edi + pxor %xmm5,%xmm1 +.byte 102,15,58,15,223,8 + xorl %edx,%esi + movl %eax,%ebp + roll $5,%eax + pxor %xmm2,%xmm1 + movdqa %xmm5,96(%esp) + xorl %ecx,%esi + addl %eax,%edi + movdqa %xmm4,%xmm5 + paddd %xmm0,%xmm4 + rorl $7,%ebx + addl %esi,%edi + pxor %xmm3,%xmm1 + addl 20(%esp),%edx + xorl %ecx,%ebp + movl %edi,%esi + roll $5,%edi + movdqa %xmm1,%xmm3 + movdqa %xmm4,(%esp) + xorl %ebx,%ebp + addl %edi,%edx + rorl $7,%eax + addl %ebp,%edx + pslld $2,%xmm1 + addl 24(%esp),%ecx + xorl %ebx,%esi + psrld $30,%xmm3 + movl %edx,%ebp + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%edi + addl %esi,%ecx + por %xmm3,%xmm1 + addl 28(%esp),%ebx + xorl %eax,%ebp + movdqa 64(%esp),%xmm3 + movl %ecx,%esi + roll $5,%ecx + xorl %edi,%ebp + addl %ecx,%ebx + rorl $7,%edx + movdqa %xmm1,%xmm4 + addl %ebp,%ebx + addl 32(%esp),%eax + pxor %xmm6,%xmm2 +.byte 102,15,58,15,224,8 + xorl %edi,%esi + movl %ebx,%ebp + roll $5,%ebx + pxor %xmm3,%xmm2 + movdqa %xmm6,64(%esp) + xorl %edx,%esi + addl %ebx,%eax + movdqa 128(%esp),%xmm6 + paddd %xmm1,%xmm5 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm4,%xmm2 + addl 36(%esp),%edi + xorl %edx,%ebp + movl %eax,%esi + roll $5,%eax + movdqa %xmm2,%xmm4 + movdqa %xmm5,16(%esp) + xorl %ecx,%ebp + addl %eax,%edi + rorl $7,%ebx + addl %ebp,%edi + pslld $2,%xmm2 + addl 40(%esp),%edx + xorl %ecx,%esi + psrld $30,%xmm4 + movl %edi,%ebp + roll $5,%edi + xorl %ebx,%esi + addl %edi,%edx + rorl $7,%eax + addl %esi,%edx + por %xmm4,%xmm2 + addl 44(%esp),%ecx + xorl %ebx,%ebp + movdqa 80(%esp),%xmm4 + movl %edx,%esi + roll $5,%edx + xorl %eax,%ebp + addl %edx,%ecx + rorl $7,%edi + movdqa %xmm2,%xmm5 + addl %ebp,%ecx + addl 48(%esp),%ebx + pxor %xmm7,%xmm3 +.byte 102,15,58,15,233,8 + xorl %eax,%esi + movl %ecx,%ebp + roll $5,%ecx + pxor %xmm4,%xmm3 + movdqa %xmm7,80(%esp) + xorl %edi,%esi + addl %ecx,%ebx + movdqa %xmm6,%xmm7 + paddd %xmm2,%xmm6 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm5,%xmm3 + addl 52(%esp),%eax + xorl %edi,%ebp + movl %ebx,%esi + roll $5,%ebx + movdqa %xmm3,%xmm5 + movdqa %xmm6,32(%esp) + xorl %edx,%ebp + addl %ebx,%eax + rorl $7,%ecx + addl %ebp,%eax + pslld $2,%xmm3 + addl 56(%esp),%edi + xorl %edx,%esi + psrld $30,%xmm5 + movl %eax,%ebp + roll $5,%eax + xorl %ecx,%esi + addl %eax,%edi + rorl $7,%ebx + addl %esi,%edi + por %xmm5,%xmm3 + addl 60(%esp),%edx + xorl %ecx,%ebp + movdqa 96(%esp),%xmm5 + movl %edi,%esi + roll $5,%edi + xorl %ebx,%ebp + addl %edi,%edx + rorl $7,%eax + movdqa %xmm3,%xmm6 + addl %ebp,%edx + addl (%esp),%ecx + pxor %xmm0,%xmm4 +.byte 102,15,58,15,242,8 + xorl %ebx,%esi + movl %edx,%ebp + roll $5,%edx + pxor %xmm5,%xmm4 + movdqa %xmm0,96(%esp) + xorl %eax,%esi + addl %edx,%ecx + movdqa %xmm7,%xmm0 + paddd %xmm3,%xmm7 + rorl $7,%edi + addl %esi,%ecx + pxor %xmm6,%xmm4 + addl 4(%esp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + movdqa %xmm4,%xmm6 + movdqa %xmm7,48(%esp) + xorl %edi,%ebp + addl %ecx,%ebx + rorl $7,%edx + addl %ebp,%ebx + pslld $2,%xmm4 + addl 8(%esp),%eax + xorl %edi,%esi + psrld $30,%xmm6 + movl %ebx,%ebp + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + por %xmm6,%xmm4 + addl 12(%esp),%edi + xorl %edx,%ebp + movdqa 64(%esp),%xmm6 + movl %eax,%esi + roll $5,%eax + xorl %ecx,%ebp + addl %eax,%edi + rorl $7,%ebx + movdqa %xmm4,%xmm7 + addl %ebp,%edi + addl 16(%esp),%edx + pxor %xmm1,%xmm5 +.byte 102,15,58,15,251,8 + xorl %ecx,%esi + movl %edi,%ebp + roll $5,%edi + pxor %xmm6,%xmm5 + movdqa %xmm1,64(%esp) + xorl %ebx,%esi + addl %edi,%edx + movdqa %xmm0,%xmm1 + paddd %xmm4,%xmm0 + rorl $7,%eax + addl %esi,%edx + pxor %xmm7,%xmm5 + addl 20(%esp),%ecx + xorl %ebx,%ebp + movl %edx,%esi + roll $5,%edx + movdqa %xmm5,%xmm7 + movdqa %xmm0,(%esp) + xorl %eax,%ebp + addl %edx,%ecx + rorl $7,%edi + addl %ebp,%ecx + pslld $2,%xmm5 + addl 24(%esp),%ebx + xorl %eax,%esi + psrld $30,%xmm7 + movl %ecx,%ebp + roll $5,%ecx + xorl %edi,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + por %xmm7,%xmm5 + addl 28(%esp),%eax + xorl %edi,%ebp + movdqa 80(%esp),%xmm7 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%ebp + addl %ebx,%eax + rorl $7,%ecx + movdqa %xmm5,%xmm0 + addl %ebp,%eax + movl %ecx,%ebp + pxor %xmm2,%xmm6 +.byte 102,15,58,15,196,8 + xorl %edx,%ecx + addl 32(%esp),%edi + andl %edx,%ebp + pxor %xmm7,%xmm6 + movdqa %xmm2,80(%esp) + andl %ecx,%esi + rorl $7,%ebx + movdqa %xmm1,%xmm2 + paddd %xmm5,%xmm1 + addl %ebp,%edi + movl %eax,%ebp + pxor %xmm0,%xmm6 + roll $5,%eax + addl %esi,%edi + xorl %edx,%ecx + addl %eax,%edi + movdqa %xmm6,%xmm0 + movdqa %xmm1,16(%esp) + movl %ebx,%esi + xorl %ecx,%ebx + addl 36(%esp),%edx + andl %ecx,%esi + pslld $2,%xmm6 + andl %ebx,%ebp + rorl $7,%eax + psrld $30,%xmm0 + addl %esi,%edx + movl %edi,%esi + roll $5,%edi + addl %ebp,%edx + xorl %ecx,%ebx + addl %edi,%edx + por %xmm0,%xmm6 + movl %eax,%ebp + xorl %ebx,%eax + movdqa 96(%esp),%xmm0 + addl 40(%esp),%ecx + andl %ebx,%ebp + andl %eax,%esi + rorl $7,%edi + addl %ebp,%ecx + movdqa %xmm6,%xmm1 + movl %edx,%ebp + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %edi,%esi + xorl %eax,%edi + addl 44(%esp),%ebx + andl %eax,%esi + andl %edi,%ebp + rorl $7,%edx + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %ebp,%ebx + xorl %eax,%edi + addl %ecx,%ebx + movl %edx,%ebp + pxor %xmm3,%xmm7 +.byte 102,15,58,15,205,8 + xorl %edi,%edx + addl 48(%esp),%eax + andl %edi,%ebp + pxor %xmm0,%xmm7 + movdqa %xmm3,96(%esp) + andl %edx,%esi + rorl $7,%ecx + movdqa 144(%esp),%xmm3 + paddd %xmm6,%xmm2 + addl %ebp,%eax + movl %ebx,%ebp + pxor %xmm1,%xmm7 + roll $5,%ebx + addl %esi,%eax + xorl %edi,%edx + addl %ebx,%eax + movdqa %xmm7,%xmm1 + movdqa %xmm2,32(%esp) + movl %ecx,%esi + xorl %edx,%ecx + addl 52(%esp),%edi + andl %edx,%esi + pslld $2,%xmm7 + andl %ecx,%ebp + rorl $7,%ebx + psrld $30,%xmm1 + addl %esi,%edi + movl %eax,%esi + roll $5,%eax + addl %ebp,%edi + xorl %edx,%ecx + addl %eax,%edi + por %xmm1,%xmm7 + movl %ebx,%ebp + xorl %ecx,%ebx + movdqa 64(%esp),%xmm1 + addl 56(%esp),%edx + andl %ecx,%ebp + andl %ebx,%esi + rorl $7,%eax + addl %ebp,%edx + movdqa %xmm7,%xmm2 + movl %edi,%ebp + roll $5,%edi + addl %esi,%edx + xorl %ecx,%ebx + addl %edi,%edx + movl %eax,%esi + xorl %ebx,%eax + addl 60(%esp),%ecx + andl %ebx,%esi + andl %eax,%ebp + rorl $7,%edi + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %ebp,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %edi,%ebp + pxor %xmm4,%xmm0 +.byte 102,15,58,15,214,8 + xorl %eax,%edi + addl (%esp),%ebx + andl %eax,%ebp + pxor %xmm1,%xmm0 + movdqa %xmm4,64(%esp) + andl %edi,%esi + rorl $7,%edx + movdqa %xmm3,%xmm4 + paddd %xmm7,%xmm3 + addl %ebp,%ebx + movl %ecx,%ebp + pxor %xmm2,%xmm0 + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%edi + addl %ecx,%ebx + movdqa %xmm0,%xmm2 + movdqa %xmm3,48(%esp) + movl %edx,%esi + xorl %edi,%edx + addl 4(%esp),%eax + andl %edi,%esi + pslld $2,%xmm0 + andl %edx,%ebp + rorl $7,%ecx + psrld $30,%xmm2 + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %ebp,%eax + xorl %edi,%edx + addl %ebx,%eax + por %xmm2,%xmm0 + movl %ecx,%ebp + xorl %edx,%ecx + movdqa 80(%esp),%xmm2 + addl 8(%esp),%edi + andl %edx,%ebp + andl %ecx,%esi + rorl $7,%ebx + addl %ebp,%edi + movdqa %xmm0,%xmm3 + movl %eax,%ebp + roll $5,%eax + addl %esi,%edi + xorl %edx,%ecx + addl %eax,%edi + movl %ebx,%esi + xorl %ecx,%ebx + addl 12(%esp),%edx + andl %ecx,%esi + andl %ebx,%ebp + rorl $7,%eax + addl %esi,%edx + movl %edi,%esi + roll $5,%edi + addl %ebp,%edx + xorl %ecx,%ebx + addl %edi,%edx + movl %eax,%ebp + pxor %xmm5,%xmm1 +.byte 102,15,58,15,223,8 + xorl %ebx,%eax + addl 16(%esp),%ecx + andl %ebx,%ebp + pxor %xmm2,%xmm1 + movdqa %xmm5,80(%esp) + andl %eax,%esi + rorl $7,%edi + movdqa %xmm4,%xmm5 + paddd %xmm0,%xmm4 + addl %ebp,%ecx + movl %edx,%ebp + pxor %xmm3,%xmm1 + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movdqa %xmm1,%xmm3 + movdqa %xmm4,(%esp) + movl %edi,%esi + xorl %eax,%edi + addl 20(%esp),%ebx + andl %eax,%esi + pslld $2,%xmm1 + andl %edi,%ebp + rorl $7,%edx + psrld $30,%xmm3 + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %ebp,%ebx + xorl %eax,%edi + addl %ecx,%ebx + por %xmm3,%xmm1 + movl %edx,%ebp + xorl %edi,%edx + movdqa 96(%esp),%xmm3 + addl 24(%esp),%eax + andl %edi,%ebp + andl %edx,%esi + rorl $7,%ecx + addl %ebp,%eax + movdqa %xmm1,%xmm4 + movl %ebx,%ebp + roll $5,%ebx + addl %esi,%eax + xorl %edi,%edx + addl %ebx,%eax + movl %ecx,%esi + xorl %edx,%ecx + addl 28(%esp),%edi + andl %edx,%esi + andl %ecx,%ebp + rorl $7,%ebx + addl %esi,%edi + movl %eax,%esi + roll $5,%eax + addl %ebp,%edi + xorl %edx,%ecx + addl %eax,%edi + movl %ebx,%ebp + pxor %xmm6,%xmm2 +.byte 102,15,58,15,224,8 + xorl %ecx,%ebx + addl 32(%esp),%edx + andl %ecx,%ebp + pxor %xmm3,%xmm2 + movdqa %xmm6,96(%esp) + andl %ebx,%esi + rorl $7,%eax + movdqa %xmm5,%xmm6 + paddd %xmm1,%xmm5 + addl %ebp,%edx + movl %edi,%ebp + pxor %xmm4,%xmm2 + roll $5,%edi + addl %esi,%edx + xorl %ecx,%ebx + addl %edi,%edx + movdqa %xmm2,%xmm4 + movdqa %xmm5,16(%esp) + movl %eax,%esi + xorl %ebx,%eax + addl 36(%esp),%ecx + andl %ebx,%esi + pslld $2,%xmm2 + andl %eax,%ebp + rorl $7,%edi + psrld $30,%xmm4 + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %ebp,%ecx + xorl %ebx,%eax + addl %edx,%ecx + por %xmm4,%xmm2 + movl %edi,%ebp + xorl %eax,%edi + movdqa 64(%esp),%xmm4 + addl 40(%esp),%ebx + andl %eax,%ebp + andl %edi,%esi + rorl $7,%edx + addl %ebp,%ebx + movdqa %xmm2,%xmm5 + movl %ecx,%ebp + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%edi + addl %ecx,%ebx + movl %edx,%esi + xorl %edi,%edx + addl 44(%esp),%eax + andl %edi,%esi + andl %edx,%ebp + rorl $7,%ecx + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %ebp,%eax + xorl %edi,%edx + addl %ebx,%eax + addl 48(%esp),%edi + pxor %xmm7,%xmm3 +.byte 102,15,58,15,233,8 + xorl %edx,%esi + movl %eax,%ebp + roll $5,%eax + pxor %xmm4,%xmm3 + movdqa %xmm7,64(%esp) + xorl %ecx,%esi + addl %eax,%edi + movdqa %xmm6,%xmm7 + paddd %xmm2,%xmm6 + rorl $7,%ebx + addl %esi,%edi + pxor %xmm5,%xmm3 + addl 52(%esp),%edx + xorl %ecx,%ebp + movl %edi,%esi + roll $5,%edi + movdqa %xmm3,%xmm5 + movdqa %xmm6,32(%esp) + xorl %ebx,%ebp + addl %edi,%edx + rorl $7,%eax + addl %ebp,%edx + pslld $2,%xmm3 + addl 56(%esp),%ecx + xorl %ebx,%esi + psrld $30,%xmm5 + movl %edx,%ebp + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%edi + addl %esi,%ecx + por %xmm5,%xmm3 + addl 60(%esp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + xorl %edi,%ebp + addl %ecx,%ebx + rorl $7,%edx + addl %ebp,%ebx + addl (%esp),%eax + paddd %xmm3,%xmm7 + xorl %edi,%esi + movl %ebx,%ebp + roll $5,%ebx + xorl %edx,%esi + movdqa %xmm7,48(%esp) + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 4(%esp),%edi + xorl %edx,%ebp + movl %eax,%esi + roll $5,%eax + xorl %ecx,%ebp + addl %eax,%edi + rorl $7,%ebx + addl %ebp,%edi + addl 8(%esp),%edx + xorl %ecx,%esi + movl %edi,%ebp + roll $5,%edi + xorl %ebx,%esi + addl %edi,%edx + rorl $7,%eax + addl %esi,%edx + addl 12(%esp),%ecx + xorl %ebx,%ebp + movl %edx,%esi + roll $5,%edx + xorl %eax,%ebp + addl %edx,%ecx + rorl $7,%edi + addl %ebp,%ecx + movl 196(%esp),%ebp + cmpl 200(%esp),%ebp + je .L005done + movdqa 160(%esp),%xmm7 + movdqa 176(%esp),%xmm6 + movdqu (%ebp),%xmm0 + movdqu 16(%ebp),%xmm1 + movdqu 32(%ebp),%xmm2 + movdqu 48(%ebp),%xmm3 + addl $64,%ebp +.byte 102,15,56,0,198 + movl %ebp,196(%esp) + movdqa %xmm7,96(%esp) + addl 16(%esp),%ebx + xorl %eax,%esi +.byte 102,15,56,0,206 + movl %ecx,%ebp + roll $5,%ecx + paddd %xmm7,%xmm0 + xorl %edi,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + movdqa %xmm0,(%esp) + addl 20(%esp),%eax + xorl %edi,%ebp + psubd %xmm7,%xmm0 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%ebp + addl %ebx,%eax + rorl $7,%ecx + addl %ebp,%eax + addl 24(%esp),%edi + xorl %edx,%esi + movl %eax,%ebp + roll $5,%eax + xorl %ecx,%esi + addl %eax,%edi + rorl $7,%ebx + addl %esi,%edi + addl 28(%esp),%edx + xorl %ecx,%ebp + movl %edi,%esi + roll $5,%edi + xorl %ebx,%ebp + addl %edi,%edx + rorl $7,%eax + addl %ebp,%edx + addl 32(%esp),%ecx + xorl %ebx,%esi +.byte 102,15,56,0,214 + movl %edx,%ebp + roll $5,%edx + paddd %xmm7,%xmm1 + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%edi + addl %esi,%ecx + movdqa %xmm1,16(%esp) + addl 36(%esp),%ebx + xorl %eax,%ebp + psubd %xmm7,%xmm1 + movl %ecx,%esi + roll $5,%ecx + xorl %edi,%ebp + addl %ecx,%ebx + rorl $7,%edx + addl %ebp,%ebx + addl 40(%esp),%eax + xorl %edi,%esi + movl %ebx,%ebp + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%esp),%edi + xorl %edx,%ebp + movl %eax,%esi + roll $5,%eax + xorl %ecx,%ebp + addl %eax,%edi + rorl $7,%ebx + addl %ebp,%edi + addl 48(%esp),%edx + xorl %ecx,%esi +.byte 102,15,56,0,222 + movl %edi,%ebp + roll $5,%edi + paddd %xmm7,%xmm2 + xorl %ebx,%esi + addl %edi,%edx + rorl $7,%eax + addl %esi,%edx + movdqa %xmm2,32(%esp) + addl 52(%esp),%ecx + xorl %ebx,%ebp + psubd %xmm7,%xmm2 + movl %edx,%esi + roll $5,%edx + xorl %eax,%ebp + addl %edx,%ecx + rorl $7,%edi + addl %ebp,%ecx + addl 56(%esp),%ebx + xorl %eax,%esi + movl %ecx,%ebp + roll $5,%ecx + xorl %edi,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%esp),%eax + xorl %edi,%ebp + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%ebp + addl %ebx,%eax + rorl $7,%ecx + addl %ebp,%eax + movl 192(%esp),%ebp + addl (%ebp),%eax + addl 4(%ebp),%esi + addl 8(%ebp),%ecx + movl %eax,(%ebp) + addl 12(%ebp),%edx + movl %esi,4(%ebp) + addl 16(%ebp),%edi + movl %ecx,8(%ebp) + movl %esi,%ebx + movl %edx,12(%ebp) + movl %edi,16(%ebp) + movdqa %xmm1,%xmm4 + jmp .L004loop +.align 16 +.L005done: + addl 16(%esp),%ebx + xorl %eax,%esi + movl %ecx,%ebp + roll $5,%ecx + xorl %edi,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 20(%esp),%eax + xorl %edi,%ebp + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%ebp + addl %ebx,%eax + rorl $7,%ecx + addl %ebp,%eax + addl 24(%esp),%edi + xorl %edx,%esi + movl %eax,%ebp + roll $5,%eax + xorl %ecx,%esi + addl %eax,%edi + rorl $7,%ebx + addl %esi,%edi + addl 28(%esp),%edx + xorl %ecx,%ebp + movl %edi,%esi + roll $5,%edi + xorl %ebx,%ebp + addl %edi,%edx + rorl $7,%eax + addl %ebp,%edx + addl 32(%esp),%ecx + xorl %ebx,%esi + movl %edx,%ebp + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%edi + addl %esi,%ecx + addl 36(%esp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + xorl %edi,%ebp + addl %ecx,%ebx + rorl $7,%edx + addl %ebp,%ebx + addl 40(%esp),%eax + xorl %edi,%esi + movl %ebx,%ebp + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%esp),%edi + xorl %edx,%ebp + movl %eax,%esi + roll $5,%eax + xorl %ecx,%ebp + addl %eax,%edi + rorl $7,%ebx + addl %ebp,%edi + addl 48(%esp),%edx + xorl %ecx,%esi + movl %edi,%ebp + roll $5,%edi + xorl %ebx,%esi + addl %edi,%edx + rorl $7,%eax + addl %esi,%edx + addl 52(%esp),%ecx + xorl %ebx,%ebp + movl %edx,%esi + roll $5,%edx + xorl %eax,%ebp + addl %edx,%ecx + rorl $7,%edi + addl %ebp,%ecx + addl 56(%esp),%ebx + xorl %eax,%esi + movl %ecx,%ebp + roll $5,%ecx + xorl %edi,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%esp),%eax + xorl %edi,%ebp + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%ebp + addl %ebx,%eax + rorl $7,%ecx + addl %ebp,%eax + movl 192(%esp),%ebp + addl (%ebp),%eax + movl 204(%esp),%esp + addl 4(%ebp),%esi + addl 8(%ebp),%ecx + movl %eax,(%ebp) + addl 12(%ebp),%edx + movl %esi,4(%ebp) + addl 16(%ebp),%edi + movl %ecx,8(%ebp) + movl %edx,12(%ebp) + movl %edi,16(%ebp) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size _sha1_block_data_order_ssse3,.-_sha1_block_data_order_ssse3 +.align 64 +.LK_XX_XX: +.long 1518500249,1518500249,1518500249,1518500249 +.long 1859775393,1859775393,1859775393,1859775393 +.long 2400959708,2400959708,2400959708,2400959708 +.long 3395469782,3395469782,3395469782,3395469782 +.long 66051,67438087,134810123,202182159 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115 .byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82 .byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112 .byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.comm OPENSSL_ia32cap_P,8,4 diff --git a/secure/lib/libcrypto/asm/sha1-x86_64.s b/secure/lib/libcrypto/asm/sha1-x86_64.s index fe6fda385a..3922e203b7 100644 --- a/secure/lib/libcrypto/asm/sha1-x86_64.s +++ b/secure/lib/libcrypto/asm/sha1-x86_64.s @@ -1,11 +1,22 @@ .text + + .globl sha1_block_data_order .type sha1_block_data_order,@function .align 16 sha1_block_data_order: + movl OPENSSL_ia32cap_P+0(%rip),%r9d + movl OPENSSL_ia32cap_P+4(%rip),%r8d + testl $512,%r8d + jz .Lialu + jmp _ssse3_shortcut + +.align 16 +.Lialu: pushq %rbx pushq %rbp pushq %r12 + pushq %r13 movq %rsp,%r11 movq %rdi,%r8 subq $72,%rsp @@ -15,1268 +26,2461 @@ sha1_block_data_order: movq %r11,64(%rsp) .Lprologue: - movl 0(%r8),%edx - movl 4(%r8),%esi - movl 8(%r8),%edi - movl 12(%r8),%ebp - movl 16(%r8),%r11d -.align 4 + movl 0(%r8),%esi + movl 4(%r8),%edi + movl 8(%r8),%r11d + movl 12(%r8),%r12d + movl 16(%r8),%r13d + jmp .Lloop + +.align 16 .Lloop: - movl 0(%r9),%eax - bswapl %eax - movl %eax,0(%rsp) - leal 1518500249(%rax,%r11,1),%r12d - movl %edi,%ebx - movl 4(%r9),%eax - movl %edx,%r11d - xorl %ebp,%ebx - bswapl %eax - roll $5,%r11d - andl %esi,%ebx - movl %eax,4(%rsp) - addl %r11d,%r12d - xorl %ebp,%ebx + movl 0(%r9),%edx + bswapl %edx + movl %edx,0(%rsp) + movl %r11d,%eax + movl 4(%r9),%ebp + movl %esi,%ecx + xorl %r12d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r13,1),%r13d + andl %edi,%eax + movl %ebp,4(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 8(%r9),%edx + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r12,1),%r12d + andl %esi,%eax + movl %edx,8(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax roll $30,%esi - addl %ebx,%r12d - leal 1518500249(%rax,%rbp,1),%r11d - movl %esi,%ebx - movl 8(%r9),%eax - movl %r12d,%ebp - xorl %edi,%ebx - bswapl %eax - roll $5,%ebp - andl %edx,%ebx - movl %eax,8(%rsp) - addl %ebp,%r11d - xorl %edi,%ebx - roll $30,%edx - addl %ebx,%r11d - leal 1518500249(%rax,%rdi,1),%ebp - movl %edx,%ebx - movl 12(%r9),%eax - movl %r11d,%edi - xorl %esi,%ebx - bswapl %eax - roll $5,%edi - andl %r12d,%ebx - movl %eax,12(%rsp) - addl %edi,%ebp - xorl %esi,%ebx + addl %eax,%r12d + movl %esi,%eax + movl 12(%r9),%ebp + movl %r12d,%ecx + xorl %edi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r11,1),%r11d + andl %r13d,%eax + movl %ebp,12(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 16(%r9),%edx + movl %r11d,%ecx + xorl %esi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rdi,1),%edi + andl %r12d,%eax + movl %edx,16(%rsp) + addl %ecx,%edi + xorl %esi,%eax roll $30,%r12d - addl %ebx,%ebp - leal 1518500249(%rax,%rsi,1),%edi - movl %r12d,%ebx - movl 16(%r9),%eax - movl %ebp,%esi - xorl %edx,%ebx - bswapl %eax - roll $5,%esi - andl %r11d,%ebx - movl %eax,16(%rsp) - addl %esi,%edi - xorl %edx,%ebx + addl %eax,%edi + movl %r12d,%eax + movl 20(%r9),%ebp + movl %edi,%ecx + xorl %r13d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rsi,1),%esi + andl %r11d,%eax + movl %ebp,20(%rsp) + addl %ecx,%esi + xorl %r13d,%eax roll $30,%r11d - addl %ebx,%edi - leal 1518500249(%rax,%rdx,1),%esi + addl %eax,%esi + movl %r11d,%eax + movl 24(%r9),%edx + movl %esi,%ecx + xorl %r12d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r13,1),%r13d + andl %edi,%eax + movl %edx,24(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 28(%r9),%ebp + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r12,1),%r12d + andl %esi,%eax + movl %ebp,28(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 32(%r9),%edx + movl %r12d,%ecx + xorl %edi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r11,1),%r11d + andl %r13d,%eax + movl %edx,32(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 36(%r9),%ebp + movl %r11d,%ecx + xorl %esi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rdi,1),%edi + andl %r12d,%eax + movl %ebp,36(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 40(%r9),%edx + movl %edi,%ecx + xorl %r13d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rsi,1),%esi + andl %r11d,%eax + movl %edx,40(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl %r11d,%eax + movl 44(%r9),%ebp + movl %esi,%ecx + xorl %r12d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r13,1),%r13d + andl %edi,%eax + movl %ebp,44(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 48(%r9),%edx + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r12,1),%r12d + andl %esi,%eax + movl %edx,48(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 52(%r9),%ebp + movl %r12d,%ecx + xorl %edi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r11,1),%r11d + andl %r13d,%eax + movl %ebp,52(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 56(%r9),%edx + movl %r11d,%ecx + xorl %esi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rdi,1),%edi + andl %r12d,%eax + movl %edx,56(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 60(%r9),%ebp + movl %edi,%ecx + xorl %r13d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rsi,1),%esi + andl %r11d,%eax + movl %ebp,60(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl 0(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 8(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + xorl 32(%rsp),%edx + andl %edi,%eax + leal 1518500249(%rbp,%r13,1),%r13d + xorl 52(%rsp),%edx + xorl %r12d,%eax + roll $1,%edx + addl %ecx,%r13d + roll $30,%edi + movl %edx,0(%rsp) + addl %eax,%r13d + movl 4(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 12(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + xorl 36(%rsp),%ebp + andl %esi,%eax + leal 1518500249(%rdx,%r12,1),%r12d + xorl 56(%rsp),%ebp + xorl %r11d,%eax + roll $1,%ebp + addl %ecx,%r12d + roll $30,%esi + movl %ebp,4(%rsp) + addl %eax,%r12d + movl 8(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 16(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + xorl 40(%rsp),%edx + andl %r13d,%eax + leal 1518500249(%rbp,%r11,1),%r11d + xorl 60(%rsp),%edx + xorl %edi,%eax + roll $1,%edx + addl %ecx,%r11d + roll $30,%r13d + movl %edx,8(%rsp) + addl %eax,%r11d + movl 12(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 20(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + xorl 44(%rsp),%ebp + andl %r12d,%eax + leal 1518500249(%rdx,%rdi,1),%edi + xorl 0(%rsp),%ebp + xorl %esi,%eax + roll $1,%ebp + addl %ecx,%edi + roll $30,%r12d + movl %ebp,12(%rsp) + addl %eax,%edi + movl 16(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 24(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + xorl 48(%rsp),%edx + andl %r11d,%eax + leal 1518500249(%rbp,%rsi,1),%esi + xorl 4(%rsp),%edx + xorl %r13d,%eax + roll $1,%edx + addl %ecx,%esi + roll $30,%r11d + movl %edx,16(%rsp) + addl %eax,%esi + movl 20(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 28(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r13,1),%r13d + xorl 52(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 8(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 32(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r12,1),%r12d + xorl 56(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 12(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 36(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r11,1),%r11d + xorl 60(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 16(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 40(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rdi,1),%edi + xorl 0(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 20(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 44(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rsi,1),%esi + xorl 4(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 24(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,36(%rsp) + movl 40(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 48(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r13,1),%r13d + xorl 8(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 28(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,40(%rsp) + movl 44(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 52(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r12,1),%r12d + xorl 12(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 32(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,44(%rsp) + movl 48(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 56(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r11,1),%r11d + xorl 16(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 36(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,48(%rsp) + movl 52(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 60(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rdi,1),%edi + xorl 20(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 40(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,52(%rsp) + movl 56(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 0(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rsi,1),%esi + xorl 24(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 44(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,56(%rsp) + movl 60(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 4(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r13,1),%r13d + xorl 28(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 48(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,60(%rsp) + movl 0(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 8(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r12,1),%r12d + xorl 32(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 52(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,0(%rsp) + movl 4(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 12(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r11,1),%r11d + xorl 36(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 56(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,4(%rsp) + movl 8(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 16(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rdi,1),%edi + xorl 40(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 60(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,8(%rsp) + movl 12(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 20(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rsi,1),%esi + xorl 44(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 0(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,12(%rsp) + movl 16(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 24(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r13,1),%r13d + xorl 48(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 4(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,16(%rsp) + movl 20(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 28(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r12,1),%r12d + xorl 52(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 8(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 32(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r11,1),%r11d + xorl 56(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 12(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 36(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rdi,1),%edi + xorl 60(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 16(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 40(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rsi,1),%esi + xorl 0(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 20(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %r11d,%eax movl %r11d,%ebx - movl 20(%r9),%eax - movl %edi,%edx - xorl %r12d,%ebx - bswapl %eax - roll $5,%edx - andl %ebp,%ebx - movl %eax,20(%rsp) - addl %edx,%esi + xorl 44(%rsp),%ebp + andl %r12d,%eax + movl %esi,%ecx + xorl 4(%rsp),%ebp xorl %r12d,%ebx - roll $30,%ebp - addl %ebx,%esi - leal 1518500249(%rax,%r12,1),%edx - movl %ebp,%ebx - movl 24(%r9),%eax - movl %esi,%r12d - xorl %r11d,%ebx - bswapl %eax - roll $5,%r12d + leal -1894007588(%rdx,%r13,1),%r13d + roll $5,%ecx + xorl 24(%rsp),%ebp + addl %eax,%r13d andl %edi,%ebx - movl %eax,24(%rsp) - addl %r12d,%edx - xorl %r11d,%ebx + roll $1,%ebp + addl %ebx,%r13d roll $30,%edi - addl %ebx,%edx - leal 1518500249(%rax,%r11,1),%r12d + movl %ebp,36(%rsp) + addl %ecx,%r13d + movl 40(%rsp),%edx + movl %edi,%eax movl %edi,%ebx - movl 28(%r9),%eax - movl %edx,%r11d - xorl %ebp,%ebx - bswapl %eax - roll $5,%r11d + xorl 48(%rsp),%edx + andl %r11d,%eax + movl %r13d,%ecx + xorl 8(%rsp),%edx + xorl %r11d,%ebx + leal -1894007588(%rbp,%r12,1),%r12d + roll $5,%ecx + xorl 28(%rsp),%edx + addl %eax,%r12d andl %esi,%ebx - movl %eax,28(%rsp) - addl %r11d,%r12d - xorl %ebp,%ebx - roll $30,%esi + roll $1,%edx addl %ebx,%r12d - leal 1518500249(%rax,%rbp,1),%r11d + roll $30,%esi + movl %edx,40(%rsp) + addl %ecx,%r12d + movl 44(%rsp),%ebp + movl %esi,%eax movl %esi,%ebx - movl 32(%r9),%eax - movl %r12d,%ebp - xorl %edi,%ebx - bswapl %eax - roll $5,%ebp - andl %edx,%ebx - movl %eax,32(%rsp) - addl %ebp,%r11d + xorl 52(%rsp),%ebp + andl %edi,%eax + movl %r12d,%ecx + xorl 12(%rsp),%ebp xorl %edi,%ebx - roll $30,%edx + leal -1894007588(%rdx,%r11,1),%r11d + roll $5,%ecx + xorl 32(%rsp),%ebp + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%ebp addl %ebx,%r11d - leal 1518500249(%rax,%rdi,1),%ebp - movl %edx,%ebx - movl 36(%r9),%eax - movl %r11d,%edi + roll $30,%r13d + movl %ebp,44(%rsp) + addl %ecx,%r11d + movl 48(%rsp),%edx + movl %r13d,%eax + movl %r13d,%ebx + xorl 56(%rsp),%edx + andl %esi,%eax + movl %r11d,%ecx + xorl 16(%rsp),%edx xorl %esi,%ebx - bswapl %eax - roll $5,%edi + leal -1894007588(%rbp,%rdi,1),%edi + roll $5,%ecx + xorl 36(%rsp),%edx + addl %eax,%edi andl %r12d,%ebx - movl %eax,36(%rsp) - addl %edi,%ebp - xorl %esi,%ebx + roll $1,%edx + addl %ebx,%edi roll $30,%r12d - addl %ebx,%ebp - leal 1518500249(%rax,%rsi,1),%edi + movl %edx,48(%rsp) + addl %ecx,%edi + movl 52(%rsp),%ebp + movl %r12d,%eax movl %r12d,%ebx - movl 40(%r9),%eax - movl %ebp,%esi - xorl %edx,%ebx - bswapl %eax - roll $5,%esi + xorl 60(%rsp),%ebp + andl %r13d,%eax + movl %edi,%ecx + xorl 20(%rsp),%ebp + xorl %r13d,%ebx + leal -1894007588(%rdx,%rsi,1),%esi + roll $5,%ecx + xorl 40(%rsp),%ebp + addl %eax,%esi andl %r11d,%ebx - movl %eax,40(%rsp) - addl %esi,%edi - xorl %edx,%ebx + roll $1,%ebp + addl %ebx,%esi roll $30,%r11d - addl %ebx,%edi - leal 1518500249(%rax,%rdx,1),%esi + movl %ebp,52(%rsp) + addl %ecx,%esi + movl 56(%rsp),%edx + movl %r11d,%eax movl %r11d,%ebx - movl 44(%r9),%eax - movl %edi,%edx - xorl %r12d,%ebx - bswapl %eax - roll $5,%edx - andl %ebp,%ebx - movl %eax,44(%rsp) - addl %edx,%esi + xorl 0(%rsp),%edx + andl %r12d,%eax + movl %esi,%ecx + xorl 24(%rsp),%edx xorl %r12d,%ebx - roll $30,%ebp - addl %ebx,%esi - leal 1518500249(%rax,%r12,1),%edx - movl %ebp,%ebx - movl 48(%r9),%eax - movl %esi,%r12d - xorl %r11d,%ebx - bswapl %eax - roll $5,%r12d + leal -1894007588(%rbp,%r13,1),%r13d + roll $5,%ecx + xorl 44(%rsp),%edx + addl %eax,%r13d andl %edi,%ebx - movl %eax,48(%rsp) - addl %r12d,%edx - xorl %r11d,%ebx + roll $1,%edx + addl %ebx,%r13d roll $30,%edi - addl %ebx,%edx - leal 1518500249(%rax,%r11,1),%r12d + movl %edx,56(%rsp) + addl %ecx,%r13d + movl 60(%rsp),%ebp + movl %edi,%eax movl %edi,%ebx - movl 52(%r9),%eax - movl %edx,%r11d - xorl %ebp,%ebx - bswapl %eax - roll $5,%r11d + xorl 4(%rsp),%ebp + andl %r11d,%eax + movl %r13d,%ecx + xorl 28(%rsp),%ebp + xorl %r11d,%ebx + leal -1894007588(%rdx,%r12,1),%r12d + roll $5,%ecx + xorl 48(%rsp),%ebp + addl %eax,%r12d andl %esi,%ebx - movl %eax,52(%rsp) - addl %r11d,%r12d - xorl %ebp,%ebx - roll $30,%esi + roll $1,%ebp addl %ebx,%r12d - leal 1518500249(%rax,%rbp,1),%r11d + roll $30,%esi + movl %ebp,60(%rsp) + addl %ecx,%r12d + movl 0(%rsp),%edx + movl %esi,%eax movl %esi,%ebx - movl 56(%r9),%eax - movl %r12d,%ebp - xorl %edi,%ebx - bswapl %eax - roll $5,%ebp - andl %edx,%ebx - movl %eax,56(%rsp) - addl %ebp,%r11d + xorl 8(%rsp),%edx + andl %edi,%eax + movl %r12d,%ecx + xorl 32(%rsp),%edx xorl %edi,%ebx - roll $30,%edx + leal -1894007588(%rbp,%r11,1),%r11d + roll $5,%ecx + xorl 52(%rsp),%edx + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%edx addl %ebx,%r11d - leal 1518500249(%rax,%rdi,1),%ebp - movl %edx,%ebx - movl 60(%r9),%eax - movl %r11d,%edi + roll $30,%r13d + movl %edx,0(%rsp) + addl %ecx,%r11d + movl 4(%rsp),%ebp + movl %r13d,%eax + movl %r13d,%ebx + xorl 12(%rsp),%ebp + andl %esi,%eax + movl %r11d,%ecx + xorl 36(%rsp),%ebp xorl %esi,%ebx - bswapl %eax - roll $5,%edi + leal -1894007588(%rdx,%rdi,1),%edi + roll $5,%ecx + xorl 56(%rsp),%ebp + addl %eax,%edi andl %r12d,%ebx - movl %eax,60(%rsp) - addl %edi,%ebp - xorl %esi,%ebx + roll $1,%ebp + addl %ebx,%edi roll $30,%r12d - addl %ebx,%ebp - leal 1518500249(%rax,%rsi,1),%edi - movl 0(%rsp),%eax + movl %ebp,4(%rsp) + addl %ecx,%edi + movl 8(%rsp),%edx + movl %r12d,%eax movl %r12d,%ebx - movl %ebp,%esi - xorl 8(%rsp),%eax - xorl %edx,%ebx - roll $5,%esi - xorl 32(%rsp),%eax + xorl 16(%rsp),%edx + andl %r13d,%eax + movl %edi,%ecx + xorl 40(%rsp),%edx + xorl %r13d,%ebx + leal -1894007588(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl 60(%rsp),%edx + addl %eax,%esi andl %r11d,%ebx - addl %esi,%edi - xorl 52(%rsp),%eax - xorl %edx,%ebx + roll $1,%edx + addl %ebx,%esi roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,0(%rsp) - leal 1518500249(%rax,%rdx,1),%esi - movl 4(%rsp),%eax + movl %edx,8(%rsp) + addl %ecx,%esi + movl 12(%rsp),%ebp + movl %r11d,%eax movl %r11d,%ebx - movl %edi,%edx - xorl 12(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edx - xorl 36(%rsp),%eax - andl %ebp,%ebx - addl %edx,%esi - xorl 56(%rsp),%eax + xorl 20(%rsp),%ebp + andl %r12d,%eax + movl %esi,%ecx + xorl 44(%rsp),%ebp xorl %r12d,%ebx - roll $30,%ebp - addl %ebx,%esi - roll $1,%eax - movl %eax,4(%rsp) - leal 1518500249(%rax,%r12,1),%edx - movl 8(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 16(%rsp),%eax - xorl %r11d,%ebx - roll $5,%r12d - xorl 40(%rsp),%eax + leal -1894007588(%rdx,%r13,1),%r13d + roll $5,%ecx + xorl 0(%rsp),%ebp + addl %eax,%r13d andl %edi,%ebx - addl %r12d,%edx - xorl 60(%rsp),%eax - xorl %r11d,%ebx + roll $1,%ebp + addl %ebx,%r13d roll $30,%edi - addl %ebx,%edx - roll $1,%eax - movl %eax,8(%rsp) - leal 1518500249(%rax,%r11,1),%r12d - movl 12(%rsp),%eax + movl %ebp,12(%rsp) + addl %ecx,%r13d + movl 16(%rsp),%edx + movl %edi,%eax movl %edi,%ebx - movl %edx,%r11d - xorl 20(%rsp),%eax - xorl %ebp,%ebx - roll $5,%r11d - xorl 44(%rsp),%eax + xorl 24(%rsp),%edx + andl %r11d,%eax + movl %r13d,%ecx + xorl 48(%rsp),%edx + xorl %r11d,%ebx + leal -1894007588(%rbp,%r12,1),%r12d + roll $5,%ecx + xorl 4(%rsp),%edx + addl %eax,%r12d andl %esi,%ebx - addl %r11d,%r12d - xorl 0(%rsp),%eax - xorl %ebp,%ebx - roll $30,%esi + roll $1,%edx addl %ebx,%r12d - roll $1,%eax - movl %eax,12(%rsp) - leal 1518500249(%rax,%rbp,1),%r11d - movl 16(%rsp),%eax + roll $30,%esi + movl %edx,16(%rsp) + addl %ecx,%r12d + movl 20(%rsp),%ebp + movl %esi,%eax movl %esi,%ebx - movl %r12d,%ebp - xorl 24(%rsp),%eax - xorl %edi,%ebx - roll $5,%ebp - xorl 48(%rsp),%eax - andl %edx,%ebx - addl %ebp,%r11d - xorl 4(%rsp),%eax + xorl 28(%rsp),%ebp + andl %edi,%eax + movl %r12d,%ecx + xorl 52(%rsp),%ebp xorl %edi,%ebx - roll $30,%edx + leal -1894007588(%rdx,%r11,1),%r11d + roll $5,%ecx + xorl 8(%rsp),%ebp + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%ebp addl %ebx,%r11d - roll $1,%eax - movl %eax,16(%rsp) - leal 1859775393(%rax,%rdi,1),%ebp - movl 20(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 28(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 52(%rsp),%eax + roll $30,%r13d + movl %ebp,20(%rsp) + addl %ecx,%r11d + movl 24(%rsp),%edx + movl %r13d,%eax + movl %r13d,%ebx + xorl 32(%rsp),%edx + andl %esi,%eax + movl %r11d,%ecx + xorl 56(%rsp),%edx xorl %esi,%ebx - addl %edi,%ebp - xorl 8(%rsp),%eax + leal -1894007588(%rbp,%rdi,1),%edi + roll $5,%ecx + xorl 12(%rsp),%edx + addl %eax,%edi + andl %r12d,%ebx + roll $1,%edx + addl %ebx,%edi roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,20(%rsp) - leal 1859775393(%rax,%rsi,1),%edi - movl 24(%rsp),%eax + movl %edx,24(%rsp) + addl %ecx,%edi + movl 28(%rsp),%ebp + movl %r12d,%eax movl %r12d,%ebx - movl %ebp,%esi - xorl 32(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 56(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 12(%rsp),%eax + xorl 36(%rsp),%ebp + andl %r13d,%eax + movl %edi,%ecx + xorl 60(%rsp),%ebp + xorl %r13d,%ebx + leal -1894007588(%rdx,%rsi,1),%esi + roll $5,%ecx + xorl 16(%rsp),%ebp + addl %eax,%esi + andl %r11d,%ebx + roll $1,%ebp + addl %ebx,%esi roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,24(%rsp) - leal 1859775393(%rax,%rdx,1),%esi - movl 28(%rsp),%eax + movl %ebp,28(%rsp) + addl %ecx,%esi + movl 32(%rsp),%edx + movl %r11d,%eax movl %r11d,%ebx - movl %edi,%edx - xorl 36(%rsp),%eax - xorl %ebp,%ebx - roll $5,%edx - xorl 60(%rsp),%eax + xorl 40(%rsp),%edx + andl %r12d,%eax + movl %esi,%ecx + xorl 0(%rsp),%edx xorl %r12d,%ebx - addl %edx,%esi - xorl 16(%rsp),%eax - roll $30,%ebp - addl %ebx,%esi - roll $1,%eax - movl %eax,28(%rsp) - leal 1859775393(%rax,%r12,1),%edx - movl 32(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 40(%rsp),%eax - xorl %edi,%ebx - roll $5,%r12d - xorl 0(%rsp),%eax - xorl %r11d,%ebx - addl %r12d,%edx - xorl 20(%rsp),%eax + leal -1894007588(%rbp,%r13,1),%r13d + roll $5,%ecx + xorl 20(%rsp),%edx + addl %eax,%r13d + andl %edi,%ebx + roll $1,%edx + addl %ebx,%r13d roll $30,%edi - addl %ebx,%edx - roll $1,%eax - movl %eax,32(%rsp) - leal 1859775393(%rax,%r11,1),%r12d - movl 36(%rsp),%eax + movl %edx,32(%rsp) + addl %ecx,%r13d + movl 36(%rsp),%ebp + movl %edi,%eax movl %edi,%ebx - movl %edx,%r11d - xorl 44(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 4(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 24(%rsp),%eax - roll $30,%esi + xorl 44(%rsp),%ebp + andl %r11d,%eax + movl %r13d,%ecx + xorl 4(%rsp),%ebp + xorl %r11d,%ebx + leal -1894007588(%rdx,%r12,1),%r12d + roll $5,%ecx + xorl 24(%rsp),%ebp + addl %eax,%r12d + andl %esi,%ebx + roll $1,%ebp addl %ebx,%r12d - roll $1,%eax - movl %eax,36(%rsp) - leal 1859775393(%rax,%rbp,1),%r11d - movl 40(%rsp),%eax + roll $30,%esi + movl %ebp,36(%rsp) + addl %ecx,%r12d + movl 40(%rsp),%edx + movl %esi,%eax movl %esi,%ebx - movl %r12d,%ebp - xorl 48(%rsp),%eax - xorl %edx,%ebx - roll $5,%ebp - xorl 8(%rsp),%eax + xorl 48(%rsp),%edx + andl %edi,%eax + movl %r12d,%ecx + xorl 8(%rsp),%edx xorl %edi,%ebx - addl %ebp,%r11d - xorl 28(%rsp),%eax - roll $30,%edx + leal -1894007588(%rbp,%r11,1),%r11d + roll $5,%ecx + xorl 28(%rsp),%edx + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%edx addl %ebx,%r11d - roll $1,%eax - movl %eax,40(%rsp) - leal 1859775393(%rax,%rdi,1),%ebp - movl 44(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 52(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 12(%rsp),%eax + roll $30,%r13d + movl %edx,40(%rsp) + addl %ecx,%r11d + movl 44(%rsp),%ebp + movl %r13d,%eax + movl %r13d,%ebx + xorl 52(%rsp),%ebp + andl %esi,%eax + movl %r11d,%ecx + xorl 12(%rsp),%ebp xorl %esi,%ebx - addl %edi,%ebp - xorl 32(%rsp),%eax + leal -1894007588(%rdx,%rdi,1),%edi + roll $5,%ecx + xorl 32(%rsp),%ebp + addl %eax,%edi + andl %r12d,%ebx + roll $1,%ebp + addl %ebx,%edi roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,44(%rsp) - leal 1859775393(%rax,%rsi,1),%edi - movl 48(%rsp),%eax + movl %ebp,44(%rsp) + addl %ecx,%edi + movl 48(%rsp),%edx + movl %r12d,%eax movl %r12d,%ebx - movl %ebp,%esi - xorl 56(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 16(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 36(%rsp),%eax - roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,48(%rsp) - leal 1859775393(%rax,%rdx,1),%esi - movl 52(%rsp),%eax - movl %r11d,%ebx - movl %edi,%edx - xorl 60(%rsp),%eax - xorl %ebp,%ebx - roll $5,%edx - xorl 20(%rsp),%eax - xorl %r12d,%ebx - addl %edx,%esi - xorl 40(%rsp),%eax - roll $30,%ebp + xorl 56(%rsp),%edx + andl %r13d,%eax + movl %edi,%ecx + xorl 16(%rsp),%edx + xorl %r13d,%ebx + leal -1894007588(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl 36(%rsp),%edx + addl %eax,%esi + andl %r11d,%ebx + roll $1,%edx addl %ebx,%esi - roll $1,%eax - movl %eax,52(%rsp) - leal 1859775393(%rax,%r12,1),%edx - movl 56(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 0(%rsp),%eax - xorl %edi,%ebx - roll $5,%r12d - xorl 24(%rsp),%eax - xorl %r11d,%ebx - addl %r12d,%edx - xorl 44(%rsp),%eax + roll $30,%r11d + movl %edx,48(%rsp) + addl %ecx,%esi + movl 52(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 60(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r13,1),%r13d + xorl 20(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 40(%rsp),%ebp roll $30,%edi - addl %ebx,%edx - roll $1,%eax - movl %eax,56(%rsp) - leal 1859775393(%rax,%r11,1),%r12d - movl 60(%rsp),%eax - movl %edi,%ebx - movl %edx,%r11d - xorl 4(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 28(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 48(%rsp),%eax + addl %eax,%r13d + roll $1,%ebp + movl %ebp,52(%rsp) + movl 56(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 0(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r12,1),%r12d + xorl 24(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 44(%rsp),%edx roll $30,%esi - addl %ebx,%r12d - roll $1,%eax - movl %eax,60(%rsp) - leal 1859775393(%rax,%rbp,1),%r11d - movl 0(%rsp),%eax - movl %esi,%ebx - movl %r12d,%ebp - xorl 8(%rsp),%eax - xorl %edx,%ebx - roll $5,%ebp - xorl 32(%rsp),%eax - xorl %edi,%ebx - addl %ebp,%r11d - xorl 52(%rsp),%eax - roll $30,%edx - addl %ebx,%r11d - roll $1,%eax - movl %eax,0(%rsp) - leal 1859775393(%rax,%rdi,1),%ebp - movl 4(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 12(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 36(%rsp),%eax - xorl %esi,%ebx - addl %edi,%ebp - xorl 56(%rsp),%eax + addl %eax,%r12d + roll $1,%edx + movl %edx,56(%rsp) + movl 60(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 4(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rdx,%r11,1),%r11d + xorl 28(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 48(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,60(%rsp) + movl 0(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 8(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rdi,1),%edi + xorl 32(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 52(%rsp),%edx roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,4(%rsp) - leal 1859775393(%rax,%rsi,1),%edi - movl 8(%rsp),%eax - movl %r12d,%ebx - movl %ebp,%esi - xorl 16(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 40(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 60(%rsp),%eax + addl %eax,%edi + roll $1,%edx + movl %edx,0(%rsp) + movl 4(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 12(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rsi,1),%esi + xorl 36(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 56(%rsp),%ebp roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,8(%rsp) - leal 1859775393(%rax,%rdx,1),%esi - movl 12(%rsp),%eax - movl %r11d,%ebx - movl %edi,%edx - xorl 20(%rsp),%eax - xorl %ebp,%ebx - roll $5,%edx - xorl 44(%rsp),%eax - xorl %r12d,%ebx - addl %edx,%esi - xorl 0(%rsp),%eax - roll $30,%ebp - addl %ebx,%esi - roll $1,%eax - movl %eax,12(%rsp) - leal 1859775393(%rax,%r12,1),%edx - movl 16(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 24(%rsp),%eax - xorl %edi,%ebx - roll $5,%r12d - xorl 48(%rsp),%eax - xorl %r11d,%ebx - addl %r12d,%edx - xorl 4(%rsp),%eax + addl %eax,%esi + roll $1,%ebp + movl %ebp,4(%rsp) + movl 8(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 16(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r13,1),%r13d + xorl 40(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 60(%rsp),%edx roll $30,%edi - addl %ebx,%edx - roll $1,%eax - movl %eax,16(%rsp) - leal 1859775393(%rax,%r11,1),%r12d - movl 20(%rsp),%eax - movl %edi,%ebx - movl %edx,%r11d - xorl 28(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 52(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 8(%rsp),%eax + addl %eax,%r13d + roll $1,%edx + movl %edx,8(%rsp) + movl 12(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 20(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r12,1),%r12d + xorl 44(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 0(%rsp),%ebp roll $30,%esi - addl %ebx,%r12d - roll $1,%eax - movl %eax,20(%rsp) - leal 1859775393(%rax,%rbp,1),%r11d - movl 24(%rsp),%eax - movl %esi,%ebx - movl %r12d,%ebp - xorl 32(%rsp),%eax - xorl %edx,%ebx - roll $5,%ebp - xorl 56(%rsp),%eax - xorl %edi,%ebx - addl %ebp,%r11d - xorl 12(%rsp),%eax - roll $30,%edx - addl %ebx,%r11d - roll $1,%eax - movl %eax,24(%rsp) - leal 1859775393(%rax,%rdi,1),%ebp - movl 28(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 36(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 60(%rsp),%eax - xorl %esi,%ebx - addl %edi,%ebp - xorl 16(%rsp),%eax + addl %eax,%r12d + roll $1,%ebp + movl %ebp,12(%rsp) + movl 16(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 24(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rbp,%r11,1),%r11d + xorl 48(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 4(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,16(%rsp) + movl 20(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 28(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rdi,1),%edi + xorl 52(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 8(%rsp),%ebp roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,28(%rsp) - leal 1859775393(%rax,%rsi,1),%edi - movl 32(%rsp),%eax - movl %r12d,%ebx - movl %ebp,%esi - xorl 40(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 0(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 20(%rsp),%eax - roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,32(%rsp) - leal -1894007588(%rax,%rdx,1),%esi - movl 36(%rsp),%eax - movl %ebp,%ebx - movl %ebp,%ecx - xorl 44(%rsp),%eax - movl %edi,%edx - andl %r11d,%ebx - xorl 4(%rsp),%eax - orl %r11d,%ecx - roll $5,%edx - xorl 24(%rsp),%eax - andl %r12d,%ecx - addl %edx,%esi - roll $1,%eax - orl %ecx,%ebx - roll $30,%ebp - movl %eax,36(%rsp) - addl %ebx,%esi - leal -1894007588(%rax,%r12,1),%edx - movl 40(%rsp),%eax - movl %edi,%ebx + addl %eax,%edi + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %r12d,%eax movl %edi,%ecx - xorl 48(%rsp),%eax - movl %esi,%r12d - andl %ebp,%ebx - xorl 8(%rsp),%eax - orl %ebp,%ecx - roll $5,%r12d - xorl 28(%rsp),%eax - andl %r11d,%ecx - addl %r12d,%edx - roll $1,%eax - orl %ecx,%ebx - roll $30,%edi - movl %eax,40(%rsp) - addl %ebx,%edx - leal -1894007588(%rax,%r11,1),%r12d - movl 44(%rsp),%eax - movl %esi,%ebx + xorl 32(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rsi,1),%esi + xorl 56(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 12(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %r11d,%eax movl %esi,%ecx - xorl 52(%rsp),%eax - movl %edx,%r11d - andl %edi,%ebx - xorl 12(%rsp),%eax - orl %edi,%ecx - roll $5,%r11d - xorl 32(%rsp),%eax - andl %ebp,%ecx - addl %r11d,%r12d - roll $1,%eax - orl %ecx,%ebx + xorl 36(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r13,1),%r13d + xorl 60(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 16(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 40(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r12,1),%r12d + xorl 0(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 20(%rsp),%edx roll $30,%esi - movl %eax,44(%rsp) - addl %ebx,%r12d - leal -1894007588(%rax,%rbp,1),%r11d - movl 48(%rsp),%eax - movl %edx,%ebx - movl %edx,%ecx - xorl 56(%rsp),%eax - movl %r12d,%ebp - andl %esi,%ebx - xorl 16(%rsp),%eax - orl %esi,%ecx - roll $5,%ebp - xorl 36(%rsp),%eax - andl %edi,%ecx - addl %ebp,%r11d - roll $1,%eax - orl %ecx,%ebx - roll $30,%edx - movl %eax,48(%rsp) - addl %ebx,%r11d - leal -1894007588(%rax,%rdi,1),%ebp - movl 52(%rsp),%eax - movl %r12d,%ebx + addl %eax,%r12d + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %esi,%eax movl %r12d,%ecx - xorl 60(%rsp),%eax - movl %r11d,%edi - andl %edx,%ebx - xorl 20(%rsp),%eax - orl %edx,%ecx - roll $5,%edi - xorl 40(%rsp),%eax - andl %esi,%ecx - addl %edi,%ebp - roll $1,%eax - orl %ecx,%ebx - roll $30,%r12d - movl %eax,52(%rsp) - addl %ebx,%ebp - leal -1894007588(%rax,%rsi,1),%edi - movl 56(%rsp),%eax - movl %r11d,%ebx + xorl 44(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rdx,%r11,1),%r11d + xorl 4(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 24(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,36(%rsp) + movl 40(%rsp),%edx + movl %r13d,%eax movl %r11d,%ecx - xorl 0(%rsp),%eax - movl %ebp,%esi - andl %r12d,%ebx - xorl 24(%rsp),%eax - orl %r12d,%ecx - roll $5,%esi - xorl 44(%rsp),%eax - andl %edx,%ecx - addl %esi,%edi - roll $1,%eax - orl %ecx,%ebx - roll $30,%r11d - movl %eax,56(%rsp) - addl %ebx,%edi - leal -1894007588(%rax,%rdx,1),%esi - movl 60(%rsp),%eax - movl %ebp,%ebx - movl %ebp,%ecx - xorl 4(%rsp),%eax - movl %edi,%edx - andl %r11d,%ebx - xorl 28(%rsp),%eax - orl %r11d,%ecx - roll $5,%edx - xorl 48(%rsp),%eax - andl %r12d,%ecx - addl %edx,%esi - roll $1,%eax - orl %ecx,%ebx - roll $30,%ebp - movl %eax,60(%rsp) - addl %ebx,%esi - leal -1894007588(%rax,%r12,1),%edx - movl 0(%rsp),%eax - movl %edi,%ebx + xorl 48(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rdi,1),%edi + xorl 8(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 28(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,40(%rsp) + movl 44(%rsp),%ebp + movl %r12d,%eax movl %edi,%ecx - xorl 8(%rsp),%eax - movl %esi,%r12d - andl %ebp,%ebx - xorl 32(%rsp),%eax - orl %ebp,%ecx - roll $5,%r12d - xorl 52(%rsp),%eax - andl %r11d,%ecx - addl %r12d,%edx - roll $1,%eax - orl %ecx,%ebx - roll $30,%edi - movl %eax,0(%rsp) - addl %ebx,%edx - leal -1894007588(%rax,%r11,1),%r12d - movl 4(%rsp),%eax - movl %esi,%ebx + xorl 52(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rsi,1),%esi + xorl 12(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 32(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,44(%rsp) + movl 48(%rsp),%edx + movl %r11d,%eax movl %esi,%ecx - xorl 12(%rsp),%eax - movl %edx,%r11d - andl %edi,%ebx - xorl 36(%rsp),%eax - orl %edi,%ecx - roll $5,%r11d - xorl 56(%rsp),%eax - andl %ebp,%ecx - addl %r11d,%r12d - roll $1,%eax - orl %ecx,%ebx + xorl 56(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r13,1),%r13d + xorl 16(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 36(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,48(%rsp) + movl 52(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 60(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r12,1),%r12d + xorl 20(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 40(%rsp),%ebp roll $30,%esi - movl %eax,4(%rsp) - addl %ebx,%r12d - leal -1894007588(%rax,%rbp,1),%r11d - movl 8(%rsp),%eax - movl %edx,%ebx - movl %edx,%ecx - xorl 16(%rsp),%eax - movl %r12d,%ebp - andl %esi,%ebx - xorl 40(%rsp),%eax - orl %esi,%ecx - roll $5,%ebp - xorl 60(%rsp),%eax - andl %edi,%ecx - addl %ebp,%r11d - roll $1,%eax - orl %ecx,%ebx - roll $30,%edx - movl %eax,8(%rsp) - addl %ebx,%r11d - leal -1894007588(%rax,%rdi,1),%ebp - movl 12(%rsp),%eax - movl %r12d,%ebx + addl %eax,%r12d + roll $1,%ebp + movl 56(%rsp),%edx + movl %esi,%eax movl %r12d,%ecx - xorl 20(%rsp),%eax - movl %r11d,%edi - andl %edx,%ebx - xorl 44(%rsp),%eax - orl %edx,%ecx - roll $5,%edi - xorl 0(%rsp),%eax - andl %esi,%ecx - addl %edi,%ebp - roll $1,%eax - orl %ecx,%ebx - roll $30,%r12d - movl %eax,12(%rsp) - addl %ebx,%ebp - leal -1894007588(%rax,%rsi,1),%edi - movl 16(%rsp),%eax - movl %r11d,%ebx + xorl 0(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rbp,%r11,1),%r11d + xorl 24(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 44(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl 60(%rsp),%ebp + movl %r13d,%eax movl %r11d,%ecx - xorl 24(%rsp),%eax - movl %ebp,%esi - andl %r12d,%ebx - xorl 48(%rsp),%eax - orl %r12d,%ecx - roll $5,%esi - xorl 4(%rsp),%eax - andl %edx,%ecx - addl %esi,%edi - roll $1,%eax - orl %ecx,%ebx + xorl 4(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rdi,1),%edi + xorl 28(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 48(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl %r11d,%eax + leal -899497514(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl %r13d,%eax + addl %ecx,%esi roll $30,%r11d - movl %eax,16(%rsp) - addl %ebx,%edi - leal -1894007588(%rax,%rdx,1),%esi - movl 20(%rsp),%eax - movl %ebp,%ebx - movl %ebp,%ecx - xorl 28(%rsp),%eax - movl %edi,%edx - andl %r11d,%ebx - xorl 52(%rsp),%eax - orl %r11d,%ecx + addl %eax,%esi + addl 0(%r8),%esi + addl 4(%r8),%edi + addl 8(%r8),%r11d + addl 12(%r8),%r12d + addl 16(%r8),%r13d + movl %esi,0(%r8) + movl %edi,4(%r8) + movl %r11d,8(%r8) + movl %r12d,12(%r8) + movl %r13d,16(%r8) + + subq $1,%r10 + leaq 64(%r9),%r9 + jnz .Lloop + + movq 64(%rsp),%rsi + movq (%rsi),%r13 + movq 8(%rsi),%r12 + movq 16(%rsi),%rbp + movq 24(%rsi),%rbx + leaq 32(%rsi),%rsp +.Lepilogue: + .byte 0xf3,0xc3 +.size sha1_block_data_order,.-sha1_block_data_order +.type sha1_block_data_order_ssse3,@function +.align 16 +sha1_block_data_order_ssse3: +_ssse3_shortcut: + pushq %rbx + pushq %rbp + pushq %r12 + leaq -64(%rsp),%rsp + movq %rdi,%r8 + movq %rsi,%r9 + movq %rdx,%r10 + + shlq $6,%r10 + addq %r9,%r10 + leaq K_XX_XX(%rip),%r11 + + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + movl %ebx,%esi + movl 16(%r8),%ebp + + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r9),%xmm0 + movdqu 16(%r9),%xmm1 + movdqu 32(%r9),%xmm2 + movdqu 48(%r9),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r9 +.byte 102,15,56,0,206 +.byte 102,15,56,0,214 +.byte 102,15,56,0,222 + paddd %xmm9,%xmm0 + paddd %xmm9,%xmm1 + paddd %xmm9,%xmm2 + movdqa %xmm0,0(%rsp) + psubd %xmm9,%xmm0 + movdqa %xmm1,16(%rsp) + psubd %xmm9,%xmm1 + movdqa %xmm2,32(%rsp) + psubd %xmm9,%xmm2 + jmp .Loop_ssse3 +.align 16 +.Loop_ssse3: + movdqa %xmm1,%xmm4 + addl 0(%rsp),%ebp + xorl %edx,%ecx + movdqa %xmm3,%xmm8 +.byte 102,15,58,15,224,8 + movl %eax,%edi + roll $5,%eax + paddd %xmm3,%xmm9 + andl %ecx,%esi + xorl %edx,%ecx + psrldq $4,%xmm8 + xorl %edx,%esi + addl %eax,%ebp + pxor %xmm0,%xmm4 + rorl $2,%ebx + addl %esi,%ebp + pxor %xmm2,%xmm8 + addl 4(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pxor %xmm8,%xmm4 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm9,48(%rsp) + xorl %ecx,%edi + addl %ebp,%edx + movdqa %xmm4,%xmm10 + movdqa %xmm4,%xmm8 + rorl $7,%eax + addl %edi,%edx + addl 8(%rsp),%ecx + xorl %ebx,%eax + pslldq $12,%xmm10 + paddd %xmm4,%xmm4 + movl %edx,%edi roll $5,%edx - xorl 8(%rsp),%eax - andl %r12d,%ecx - addl %edx,%esi - roll $1,%eax - orl %ecx,%ebx - roll $30,%ebp - movl %eax,20(%rsp) - addl %ebx,%esi - leal -1894007588(%rax,%r12,1),%edx - movl 24(%rsp),%eax - movl %edi,%ebx - movl %edi,%ecx - xorl 32(%rsp),%eax - movl %esi,%r12d - andl %ebp,%ebx - xorl 56(%rsp),%eax - orl %ebp,%ecx - roll $5,%r12d - xorl 12(%rsp),%eax - andl %r11d,%ecx - addl %r12d,%edx - roll $1,%eax - orl %ecx,%ebx - roll $30,%edi - movl %eax,24(%rsp) - addl %ebx,%edx - leal -1894007588(%rax,%r11,1),%r12d - movl 28(%rsp),%eax - movl %esi,%ebx - movl %esi,%ecx - xorl 36(%rsp),%eax - movl %edx,%r11d - andl %edi,%ebx - xorl 60(%rsp),%eax - orl %edi,%ecx - roll $5,%r11d - xorl 16(%rsp),%eax - andl %ebp,%ecx - addl %r11d,%r12d - roll $1,%eax - orl %ecx,%ebx - roll $30,%esi - movl %eax,28(%rsp) - addl %ebx,%r12d - leal -1894007588(%rax,%rbp,1),%r11d - movl 32(%rsp),%eax - movl %edx,%ebx - movl %edx,%ecx - xorl 40(%rsp),%eax - movl %r12d,%ebp - andl %esi,%ebx - xorl 0(%rsp),%eax - orl %esi,%ecx + andl %eax,%esi + xorl %ebx,%eax + psrld $31,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + movdqa %xmm10,%xmm9 + rorl $7,%ebp + addl %esi,%ecx + psrld $30,%xmm10 + por %xmm8,%xmm4 + addl 12(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + pslld $2,%xmm9 + pxor %xmm10,%xmm4 + andl %ebp,%edi + xorl %eax,%ebp + movdqa 0(%r11),%xmm10 + xorl %eax,%edi + addl %ecx,%ebx + pxor %xmm9,%xmm4 + rorl $7,%edx + addl %edi,%ebx + movdqa %xmm2,%xmm5 + addl 16(%rsp),%eax + xorl %ebp,%edx + movdqa %xmm4,%xmm9 +.byte 102,15,58,15,233,8 + movl %ebx,%edi + roll $5,%ebx + paddd %xmm4,%xmm10 + andl %edx,%esi + xorl %ebp,%edx + psrldq $4,%xmm9 + xorl %ebp,%esi + addl %ebx,%eax + pxor %xmm1,%xmm5 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm3,%xmm9 + addl 20(%rsp),%ebp + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pxor %xmm9,%xmm5 + andl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm10,0(%rsp) + xorl %edx,%edi + addl %eax,%ebp + movdqa %xmm5,%xmm8 + movdqa %xmm5,%xmm9 + rorl $7,%ebx + addl %edi,%ebp + addl 24(%rsp),%edx + xorl %ecx,%ebx + pslldq $12,%xmm8 + paddd %xmm5,%xmm5 + movl %ebp,%edi roll $5,%ebp - xorl 20(%rsp),%eax - andl %edi,%ecx - addl %ebp,%r11d - roll $1,%eax - orl %ecx,%ebx - roll $30,%edx - movl %eax,32(%rsp) - addl %ebx,%r11d - leal -1894007588(%rax,%rdi,1),%ebp - movl 36(%rsp),%eax - movl %r12d,%ebx - movl %r12d,%ecx - xorl 44(%rsp),%eax - movl %r11d,%edi - andl %edx,%ebx - xorl 4(%rsp),%eax - orl %edx,%ecx - roll $5,%edi - xorl 24(%rsp),%eax - andl %esi,%ecx + andl %ebx,%esi + xorl %ecx,%ebx + psrld $31,%xmm9 + xorl %ecx,%esi + addl %ebp,%edx + movdqa %xmm8,%xmm10 + rorl $7,%eax + addl %esi,%edx + psrld $30,%xmm8 + por %xmm9,%xmm5 + addl 28(%rsp),%ecx + xorl %ebx,%eax + movl %edx,%esi + roll $5,%edx + pslld $2,%xmm10 + pxor %xmm8,%xmm5 + andl %eax,%edi + xorl %ebx,%eax + movdqa 16(%r11),%xmm8 + xorl %ebx,%edi + addl %edx,%ecx + pxor %xmm10,%xmm5 + rorl $7,%ebp + addl %edi,%ecx + movdqa %xmm3,%xmm6 + addl 32(%rsp),%ebx + xorl %eax,%ebp + movdqa %xmm5,%xmm10 +.byte 102,15,58,15,242,8 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm5,%xmm8 + andl %ebp,%esi + xorl %eax,%ebp + psrldq $4,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + pxor %xmm2,%xmm6 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm4,%xmm10 + addl 36(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + pxor %xmm10,%xmm6 + andl %edx,%edi + xorl %ebp,%edx + movdqa %xmm8,16(%rsp) + xorl %ebp,%edi + addl %ebx,%eax + movdqa %xmm6,%xmm9 + movdqa %xmm6,%xmm10 + rorl $7,%ecx + addl %edi,%eax + addl 40(%rsp),%ebp + xorl %edx,%ecx + pslldq $12,%xmm9 + paddd %xmm6,%xmm6 + movl %eax,%edi + roll $5,%eax + andl %ecx,%esi + xorl %edx,%ecx + psrld $31,%xmm10 + xorl %edx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + psrld $30,%xmm9 + por %xmm10,%xmm6 + addl 44(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pslld $2,%xmm8 + pxor %xmm9,%xmm6 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa 16(%r11),%xmm9 + xorl %ecx,%edi + addl %ebp,%edx + pxor %xmm8,%xmm6 + rorl $7,%eax + addl %edi,%edx + movdqa %xmm4,%xmm7 + addl 48(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm6,%xmm8 +.byte 102,15,58,15,251,8 + movl %edx,%edi + roll $5,%edx + paddd %xmm6,%xmm9 + andl %eax,%esi + xorl %ebx,%eax + psrldq $4,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + pxor %xmm3,%xmm7 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm5,%xmm8 + addl 52(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + pxor %xmm8,%xmm7 + andl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm9,32(%rsp) + xorl %eax,%edi + addl %ecx,%ebx + movdqa %xmm7,%xmm10 + movdqa %xmm7,%xmm8 + rorl $7,%edx + addl %edi,%ebx + addl 56(%rsp),%eax + xorl %ebp,%edx + pslldq $12,%xmm10 + paddd %xmm7,%xmm7 + movl %ebx,%edi + roll $5,%ebx + andl %edx,%esi + xorl %ebp,%edx + psrld $31,%xmm8 + xorl %ebp,%esi + addl %ebx,%eax + movdqa %xmm10,%xmm9 + rorl $7,%ecx + addl %esi,%eax + psrld $30,%xmm10 + por %xmm8,%xmm7 + addl 60(%rsp),%ebp + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pslld $2,%xmm9 + pxor %xmm10,%xmm7 + andl %ecx,%edi + xorl %edx,%ecx + movdqa 16(%r11),%xmm10 + xorl %edx,%edi + addl %eax,%ebp + pxor %xmm9,%xmm7 + rorl $7,%ebx addl %edi,%ebp - roll $1,%eax - orl %ecx,%ebx - roll $30,%r12d - movl %eax,36(%rsp) - addl %ebx,%ebp - leal -1894007588(%rax,%rsi,1),%edi - movl 40(%rsp),%eax - movl %r11d,%ebx - movl %r11d,%ecx - xorl 48(%rsp),%eax + movdqa %xmm7,%xmm9 + addl 0(%rsp),%edx + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,206,8 + xorl %ecx,%ebx + movl %ebp,%edi + roll $5,%ebp + pxor %xmm1,%xmm0 + andl %ebx,%esi + xorl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm7,%xmm10 + xorl %ecx,%esi + addl %ebp,%edx + pxor %xmm9,%xmm0 + rorl $7,%eax + addl %esi,%edx + addl 4(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm0,%xmm9 + movdqa %xmm10,48(%rsp) + movl %edx,%esi + roll $5,%edx + andl %eax,%edi + xorl %ebx,%eax + pslld $2,%xmm0 + xorl %ebx,%edi + addl %edx,%ecx + psrld $30,%xmm9 + rorl $7,%ebp + addl %edi,%ecx + addl 8(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%edi + roll $5,%ecx + por %xmm9,%xmm0 + andl %ebp,%esi + xorl %eax,%ebp + movdqa %xmm0,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 12(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + andl %edx,%edi + xorl %ebp,%edx + xorl %ebp,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 16(%rsp),%ebp + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,215,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm2,%xmm1 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm8,%xmm9 + paddd %xmm0,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm10,%xmm1 + addl 20(%rsp),%edx + xorl %ecx,%edi movl %ebp,%esi - andl %r12d,%ebx - xorl 8(%rsp),%eax - orl %r12d,%ecx - roll $5,%esi - xorl 28(%rsp),%eax - andl %edx,%ecx - addl %esi,%edi - roll $1,%eax - orl %ecx,%ebx - roll $30,%r11d - movl %eax,40(%rsp) - addl %ebx,%edi - leal -1894007588(%rax,%rdx,1),%esi - movl 44(%rsp),%eax - movl %ebp,%ebx - movl %ebp,%ecx - xorl 52(%rsp),%eax - movl %edi,%edx - andl %r11d,%ebx - xorl 12(%rsp),%eax - orl %r11d,%ecx + roll $5,%ebp + movdqa %xmm1,%xmm10 + movdqa %xmm8,0(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm1 + addl 24(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm10 + movl %edx,%edi roll $5,%edx - xorl 32(%rsp),%eax - andl %r12d,%ecx - addl %edx,%esi - roll $1,%eax - orl %ecx,%ebx - roll $30,%ebp - movl %eax,44(%rsp) - addl %ebx,%esi - leal -1894007588(%rax,%r12,1),%edx - movl 48(%rsp),%eax - movl %edi,%ebx - movl %edi,%ecx - xorl 56(%rsp),%eax - movl %esi,%r12d - andl %ebp,%ebx - xorl 16(%rsp),%eax - orl %ebp,%ecx - roll $5,%r12d - xorl 36(%rsp),%eax - andl %r11d,%ecx - addl %r12d,%edx - roll $1,%eax - orl %ecx,%ebx - roll $30,%edi - movl %eax,48(%rsp) - addl %ebx,%edx - leal -899497514(%rax,%r11,1),%r12d - movl 52(%rsp),%eax - movl %edi,%ebx - movl %edx,%r11d - xorl 60(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 20(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 40(%rsp),%eax - roll $30,%esi - addl %ebx,%r12d - roll $1,%eax - movl %eax,52(%rsp) - leal -899497514(%rax,%rbp,1),%r11d - movl 56(%rsp),%eax - movl %esi,%ebx - movl %r12d,%ebp - xorl 0(%rsp),%eax - xorl %edx,%ebx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm10,%xmm1 + addl 28(%rsp),%ebx + xorl %eax,%edi + movdqa %xmm1,%xmm8 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 32(%rsp),%eax + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,192,8 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + pxor %xmm3,%xmm2 + xorl %edx,%esi + addl %ebx,%eax + movdqa 32(%r11),%xmm10 + paddd %xmm1,%xmm9 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm8,%xmm2 + addl 36(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + movdqa %xmm2,%xmm8 + movdqa %xmm9,16(%rsp) + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + pslld $2,%xmm2 + addl 40(%rsp),%edx + xorl %ecx,%esi + psrld $30,%xmm8 + movl %ebp,%edi roll $5,%ebp - xorl 24(%rsp),%eax - xorl %edi,%ebx - addl %ebp,%r11d - xorl 44(%rsp),%eax - roll $30,%edx - addl %ebx,%r11d - roll $1,%eax - movl %eax,56(%rsp) - leal -899497514(%rax,%rdi,1),%ebp - movl 60(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 4(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 28(%rsp),%eax - xorl %esi,%ebx + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + por %xmm8,%xmm2 + addl 44(%rsp),%ecx + xorl %ebx,%edi + movdqa %xmm2,%xmm9 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 48(%rsp),%ebx + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,201,8 + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + pxor %xmm4,%xmm3 + xorl %ebp,%esi + addl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm2,%xmm10 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm9,%xmm3 + addl 52(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + movdqa %xmm3,%xmm9 + movdqa %xmm10,32(%rsp) + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + pslld $2,%xmm3 + addl 56(%rsp),%ebp + xorl %edx,%esi + psrld $30,%xmm9 + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + por %xmm9,%xmm3 + addl 60(%rsp),%edx + xorl %ecx,%edi + movdqa %xmm3,%xmm10 + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 0(%rsp),%ecx + pxor %xmm0,%xmm4 +.byte 102,68,15,58,15,210,8 + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + pxor %xmm5,%xmm4 + xorl %eax,%esi + addl %edx,%ecx + movdqa %xmm8,%xmm9 + paddd %xmm3,%xmm8 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm10,%xmm4 + addl 4(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + movdqa %xmm4,%xmm10 + movdqa %xmm8,48(%rsp) + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + pslld $2,%xmm4 + addl 8(%rsp),%eax + xorl %ebp,%esi + psrld $30,%xmm10 + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + por %xmm10,%xmm4 + addl 12(%rsp),%ebp + xorl %edx,%edi + movdqa %xmm4,%xmm8 + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx addl %edi,%ebp - xorl 48(%rsp),%eax - roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,60(%rsp) - leal -899497514(%rax,%rsi,1),%edi - movl 0(%rsp),%eax - movl %r12d,%ebx + addl 16(%rsp),%edx + pxor %xmm1,%xmm5 +.byte 102,68,15,58,15,195,8 + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + pxor %xmm6,%xmm5 + xorl %ebx,%esi + addl %ebp,%edx + movdqa %xmm9,%xmm10 + paddd %xmm4,%xmm9 + rorl $7,%eax + addl %esi,%edx + pxor %xmm8,%xmm5 + addl 20(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + movdqa %xmm5,%xmm8 + movdqa %xmm9,0(%rsp) + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + pslld $2,%xmm5 + addl 24(%rsp),%ebx + xorl %eax,%esi + psrld $30,%xmm8 + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + por %xmm8,%xmm5 + addl 28(%rsp),%eax + xorl %ebp,%edi + movdqa %xmm5,%xmm9 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + movl %ecx,%edi + pxor %xmm2,%xmm6 +.byte 102,68,15,58,15,204,8 + xorl %edx,%ecx + addl 32(%rsp),%ebp + andl %edx,%edi + pxor %xmm7,%xmm6 + andl %ecx,%esi + rorl $7,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm5,%xmm10 + addl %edi,%ebp + movl %eax,%edi + pxor %xmm9,%xmm6 + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movdqa %xmm6,%xmm9 + movdqa %xmm10,16(%rsp) + movl %ebx,%esi + xorl %ecx,%ebx + addl 36(%rsp),%edx + andl %ecx,%esi + pslld $2,%xmm6 + andl %ebx,%edi + rorl $7,%eax + psrld $30,%xmm9 + addl %esi,%edx movl %ebp,%esi - xorl 8(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 32(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 52(%rsp),%eax - roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,0(%rsp) - leal -899497514(%rax,%rdx,1),%esi - movl 4(%rsp),%eax - movl %r11d,%ebx - movl %edi,%edx - xorl 12(%rsp),%eax - xorl %ebp,%ebx + roll $5,%ebp + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + por %xmm9,%xmm6 + movl %eax,%edi + xorl %ebx,%eax + movdqa %xmm6,%xmm10 + addl 40(%rsp),%ecx + andl %ebx,%edi + andl %eax,%esi + rorl $7,%ebp + addl %edi,%ecx + movl %edx,%edi roll $5,%edx - xorl 36(%rsp),%eax - xorl %r12d,%ebx - addl %edx,%esi - xorl 56(%rsp),%eax - roll $30,%ebp - addl %ebx,%esi - roll $1,%eax - movl %eax,4(%rsp) - leal -899497514(%rax,%r12,1),%edx - movl 8(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 16(%rsp),%eax - xorl %edi,%ebx - roll $5,%r12d - xorl 40(%rsp),%eax - xorl %r11d,%ebx - addl %r12d,%edx - xorl 60(%rsp),%eax - roll $30,%edi - addl %ebx,%edx - roll $1,%eax - movl %eax,8(%rsp) - leal -899497514(%rax,%r11,1),%r12d - movl 12(%rsp),%eax - movl %edi,%ebx - movl %edx,%r11d - xorl 20(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 44(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 0(%rsp),%eax - roll $30,%esi - addl %ebx,%r12d - roll $1,%eax - movl %eax,12(%rsp) - leal -899497514(%rax,%rbp,1),%r11d - movl 16(%rsp),%eax - movl %esi,%ebx - movl %r12d,%ebp - xorl 24(%rsp),%eax - xorl %edx,%ebx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%esi + xorl %eax,%ebp + addl 44(%rsp),%ebx + andl %eax,%esi + andl %ebp,%edi + rorl $7,%edx + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%edi + pxor %xmm3,%xmm7 +.byte 102,68,15,58,15,213,8 + xorl %ebp,%edx + addl 48(%rsp),%eax + andl %ebp,%edi + pxor %xmm0,%xmm7 + andl %edx,%esi + rorl $7,%ecx + movdqa 48(%r11),%xmm9 + paddd %xmm6,%xmm8 + addl %edi,%eax + movl %ebx,%edi + pxor %xmm10,%xmm7 + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movdqa %xmm7,%xmm10 + movdqa %xmm8,32(%rsp) + movl %ecx,%esi + xorl %edx,%ecx + addl 52(%rsp),%ebp + andl %edx,%esi + pslld $2,%xmm7 + andl %ecx,%edi + rorl $7,%ebx + psrld $30,%xmm10 + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + por %xmm10,%xmm7 + movl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm7,%xmm8 + addl 56(%rsp),%edx + andl %ecx,%edi + andl %ebx,%esi + rorl $7,%eax + addl %edi,%edx + movl %ebp,%edi roll $5,%ebp - xorl 48(%rsp),%eax - xorl %edi,%ebx - addl %ebp,%r11d - xorl 4(%rsp),%eax - roll $30,%edx - addl %ebx,%r11d - roll $1,%eax - movl %eax,16(%rsp) - leal -899497514(%rax,%rdi,1),%ebp - movl 20(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 28(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 52(%rsp),%eax - xorl %esi,%ebx + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%esi + xorl %ebx,%eax + addl 60(%rsp),%ecx + andl %ebx,%esi + andl %eax,%edi + rorl $7,%ebp + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%edi + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,198,8 + xorl %eax,%ebp + addl 0(%rsp),%ebx + andl %eax,%edi + pxor %xmm1,%xmm0 + andl %ebp,%esi + rorl $7,%edx + movdqa %xmm9,%xmm10 + paddd %xmm7,%xmm9 + addl %edi,%ebx + movl %ecx,%edi + pxor %xmm8,%xmm0 + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movdqa %xmm0,%xmm8 + movdqa %xmm9,48(%rsp) + movl %edx,%esi + xorl %ebp,%edx + addl 4(%rsp),%eax + andl %ebp,%esi + pslld $2,%xmm0 + andl %edx,%edi + rorl $7,%ecx + psrld $30,%xmm8 + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + por %xmm8,%xmm0 + movl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm0,%xmm9 + addl 8(%rsp),%ebp + andl %edx,%edi + andl %ecx,%esi + rorl $7,%ebx addl %edi,%ebp - xorl 8(%rsp),%eax - roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,20(%rsp) - leal -899497514(%rax,%rsi,1),%edi - movl 24(%rsp),%eax - movl %r12d,%ebx + movl %eax,%edi + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%esi + xorl %ecx,%ebx + addl 12(%rsp),%edx + andl %ecx,%esi + andl %ebx,%edi + rorl $7,%eax + addl %esi,%edx movl %ebp,%esi - xorl 32(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 56(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 12(%rsp),%eax - roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,24(%rsp) - leal -899497514(%rax,%rdx,1),%esi - movl 28(%rsp),%eax - movl %r11d,%ebx - movl %edi,%edx - xorl 36(%rsp),%eax - xorl %ebp,%ebx + roll $5,%ebp + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%edi + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,207,8 + xorl %ebx,%eax + addl 16(%rsp),%ecx + andl %ebx,%edi + pxor %xmm2,%xmm1 + andl %eax,%esi + rorl $7,%ebp + movdqa %xmm10,%xmm8 + paddd %xmm0,%xmm10 + addl %edi,%ecx + movl %edx,%edi + pxor %xmm9,%xmm1 roll $5,%edx - xorl 60(%rsp),%eax - xorl %r12d,%ebx - addl %edx,%esi - xorl 16(%rsp),%eax - roll $30,%ebp - addl %ebx,%esi - roll $1,%eax - movl %eax,28(%rsp) - leal -899497514(%rax,%r12,1),%edx - movl 32(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 40(%rsp),%eax - xorl %edi,%ebx - roll $5,%r12d - xorl 0(%rsp),%eax - xorl %r11d,%ebx - addl %r12d,%edx - xorl 20(%rsp),%eax - roll $30,%edi - addl %ebx,%edx - roll $1,%eax - movl %eax,32(%rsp) - leal -899497514(%rax,%r11,1),%r12d - movl 36(%rsp),%eax - movl %edi,%ebx - movl %edx,%r11d - xorl 44(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 4(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 24(%rsp),%eax - roll $30,%esi - addl %ebx,%r12d - roll $1,%eax - movl %eax,36(%rsp) - leal -899497514(%rax,%rbp,1),%r11d - movl 40(%rsp),%eax - movl %esi,%ebx - movl %r12d,%ebp - xorl 48(%rsp),%eax - xorl %edx,%ebx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movdqa %xmm1,%xmm9 + movdqa %xmm10,0(%rsp) + movl %ebp,%esi + xorl %eax,%ebp + addl 20(%rsp),%ebx + andl %eax,%esi + pslld $2,%xmm1 + andl %ebp,%edi + rorl $7,%edx + psrld $30,%xmm9 + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + por %xmm9,%xmm1 + movl %edx,%edi + xorl %ebp,%edx + movdqa %xmm1,%xmm10 + addl 24(%rsp),%eax + andl %ebp,%edi + andl %edx,%esi + rorl $7,%ecx + addl %edi,%eax + movl %ebx,%edi + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movl %ecx,%esi + xorl %edx,%ecx + addl 28(%rsp),%ebp + andl %edx,%esi + andl %ecx,%edi + rorl $7,%ebx + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%edi + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,208,8 + xorl %ecx,%ebx + addl 32(%rsp),%edx + andl %ecx,%edi + pxor %xmm3,%xmm2 + andl %ebx,%esi + rorl $7,%eax + movdqa %xmm8,%xmm9 + paddd %xmm1,%xmm8 + addl %edi,%edx + movl %ebp,%edi + pxor %xmm10,%xmm2 roll $5,%ebp - xorl 8(%rsp),%eax - xorl %edi,%ebx - addl %ebp,%r11d - xorl 28(%rsp),%eax - roll $30,%edx - addl %ebx,%r11d - roll $1,%eax - movl %eax,40(%rsp) - leal -899497514(%rax,%rdi,1),%ebp - movl 44(%rsp),%eax - movl %edx,%ebx - movl %r11d,%edi - xorl 52(%rsp),%eax - xorl %r12d,%ebx - roll $5,%edi - xorl 12(%rsp),%eax - xorl %esi,%ebx + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movdqa %xmm2,%xmm10 + movdqa %xmm8,16(%rsp) + movl %eax,%esi + xorl %ebx,%eax + addl 36(%rsp),%ecx + andl %ebx,%esi + pslld $2,%xmm2 + andl %eax,%edi + rorl $7,%ebp + psrld $30,%xmm10 + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + por %xmm10,%xmm2 + movl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm2,%xmm8 + addl 40(%rsp),%ebx + andl %eax,%edi + andl %ebp,%esi + rorl $7,%edx + addl %edi,%ebx + movl %ecx,%edi + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%esi + xorl %ebp,%edx + addl 44(%rsp),%eax + andl %ebp,%esi + andl %edx,%edi + rorl $7,%ecx + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + addl 48(%rsp),%ebp + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,193,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm4,%xmm3 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm10 + paddd %xmm2,%xmm9 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm8,%xmm3 + addl 52(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm3,%xmm8 + movdqa %xmm9,32(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm3 + addl 56(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm8 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm8,%xmm3 + addl 60(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 0(%rsp),%eax + paddd %xmm3,%xmm10 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + movdqa %xmm10,48(%rsp) + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 4(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx addl %edi,%ebp - xorl 32(%rsp),%eax - roll $30,%r12d - addl %ebx,%ebp - roll $1,%eax - movl %eax,44(%rsp) - leal -899497514(%rax,%rsi,1),%edi - movl 48(%rsp),%eax - movl %r12d,%ebx + addl 8(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 12(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + cmpq %r10,%r9 + je .Ldone_ssse3 + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r9),%xmm0 + movdqu 16(%r9),%xmm1 + movdqu 32(%r9),%xmm2 + movdqu 48(%r9),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r9 + addl 16(%rsp),%ebx + xorl %eax,%esi +.byte 102,15,56,0,206 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm9,%xmm0 + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + movdqa %xmm0,0(%rsp) + addl 20(%rsp),%eax + xorl %ebp,%edi + psubd %xmm9,%xmm0 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi movl %ebp,%esi - xorl 56(%rsp),%eax - xorl %r11d,%ebx - roll $5,%esi - xorl 16(%rsp),%eax - xorl %edx,%ebx - addl %esi,%edi - xorl 36(%rsp),%eax - roll $30,%r11d - addl %ebx,%edi - roll $1,%eax - movl %eax,48(%rsp) - leal -899497514(%rax,%rdx,1),%esi - movl 52(%rsp),%eax - movl %r11d,%ebx - movl %edi,%edx - xorl 60(%rsp),%eax - xorl %ebp,%ebx + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi +.byte 102,15,56,0,214 + movl %edx,%edi roll $5,%edx - xorl 20(%rsp),%eax - xorl %r12d,%ebx - addl %edx,%esi - xorl 40(%rsp),%eax - roll $30,%ebp - addl %ebx,%esi - roll $1,%eax - leal -899497514(%rax,%r12,1),%edx - movl 56(%rsp),%eax - movl %ebp,%ebx - movl %esi,%r12d - xorl 0(%rsp),%eax - xorl %edi,%ebx - roll $5,%r12d - xorl 24(%rsp),%eax - xorl %r11d,%ebx - addl %r12d,%edx - xorl 44(%rsp),%eax - roll $30,%edi - addl %ebx,%edx - roll $1,%eax - leal -899497514(%rax,%r11,1),%r12d - movl 60(%rsp),%eax - movl %edi,%ebx - movl %edx,%r11d - xorl 4(%rsp),%eax - xorl %esi,%ebx - roll $5,%r11d - xorl 28(%rsp),%eax - xorl %ebp,%ebx - addl %r11d,%r12d - xorl 48(%rsp),%eax - roll $30,%esi - addl %ebx,%r12d - roll $1,%eax - leal -899497514(%rax,%rbp,1),%r11d - movl %esi,%ebx - movl %r12d,%ebp - xorl %edx,%ebx + paddd %xmm9,%xmm1 + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + movdqa %xmm1,16(%rsp) + addl 36(%rsp),%ebx + xorl %eax,%edi + psubd %xmm9,%xmm1 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi +.byte 102,15,56,0,222 + movl %ebp,%edi roll $5,%ebp - xorl %edi,%ebx - addl %ebp,%r11d - roll $30,%edx - addl %ebx,%r11d - addl 0(%r8),%r11d - addl 4(%r8),%r12d - addl 8(%r8),%edx - addl 12(%r8),%esi - addl 16(%r8),%edi - movl %r11d,0(%r8) - movl %r12d,4(%r8) - movl %edx,8(%r8) - movl %esi,12(%r8) - movl %edi,16(%r8) - - xchgl %r11d,%edx - xchgl %r12d,%esi - xchgl %r11d,%edi - xchgl %r12d,%ebp + paddd %xmm9,%xmm2 + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + movdqa %xmm2,32(%rsp) + addl 52(%rsp),%ecx + xorl %ebx,%edi + psubd %xmm9,%xmm2 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 0(%r8),%eax + addl 4(%r8),%esi + addl 8(%r8),%ecx + addl 12(%r8),%edx + movl %eax,0(%r8) + addl 16(%r8),%ebp + movl %esi,4(%r8) + movl %esi,%ebx + movl %ecx,8(%r8) + movl %edx,12(%r8) + movl %ebp,16(%r8) + jmp .Loop_ssse3 - leaq 64(%r9),%r9 - subq $1,%r10 - jnz .Lloop - movq 64(%rsp),%rsi - movq (%rsi),%r12 +.align 16 +.Ldone_ssse3: + addl 16(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 20(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + addl 36(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 52(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 0(%r8),%eax + addl 4(%r8),%esi + addl 8(%r8),%ecx + movl %eax,0(%r8) + addl 12(%r8),%edx + movl %esi,4(%r8) + addl 16(%r8),%ebp + movl %ecx,8(%r8) + movl %edx,12(%r8) + movl %ebp,16(%r8) + leaq 64(%rsp),%rsi + movq 0(%rsi),%r12 movq 8(%rsi),%rbp movq 16(%rsi),%rbx leaq 24(%rsi),%rsp -.Lepilogue: +.Lepilogue_ssse3: .byte 0xf3,0xc3 -.size sha1_block_data_order,.-sha1_block_data_order +.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3 +.align 64 +K_XX_XX: +.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 +.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 +.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc +.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 +.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 -.align 16 +.align 64 diff --git a/secure/lib/libcrypto/asm/sha256-586.s b/secure/lib/libcrypto/asm/sha256-586.s index 973e50d198..77a89514f1 100644 --- a/secure/lib/libcrypto/asm/sha256-586.s +++ b/secure/lib/libcrypto/asm/sha256-586.s @@ -96,31 +96,30 @@ sha256_block_data_order: .L00300_15: movl 92(%esp),%ebx movl %edx,%ecx - rorl $6,%ecx - movl %edx,%edi - rorl $11,%edi + rorl $14,%ecx movl 20(%esp),%esi - xorl %edi,%ecx - rorl $14,%edi - xorl %edi,%ecx + xorl %edx,%ecx + rorl $5,%ecx + xorl %edx,%ecx + rorl $6,%ecx movl 24(%esp),%edi addl %ecx,%ebx - movl %edx,16(%esp) xorl %edi,%esi + movl %edx,16(%esp) movl %eax,%ecx andl %edx,%esi movl 12(%esp),%edx xorl %edi,%esi movl %eax,%edi addl %esi,%ebx - rorl $2,%ecx + rorl $9,%ecx addl 28(%esp),%ebx - rorl $13,%edi + xorl %eax,%ecx + rorl $11,%ecx movl 4(%esp),%esi - xorl %edi,%ecx - rorl $9,%edi + xorl %eax,%ecx + rorl $2,%ecx addl %ebx,%edx - xorl %edi,%ecx movl 8(%esp),%edi addl %ecx,%ebx movl %eax,(%esp) @@ -142,48 +141,46 @@ sha256_block_data_order: .L00416_63: movl %ebx,%esi movl 100(%esp),%ecx - shrl $3,%ebx - rorl $7,%esi - xorl %esi,%ebx rorl $11,%esi movl %ecx,%edi + xorl %ebx,%esi + rorl $7,%esi + shrl $3,%ebx + rorl $2,%edi xorl %esi,%ebx - shrl $10,%ecx - movl 156(%esp),%esi + xorl %ecx,%edi rorl $17,%edi - xorl %edi,%ecx - rorl $2,%edi - addl %esi,%ebx + shrl $10,%ecx + addl 156(%esp),%ebx xorl %ecx,%edi - addl %edi,%ebx - movl %edx,%ecx addl 120(%esp),%ebx - rorl $6,%ecx - movl %edx,%edi - rorl $11,%edi + movl %edx,%ecx + addl %edi,%ebx + rorl $14,%ecx movl 20(%esp),%esi - xorl %edi,%ecx - rorl $14,%edi + xorl %edx,%ecx + rorl $5,%ecx movl %ebx,92(%esp) - xorl %edi,%ecx + xorl %edx,%ecx + rorl $6,%ecx movl 24(%esp),%edi addl %ecx,%ebx - movl %edx,16(%esp) xorl %edi,%esi + movl %edx,16(%esp) movl %eax,%ecx andl %edx,%esi movl 12(%esp),%edx xorl %edi,%esi movl %eax,%edi addl %esi,%ebx - rorl $2,%ecx + rorl $9,%ecx addl 28(%esp),%ebx - rorl $13,%edi + xorl %eax,%ecx + rorl $11,%ecx movl 4(%esp),%esi - xorl %edi,%ecx - rorl $9,%edi + xorl %eax,%ecx + rorl $2,%ecx addl %ebx,%edx - xorl %edi,%ecx movl 8(%esp),%edi addl %ecx,%ebx movl %eax,(%esp) diff --git a/secure/lib/libcrypto/asm/sha256-x86_64.s b/secure/lib/libcrypto/asm/sha256-x86_64.s index eed3b0080f..db5b898f0f 100644 --- a/secure/lib/libcrypto/asm/sha256-x86_64.s +++ b/secure/lib/libcrypto/asm/sha256-x86_64.s @@ -37,1880 +37,1688 @@ sha256_block_data_order: .Lloop: xorq %rdi,%rdi movl 0(%rsi),%r12d - bswapl %r12d movl %r8d,%r13d - movl %r8d,%r14d + movl %eax,%r14d + bswapl %r12d + rorl $14,%r13d movl %r9d,%r15d + movl %r12d,0(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r8d,%r15d - movl %r12d,0(%rsp) + movl %ebx,%r11d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - addl %r11d,%r12d - - movl %eax,%r11d - addl %r13d,%r12d + xorl %ecx,%r11d + xorl %eax,%r14d addl %r15d,%r12d - movl %eax,%r13d - movl %eax,%r14d + movl %ebx,%r15d - rorl $2,%r11d - rorl $13,%r13d - movl %eax,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d - xorl %r13d,%r11d - rorl $9,%r13d - orl %ecx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d - xorl %r13d,%r11d - andl %ecx,%r15d addl %r12d,%edx - - andl %ebx,%r14d addl %r12d,%r11d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r11d + movl 4(%rsi),%r12d - bswapl %r12d movl %edx,%r13d - movl %edx,%r14d + movl %r11d,%r14d + bswapl %r12d + rorl $14,%r13d movl %r8d,%r15d + movl %r12d,4(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %edx,%r15d - movl %r12d,4(%rsp) + movl %eax,%r10d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - addl %r10d,%r12d - - movl %r11d,%r10d - addl %r13d,%r12d + xorl %ebx,%r10d + xorl %r11d,%r14d addl %r15d,%r12d - movl %r11d,%r13d - movl %r11d,%r14d + movl %eax,%r15d - rorl $2,%r10d - rorl $13,%r13d - movl %r11d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d - xorl %r13d,%r10d - rorl $9,%r13d - orl %ebx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d - xorl %r13d,%r10d - andl %ebx,%r15d addl %r12d,%ecx - - andl %eax,%r14d addl %r12d,%r10d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r10d + movl 8(%rsi),%r12d - bswapl %r12d movl %ecx,%r13d - movl %ecx,%r14d + movl %r10d,%r14d + bswapl %r12d + rorl $14,%r13d movl %edx,%r15d + movl %r12d,8(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ecx,%r15d - movl %r12d,8(%rsp) + movl %r11d,%r9d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - addl %r9d,%r12d - - movl %r10d,%r9d - addl %r13d,%r12d + xorl %eax,%r9d + xorl %r10d,%r14d addl %r15d,%r12d - movl %r10d,%r13d - movl %r10d,%r14d + movl %r11d,%r15d - rorl $2,%r9d - rorl $13,%r13d - movl %r10d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d - xorl %r13d,%r9d - rorl $9,%r13d - orl %eax,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d - xorl %r13d,%r9d - andl %eax,%r15d addl %r12d,%ebx - - andl %r11d,%r14d addl %r12d,%r9d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r9d + movl 12(%rsi),%r12d - bswapl %r12d movl %ebx,%r13d - movl %ebx,%r14d + movl %r9d,%r14d + bswapl %r12d + rorl $14,%r13d movl %ecx,%r15d + movl %r12d,12(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ebx,%r15d - movl %r12d,12(%rsp) + movl %r10d,%r8d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - addl %r8d,%r12d - - movl %r9d,%r8d - addl %r13d,%r12d + xorl %r11d,%r8d + xorl %r9d,%r14d addl %r15d,%r12d - movl %r9d,%r13d - movl %r9d,%r14d + movl %r10d,%r15d - rorl $2,%r8d - rorl $13,%r13d - movl %r9d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d - xorl %r13d,%r8d - rorl $9,%r13d - orl %r11d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d - xorl %r13d,%r8d - andl %r11d,%r15d addl %r12d,%eax - - andl %r10d,%r14d addl %r12d,%r8d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r8d + movl 16(%rsi),%r12d - bswapl %r12d movl %eax,%r13d - movl %eax,%r14d + movl %r8d,%r14d + bswapl %r12d + rorl $14,%r13d movl %ebx,%r15d + movl %r12d,16(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %eax,%r15d - movl %r12d,16(%rsp) + movl %r9d,%edx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - addl %edx,%r12d - - movl %r8d,%edx - addl %r13d,%r12d + xorl %r10d,%edx + xorl %r8d,%r14d addl %r15d,%r12d - movl %r8d,%r13d - movl %r8d,%r14d + movl %r9d,%r15d - rorl $2,%edx - rorl $13,%r13d - movl %r8d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d - xorl %r13d,%edx - rorl $9,%r13d - orl %r10d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx - xorl %r13d,%edx - andl %r10d,%r15d addl %r12d,%r11d - - andl %r9d,%r14d addl %r12d,%edx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%edx + movl 20(%rsi),%r12d - bswapl %r12d movl %r11d,%r13d - movl %r11d,%r14d + movl %edx,%r14d + bswapl %r12d + rorl $14,%r13d movl %eax,%r15d + movl %r12d,20(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r11d,%r15d - movl %r12d,20(%rsp) + movl %r8d,%ecx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - addl %ecx,%r12d - - movl %edx,%ecx - addl %r13d,%r12d + xorl %r9d,%ecx + xorl %edx,%r14d addl %r15d,%r12d - movl %edx,%r13d - movl %edx,%r14d + movl %r8d,%r15d - rorl $2,%ecx - rorl $13,%r13d - movl %edx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d - xorl %r13d,%ecx - rorl $9,%r13d - orl %r9d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx - xorl %r13d,%ecx - andl %r9d,%r15d addl %r12d,%r10d - - andl %r8d,%r14d addl %r12d,%ecx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ecx + movl 24(%rsi),%r12d - bswapl %r12d movl %r10d,%r13d - movl %r10d,%r14d + movl %ecx,%r14d + bswapl %r12d + rorl $14,%r13d movl %r11d,%r15d + movl %r12d,24(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r10d,%r15d - movl %r12d,24(%rsp) + movl %edx,%ebx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - addl %ebx,%r12d - - movl %ecx,%ebx - addl %r13d,%r12d + xorl %r8d,%ebx + xorl %ecx,%r14d addl %r15d,%r12d - movl %ecx,%r13d - movl %ecx,%r14d + movl %edx,%r15d - rorl $2,%ebx - rorl $13,%r13d - movl %ecx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d - xorl %r13d,%ebx - rorl $9,%r13d - orl %r8d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx - xorl %r13d,%ebx - andl %r8d,%r15d addl %r12d,%r9d - - andl %edx,%r14d addl %r12d,%ebx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ebx + movl 28(%rsi),%r12d - bswapl %r12d movl %r9d,%r13d - movl %r9d,%r14d + movl %ebx,%r14d + bswapl %r12d + rorl $14,%r13d movl %r10d,%r15d + movl %r12d,28(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r9d,%r15d - movl %r12d,28(%rsp) + movl %ecx,%eax - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - addl %eax,%r12d - - movl %ebx,%eax - addl %r13d,%r12d + xorl %edx,%eax + xorl %ebx,%r14d addl %r15d,%r12d - movl %ebx,%r13d - movl %ebx,%r14d + movl %ecx,%r15d - rorl $2,%eax - rorl $13,%r13d - movl %ebx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d - xorl %r13d,%eax - rorl $9,%r13d - orl %edx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax - xorl %r13d,%eax - andl %edx,%r15d addl %r12d,%r8d - - andl %ecx,%r14d addl %r12d,%eax - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%eax + movl 32(%rsi),%r12d - bswapl %r12d movl %r8d,%r13d - movl %r8d,%r14d + movl %eax,%r14d + bswapl %r12d + rorl $14,%r13d movl %r9d,%r15d + movl %r12d,32(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r8d,%r15d - movl %r12d,32(%rsp) + movl %ebx,%r11d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - addl %r11d,%r12d - - movl %eax,%r11d - addl %r13d,%r12d + xorl %ecx,%r11d + xorl %eax,%r14d addl %r15d,%r12d - movl %eax,%r13d - movl %eax,%r14d + movl %ebx,%r15d - rorl $2,%r11d - rorl $13,%r13d - movl %eax,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d - xorl %r13d,%r11d - rorl $9,%r13d - orl %ecx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d - xorl %r13d,%r11d - andl %ecx,%r15d addl %r12d,%edx - - andl %ebx,%r14d addl %r12d,%r11d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r11d + movl 36(%rsi),%r12d - bswapl %r12d movl %edx,%r13d - movl %edx,%r14d + movl %r11d,%r14d + bswapl %r12d + rorl $14,%r13d movl %r8d,%r15d - - rorl $6,%r13d - rorl $11,%r14d - xorl %r9d,%r15d - - xorl %r14d,%r13d - rorl $14,%r14d - andl %edx,%r15d movl %r12d,36(%rsp) - xorl %r14d,%r13d + rorl $9,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - addl %r10d,%r12d - - movl %r11d,%r10d - addl %r13d,%r12d - addl %r15d,%r12d - movl %r11d,%r13d - movl %r11d,%r14d + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d - rorl $2,%r10d - rorl $13,%r13d - movl %r11d,%r15d addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d - xorl %r13d,%r10d - rorl $9,%r13d - orl %ebx,%r14d + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d - xorl %r13d,%r10d + rorl $6,%r13d + andl %r11d,%r10d andl %ebx,%r15d - addl %r12d,%ecx - andl %eax,%r14d - addl %r12d,%r10d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d - orl %r15d,%r14d + addl %r12d,%ecx + addl %r12d,%r10d leaq 1(%rdi),%rdi - addl %r14d,%r10d + movl 40(%rsi),%r12d - bswapl %r12d movl %ecx,%r13d - movl %ecx,%r14d + movl %r10d,%r14d + bswapl %r12d + rorl $14,%r13d movl %edx,%r15d + movl %r12d,40(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ecx,%r15d - movl %r12d,40(%rsp) + movl %r11d,%r9d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - addl %r9d,%r12d - - movl %r10d,%r9d - addl %r13d,%r12d + xorl %eax,%r9d + xorl %r10d,%r14d addl %r15d,%r12d - movl %r10d,%r13d - movl %r10d,%r14d + movl %r11d,%r15d - rorl $2,%r9d - rorl $13,%r13d - movl %r10d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d - xorl %r13d,%r9d - rorl $9,%r13d - orl %eax,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d - xorl %r13d,%r9d - andl %eax,%r15d addl %r12d,%ebx - - andl %r11d,%r14d addl %r12d,%r9d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r9d + movl 44(%rsi),%r12d - bswapl %r12d movl %ebx,%r13d - movl %ebx,%r14d + movl %r9d,%r14d + bswapl %r12d + rorl $14,%r13d movl %ecx,%r15d + movl %r12d,44(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ebx,%r15d - movl %r12d,44(%rsp) + movl %r10d,%r8d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - addl %r8d,%r12d - - movl %r9d,%r8d - addl %r13d,%r12d + xorl %r11d,%r8d + xorl %r9d,%r14d addl %r15d,%r12d - movl %r9d,%r13d - movl %r9d,%r14d + movl %r10d,%r15d - rorl $2,%r8d - rorl $13,%r13d - movl %r9d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d - xorl %r13d,%r8d - rorl $9,%r13d - orl %r11d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d - xorl %r13d,%r8d - andl %r11d,%r15d addl %r12d,%eax - - andl %r10d,%r14d addl %r12d,%r8d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r8d + movl 48(%rsi),%r12d - bswapl %r12d movl %eax,%r13d - movl %eax,%r14d + movl %r8d,%r14d + bswapl %r12d + rorl $14,%r13d movl %ebx,%r15d + movl %r12d,48(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %eax,%r15d - movl %r12d,48(%rsp) + movl %r9d,%edx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - addl %edx,%r12d - - movl %r8d,%edx - addl %r13d,%r12d + xorl %r10d,%edx + xorl %r8d,%r14d addl %r15d,%r12d - movl %r8d,%r13d - movl %r8d,%r14d + movl %r9d,%r15d - rorl $2,%edx - rorl $13,%r13d - movl %r8d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d - xorl %r13d,%edx - rorl $9,%r13d - orl %r10d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx - xorl %r13d,%edx - andl %r10d,%r15d addl %r12d,%r11d - - andl %r9d,%r14d addl %r12d,%edx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%edx + movl 52(%rsi),%r12d - bswapl %r12d movl %r11d,%r13d - movl %r11d,%r14d + movl %edx,%r14d + bswapl %r12d + rorl $14,%r13d movl %eax,%r15d + movl %r12d,52(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r11d,%r15d - movl %r12d,52(%rsp) + movl %r8d,%ecx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - addl %ecx,%r12d - - movl %edx,%ecx - addl %r13d,%r12d + xorl %r9d,%ecx + xorl %edx,%r14d addl %r15d,%r12d - movl %edx,%r13d - movl %edx,%r14d + movl %r8d,%r15d - rorl $2,%ecx - rorl $13,%r13d - movl %edx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d - xorl %r13d,%ecx - rorl $9,%r13d - orl %r9d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx - xorl %r13d,%ecx - andl %r9d,%r15d addl %r12d,%r10d - - andl %r8d,%r14d addl %r12d,%ecx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ecx + movl 56(%rsi),%r12d - bswapl %r12d movl %r10d,%r13d - movl %r10d,%r14d + movl %ecx,%r14d + bswapl %r12d + rorl $14,%r13d movl %r11d,%r15d + movl %r12d,56(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r10d,%r15d - movl %r12d,56(%rsp) + movl %edx,%ebx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - addl %ebx,%r12d - - movl %ecx,%ebx - addl %r13d,%r12d + xorl %r8d,%ebx + xorl %ecx,%r14d addl %r15d,%r12d - movl %ecx,%r13d - movl %ecx,%r14d + movl %edx,%r15d - rorl $2,%ebx - rorl $13,%r13d - movl %ecx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d - xorl %r13d,%ebx - rorl $9,%r13d - orl %r8d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx - xorl %r13d,%ebx - andl %r8d,%r15d addl %r12d,%r9d - - andl %edx,%r14d addl %r12d,%ebx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ebx + movl 60(%rsi),%r12d - bswapl %r12d movl %r9d,%r13d - movl %r9d,%r14d + movl %ebx,%r14d + bswapl %r12d + rorl $14,%r13d movl %r10d,%r15d + movl %r12d,60(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r9d,%r15d - movl %r12d,60(%rsp) + movl %ecx,%eax - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - addl %eax,%r12d - - movl %ebx,%eax - addl %r13d,%r12d + xorl %edx,%eax + xorl %ebx,%r14d addl %r15d,%r12d - movl %ebx,%r13d - movl %ebx,%r14d + movl %ecx,%r15d - rorl $2,%eax - rorl $13,%r13d - movl %ebx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d - xorl %r13d,%eax - rorl $9,%r13d - orl %edx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax - xorl %r13d,%eax - andl %edx,%r15d addl %r12d,%r8d - - andl %ecx,%r14d addl %r12d,%eax - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%eax + jmp .Lrounds_16_xx .align 16 .Lrounds_16_xx: movl 4(%rsp),%r13d - movl 56(%rsp),%r12d - - movl %r13d,%r15d + movl 56(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 36(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 36(%rsp),%r12d + xorl %r15d,%r14d addl 0(%rsp),%r12d movl %r8d,%r13d - movl %r8d,%r14d + addl %r14d,%r12d + movl %eax,%r14d + rorl $14,%r13d movl %r9d,%r15d + movl %r12d,0(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r8d,%r15d - movl %r12d,0(%rsp) + movl %ebx,%r11d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - addl %r11d,%r12d - - movl %eax,%r11d - addl %r13d,%r12d + xorl %ecx,%r11d + xorl %eax,%r14d addl %r15d,%r12d - movl %eax,%r13d - movl %eax,%r14d + movl %ebx,%r15d - rorl $2,%r11d - rorl $13,%r13d - movl %eax,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d - xorl %r13d,%r11d - rorl $9,%r13d - orl %ecx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d - xorl %r13d,%r11d - andl %ecx,%r15d addl %r12d,%edx - - andl %ebx,%r14d addl %r12d,%r11d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r11d - movl 8(%rsp),%r13d - movl 60(%rsp),%r12d - movl %r13d,%r15d + movl 8(%rsp),%r13d + movl 60(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 40(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 40(%rsp),%r12d + xorl %r15d,%r14d addl 4(%rsp),%r12d movl %edx,%r13d - movl %edx,%r14d + addl %r14d,%r12d + movl %r11d,%r14d + rorl $14,%r13d movl %r8d,%r15d + movl %r12d,4(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %edx,%r15d - movl %r12d,4(%rsp) + movl %eax,%r10d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - addl %r10d,%r12d - - movl %r11d,%r10d - addl %r13d,%r12d + xorl %ebx,%r10d + xorl %r11d,%r14d addl %r15d,%r12d - movl %r11d,%r13d - movl %r11d,%r14d + movl %eax,%r15d - rorl $2,%r10d - rorl $13,%r13d - movl %r11d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d - xorl %r13d,%r10d - rorl $9,%r13d - orl %ebx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d - xorl %r13d,%r10d - andl %ebx,%r15d addl %r12d,%ecx - - andl %eax,%r14d addl %r12d,%r10d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r10d - movl 12(%rsp),%r13d - movl 0(%rsp),%r12d - movl %r13d,%r15d + movl 12(%rsp),%r13d + movl 0(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 44(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 44(%rsp),%r12d + xorl %r15d,%r14d addl 8(%rsp),%r12d movl %ecx,%r13d - movl %ecx,%r14d + addl %r14d,%r12d + movl %r10d,%r14d + rorl $14,%r13d movl %edx,%r15d + movl %r12d,8(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ecx,%r15d - movl %r12d,8(%rsp) + movl %r11d,%r9d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - addl %r9d,%r12d - - movl %r10d,%r9d - addl %r13d,%r12d + xorl %eax,%r9d + xorl %r10d,%r14d addl %r15d,%r12d - movl %r10d,%r13d - movl %r10d,%r14d + movl %r11d,%r15d - rorl $2,%r9d - rorl $13,%r13d - movl %r10d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d - xorl %r13d,%r9d - rorl $9,%r13d - orl %eax,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d - xorl %r13d,%r9d - andl %eax,%r15d addl %r12d,%ebx - - andl %r11d,%r14d addl %r12d,%r9d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r9d - movl 16(%rsp),%r13d - movl 4(%rsp),%r12d - movl %r13d,%r15d + movl 16(%rsp),%r13d + movl 4(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 48(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 48(%rsp),%r12d + xorl %r15d,%r14d addl 12(%rsp),%r12d movl %ebx,%r13d - movl %ebx,%r14d + addl %r14d,%r12d + movl %r9d,%r14d + rorl $14,%r13d movl %ecx,%r15d + movl %r12d,12(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ebx,%r15d - movl %r12d,12(%rsp) + movl %r10d,%r8d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - addl %r8d,%r12d - - movl %r9d,%r8d - addl %r13d,%r12d + xorl %r11d,%r8d + xorl %r9d,%r14d addl %r15d,%r12d - movl %r9d,%r13d - movl %r9d,%r14d + movl %r10d,%r15d - rorl $2,%r8d - rorl $13,%r13d - movl %r9d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d - xorl %r13d,%r8d - rorl $9,%r13d - orl %r11d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d - xorl %r13d,%r8d - andl %r11d,%r15d addl %r12d,%eax - - andl %r10d,%r14d addl %r12d,%r8d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r8d - movl 20(%rsp),%r13d - movl 8(%rsp),%r12d - movl %r13d,%r15d + movl 20(%rsp),%r13d + movl 8(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 52(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 52(%rsp),%r12d + xorl %r15d,%r14d addl 16(%rsp),%r12d movl %eax,%r13d - movl %eax,%r14d + addl %r14d,%r12d + movl %r8d,%r14d + rorl $14,%r13d movl %ebx,%r15d + movl %r12d,16(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %eax,%r15d - movl %r12d,16(%rsp) + movl %r9d,%edx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - addl %edx,%r12d - - movl %r8d,%edx - addl %r13d,%r12d + xorl %r10d,%edx + xorl %r8d,%r14d addl %r15d,%r12d - movl %r8d,%r13d - movl %r8d,%r14d + movl %r9d,%r15d - rorl $2,%edx - rorl $13,%r13d - movl %r8d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d - xorl %r13d,%edx - rorl $9,%r13d - orl %r10d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx - xorl %r13d,%edx - andl %r10d,%r15d addl %r12d,%r11d - - andl %r9d,%r14d addl %r12d,%edx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%edx - movl 24(%rsp),%r13d - movl 12(%rsp),%r12d - movl %r13d,%r15d + movl 24(%rsp),%r13d + movl 12(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 56(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 56(%rsp),%r12d + xorl %r15d,%r14d addl 20(%rsp),%r12d movl %r11d,%r13d - movl %r11d,%r14d + addl %r14d,%r12d + movl %edx,%r14d + rorl $14,%r13d movl %eax,%r15d + movl %r12d,20(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r11d,%r15d - movl %r12d,20(%rsp) + movl %r8d,%ecx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - addl %ecx,%r12d - - movl %edx,%ecx - addl %r13d,%r12d + xorl %r9d,%ecx + xorl %edx,%r14d addl %r15d,%r12d - movl %edx,%r13d - movl %edx,%r14d + movl %r8d,%r15d - rorl $2,%ecx - rorl $13,%r13d - movl %edx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d - xorl %r13d,%ecx - rorl $9,%r13d - orl %r9d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx - xorl %r13d,%ecx - andl %r9d,%r15d addl %r12d,%r10d - - andl %r8d,%r14d addl %r12d,%ecx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ecx - movl 28(%rsp),%r13d - movl 16(%rsp),%r12d - movl %r13d,%r15d + movl 28(%rsp),%r13d + movl 16(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 60(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 60(%rsp),%r12d + xorl %r15d,%r14d addl 24(%rsp),%r12d movl %r10d,%r13d - movl %r10d,%r14d + addl %r14d,%r12d + movl %ecx,%r14d + rorl $14,%r13d movl %r11d,%r15d + movl %r12d,24(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r10d,%r15d - movl %r12d,24(%rsp) + movl %edx,%ebx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - addl %ebx,%r12d - - movl %ecx,%ebx - addl %r13d,%r12d + xorl %r8d,%ebx + xorl %ecx,%r14d addl %r15d,%r12d - movl %ecx,%r13d - movl %ecx,%r14d + movl %edx,%r15d - rorl $2,%ebx - rorl $13,%r13d - movl %ecx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d - xorl %r13d,%ebx - rorl $9,%r13d - orl %r8d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx - xorl %r13d,%ebx - andl %r8d,%r15d addl %r12d,%r9d - - andl %edx,%r14d addl %r12d,%ebx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ebx - movl 32(%rsp),%r13d - movl 20(%rsp),%r12d - movl %r13d,%r15d + movl 32(%rsp),%r13d + movl 20(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 0(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 0(%rsp),%r12d + xorl %r15d,%r14d addl 28(%rsp),%r12d movl %r9d,%r13d - movl %r9d,%r14d + addl %r14d,%r12d + movl %ebx,%r14d + rorl $14,%r13d movl %r10d,%r15d + movl %r12d,28(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r9d,%r15d - movl %r12d,28(%rsp) + movl %ecx,%eax - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - addl %eax,%r12d - - movl %ebx,%eax - addl %r13d,%r12d + xorl %edx,%eax + xorl %ebx,%r14d addl %r15d,%r12d - movl %ebx,%r13d - movl %ebx,%r14d + movl %ecx,%r15d - rorl $2,%eax - rorl $13,%r13d - movl %ebx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d - xorl %r13d,%eax - rorl $9,%r13d - orl %edx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax - xorl %r13d,%eax - andl %edx,%r15d addl %r12d,%r8d - - andl %ecx,%r14d addl %r12d,%eax - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%eax - movl 36(%rsp),%r13d - movl 24(%rsp),%r12d - movl %r13d,%r15d + movl 36(%rsp),%r13d + movl 24(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 4(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 4(%rsp),%r12d + xorl %r15d,%r14d addl 32(%rsp),%r12d movl %r8d,%r13d - movl %r8d,%r14d + addl %r14d,%r12d + movl %eax,%r14d + rorl $14,%r13d movl %r9d,%r15d + movl %r12d,32(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r8d,%r15d - movl %r12d,32(%rsp) + movl %ebx,%r11d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r8d,%r13d xorl %r10d,%r15d - addl %r11d,%r12d - - movl %eax,%r11d - addl %r13d,%r12d + xorl %ecx,%r11d + xorl %eax,%r14d addl %r15d,%r12d - movl %eax,%r13d - movl %eax,%r14d - - rorl $2,%r11d - rorl $13,%r13d - movl %eax,%r15d - addl (%rbp,%rdi,4),%r12d - - xorl %r13d,%r11d - rorl $9,%r13d - orl %ecx,%r14d + movl %ebx,%r15d - xorl %r13d,%r11d + rorl $6,%r13d + andl %eax,%r11d andl %ecx,%r15d - addl %r12d,%edx - andl %ebx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx addl %r12d,%r11d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r11d - movl 40(%rsp),%r13d - movl 28(%rsp),%r12d - movl %r13d,%r15d + movl 40(%rsp),%r13d + movl 28(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 8(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 8(%rsp),%r12d + xorl %r15d,%r14d addl 36(%rsp),%r12d movl %edx,%r13d - movl %edx,%r14d + addl %r14d,%r12d + movl %r11d,%r14d + rorl $14,%r13d movl %r8d,%r15d + movl %r12d,36(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %edx,%r15d - movl %r12d,36(%rsp) + movl %eax,%r10d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %edx,%r13d xorl %r9d,%r15d - addl %r10d,%r12d - - movl %r11d,%r10d - addl %r13d,%r12d + xorl %ebx,%r10d + xorl %r11d,%r14d addl %r15d,%r12d - movl %r11d,%r13d - movl %r11d,%r14d + movl %eax,%r15d - rorl $2,%r10d - rorl $13,%r13d - movl %r11d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d - xorl %r13d,%r10d - rorl $9,%r13d - orl %ebx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d - xorl %r13d,%r10d - andl %ebx,%r15d addl %r12d,%ecx - - andl %eax,%r14d addl %r12d,%r10d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r10d - movl 44(%rsp),%r13d - movl 32(%rsp),%r12d - movl %r13d,%r15d + movl 44(%rsp),%r13d + movl 32(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 12(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 12(%rsp),%r12d + xorl %r15d,%r14d addl 40(%rsp),%r12d movl %ecx,%r13d - movl %ecx,%r14d + addl %r14d,%r12d + movl %r10d,%r14d + rorl $14,%r13d movl %edx,%r15d + movl %r12d,40(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ecx,%r15d - movl %r12d,40(%rsp) + movl %r11d,%r9d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ecx,%r13d xorl %r8d,%r15d - addl %r9d,%r12d - - movl %r10d,%r9d - addl %r13d,%r12d + xorl %eax,%r9d + xorl %r10d,%r14d addl %r15d,%r12d - movl %r10d,%r13d - movl %r10d,%r14d + movl %r11d,%r15d - rorl $2,%r9d - rorl $13,%r13d - movl %r10d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d - xorl %r13d,%r9d - rorl $9,%r13d - orl %eax,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d - xorl %r13d,%r9d - andl %eax,%r15d addl %r12d,%ebx - - andl %r11d,%r14d addl %r12d,%r9d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r9d - movl 48(%rsp),%r13d - movl 36(%rsp),%r12d - movl %r13d,%r15d + movl 48(%rsp),%r13d + movl 36(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 16(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 16(%rsp),%r12d + xorl %r15d,%r14d addl 44(%rsp),%r12d movl %ebx,%r13d - movl %ebx,%r14d + addl %r14d,%r12d + movl %r9d,%r14d + rorl $14,%r13d movl %ecx,%r15d + movl %r12d,44(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %ebx,%r15d - movl %r12d,44(%rsp) + movl %r10d,%r8d - xorl %r14d,%r13d + rorl $11,%r14d + xorl %ebx,%r13d xorl %edx,%r15d - addl %r8d,%r12d - - movl %r9d,%r8d - addl %r13d,%r12d + xorl %r11d,%r8d + xorl %r9d,%r14d addl %r15d,%r12d - movl %r9d,%r13d - movl %r9d,%r14d + movl %r10d,%r15d - rorl $2,%r8d - rorl $13,%r13d - movl %r9d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d - xorl %r13d,%r8d - rorl $9,%r13d - orl %r11d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d - xorl %r13d,%r8d - andl %r11d,%r15d addl %r12d,%eax - - andl %r10d,%r14d addl %r12d,%r8d - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%r8d - movl 52(%rsp),%r13d - movl 40(%rsp),%r12d - movl %r13d,%r15d + movl 52(%rsp),%r13d + movl 40(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 20(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 20(%rsp),%r12d + xorl %r15d,%r14d addl 48(%rsp),%r12d movl %eax,%r13d - movl %eax,%r14d + addl %r14d,%r12d + movl %r8d,%r14d + rorl $14,%r13d movl %ebx,%r15d + movl %r12d,48(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d andl %eax,%r15d - movl %r12d,48(%rsp) + movl %r9d,%edx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %eax,%r13d xorl %ecx,%r15d - addl %edx,%r12d - - movl %r8d,%edx - addl %r13d,%r12d + xorl %r10d,%edx + xorl %r8d,%r14d addl %r15d,%r12d - movl %r8d,%r13d - movl %r8d,%r14d + movl %r9d,%r15d - rorl $2,%edx - rorl $13,%r13d - movl %r8d,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d - xorl %r13d,%edx - rorl $9,%r13d - orl %r10d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx - xorl %r13d,%edx - andl %r10d,%r15d addl %r12d,%r11d - - andl %r9d,%r14d addl %r12d,%edx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%edx - movl 56(%rsp),%r13d - movl 44(%rsp),%r12d - movl %r13d,%r15d + movl 56(%rsp),%r13d + movl 44(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 24(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 24(%rsp),%r12d + xorl %r15d,%r14d addl 52(%rsp),%r12d movl %r11d,%r13d - movl %r11d,%r14d + addl %r14d,%r12d + movl %edx,%r14d + rorl $14,%r13d movl %eax,%r15d + movl %r12d,52(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r11d,%r15d - movl %r12d,52(%rsp) + movl %r8d,%ecx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r11d,%r13d xorl %ebx,%r15d - addl %ecx,%r12d - - movl %edx,%ecx - addl %r13d,%r12d + xorl %r9d,%ecx + xorl %edx,%r14d addl %r15d,%r12d - movl %edx,%r13d - movl %edx,%r14d + movl %r8d,%r15d - rorl $2,%ecx - rorl $13,%r13d - movl %edx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d - xorl %r13d,%ecx - rorl $9,%r13d - orl %r9d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx - xorl %r13d,%ecx - andl %r9d,%r15d addl %r12d,%r10d - - andl %r8d,%r14d addl %r12d,%ecx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ecx - movl 60(%rsp),%r13d - movl 48(%rsp),%r12d - movl %r13d,%r15d + movl 60(%rsp),%r13d + movl 48(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 28(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 28(%rsp),%r12d + xorl %r15d,%r14d addl 56(%rsp),%r12d movl %r10d,%r13d - movl %r10d,%r14d + addl %r14d,%r12d + movl %ecx,%r14d + rorl $14,%r13d movl %r11d,%r15d + movl %r12d,56(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r10d,%r15d - movl %r12d,56(%rsp) + movl %edx,%ebx - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r10d,%r13d xorl %eax,%r15d - addl %ebx,%r12d - - movl %ecx,%ebx - addl %r13d,%r12d + xorl %r8d,%ebx + xorl %ecx,%r14d addl %r15d,%r12d - movl %ecx,%r13d - movl %ecx,%r14d + movl %edx,%r15d - rorl $2,%ebx - rorl $13,%r13d - movl %ecx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d - xorl %r13d,%ebx - rorl $9,%r13d - orl %r8d,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx - xorl %r13d,%ebx - andl %r8d,%r15d addl %r12d,%r9d - - andl %edx,%r14d addl %r12d,%ebx - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%ebx - movl 0(%rsp),%r13d - movl 52(%rsp),%r12d - movl %r13d,%r15d + movl 0(%rsp),%r13d + movl 52(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + rorl $11,%r12d + xorl %r13d,%r12d shrl $3,%r13d - rorl $7,%r15d - - xorl %r15d,%r13d - rorl $11,%r15d - - xorl %r15d,%r13d - movl %r12d,%r14d - shrl $10,%r12d - rorl $17,%r14d - - xorl %r14d,%r12d - rorl $2,%r14d + rorl $7,%r12d + xorl %r12d,%r13d + movl 32(%rsp),%r12d - xorl %r14d,%r12d + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + rorl $17,%r15d addl %r13d,%r12d - - addl 32(%rsp),%r12d + xorl %r15d,%r14d addl 60(%rsp),%r12d movl %r9d,%r13d - movl %r9d,%r14d + addl %r14d,%r12d + movl %ebx,%r14d + rorl $14,%r13d movl %r10d,%r15d + movl %r12d,60(%rsp) - rorl $6,%r13d - rorl $11,%r14d + rorl $9,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - xorl %r14d,%r13d - rorl $14,%r14d + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d andl %r9d,%r15d - movl %r12d,60(%rsp) + movl %ecx,%eax - xorl %r14d,%r13d + rorl $11,%r14d + xorl %r9d,%r13d xorl %r11d,%r15d - addl %eax,%r12d - - movl %ebx,%eax - addl %r13d,%r12d + xorl %edx,%eax + xorl %ebx,%r14d addl %r15d,%r12d - movl %ebx,%r13d - movl %ebx,%r14d + movl %ecx,%r15d - rorl $2,%eax - rorl $13,%r13d - movl %ebx,%r15d - addl (%rbp,%rdi,4),%r12d + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d - xorl %r13d,%eax - rorl $9,%r13d - orl %edx,%r14d + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax - xorl %r13d,%eax - andl %edx,%r15d addl %r12d,%r8d - - andl %ecx,%r14d addl %r12d,%eax - - orl %r15d,%r14d leaq 1(%rdi),%rdi - addl %r14d,%eax + cmpq $64,%rdi jb .Lrounds_16_xx diff --git a/secure/lib/libcrypto/asm/sha512-x86_64.s b/secure/lib/libcrypto/asm/sha512-x86_64.s index 284003b17b..2d3294e02d 100644 --- a/secure/lib/libcrypto/asm/sha512-x86_64.s +++ b/secure/lib/libcrypto/asm/sha512-x86_64.s @@ -37,1880 +37,1688 @@ sha512_block_data_order: .Lloop: xorq %rdi,%rdi movq 0(%rsi),%r12 - bswapq %r12 movq %r8,%r13 - movq %r8,%r14 + movq %rax,%r14 + bswapq %r12 + rorq $23,%r13 movq %r9,%r15 + movq %r12,0(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r8,%r13 xorq %r10,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r8,%r15 - movq %r12,0(%rsp) + movq %rbx,%r11 - xorq %r14,%r13 + rorq $6,%r14 + xorq %r8,%r13 xorq %r10,%r15 - addq %r11,%r12 - - movq %rax,%r11 - addq %r13,%r12 + xorq %rcx,%r11 + xorq %rax,%r14 addq %r15,%r12 - movq %rax,%r13 - movq %rax,%r14 + movq %rbx,%r15 - rorq $28,%r11 - rorq $34,%r13 - movq %rax,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 - xorq %r13,%r11 - rorq $5,%r13 - orq %rcx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 - xorq %r13,%r11 - andq %rcx,%r15 addq %r12,%rdx - - andq %rbx,%r14 addq %r12,%r11 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r11 + movq 8(%rsi),%r12 - bswapq %r12 movq %rdx,%r13 - movq %rdx,%r14 + movq %r11,%r14 + bswapq %r12 + rorq $23,%r13 movq %r8,%r15 + movq %r12,8(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rdx,%r15 - movq %r12,8(%rsp) + movq %rax,%r10 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - addq %r10,%r12 - - movq %r11,%r10 - addq %r13,%r12 + xorq %rbx,%r10 + xorq %r11,%r14 addq %r15,%r12 - movq %r11,%r13 - movq %r11,%r14 + movq %rax,%r15 - rorq $28,%r10 - rorq $34,%r13 - movq %r11,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 - xorq %r13,%r10 - rorq $5,%r13 - orq %rbx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 - xorq %r13,%r10 - andq %rbx,%r15 addq %r12,%rcx - - andq %rax,%r14 addq %r12,%r10 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r10 + movq 16(%rsi),%r12 - bswapq %r12 movq %rcx,%r13 - movq %rcx,%r14 + movq %r10,%r14 + bswapq %r12 + rorq $23,%r13 movq %rdx,%r15 + movq %r12,16(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rcx,%r15 - movq %r12,16(%rsp) + movq %r11,%r9 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - addq %r9,%r12 - - movq %r10,%r9 - addq %r13,%r12 + xorq %rax,%r9 + xorq %r10,%r14 addq %r15,%r12 - movq %r10,%r13 - movq %r10,%r14 + movq %r11,%r15 - rorq $28,%r9 - rorq $34,%r13 - movq %r10,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 - xorq %r13,%r9 - rorq $5,%r13 - orq %rax,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 - xorq %r13,%r9 - andq %rax,%r15 addq %r12,%rbx - - andq %r11,%r14 addq %r12,%r9 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r9 + movq 24(%rsi),%r12 - bswapq %r12 movq %rbx,%r13 - movq %rbx,%r14 + movq %r9,%r14 + bswapq %r12 + rorq $23,%r13 movq %rcx,%r15 + movq %r12,24(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rbx,%r15 - movq %r12,24(%rsp) + movq %r10,%r8 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - addq %r8,%r12 - - movq %r9,%r8 - addq %r13,%r12 + xorq %r11,%r8 + xorq %r9,%r14 addq %r15,%r12 - movq %r9,%r13 - movq %r9,%r14 + movq %r10,%r15 - rorq $28,%r8 - rorq $34,%r13 - movq %r9,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 - xorq %r13,%r8 - rorq $5,%r13 - orq %r11,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 - xorq %r13,%r8 - andq %r11,%r15 addq %r12,%rax - - andq %r10,%r14 addq %r12,%r8 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r8 + movq 32(%rsi),%r12 - bswapq %r12 movq %rax,%r13 - movq %rax,%r14 + movq %r8,%r14 + bswapq %r12 + rorq $23,%r13 movq %rbx,%r15 + movq %r12,32(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rax,%r15 - movq %r12,32(%rsp) + movq %r9,%rdx - xorq %r14,%r13 + rorq $6,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - addq %rdx,%r12 - - movq %r8,%rdx - addq %r13,%r12 + xorq %r10,%rdx + xorq %r8,%r14 addq %r15,%r12 - movq %r8,%r13 - movq %r8,%r14 + movq %r9,%r15 - rorq $28,%rdx - rorq $34,%r13 - movq %r8,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 - xorq %r13,%rdx - rorq $5,%r13 - orq %r10,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx - xorq %r13,%rdx - andq %r10,%r15 addq %r12,%r11 - - andq %r9,%r14 addq %r12,%rdx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rdx + movq 40(%rsi),%r12 - bswapq %r12 movq %r11,%r13 - movq %r11,%r14 + movq %rdx,%r14 + bswapq %r12 + rorq $23,%r13 movq %rax,%r15 + movq %r12,40(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r11,%r15 - movq %r12,40(%rsp) + movq %r8,%rcx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - addq %rcx,%r12 - - movq %rdx,%rcx - addq %r13,%r12 + xorq %r9,%rcx + xorq %rdx,%r14 addq %r15,%r12 - movq %rdx,%r13 - movq %rdx,%r14 + movq %r8,%r15 - rorq $28,%rcx - rorq $34,%r13 - movq %rdx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 - xorq %r13,%rcx - rorq $5,%r13 - orq %r9,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx - xorq %r13,%rcx - andq %r9,%r15 addq %r12,%r10 - - andq %r8,%r14 addq %r12,%rcx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rcx + movq 48(%rsi),%r12 - bswapq %r12 movq %r10,%r13 - movq %r10,%r14 + movq %rcx,%r14 + bswapq %r12 + rorq $23,%r13 movq %r11,%r15 + movq %r12,48(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r10,%r13 xorq %rax,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r10,%r15 - movq %r12,48(%rsp) + movq %rdx,%rbx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r10,%r13 xorq %rax,%r15 - addq %rbx,%r12 - - movq %rcx,%rbx - addq %r13,%r12 + xorq %r8,%rbx + xorq %rcx,%r14 addq %r15,%r12 - movq %rcx,%r13 - movq %rcx,%r14 + movq %rdx,%r15 - rorq $28,%rbx - rorq $34,%r13 - movq %rcx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 - xorq %r13,%rbx - rorq $5,%r13 - orq %r8,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx - xorq %r13,%rbx - andq %r8,%r15 addq %r12,%r9 - - andq %rdx,%r14 addq %r12,%rbx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rbx + movq 56(%rsi),%r12 - bswapq %r12 movq %r9,%r13 - movq %r9,%r14 + movq %rbx,%r14 + bswapq %r12 + rorq $23,%r13 movq %r10,%r15 + movq %r12,56(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r9,%r13 xorq %r11,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r9,%r15 - movq %r12,56(%rsp) + movq %rcx,%rax - xorq %r14,%r13 + rorq $6,%r14 + xorq %r9,%r13 xorq %r11,%r15 - addq %rax,%r12 - - movq %rbx,%rax - addq %r13,%r12 + xorq %rdx,%rax + xorq %rbx,%r14 addq %r15,%r12 - movq %rbx,%r13 - movq %rbx,%r14 + movq %rcx,%r15 - rorq $28,%rax - rorq $34,%r13 - movq %rbx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 - xorq %r13,%rax - rorq $5,%r13 - orq %rdx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax - xorq %r13,%rax - andq %rdx,%r15 addq %r12,%r8 - - andq %rcx,%r14 addq %r12,%rax - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rax + movq 64(%rsi),%r12 - bswapq %r12 movq %r8,%r13 - movq %r8,%r14 + movq %rax,%r14 + bswapq %r12 + rorq $23,%r13 movq %r9,%r15 + movq %r12,64(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r8,%r13 xorq %r10,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r8,%r15 - movq %r12,64(%rsp) + movq %rbx,%r11 - xorq %r14,%r13 + rorq $6,%r14 + xorq %r8,%r13 xorq %r10,%r15 - addq %r11,%r12 - - movq %rax,%r11 - addq %r13,%r12 + xorq %rcx,%r11 + xorq %rax,%r14 addq %r15,%r12 - movq %rax,%r13 - movq %rax,%r14 + movq %rbx,%r15 - rorq $28,%r11 - rorq $34,%r13 - movq %rax,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 - xorq %r13,%r11 - rorq $5,%r13 - orq %rcx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 - xorq %r13,%r11 - andq %rcx,%r15 addq %r12,%rdx - - andq %rbx,%r14 addq %r12,%r11 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r11 + movq 72(%rsi),%r12 - bswapq %r12 movq %rdx,%r13 - movq %rdx,%r14 + movq %r11,%r14 + bswapq %r12 + rorq $23,%r13 movq %r8,%r15 + movq %r12,72(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rdx,%r15 - movq %r12,72(%rsp) + movq %rax,%r10 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - addq %r10,%r12 - - movq %r11,%r10 - addq %r13,%r12 + xorq %rbx,%r10 + xorq %r11,%r14 addq %r15,%r12 - movq %r11,%r13 - movq %r11,%r14 + movq %rax,%r15 - rorq $28,%r10 - rorq $34,%r13 - movq %r11,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 - xorq %r13,%r10 - rorq $5,%r13 - orq %rbx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 - xorq %r13,%r10 - andq %rbx,%r15 addq %r12,%rcx - - andq %rax,%r14 addq %r12,%r10 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r10 + movq 80(%rsi),%r12 - bswapq %r12 movq %rcx,%r13 - movq %rcx,%r14 + movq %r10,%r14 + bswapq %r12 + rorq $23,%r13 movq %rdx,%r15 + movq %r12,80(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rcx,%r15 - movq %r12,80(%rsp) + movq %r11,%r9 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - addq %r9,%r12 - - movq %r10,%r9 - addq %r13,%r12 + xorq %rax,%r9 + xorq %r10,%r14 addq %r15,%r12 - movq %r10,%r13 - movq %r10,%r14 + movq %r11,%r15 - rorq $28,%r9 - rorq $34,%r13 - movq %r10,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 - xorq %r13,%r9 - rorq $5,%r13 - orq %rax,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 - xorq %r13,%r9 - andq %rax,%r15 addq %r12,%rbx - - andq %r11,%r14 addq %r12,%r9 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r9 + movq 88(%rsi),%r12 - bswapq %r12 movq %rbx,%r13 - movq %rbx,%r14 + movq %r9,%r14 + bswapq %r12 + rorq $23,%r13 movq %rcx,%r15 + movq %r12,88(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rbx,%r15 - movq %r12,88(%rsp) + movq %r10,%r8 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - addq %r8,%r12 - - movq %r9,%r8 - addq %r13,%r12 + xorq %r11,%r8 + xorq %r9,%r14 addq %r15,%r12 - movq %r9,%r13 - movq %r9,%r14 + movq %r10,%r15 - rorq $28,%r8 - rorq $34,%r13 - movq %r9,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 - xorq %r13,%r8 - rorq $5,%r13 - orq %r11,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 - xorq %r13,%r8 - andq %r11,%r15 addq %r12,%rax - - andq %r10,%r14 addq %r12,%r8 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r8 + movq 96(%rsi),%r12 - bswapq %r12 movq %rax,%r13 - movq %rax,%r14 + movq %r8,%r14 + bswapq %r12 + rorq $23,%r13 movq %rbx,%r15 + movq %r12,96(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rax,%r15 - movq %r12,96(%rsp) + movq %r9,%rdx - xorq %r14,%r13 + rorq $6,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - addq %rdx,%r12 - - movq %r8,%rdx - addq %r13,%r12 + xorq %r10,%rdx + xorq %r8,%r14 addq %r15,%r12 - movq %r8,%r13 - movq %r8,%r14 + movq %r9,%r15 - rorq $28,%rdx - rorq $34,%r13 - movq %r8,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 - xorq %r13,%rdx - rorq $5,%r13 - orq %r10,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx - xorq %r13,%rdx - andq %r10,%r15 addq %r12,%r11 - - andq %r9,%r14 addq %r12,%rdx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rdx + movq 104(%rsi),%r12 - bswapq %r12 movq %r11,%r13 - movq %r11,%r14 + movq %rdx,%r14 + bswapq %r12 + rorq $23,%r13 movq %rax,%r15 + movq %r12,104(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r11,%r15 - movq %r12,104(%rsp) + movq %r8,%rcx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - addq %rcx,%r12 - - movq %rdx,%rcx - addq %r13,%r12 + xorq %r9,%rcx + xorq %rdx,%r14 addq %r15,%r12 - movq %rdx,%r13 - movq %rdx,%r14 + movq %r8,%r15 - rorq $28,%rcx - rorq $34,%r13 - movq %rdx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 - xorq %r13,%rcx - rorq $5,%r13 - orq %r9,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx - xorq %r13,%rcx - andq %r9,%r15 addq %r12,%r10 - - andq %r8,%r14 addq %r12,%rcx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rcx + movq 112(%rsi),%r12 - bswapq %r12 movq %r10,%r13 - movq %r10,%r14 + movq %rcx,%r14 + bswapq %r12 + rorq $23,%r13 movq %r11,%r15 + movq %r12,112(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r10,%r13 xorq %rax,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r10,%r15 - movq %r12,112(%rsp) + movq %rdx,%rbx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r10,%r13 xorq %rax,%r15 - addq %rbx,%r12 - - movq %rcx,%rbx - addq %r13,%r12 + xorq %r8,%rbx + xorq %rcx,%r14 addq %r15,%r12 - movq %rcx,%r13 - movq %rcx,%r14 + movq %rdx,%r15 - rorq $28,%rbx - rorq $34,%r13 - movq %rcx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 - xorq %r13,%rbx - rorq $5,%r13 - orq %r8,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx - xorq %r13,%rbx - andq %r8,%r15 addq %r12,%r9 - - andq %rdx,%r14 addq %r12,%rbx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rbx + movq 120(%rsi),%r12 - bswapq %r12 movq %r9,%r13 - movq %r9,%r14 + movq %rbx,%r14 + bswapq %r12 + rorq $23,%r13 movq %r10,%r15 + movq %r12,120(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r9,%r13 xorq %r11,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r9,%r15 - movq %r12,120(%rsp) + movq %rcx,%rax - xorq %r14,%r13 + rorq $6,%r14 + xorq %r9,%r13 xorq %r11,%r15 - addq %rax,%r12 - - movq %rbx,%rax - addq %r13,%r12 + xorq %rdx,%rax + xorq %rbx,%r14 addq %r15,%r12 - movq %rbx,%r13 - movq %rbx,%r14 + movq %rcx,%r15 - rorq $28,%rax - rorq $34,%r13 - movq %rbx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 - xorq %r13,%rax - rorq $5,%r13 - orq %rdx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax - xorq %r13,%rax - andq %rdx,%r15 addq %r12,%r8 - - andq %rcx,%r14 addq %r12,%rax - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rax + jmp .Lrounds_16_xx .align 16 .Lrounds_16_xx: movq 8(%rsp),%r13 - movq 112(%rsp),%r12 - - movq %r13,%r15 + movq 112(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 72(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 72(%rsp),%r12 + xorq %r15,%r14 addq 0(%rsp),%r12 movq %r8,%r13 - movq %r8,%r14 + addq %r14,%r12 + movq %rax,%r14 + rorq $23,%r13 movq %r9,%r15 + movq %r12,0(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r8,%r13 xorq %r10,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r8,%r15 - movq %r12,0(%rsp) + movq %rbx,%r11 - xorq %r14,%r13 + rorq $6,%r14 + xorq %r8,%r13 xorq %r10,%r15 - addq %r11,%r12 - - movq %rax,%r11 - addq %r13,%r12 + xorq %rcx,%r11 + xorq %rax,%r14 addq %r15,%r12 - movq %rax,%r13 - movq %rax,%r14 + movq %rbx,%r15 - rorq $28,%r11 - rorq $34,%r13 - movq %rax,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 - xorq %r13,%r11 - rorq $5,%r13 - orq %rcx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 - xorq %r13,%r11 - andq %rcx,%r15 addq %r12,%rdx - - andq %rbx,%r14 addq %r12,%r11 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r11 - movq 16(%rsp),%r13 - movq 120(%rsp),%r12 - movq %r13,%r15 + movq 16(%rsp),%r13 + movq 120(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 80(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 80(%rsp),%r12 + xorq %r15,%r14 addq 8(%rsp),%r12 movq %rdx,%r13 - movq %rdx,%r14 + addq %r14,%r12 + movq %r11,%r14 + rorq $23,%r13 movq %r8,%r15 + movq %r12,8(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rdx,%r15 - movq %r12,8(%rsp) + movq %rax,%r10 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - addq %r10,%r12 - - movq %r11,%r10 - addq %r13,%r12 + xorq %rbx,%r10 + xorq %r11,%r14 addq %r15,%r12 - movq %r11,%r13 - movq %r11,%r14 + movq %rax,%r15 - rorq $28,%r10 - rorq $34,%r13 - movq %r11,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 - xorq %r13,%r10 - rorq $5,%r13 - orq %rbx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 - xorq %r13,%r10 - andq %rbx,%r15 addq %r12,%rcx - - andq %rax,%r14 addq %r12,%r10 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r10 - movq 24(%rsp),%r13 - movq 0(%rsp),%r12 - movq %r13,%r15 + movq 24(%rsp),%r13 + movq 0(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 88(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 88(%rsp),%r12 + xorq %r15,%r14 addq 16(%rsp),%r12 movq %rcx,%r13 - movq %rcx,%r14 + addq %r14,%r12 + movq %r10,%r14 + rorq $23,%r13 movq %rdx,%r15 + movq %r12,16(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rcx,%r15 - movq %r12,16(%rsp) + movq %r11,%r9 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - addq %r9,%r12 - - movq %r10,%r9 - addq %r13,%r12 + xorq %rax,%r9 + xorq %r10,%r14 addq %r15,%r12 - movq %r10,%r13 - movq %r10,%r14 + movq %r11,%r15 - rorq $28,%r9 - rorq $34,%r13 - movq %r10,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 - xorq %r13,%r9 - rorq $5,%r13 - orq %rax,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 - xorq %r13,%r9 - andq %rax,%r15 addq %r12,%rbx - - andq %r11,%r14 addq %r12,%r9 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r9 - movq 32(%rsp),%r13 - movq 8(%rsp),%r12 - movq %r13,%r15 + movq 32(%rsp),%r13 + movq 8(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 96(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 96(%rsp),%r12 + xorq %r15,%r14 addq 24(%rsp),%r12 movq %rbx,%r13 - movq %rbx,%r14 + addq %r14,%r12 + movq %r9,%r14 + rorq $23,%r13 movq %rcx,%r15 + movq %r12,24(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rbx,%r15 - movq %r12,24(%rsp) + movq %r10,%r8 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - addq %r8,%r12 - - movq %r9,%r8 - addq %r13,%r12 + xorq %r11,%r8 + xorq %r9,%r14 addq %r15,%r12 - movq %r9,%r13 - movq %r9,%r14 + movq %r10,%r15 - rorq $28,%r8 - rorq $34,%r13 - movq %r9,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 - xorq %r13,%r8 - rorq $5,%r13 - orq %r11,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 - xorq %r13,%r8 - andq %r11,%r15 addq %r12,%rax - - andq %r10,%r14 addq %r12,%r8 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r8 - movq 40(%rsp),%r13 - movq 16(%rsp),%r12 - movq %r13,%r15 + movq 40(%rsp),%r13 + movq 16(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 104(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 104(%rsp),%r12 + xorq %r15,%r14 addq 32(%rsp),%r12 movq %rax,%r13 - movq %rax,%r14 + addq %r14,%r12 + movq %r8,%r14 + rorq $23,%r13 movq %rbx,%r15 + movq %r12,32(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rax,%r15 - movq %r12,32(%rsp) + movq %r9,%rdx - xorq %r14,%r13 + rorq $6,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - addq %rdx,%r12 - - movq %r8,%rdx - addq %r13,%r12 + xorq %r10,%rdx + xorq %r8,%r14 addq %r15,%r12 - movq %r8,%r13 - movq %r8,%r14 + movq %r9,%r15 - rorq $28,%rdx - rorq $34,%r13 - movq %r8,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 - xorq %r13,%rdx - rorq $5,%r13 - orq %r10,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx - xorq %r13,%rdx - andq %r10,%r15 addq %r12,%r11 - - andq %r9,%r14 addq %r12,%rdx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rdx - movq 48(%rsp),%r13 - movq 24(%rsp),%r12 - movq %r13,%r15 + movq 48(%rsp),%r13 + movq 24(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 112(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 112(%rsp),%r12 + xorq %r15,%r14 addq 40(%rsp),%r12 movq %r11,%r13 - movq %r11,%r14 + addq %r14,%r12 + movq %rdx,%r14 + rorq $23,%r13 movq %rax,%r15 + movq %r12,40(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r11,%r15 - movq %r12,40(%rsp) + movq %r8,%rcx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - addq %rcx,%r12 - - movq %rdx,%rcx - addq %r13,%r12 + xorq %r9,%rcx + xorq %rdx,%r14 addq %r15,%r12 - movq %rdx,%r13 - movq %rdx,%r14 + movq %r8,%r15 - rorq $28,%rcx - rorq $34,%r13 - movq %rdx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 - xorq %r13,%rcx - rorq $5,%r13 - orq %r9,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx - xorq %r13,%rcx - andq %r9,%r15 addq %r12,%r10 - - andq %r8,%r14 addq %r12,%rcx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rcx - movq 56(%rsp),%r13 - movq 32(%rsp),%r12 - movq %r13,%r15 + movq 56(%rsp),%r13 + movq 32(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 120(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 120(%rsp),%r12 + xorq %r15,%r14 addq 48(%rsp),%r12 movq %r10,%r13 - movq %r10,%r14 + addq %r14,%r12 + movq %rcx,%r14 + rorq $23,%r13 movq %r11,%r15 + movq %r12,48(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r10,%r13 xorq %rax,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r10,%r15 - movq %r12,48(%rsp) + movq %rdx,%rbx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r10,%r13 xorq %rax,%r15 - addq %rbx,%r12 - - movq %rcx,%rbx - addq %r13,%r12 + xorq %r8,%rbx + xorq %rcx,%r14 addq %r15,%r12 - movq %rcx,%r13 - movq %rcx,%r14 + movq %rdx,%r15 - rorq $28,%rbx - rorq $34,%r13 - movq %rcx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 - xorq %r13,%rbx - rorq $5,%r13 - orq %r8,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx - xorq %r13,%rbx - andq %r8,%r15 addq %r12,%r9 - - andq %rdx,%r14 addq %r12,%rbx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rbx - movq 64(%rsp),%r13 - movq 40(%rsp),%r12 - movq %r13,%r15 + movq 64(%rsp),%r13 + movq 40(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 0(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 0(%rsp),%r12 + xorq %r15,%r14 addq 56(%rsp),%r12 movq %r9,%r13 - movq %r9,%r14 + addq %r14,%r12 + movq %rbx,%r14 + rorq $23,%r13 movq %r10,%r15 + movq %r12,56(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r9,%r13 xorq %r11,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r9,%r15 - movq %r12,56(%rsp) + movq %rcx,%rax - xorq %r14,%r13 + rorq $6,%r14 + xorq %r9,%r13 xorq %r11,%r15 - addq %rax,%r12 - - movq %rbx,%rax - addq %r13,%r12 + xorq %rdx,%rax + xorq %rbx,%r14 addq %r15,%r12 - movq %rbx,%r13 - movq %rbx,%r14 + movq %rcx,%r15 - rorq $28,%rax - rorq $34,%r13 - movq %rbx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 - xorq %r13,%rax - rorq $5,%r13 - orq %rdx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax - xorq %r13,%rax - andq %rdx,%r15 addq %r12,%r8 - - andq %rcx,%r14 addq %r12,%rax - - orq %r15,%r14 leaq 1(%rdi),%rdi + addq %r14,%rax - addq %r14,%rax - movq 72(%rsp),%r13 - movq 48(%rsp),%r12 - - movq %r13,%r15 + movq 72(%rsp),%r13 + movq 48(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 8(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 8(%rsp),%r12 + xorq %r15,%r14 addq 64(%rsp),%r12 movq %r8,%r13 - movq %r8,%r14 + addq %r14,%r12 + movq %rax,%r14 + rorq $23,%r13 movq %r9,%r15 + movq %r12,64(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r8,%r13 xorq %r10,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r8,%r15 - movq %r12,64(%rsp) + movq %rbx,%r11 - xorq %r14,%r13 + rorq $6,%r14 + xorq %r8,%r13 xorq %r10,%r15 - addq %r11,%r12 - - movq %rax,%r11 - addq %r13,%r12 + xorq %rcx,%r11 + xorq %rax,%r14 addq %r15,%r12 - movq %rax,%r13 - movq %rax,%r14 + movq %rbx,%r15 - rorq $28,%r11 - rorq $34,%r13 - movq %rax,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 - xorq %r13,%r11 - rorq $5,%r13 - orq %rcx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 - xorq %r13,%r11 - andq %rcx,%r15 addq %r12,%rdx - - andq %rbx,%r14 addq %r12,%r11 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r11 - movq 80(%rsp),%r13 - movq 56(%rsp),%r12 - movq %r13,%r15 + movq 80(%rsp),%r13 + movq 56(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 16(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 16(%rsp),%r12 + xorq %r15,%r14 addq 72(%rsp),%r12 movq %rdx,%r13 - movq %rdx,%r14 + addq %r14,%r12 + movq %r11,%r14 + rorq $23,%r13 movq %r8,%r15 + movq %r12,72(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rdx,%r15 - movq %r12,72(%rsp) + movq %rax,%r10 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rdx,%r13 xorq %r9,%r15 - addq %r10,%r12 - - movq %r11,%r10 - addq %r13,%r12 + xorq %rbx,%r10 + xorq %r11,%r14 addq %r15,%r12 - movq %r11,%r13 - movq %r11,%r14 + movq %rax,%r15 - rorq $28,%r10 - rorq $34,%r13 - movq %r11,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 - xorq %r13,%r10 - rorq $5,%r13 - orq %rbx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 - xorq %r13,%r10 - andq %rbx,%r15 addq %r12,%rcx - - andq %rax,%r14 addq %r12,%r10 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r10 - movq 88(%rsp),%r13 - movq 64(%rsp),%r12 - movq %r13,%r15 + movq 88(%rsp),%r13 + movq 64(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 24(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 24(%rsp),%r12 + xorq %r15,%r14 addq 80(%rsp),%r12 movq %rcx,%r13 - movq %rcx,%r14 + addq %r14,%r12 + movq %r10,%r14 + rorq $23,%r13 movq %rdx,%r15 + movq %r12,80(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rcx,%r15 - movq %r12,80(%rsp) + movq %r11,%r9 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rcx,%r13 xorq %r8,%r15 - addq %r9,%r12 - - movq %r10,%r9 - addq %r13,%r12 + xorq %rax,%r9 + xorq %r10,%r14 addq %r15,%r12 - movq %r10,%r13 - movq %r10,%r14 + movq %r11,%r15 - rorq $28,%r9 - rorq $34,%r13 - movq %r10,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 - xorq %r13,%r9 - rorq $5,%r13 - orq %rax,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 - xorq %r13,%r9 - andq %rax,%r15 addq %r12,%rbx - - andq %r11,%r14 addq %r12,%r9 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r9 - movq 96(%rsp),%r13 - movq 72(%rsp),%r12 - movq %r13,%r15 + movq 96(%rsp),%r13 + movq 72(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 32(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 32(%rsp),%r12 + xorq %r15,%r14 addq 88(%rsp),%r12 movq %rbx,%r13 - movq %rbx,%r14 + addq %r14,%r12 + movq %r9,%r14 + rorq $23,%r13 movq %rcx,%r15 + movq %r12,88(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rbx,%r15 - movq %r12,88(%rsp) + movq %r10,%r8 - xorq %r14,%r13 + rorq $6,%r14 + xorq %rbx,%r13 xorq %rdx,%r15 - addq %r8,%r12 - - movq %r9,%r8 - addq %r13,%r12 + xorq %r11,%r8 + xorq %r9,%r14 addq %r15,%r12 - movq %r9,%r13 - movq %r9,%r14 + movq %r10,%r15 - rorq $28,%r8 - rorq $34,%r13 - movq %r9,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 - xorq %r13,%r8 - rorq $5,%r13 - orq %r11,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 - xorq %r13,%r8 - andq %r11,%r15 addq %r12,%rax - - andq %r10,%r14 addq %r12,%r8 - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%r8 - movq 104(%rsp),%r13 - movq 80(%rsp),%r12 - movq %r13,%r15 + movq 104(%rsp),%r13 + movq 80(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 40(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 40(%rsp),%r12 + xorq %r15,%r14 addq 96(%rsp),%r12 movq %rax,%r13 - movq %rax,%r14 + addq %r14,%r12 + movq %r8,%r14 + rorq $23,%r13 movq %rbx,%r15 + movq %r12,96(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 andq %rax,%r15 - movq %r12,96(%rsp) + movq %r9,%rdx - xorq %r14,%r13 + rorq $6,%r14 + xorq %rax,%r13 xorq %rcx,%r15 - addq %rdx,%r12 - - movq %r8,%rdx - addq %r13,%r12 + xorq %r10,%rdx + xorq %r8,%r14 addq %r15,%r12 - movq %r8,%r13 - movq %r8,%r14 + movq %r9,%r15 - rorq $28,%rdx - rorq $34,%r13 - movq %r8,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 - xorq %r13,%rdx - rorq $5,%r13 - orq %r10,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx - xorq %r13,%rdx - andq %r10,%r15 addq %r12,%r11 - - andq %r9,%r14 addq %r12,%rdx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rdx - movq 112(%rsp),%r13 - movq 88(%rsp),%r12 - movq %r13,%r15 + movq 112(%rsp),%r13 + movq 88(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 48(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 48(%rsp),%r12 + xorq %r15,%r14 addq 104(%rsp),%r12 movq %r11,%r13 - movq %r11,%r14 + addq %r14,%r12 + movq %rdx,%r14 + rorq $23,%r13 movq %rax,%r15 + movq %r12,104(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r11,%r15 - movq %r12,104(%rsp) + movq %r8,%rcx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r11,%r13 xorq %rbx,%r15 - addq %rcx,%r12 - - movq %rdx,%rcx - addq %r13,%r12 + xorq %r9,%rcx + xorq %rdx,%r14 addq %r15,%r12 - movq %rdx,%r13 - movq %rdx,%r14 + movq %r8,%r15 - rorq $28,%rcx - rorq $34,%r13 - movq %rdx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 - xorq %r13,%rcx - rorq $5,%r13 - orq %r9,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx - xorq %r13,%rcx - andq %r9,%r15 addq %r12,%r10 - - andq %r8,%r14 addq %r12,%rcx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rcx - movq 120(%rsp),%r13 - movq 96(%rsp),%r12 - movq %r13,%r15 + movq 120(%rsp),%r13 + movq 96(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 56(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 56(%rsp),%r12 + xorq %r15,%r14 addq 112(%rsp),%r12 movq %r10,%r13 - movq %r10,%r14 + addq %r14,%r12 + movq %rcx,%r14 + rorq $23,%r13 movq %r11,%r15 + movq %r12,112(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r10,%r13 xorq %rax,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r10,%r15 - movq %r12,112(%rsp) + movq %rdx,%rbx - xorq %r14,%r13 + rorq $6,%r14 + xorq %r10,%r13 xorq %rax,%r15 - addq %rbx,%r12 - - movq %rcx,%rbx - addq %r13,%r12 + xorq %r8,%rbx + xorq %rcx,%r14 addq %r15,%r12 - movq %rcx,%r13 - movq %rcx,%r14 + movq %rdx,%r15 - rorq $28,%rbx - rorq $34,%r13 - movq %rcx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 - xorq %r13,%rbx - rorq $5,%r13 - orq %r8,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx - xorq %r13,%rbx - andq %r8,%r15 addq %r12,%r9 - - andq %rdx,%r14 addq %r12,%rbx - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rbx - movq 0(%rsp),%r13 - movq 104(%rsp),%r12 - movq %r13,%r15 + movq 0(%rsp),%r13 + movq 104(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + rorq $7,%r12 + xorq %r13,%r12 shrq $7,%r13 - rorq $1,%r15 - - xorq %r15,%r13 - rorq $7,%r15 - - xorq %r15,%r13 - movq %r12,%r14 - shrq $6,%r12 - rorq $19,%r14 - - xorq %r14,%r12 - rorq $42,%r14 + rorq $1,%r12 + xorq %r12,%r13 + movq 64(%rsp),%r12 - xorq %r14,%r12 + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + rorq $19,%r15 addq %r13,%r12 - - addq 64(%rsp),%r12 + xorq %r15,%r14 addq 120(%rsp),%r12 movq %r9,%r13 - movq %r9,%r14 + addq %r14,%r12 + movq %rbx,%r14 + rorq $23,%r13 movq %r10,%r15 + movq %r12,120(%rsp) - rorq $14,%r13 - rorq $18,%r14 + rorq $5,%r14 + xorq %r9,%r13 xorq %r11,%r15 - xorq %r14,%r13 - rorq $23,%r14 + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 andq %r9,%r15 - movq %r12,120(%rsp) + movq %rcx,%rax - xorq %r14,%r13 + rorq $6,%r14 + xorq %r9,%r13 xorq %r11,%r15 - addq %rax,%r12 - - movq %rbx,%rax - addq %r13,%r12 + xorq %rdx,%rax + xorq %rbx,%r14 addq %r15,%r12 - movq %rbx,%r13 - movq %rbx,%r14 + movq %rcx,%r15 - rorq $28,%rax - rorq $34,%r13 - movq %rbx,%r15 - addq (%rbp,%rdi,8),%r12 + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 - xorq %r13,%rax - rorq $5,%r13 - orq %rdx,%r14 + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax - xorq %r13,%rax - andq %rdx,%r15 addq %r12,%r8 - - andq %rcx,%r14 addq %r12,%rax - - orq %r15,%r14 leaq 1(%rdi),%rdi - addq %r14,%rax + cmpq $80,%rdi jb .Lrounds_16_xx diff --git a/secure/lib/libcrypto/asm/vpaes-x86.s b/secure/lib/libcrypto/asm/vpaes-x86.s new file mode 100644 index 0000000000..f0feead19a --- /dev/null +++ b/secure/lib/libcrypto/asm/vpaes-x86.s @@ -0,0 +1,659 @@ +.file "vpaes-x86.s" +.text +.align 64 +.L_vpaes_consts: +.long 218628480,235210255,168496130,67568393 +.long 252381056,17041926,33884169,51187212 +.long 252645135,252645135,252645135,252645135 +.long 1512730624,3266504856,1377990664,3401244816 +.long 830229760,1275146365,2969422977,3447763452 +.long 3411033600,2979783055,338359620,2782886510 +.long 4209124096,907596821,221174255,1006095553 +.long 191964160,3799684038,3164090317,1589111125 +.long 182528256,1777043520,2877432650,3265356744 +.long 1874708224,3503451415,3305285752,363511674 +.long 1606117888,3487855781,1093350906,2384367825 +.long 197121,67569157,134941193,202313229 +.long 67569157,134941193,202313229,197121 +.long 134941193,202313229,197121,67569157 +.long 202313229,197121,67569157,134941193 +.long 33619971,100992007,168364043,235736079 +.long 235736079,33619971,100992007,168364043 +.long 168364043,235736079,33619971,100992007 +.long 100992007,168364043,235736079,33619971 +.long 50462976,117835012,185207048,252579084 +.long 252314880,51251460,117574920,184942860 +.long 184682752,252054788,50987272,118359308 +.long 118099200,185467140,251790600,50727180 +.long 2946363062,528716217,1300004225,1881839624 +.long 1532713819,1532713819,1532713819,1532713819 +.long 3602276352,4288629033,3737020424,4153884961 +.long 1354558464,32357713,2958822624,3775749553 +.long 1201988352,132424512,1572796698,503232858 +.long 2213177600,1597421020,4103937655,675398315 +.long 2749646592,4273543773,1511898873,121693092 +.long 3040248576,1103263732,2871565598,1608280554 +.long 2236667136,2588920351,482954393,64377734 +.long 3069987328,291237287,2117370568,3650299247 +.long 533321216,3573750986,2572112006,1401264716 +.long 1339849704,2721158661,548607111,3445553514 +.long 2128193280,3054596040,2183486460,1257083700 +.long 655635200,1165381986,3923443150,2344132524 +.long 190078720,256924420,290342170,357187870 +.long 1610966272,2263057382,4103205268,309794674 +.long 2592527872,2233205587,1335446729,3402964816 +.long 3973531904,3225098121,3002836325,1918774430 +.long 3870401024,2102906079,2284471353,4117666579 +.long 617007872,1021508343,366931923,691083277 +.long 2528395776,3491914898,2968704004,1613121270 +.long 3445188352,3247741094,844474987,4093578302 +.long 651481088,1190302358,1689581232,574775300 +.long 4289380608,206939853,2555985458,2489840491 +.long 2130264064,327674451,3566485037,3349835193 +.long 2470714624,316102159,3636825756,3393945945 +.byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105 +.byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83 +.byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117 +.byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105 +.byte 118,101,114,115,105,116,121,41,0 +.align 64 +.type _vpaes_preheat,@function +.align 16 +_vpaes_preheat: + addl (%esp),%ebp + movdqa -48(%ebp),%xmm7 + movdqa -16(%ebp),%xmm6 + ret +.size _vpaes_preheat,.-_vpaes_preheat +.type _vpaes_encrypt_core,@function +.align 16 +_vpaes_encrypt_core: + movl $16,%ecx + movl 240(%edx),%eax + movdqa %xmm6,%xmm1 + movdqa (%ebp),%xmm2 + pandn %xmm0,%xmm1 + movdqu (%edx),%xmm5 + psrld $4,%xmm1 + pand %xmm6,%xmm0 +.byte 102,15,56,0,208 + movdqa 16(%ebp),%xmm0 +.byte 102,15,56,0,193 + pxor %xmm5,%xmm2 + pxor %xmm2,%xmm0 + addl $16,%edx + leal 192(%ebp),%ebx + jmp .L000enc_entry +.align 16 +.L001enc_loop: + movdqa 32(%ebp),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm5,%xmm4 + movdqa 48(%ebp),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + movdqa 64(%ebp),%xmm5 +.byte 102,15,56,0,234 + movdqa -64(%ebx,%ecx,1),%xmm1 + movdqa 80(%ebp),%xmm2 +.byte 102,15,56,0,211 + pxor %xmm5,%xmm2 + movdqa (%ebx,%ecx,1),%xmm4 + movdqa %xmm0,%xmm3 +.byte 102,15,56,0,193 + addl $16,%edx + pxor %xmm2,%xmm0 +.byte 102,15,56,0,220 + addl $16,%ecx + pxor %xmm0,%xmm3 +.byte 102,15,56,0,193 + andl $48,%ecx + pxor %xmm3,%xmm0 + subl $1,%eax +.L000enc_entry: + movdqa %xmm6,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm6,%xmm0 + movdqa -32(%ebp),%xmm5 +.byte 102,15,56,0,232 + pxor %xmm1,%xmm0 + movdqa %xmm7,%xmm3 +.byte 102,15,56,0,217 + pxor %xmm5,%xmm3 + movdqa %xmm7,%xmm4 +.byte 102,15,56,0,224 + pxor %xmm5,%xmm4 + movdqa %xmm7,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm0,%xmm2 + movdqa %xmm7,%xmm3 + movdqu (%edx),%xmm5 +.byte 102,15,56,0,220 + pxor %xmm1,%xmm3 + jnz .L001enc_loop + movdqa 96(%ebp),%xmm4 + movdqa 112(%ebp),%xmm0 +.byte 102,15,56,0,226 + pxor %xmm5,%xmm4 +.byte 102,15,56,0,195 + movdqa 64(%ebx,%ecx,1),%xmm1 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,193 + ret +.size _vpaes_encrypt_core,.-_vpaes_encrypt_core +.type _vpaes_decrypt_core,@function +.align 16 +_vpaes_decrypt_core: + movl 240(%edx),%eax + leal 608(%ebp),%ebx + movdqa %xmm6,%xmm1 + movdqa -64(%ebx),%xmm2 + pandn %xmm0,%xmm1 + movl %eax,%ecx + psrld $4,%xmm1 + movdqu (%edx),%xmm5 + shll $4,%ecx + pand %xmm6,%xmm0 +.byte 102,15,56,0,208 + movdqa -48(%ebx),%xmm0 + xorl $48,%ecx +.byte 102,15,56,0,193 + andl $48,%ecx + pxor %xmm5,%xmm2 + movdqa 176(%ebp),%xmm5 + pxor %xmm2,%xmm0 + addl $16,%edx + leal -352(%ebx,%ecx,1),%ecx + jmp .L002dec_entry +.align 16 +.L003dec_loop: + movdqa -32(%ebx),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa -16(%ebx),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + addl $16,%edx +.byte 102,15,56,0,197 + movdqa (%ebx),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 16(%ebx),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + subl $1,%eax +.byte 102,15,56,0,197 + movdqa 32(%ebx),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 48(%ebx),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,197 + movdqa 64(%ebx),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 80(%ebx),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 +.byte 102,15,58,15,237,12 +.L002dec_entry: + movdqa %xmm6,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm6,%xmm0 + movdqa -32(%ebp),%xmm2 +.byte 102,15,56,0,208 + pxor %xmm1,%xmm0 + movdqa %xmm7,%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 + movdqa %xmm7,%xmm4 +.byte 102,15,56,0,224 + pxor %xmm2,%xmm4 + movdqa %xmm7,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm0,%xmm2 + movdqa %xmm7,%xmm3 +.byte 102,15,56,0,220 + pxor %xmm1,%xmm3 + movdqu (%edx),%xmm0 + jnz .L003dec_loop + movdqa 96(%ebx),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 112(%ebx),%xmm0 + movdqa (%ecx),%xmm2 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,194 + ret +.size _vpaes_decrypt_core,.-_vpaes_decrypt_core +.type _vpaes_schedule_core,@function +.align 16 +_vpaes_schedule_core: + addl (%esp),%ebp + movdqu (%esi),%xmm0 + movdqa 320(%ebp),%xmm2 + movdqa %xmm0,%xmm3 + leal (%ebp),%ebx + movdqa %xmm2,4(%esp) + call _vpaes_schedule_transform + movdqa %xmm0,%xmm7 + testl %edi,%edi + jnz .L004schedule_am_decrypting + movdqu %xmm0,(%edx) + jmp .L005schedule_go +.L004schedule_am_decrypting: + movdqa 256(%ebp,%ecx,1),%xmm1 +.byte 102,15,56,0,217 + movdqu %xmm3,(%edx) + xorl $48,%ecx +.L005schedule_go: + cmpl $192,%eax + ja .L006schedule_256 + je .L007schedule_192 +.L008schedule_128: + movl $10,%eax +.L009loop_schedule_128: + call _vpaes_schedule_round + decl %eax + jz .L010schedule_mangle_last + call _vpaes_schedule_mangle + jmp .L009loop_schedule_128 +.align 16 +.L007schedule_192: + movdqu 8(%esi),%xmm0 + call _vpaes_schedule_transform + movdqa %xmm0,%xmm6 + pxor %xmm4,%xmm4 + movhlps %xmm4,%xmm6 + movl $4,%eax +.L011loop_schedule_192: + call _vpaes_schedule_round +.byte 102,15,58,15,198,8 + call _vpaes_schedule_mangle + call _vpaes_schedule_192_smear + call _vpaes_schedule_mangle + call _vpaes_schedule_round + decl %eax + jz .L010schedule_mangle_last + call _vpaes_schedule_mangle + call _vpaes_schedule_192_smear + jmp .L011loop_schedule_192 +.align 16 +.L006schedule_256: + movdqu 16(%esi),%xmm0 + call _vpaes_schedule_transform + movl $7,%eax +.L012loop_schedule_256: + call _vpaes_schedule_mangle + movdqa %xmm0,%xmm6 + call _vpaes_schedule_round + decl %eax + jz .L010schedule_mangle_last + call _vpaes_schedule_mangle + pshufd $255,%xmm0,%xmm0 + movdqa %xmm7,20(%esp) + movdqa %xmm6,%xmm7 + call .L_vpaes_schedule_low_round + movdqa 20(%esp),%xmm7 + jmp .L012loop_schedule_256 +.align 16 +.L010schedule_mangle_last: + leal 384(%ebp),%ebx + testl %edi,%edi + jnz .L013schedule_mangle_last_dec + movdqa 256(%ebp,%ecx,1),%xmm1 +.byte 102,15,56,0,193 + leal 352(%ebp),%ebx + addl $32,%edx +.L013schedule_mangle_last_dec: + addl $-16,%edx + pxor 336(%ebp),%xmm0 + call _vpaes_schedule_transform + movdqu %xmm0,(%edx) + pxor %xmm0,%xmm0 + pxor %xmm1,%xmm1 + pxor %xmm2,%xmm2 + pxor %xmm3,%xmm3 + pxor %xmm4,%xmm4 + pxor %xmm5,%xmm5 + pxor %xmm6,%xmm6 + pxor %xmm7,%xmm7 + ret +.size _vpaes_schedule_core,.-_vpaes_schedule_core +.type _vpaes_schedule_192_smear,@function +.align 16 +_vpaes_schedule_192_smear: + pshufd $128,%xmm6,%xmm0 + pxor %xmm0,%xmm6 + pshufd $254,%xmm7,%xmm0 + pxor %xmm0,%xmm6 + movdqa %xmm6,%xmm0 + pxor %xmm1,%xmm1 + movhlps %xmm1,%xmm6 + ret +.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear +.type _vpaes_schedule_round,@function +.align 16 +_vpaes_schedule_round: + movdqa 8(%esp),%xmm2 + pxor %xmm1,%xmm1 +.byte 102,15,58,15,202,15 +.byte 102,15,58,15,210,15 + pxor %xmm1,%xmm7 + pshufd $255,%xmm0,%xmm0 +.byte 102,15,58,15,192,1 + movdqa %xmm2,8(%esp) +.L_vpaes_schedule_low_round: + movdqa %xmm7,%xmm1 + pslldq $4,%xmm7 + pxor %xmm1,%xmm7 + movdqa %xmm7,%xmm1 + pslldq $8,%xmm7 + pxor %xmm1,%xmm7 + pxor 336(%ebp),%xmm7 + movdqa -16(%ebp),%xmm4 + movdqa -48(%ebp),%xmm5 + movdqa %xmm4,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm4,%xmm0 + movdqa -32(%ebp),%xmm2 +.byte 102,15,56,0,208 + pxor %xmm1,%xmm0 + movdqa %xmm5,%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 + movdqa %xmm5,%xmm4 +.byte 102,15,56,0,224 + pxor %xmm2,%xmm4 + movdqa %xmm5,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm0,%xmm2 + movdqa %xmm5,%xmm3 +.byte 102,15,56,0,220 + pxor %xmm1,%xmm3 + movdqa 32(%ebp),%xmm4 +.byte 102,15,56,0,226 + movdqa 48(%ebp),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + pxor %xmm7,%xmm0 + movdqa %xmm0,%xmm7 + ret +.size _vpaes_schedule_round,.-_vpaes_schedule_round +.type _vpaes_schedule_transform,@function +.align 16 +_vpaes_schedule_transform: + movdqa -16(%ebp),%xmm2 + movdqa %xmm2,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm2,%xmm0 + movdqa (%ebx),%xmm2 +.byte 102,15,56,0,208 + movdqa 16(%ebx),%xmm0 +.byte 102,15,56,0,193 + pxor %xmm2,%xmm0 + ret +.size _vpaes_schedule_transform,.-_vpaes_schedule_transform +.type _vpaes_schedule_mangle,@function +.align 16 +_vpaes_schedule_mangle: + movdqa %xmm0,%xmm4 + movdqa 128(%ebp),%xmm5 + testl %edi,%edi + jnz .L014schedule_mangle_dec + addl $16,%edx + pxor 336(%ebp),%xmm4 +.byte 102,15,56,0,229 + movdqa %xmm4,%xmm3 +.byte 102,15,56,0,229 + pxor %xmm4,%xmm3 +.byte 102,15,56,0,229 + pxor %xmm4,%xmm3 + jmp .L015schedule_mangle_both +.align 16 +.L014schedule_mangle_dec: + movdqa -16(%ebp),%xmm2 + leal 416(%ebp),%esi + movdqa %xmm2,%xmm1 + pandn %xmm4,%xmm1 + psrld $4,%xmm1 + pand %xmm2,%xmm4 + movdqa (%esi),%xmm2 +.byte 102,15,56,0,212 + movdqa 16(%esi),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 +.byte 102,15,56,0,221 + movdqa 32(%esi),%xmm2 +.byte 102,15,56,0,212 + pxor %xmm3,%xmm2 + movdqa 48(%esi),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 +.byte 102,15,56,0,221 + movdqa 64(%esi),%xmm2 +.byte 102,15,56,0,212 + pxor %xmm3,%xmm2 + movdqa 80(%esi),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 +.byte 102,15,56,0,221 + movdqa 96(%esi),%xmm2 +.byte 102,15,56,0,212 + pxor %xmm3,%xmm2 + movdqa 112(%esi),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 + addl $-16,%edx +.L015schedule_mangle_both: + movdqa 256(%ebp,%ecx,1),%xmm1 +.byte 102,15,56,0,217 + addl $-16,%ecx + andl $48,%ecx + movdqu %xmm3,(%edx) + ret +.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle +.globl vpaes_set_encrypt_key +.type vpaes_set_encrypt_key,@function +.align 16 +vpaes_set_encrypt_key: +.L_vpaes_set_encrypt_key_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + leal -56(%esp),%ebx + movl 24(%esp),%eax + andl $-16,%ebx + movl 28(%esp),%edx + xchgl %esp,%ebx + movl %ebx,48(%esp) + movl %eax,%ebx + shrl $5,%ebx + addl $5,%ebx + movl %ebx,240(%edx) + movl $48,%ecx + movl $0,%edi + leal .L_vpaes_consts+0x30-.L016pic_point,%ebp + call _vpaes_schedule_core +.L016pic_point: + movl 48(%esp),%esp + xorl %eax,%eax + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin +.globl vpaes_set_decrypt_key +.type vpaes_set_decrypt_key,@function +.align 16 +vpaes_set_decrypt_key: +.L_vpaes_set_decrypt_key_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + leal -56(%esp),%ebx + movl 24(%esp),%eax + andl $-16,%ebx + movl 28(%esp),%edx + xchgl %esp,%ebx + movl %ebx,48(%esp) + movl %eax,%ebx + shrl $5,%ebx + addl $5,%ebx + movl %ebx,240(%edx) + shll $4,%ebx + leal 16(%edx,%ebx,1),%edx + movl $1,%edi + movl %eax,%ecx + shrl $1,%ecx + andl $32,%ecx + xorl $32,%ecx + leal .L_vpaes_consts+0x30-.L017pic_point,%ebp + call _vpaes_schedule_core +.L017pic_point: + movl 48(%esp),%esp + xorl %eax,%eax + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin +.globl vpaes_encrypt +.type vpaes_encrypt,@function +.align 16 +vpaes_encrypt: +.L_vpaes_encrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + leal .L_vpaes_consts+0x30-.L018pic_point,%ebp + call _vpaes_preheat +.L018pic_point: + movl 20(%esp),%esi + leal -56(%esp),%ebx + movl 24(%esp),%edi + andl $-16,%ebx + movl 28(%esp),%edx + xchgl %esp,%ebx + movl %ebx,48(%esp) + movdqu (%esi),%xmm0 + call _vpaes_encrypt_core + movdqu %xmm0,(%edi) + movl 48(%esp),%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size vpaes_encrypt,.-.L_vpaes_encrypt_begin +.globl vpaes_decrypt +.type vpaes_decrypt,@function +.align 16 +vpaes_decrypt: +.L_vpaes_decrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + leal .L_vpaes_consts+0x30-.L019pic_point,%ebp + call _vpaes_preheat +.L019pic_point: + movl 20(%esp),%esi + leal -56(%esp),%ebx + movl 24(%esp),%edi + andl $-16,%ebx + movl 28(%esp),%edx + xchgl %esp,%ebx + movl %ebx,48(%esp) + movdqu (%esi),%xmm0 + call _vpaes_decrypt_core + movdqu %xmm0,(%edi) + movl 48(%esp),%esp + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size vpaes_decrypt,.-.L_vpaes_decrypt_begin +.globl vpaes_cbc_encrypt +.type vpaes_cbc_encrypt,@function +.align 16 +vpaes_cbc_encrypt: +.L_vpaes_cbc_encrypt_begin: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 20(%esp),%esi + movl 24(%esp),%edi + movl 28(%esp),%eax + movl 32(%esp),%edx + leal -56(%esp),%ebx + movl 36(%esp),%ebp + andl $-16,%ebx + movl 40(%esp),%ecx + xchgl %esp,%ebx + movdqu (%ebp),%xmm1 + subl %esi,%edi + movl %ebx,48(%esp) + movl %edi,(%esp) + subl $16,%eax + movl %edx,4(%esp) + movl %ebp,8(%esp) + movl %eax,%edi + leal .L_vpaes_consts+0x30-.L020pic_point,%ebp + call _vpaes_preheat +.L020pic_point: + cmpl $0,%ecx + je .L021cbc_dec_loop + jmp .L022cbc_enc_loop +.align 16 +.L022cbc_enc_loop: + movdqu (%esi),%xmm0 + pxor %xmm1,%xmm0 + call _vpaes_encrypt_core + movl (%esp),%ebx + movl 4(%esp),%edx + movdqa %xmm0,%xmm1 + movdqu %xmm0,(%ebx,%esi,1) + leal 16(%esi),%esi + subl $16,%edi + jnc .L022cbc_enc_loop + jmp .L023cbc_done +.align 16 +.L021cbc_dec_loop: + movdqu (%esi),%xmm0 + movdqa %xmm1,16(%esp) + movdqa %xmm0,32(%esp) + call _vpaes_decrypt_core + movl (%esp),%ebx + movl 4(%esp),%edx + pxor 16(%esp),%xmm0 + movdqa 32(%esp),%xmm1 + movdqu %xmm0,(%ebx,%esi,1) + leal 16(%esi),%esi + subl $16,%edi + jnc .L021cbc_dec_loop +.L023cbc_done: + movl 8(%esp),%ebx + movl 48(%esp),%esp + movdqu %xmm1,(%ebx) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.size vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin diff --git a/secure/lib/libcrypto/asm/vpaes-x86_64.s b/secure/lib/libcrypto/asm/vpaes-x86_64.s new file mode 100644 index 0000000000..70284376ee --- /dev/null +++ b/secure/lib/libcrypto/asm/vpaes-x86_64.s @@ -0,0 +1,826 @@ +.text + + + + + + + + + + + + + + + + +.type _vpaes_encrypt_core,@function +.align 16 +_vpaes_encrypt_core: + movq %rdx,%r9 + movq $16,%r11 + movl 240(%rdx),%eax + movdqa %xmm9,%xmm1 + movdqa .Lk_ipt(%rip),%xmm2 + pandn %xmm0,%xmm1 + movdqu (%r9),%xmm5 + psrld $4,%xmm1 + pand %xmm9,%xmm0 +.byte 102,15,56,0,208 + movdqa .Lk_ipt+16(%rip),%xmm0 +.byte 102,15,56,0,193 + pxor %xmm5,%xmm2 + pxor %xmm2,%xmm0 + addq $16,%r9 + leaq .Lk_mc_backward(%rip),%r10 + jmp .Lenc_entry + +.align 16 +.Lenc_loop: + + movdqa %xmm13,%xmm4 +.byte 102,15,56,0,226 + pxor %xmm5,%xmm4 + movdqa %xmm12,%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + movdqa %xmm15,%xmm5 +.byte 102,15,56,0,234 + movdqa -64(%r11,%r10,1),%xmm1 + movdqa %xmm14,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm5,%xmm2 + movdqa (%r11,%r10,1),%xmm4 + movdqa %xmm0,%xmm3 +.byte 102,15,56,0,193 + addq $16,%r9 + pxor %xmm2,%xmm0 +.byte 102,15,56,0,220 + addq $16,%r11 + pxor %xmm0,%xmm3 +.byte 102,15,56,0,193 + andq $48,%r11 + pxor %xmm3,%xmm0 + subq $1,%rax + +.Lenc_entry: + + movdqa %xmm9,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm9,%xmm0 + movdqa %xmm11,%xmm5 +.byte 102,15,56,0,232 + pxor %xmm1,%xmm0 + movdqa %xmm10,%xmm3 +.byte 102,15,56,0,217 + pxor %xmm5,%xmm3 + movdqa %xmm10,%xmm4 +.byte 102,15,56,0,224 + pxor %xmm5,%xmm4 + movdqa %xmm10,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm0,%xmm2 + movdqa %xmm10,%xmm3 + movdqu (%r9),%xmm5 +.byte 102,15,56,0,220 + pxor %xmm1,%xmm3 + jnz .Lenc_loop + + + movdqa -96(%r10),%xmm4 + movdqa -80(%r10),%xmm0 +.byte 102,15,56,0,226 + pxor %xmm5,%xmm4 +.byte 102,15,56,0,195 + movdqa 64(%r11,%r10,1),%xmm1 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,193 + .byte 0xf3,0xc3 +.size _vpaes_encrypt_core,.-_vpaes_encrypt_core + + + + + + +.type _vpaes_decrypt_core,@function +.align 16 +_vpaes_decrypt_core: + movq %rdx,%r9 + movl 240(%rdx),%eax + movdqa %xmm9,%xmm1 + movdqa .Lk_dipt(%rip),%xmm2 + pandn %xmm0,%xmm1 + movq %rax,%r11 + psrld $4,%xmm1 + movdqu (%r9),%xmm5 + shlq $4,%r11 + pand %xmm9,%xmm0 +.byte 102,15,56,0,208 + movdqa .Lk_dipt+16(%rip),%xmm0 + xorq $48,%r11 + leaq .Lk_dsbd(%rip),%r10 +.byte 102,15,56,0,193 + andq $48,%r11 + pxor %xmm5,%xmm2 + movdqa .Lk_mc_forward+48(%rip),%xmm5 + pxor %xmm2,%xmm0 + addq $16,%r9 + addq %r10,%r11 + jmp .Ldec_entry + +.align 16 +.Ldec_loop: + + + + movdqa -32(%r10),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa -16(%r10),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + addq $16,%r9 + +.byte 102,15,56,0,197 + movdqa 0(%r10),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 16(%r10),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + subq $1,%rax + +.byte 102,15,56,0,197 + movdqa 32(%r10),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 48(%r10),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + +.byte 102,15,56,0,197 + movdqa 64(%r10),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 80(%r10),%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + +.byte 102,15,58,15,237,12 + +.Ldec_entry: + + movdqa %xmm9,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm9,%xmm0 + movdqa %xmm11,%xmm2 +.byte 102,15,56,0,208 + pxor %xmm1,%xmm0 + movdqa %xmm10,%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 + movdqa %xmm10,%xmm4 +.byte 102,15,56,0,224 + pxor %xmm2,%xmm4 + movdqa %xmm10,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm0,%xmm2 + movdqa %xmm10,%xmm3 +.byte 102,15,56,0,220 + pxor %xmm1,%xmm3 + movdqu (%r9),%xmm0 + jnz .Ldec_loop + + + movdqa 96(%r10),%xmm4 +.byte 102,15,56,0,226 + pxor %xmm0,%xmm4 + movdqa 112(%r10),%xmm0 + movdqa .Lk_sr-.Lk_dsbd(%r11),%xmm2 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 +.byte 102,15,56,0,194 + .byte 0xf3,0xc3 +.size _vpaes_decrypt_core,.-_vpaes_decrypt_core + + + + + + +.type _vpaes_schedule_core,@function +.align 16 +_vpaes_schedule_core: + + + + + + call _vpaes_preheat + movdqa .Lk_rcon(%rip),%xmm8 + movdqu (%rdi),%xmm0 + + + movdqa %xmm0,%xmm3 + leaq .Lk_ipt(%rip),%r11 + call _vpaes_schedule_transform + movdqa %xmm0,%xmm7 + + leaq .Lk_sr(%rip),%r10 + testq %rcx,%rcx + jnz .Lschedule_am_decrypting + + + movdqu %xmm0,(%rdx) + jmp .Lschedule_go + +.Lschedule_am_decrypting: + + movdqa (%r8,%r10,1),%xmm1 +.byte 102,15,56,0,217 + movdqu %xmm3,(%rdx) + xorq $48,%r8 + +.Lschedule_go: + cmpl $192,%esi + ja .Lschedule_256 + je .Lschedule_192 + + + + + + + + + + +.Lschedule_128: + movl $10,%esi + +.Loop_schedule_128: + call _vpaes_schedule_round + decq %rsi + jz .Lschedule_mangle_last + call _vpaes_schedule_mangle + jmp .Loop_schedule_128 + + + + + + + + + + + + + + + + +.align 16 +.Lschedule_192: + movdqu 8(%rdi),%xmm0 + call _vpaes_schedule_transform + movdqa %xmm0,%xmm6 + pxor %xmm4,%xmm4 + movhlps %xmm4,%xmm6 + movl $4,%esi + +.Loop_schedule_192: + call _vpaes_schedule_round +.byte 102,15,58,15,198,8 + call _vpaes_schedule_mangle + call _vpaes_schedule_192_smear + call _vpaes_schedule_mangle + call _vpaes_schedule_round + decq %rsi + jz .Lschedule_mangle_last + call _vpaes_schedule_mangle + call _vpaes_schedule_192_smear + jmp .Loop_schedule_192 + + + + + + + + + + + +.align 16 +.Lschedule_256: + movdqu 16(%rdi),%xmm0 + call _vpaes_schedule_transform + movl $7,%esi + +.Loop_schedule_256: + call _vpaes_schedule_mangle + movdqa %xmm0,%xmm6 + + + call _vpaes_schedule_round + decq %rsi + jz .Lschedule_mangle_last + call _vpaes_schedule_mangle + + + pshufd $255,%xmm0,%xmm0 + movdqa %xmm7,%xmm5 + movdqa %xmm6,%xmm7 + call _vpaes_schedule_low_round + movdqa %xmm5,%xmm7 + + jmp .Loop_schedule_256 + + + + + + + + + + + + +.align 16 +.Lschedule_mangle_last: + + leaq .Lk_deskew(%rip),%r11 + testq %rcx,%rcx + jnz .Lschedule_mangle_last_dec + + + movdqa (%r8,%r10,1),%xmm1 +.byte 102,15,56,0,193 + leaq .Lk_opt(%rip),%r11 + addq $32,%rdx + +.Lschedule_mangle_last_dec: + addq $-16,%rdx + pxor .Lk_s63(%rip),%xmm0 + call _vpaes_schedule_transform + movdqu %xmm0,(%rdx) + + + pxor %xmm0,%xmm0 + pxor %xmm1,%xmm1 + pxor %xmm2,%xmm2 + pxor %xmm3,%xmm3 + pxor %xmm4,%xmm4 + pxor %xmm5,%xmm5 + pxor %xmm6,%xmm6 + pxor %xmm7,%xmm7 + .byte 0xf3,0xc3 +.size _vpaes_schedule_core,.-_vpaes_schedule_core + + + + + + + + + + + + + + + +.type _vpaes_schedule_192_smear,@function +.align 16 +_vpaes_schedule_192_smear: + pshufd $128,%xmm6,%xmm0 + pxor %xmm0,%xmm6 + pshufd $254,%xmm7,%xmm0 + pxor %xmm0,%xmm6 + movdqa %xmm6,%xmm0 + pxor %xmm1,%xmm1 + movhlps %xmm1,%xmm6 + .byte 0xf3,0xc3 +.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear + + + + + + + + + + + + + + + + + + + +.type _vpaes_schedule_round,@function +.align 16 +_vpaes_schedule_round: + + pxor %xmm1,%xmm1 +.byte 102,65,15,58,15,200,15 +.byte 102,69,15,58,15,192,15 + pxor %xmm1,%xmm7 + + + pshufd $255,%xmm0,%xmm0 +.byte 102,15,58,15,192,1 + + + + +_vpaes_schedule_low_round: + + movdqa %xmm7,%xmm1 + pslldq $4,%xmm7 + pxor %xmm1,%xmm7 + movdqa %xmm7,%xmm1 + pslldq $8,%xmm7 + pxor %xmm1,%xmm7 + pxor .Lk_s63(%rip),%xmm7 + + + movdqa %xmm9,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm9,%xmm0 + movdqa %xmm11,%xmm2 +.byte 102,15,56,0,208 + pxor %xmm1,%xmm0 + movdqa %xmm10,%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 + movdqa %xmm10,%xmm4 +.byte 102,15,56,0,224 + pxor %xmm2,%xmm4 + movdqa %xmm10,%xmm2 +.byte 102,15,56,0,211 + pxor %xmm0,%xmm2 + movdqa %xmm10,%xmm3 +.byte 102,15,56,0,220 + pxor %xmm1,%xmm3 + movdqa %xmm13,%xmm4 +.byte 102,15,56,0,226 + movdqa %xmm12,%xmm0 +.byte 102,15,56,0,195 + pxor %xmm4,%xmm0 + + + pxor %xmm7,%xmm0 + movdqa %xmm0,%xmm7 + .byte 0xf3,0xc3 +.size _vpaes_schedule_round,.-_vpaes_schedule_round + + + + + + + + + + +.type _vpaes_schedule_transform,@function +.align 16 +_vpaes_schedule_transform: + movdqa %xmm9,%xmm1 + pandn %xmm0,%xmm1 + psrld $4,%xmm1 + pand %xmm9,%xmm0 + movdqa (%r11),%xmm2 +.byte 102,15,56,0,208 + movdqa 16(%r11),%xmm0 +.byte 102,15,56,0,193 + pxor %xmm2,%xmm0 + .byte 0xf3,0xc3 +.size _vpaes_schedule_transform,.-_vpaes_schedule_transform + + + + + + + + + + + + + + + + + + + + + + + + +.type _vpaes_schedule_mangle,@function +.align 16 +_vpaes_schedule_mangle: + movdqa %xmm0,%xmm4 + movdqa .Lk_mc_forward(%rip),%xmm5 + testq %rcx,%rcx + jnz .Lschedule_mangle_dec + + + addq $16,%rdx + pxor .Lk_s63(%rip),%xmm4 +.byte 102,15,56,0,229 + movdqa %xmm4,%xmm3 +.byte 102,15,56,0,229 + pxor %xmm4,%xmm3 +.byte 102,15,56,0,229 + pxor %xmm4,%xmm3 + + jmp .Lschedule_mangle_both +.align 16 +.Lschedule_mangle_dec: + + leaq .Lk_dksd(%rip),%r11 + movdqa %xmm9,%xmm1 + pandn %xmm4,%xmm1 + psrld $4,%xmm1 + pand %xmm9,%xmm4 + + movdqa 0(%r11),%xmm2 +.byte 102,15,56,0,212 + movdqa 16(%r11),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 +.byte 102,15,56,0,221 + + movdqa 32(%r11),%xmm2 +.byte 102,15,56,0,212 + pxor %xmm3,%xmm2 + movdqa 48(%r11),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 +.byte 102,15,56,0,221 + + movdqa 64(%r11),%xmm2 +.byte 102,15,56,0,212 + pxor %xmm3,%xmm2 + movdqa 80(%r11),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 +.byte 102,15,56,0,221 + + movdqa 96(%r11),%xmm2 +.byte 102,15,56,0,212 + pxor %xmm3,%xmm2 + movdqa 112(%r11),%xmm3 +.byte 102,15,56,0,217 + pxor %xmm2,%xmm3 + + addq $-16,%rdx + +.Lschedule_mangle_both: + movdqa (%r8,%r10,1),%xmm1 +.byte 102,15,56,0,217 + addq $-16,%r8 + andq $48,%r8 + movdqu %xmm3,(%rdx) + .byte 0xf3,0xc3 +.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle + + + + +.globl vpaes_set_encrypt_key +.type vpaes_set_encrypt_key,@function +.align 16 +vpaes_set_encrypt_key: + movl %esi,%eax + shrl $5,%eax + addl $5,%eax + movl %eax,240(%rdx) + + movl $0,%ecx + movl $48,%r8d + call _vpaes_schedule_core + xorl %eax,%eax + .byte 0xf3,0xc3 +.size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key + +.globl vpaes_set_decrypt_key +.type vpaes_set_decrypt_key,@function +.align 16 +vpaes_set_decrypt_key: + movl %esi,%eax + shrl $5,%eax + addl $5,%eax + movl %eax,240(%rdx) + shll $4,%eax + leaq 16(%rdx,%rax,1),%rdx + + movl $1,%ecx + movl %esi,%r8d + shrl $1,%r8d + andl $32,%r8d + xorl $32,%r8d + call _vpaes_schedule_core + xorl %eax,%eax + .byte 0xf3,0xc3 +.size vpaes_set_decrypt_key,.-vpaes_set_decrypt_key + +.globl vpaes_encrypt +.type vpaes_encrypt,@function +.align 16 +vpaes_encrypt: + movdqu (%rdi),%xmm0 + call _vpaes_preheat + call _vpaes_encrypt_core + movdqu %xmm0,(%rsi) + .byte 0xf3,0xc3 +.size vpaes_encrypt,.-vpaes_encrypt + +.globl vpaes_decrypt +.type vpaes_decrypt,@function +.align 16 +vpaes_decrypt: + movdqu (%rdi),%xmm0 + call _vpaes_preheat + call _vpaes_decrypt_core + movdqu %xmm0,(%rsi) + .byte 0xf3,0xc3 +.size vpaes_decrypt,.-vpaes_decrypt +.globl vpaes_cbc_encrypt +.type vpaes_cbc_encrypt,@function +.align 16 +vpaes_cbc_encrypt: + xchgq %rcx,%rdx + movdqu (%r8),%xmm6 + subq %rdi,%rsi + subq $16,%rcx + call _vpaes_preheat + cmpl $0,%r9d + je .Lcbc_dec_loop + jmp .Lcbc_enc_loop +.align 16 +.Lcbc_enc_loop: + movdqu (%rdi),%xmm0 + pxor %xmm6,%xmm0 + call _vpaes_encrypt_core + movdqa %xmm0,%xmm6 + movdqu %xmm0,(%rsi,%rdi,1) + leaq 16(%rdi),%rdi + subq $16,%rcx + jnc .Lcbc_enc_loop + jmp .Lcbc_done +.align 16 +.Lcbc_dec_loop: + movdqu (%rdi),%xmm0 + movdqa %xmm0,%xmm7 + call _vpaes_decrypt_core + pxor %xmm6,%xmm0 + movdqa %xmm7,%xmm6 + movdqu %xmm0,(%rsi,%rdi,1) + leaq 16(%rdi),%rdi + subq $16,%rcx + jnc .Lcbc_dec_loop +.Lcbc_done: + movdqu %xmm6,(%r8) + .byte 0xf3,0xc3 +.size vpaes_cbc_encrypt,.-vpaes_cbc_encrypt + + + + + + +.type _vpaes_preheat,@function +.align 16 +_vpaes_preheat: + leaq .Lk_s0F(%rip),%r10 + movdqa -32(%r10),%xmm10 + movdqa -16(%r10),%xmm11 + movdqa 0(%r10),%xmm9 + movdqa 48(%r10),%xmm13 + movdqa 64(%r10),%xmm12 + movdqa 80(%r10),%xmm15 + movdqa 96(%r10),%xmm14 + .byte 0xf3,0xc3 +.size _vpaes_preheat,.-_vpaes_preheat + + + + + +.type _vpaes_consts,@object +.align 64 +_vpaes_consts: +.Lk_inv: +.quad 0x0E05060F0D080180, 0x040703090A0B0C02 +.quad 0x01040A060F0B0780, 0x030D0E0C02050809 + +.Lk_s0F: +.quad 0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F0F0F0F0F + +.Lk_ipt: +.quad 0xC2B2E8985A2A7000, 0xCABAE09052227808 +.quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81 + +.Lk_sb1: +.quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544 +.quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF +.Lk_sb2: +.quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD +.quad 0x69EB88400AE12900, 0xC2A163C8AB82234A +.Lk_sbo: +.quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878 +.quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA + +.Lk_mc_forward: +.quad 0x0407060500030201, 0x0C0F0E0D080B0A09 +.quad 0x080B0A0904070605, 0x000302010C0F0E0D +.quad 0x0C0F0E0D080B0A09, 0x0407060500030201 +.quad 0x000302010C0F0E0D, 0x080B0A0904070605 + +.Lk_mc_backward: +.quad 0x0605040702010003, 0x0E0D0C0F0A09080B +.quad 0x020100030E0D0C0F, 0x0A09080B06050407 +.quad 0x0E0D0C0F0A09080B, 0x0605040702010003 +.quad 0x0A09080B06050407, 0x020100030E0D0C0F + +.Lk_sr: +.quad 0x0706050403020100, 0x0F0E0D0C0B0A0908 +.quad 0x030E09040F0A0500, 0x0B06010C07020D08 +.quad 0x0F060D040B020900, 0x070E050C030A0108 +.quad 0x0B0E0104070A0D00, 0x0306090C0F020508 + +.Lk_rcon: +.quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81 + +.Lk_s63: +.quad 0x5B5B5B5B5B5B5B5B, 0x5B5B5B5B5B5B5B5B + +.Lk_opt: +.quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808 +.quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0 + +.Lk_deskew: +.quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A +.quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77 + + + + + +.Lk_dksd: +.quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9 +.quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E +.Lk_dksb: +.quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99 +.quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8 +.Lk_dkse: +.quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086 +.quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487 +.Lk_dks9: +.quad 0xB6116FC87ED9A700, 0x4AED933482255BFC +.quad 0x4576516227143300, 0x8BB89FACE9DAFDCE + + + + + +.Lk_dipt: +.quad 0x0F505B040B545F00, 0x154A411E114E451A +.quad 0x86E383E660056500, 0x12771772F491F194 + +.Lk_dsb9: +.quad 0x851C03539A86D600, 0xCAD51F504F994CC9 +.quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565 +.Lk_dsbd: +.quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439 +.quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3 +.Lk_dsbb: +.quad 0xD022649296B44200, 0x602646F6B0F2D404 +.quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B +.Lk_dsbe: +.quad 0x46F2929626D4D000, 0x2242600464B4F6B0 +.quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 +.Lk_dsbo: +.quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D +.quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C +.byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,105,111,110,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0 +.align 64 +.size _vpaes_consts,.-_vpaes_consts diff --git a/secure/lib/libcrypto/asm/x86-gf2m.s b/secure/lib/libcrypto/asm/x86-gf2m.s new file mode 100644 index 0000000000..9f2d0dafeb --- /dev/null +++ b/secure/lib/libcrypto/asm/x86-gf2m.s @@ -0,0 +1,343 @@ +.file "../../../../crypto/openssl/crypto/bn/asm/x86-gf2m.s" +.text +.type _mul_1x1_mmx,@function +.align 16 +_mul_1x1_mmx: + subl $36,%esp + movl %eax,%ecx + leal (%eax,%eax,1),%edx + andl $1073741823,%ecx + leal (%edx,%edx,1),%ebp + movl $0,(%esp) + andl $2147483647,%edx + movd %eax,%mm2 + movd %ebx,%mm3 + movl %ecx,4(%esp) + xorl %edx,%ecx + pxor %mm5,%mm5 + pxor %mm4,%mm4 + movl %edx,8(%esp) + xorl %ebp,%edx + movl %ecx,12(%esp) + pcmpgtd %mm2,%mm5 + paddd %mm2,%mm2 + xorl %edx,%ecx + movl %ebp,16(%esp) + xorl %edx,%ebp + pand %mm3,%mm5 + pcmpgtd %mm2,%mm4 + movl %ecx,20(%esp) + xorl %ecx,%ebp + psllq $31,%mm5 + pand %mm3,%mm4 + movl %edx,24(%esp) + movl $7,%esi + movl %ebp,28(%esp) + movl %esi,%ebp + andl %ebx,%esi + shrl $3,%ebx + movl %ebp,%edi + psllq $30,%mm4 + andl %ebx,%edi + shrl $3,%ebx + movd (%esp,%esi,4),%mm0 + movl %ebp,%esi + andl %ebx,%esi + shrl $3,%ebx + movd (%esp,%edi,4),%mm2 + movl %ebp,%edi + psllq $3,%mm2 + andl %ebx,%edi + shrl $3,%ebx + pxor %mm2,%mm0 + movd (%esp,%esi,4),%mm1 + movl %ebp,%esi + psllq $6,%mm1 + andl %ebx,%esi + shrl $3,%ebx + pxor %mm1,%mm0 + movd (%esp,%edi,4),%mm2 + movl %ebp,%edi + psllq $9,%mm2 + andl %ebx,%edi + shrl $3,%ebx + pxor %mm2,%mm0 + movd (%esp,%esi,4),%mm1 + movl %ebp,%esi + psllq $12,%mm1 + andl %ebx,%esi + shrl $3,%ebx + pxor %mm1,%mm0 + movd (%esp,%edi,4),%mm2 + movl %ebp,%edi + psllq $15,%mm2 + andl %ebx,%edi + shrl $3,%ebx + pxor %mm2,%mm0 + movd (%esp,%esi,4),%mm1 + movl %ebp,%esi + psllq $18,%mm1 + andl %ebx,%esi + shrl $3,%ebx + pxor %mm1,%mm0 + movd (%esp,%edi,4),%mm2 + movl %ebp,%edi + psllq $21,%mm2 + andl %ebx,%edi + shrl $3,%ebx + pxor %mm2,%mm0 + movd (%esp,%esi,4),%mm1 + movl %ebp,%esi + psllq $24,%mm1 + andl %ebx,%esi + shrl $3,%ebx + pxor %mm1,%mm0 + movd (%esp,%edi,4),%mm2 + pxor %mm4,%mm0 + psllq $27,%mm2 + pxor %mm2,%mm0 + movd (%esp,%esi,4),%mm1 + pxor %mm5,%mm0 + psllq $30,%mm1 + addl $36,%esp + pxor %mm1,%mm0 + ret +.size _mul_1x1_mmx,.-_mul_1x1_mmx +.type _mul_1x1_ialu,@function +.align 16 +_mul_1x1_ialu: + subl $36,%esp + movl %eax,%ecx + leal (%eax,%eax,1),%edx + leal (,%eax,4),%ebp + andl $1073741823,%ecx + leal (%eax,%eax,1),%edi + sarl $31,%eax + movl $0,(%esp) + andl $2147483647,%edx + movl %ecx,4(%esp) + xorl %edx,%ecx + movl %edx,8(%esp) + xorl %ebp,%edx + movl %ecx,12(%esp) + xorl %edx,%ecx + movl %ebp,16(%esp) + xorl %edx,%ebp + movl %ecx,20(%esp) + xorl %ecx,%ebp + sarl $31,%edi + andl %ebx,%eax + movl %edx,24(%esp) + andl %ebx,%edi + movl %ebp,28(%esp) + movl %eax,%edx + shll $31,%eax + movl %edi,%ecx + shrl $1,%edx + movl $7,%esi + shll $30,%edi + andl %ebx,%esi + shrl $2,%ecx + xorl %edi,%eax + shrl $3,%ebx + movl $7,%edi + andl %ebx,%edi + shrl $3,%ebx + xorl %ecx,%edx + xorl (%esp,%esi,4),%eax + movl $7,%esi + andl %ebx,%esi + shrl $3,%ebx + movl (%esp,%edi,4),%ebp + movl $7,%edi + movl %ebp,%ecx + shll $3,%ebp + andl %ebx,%edi + shrl $29,%ecx + xorl %ebp,%eax + shrl $3,%ebx + xorl %ecx,%edx + movl (%esp,%esi,4),%ecx + movl $7,%esi + movl %ecx,%ebp + shll $6,%ecx + andl %ebx,%esi + shrl $26,%ebp + xorl %ecx,%eax + shrl $3,%ebx + xorl %ebp,%edx + movl (%esp,%edi,4),%ebp + movl $7,%edi + movl %ebp,%ecx + shll $9,%ebp + andl %ebx,%edi + shrl $23,%ecx + xorl %ebp,%eax + shrl $3,%ebx + xorl %ecx,%edx + movl (%esp,%esi,4),%ecx + movl $7,%esi + movl %ecx,%ebp + shll $12,%ecx + andl %ebx,%esi + shrl $20,%ebp + xorl %ecx,%eax + shrl $3,%ebx + xorl %ebp,%edx + movl (%esp,%edi,4),%ebp + movl $7,%edi + movl %ebp,%ecx + shll $15,%ebp + andl %ebx,%edi + shrl $17,%ecx + xorl %ebp,%eax + shrl $3,%ebx + xorl %ecx,%edx + movl (%esp,%esi,4),%ecx + movl $7,%esi + movl %ecx,%ebp + shll $18,%ecx + andl %ebx,%esi + shrl $14,%ebp + xorl %ecx,%eax + shrl $3,%ebx + xorl %ebp,%edx + movl (%esp,%edi,4),%ebp + movl $7,%edi + movl %ebp,%ecx + shll $21,%ebp + andl %ebx,%edi + shrl $11,%ecx + xorl %ebp,%eax + shrl $3,%ebx + xorl %ecx,%edx + movl (%esp,%esi,4),%ecx + movl $7,%esi + movl %ecx,%ebp + shll $24,%ecx + andl %ebx,%esi + shrl $8,%ebp + xorl %ecx,%eax + shrl $3,%ebx + xorl %ebp,%edx + movl (%esp,%edi,4),%ebp + movl %ebp,%ecx + shll $27,%ebp + movl (%esp,%esi,4),%edi + shrl $5,%ecx + movl %edi,%esi + xorl %ebp,%eax + shll $30,%edi + xorl %ecx,%edx + shrl $2,%esi + xorl %edi,%eax + xorl %esi,%edx + addl $36,%esp + ret +.size _mul_1x1_ialu,.-_mul_1x1_ialu +.globl bn_GF2m_mul_2x2 +.type bn_GF2m_mul_2x2,@function +.align 16 +bn_GF2m_mul_2x2: +.L_bn_GF2m_mul_2x2_begin: + leal OPENSSL_ia32cap_P,%edx + movl (%edx),%eax + movl 4(%edx),%edx + testl $8388608,%eax + jz .L000ialu + testl $16777216,%eax + jz .L001mmx + testl $2,%edx + jz .L001mmx + movups 8(%esp),%xmm0 + shufps $177,%xmm0,%xmm0 +.byte 102,15,58,68,192,1 + movl 4(%esp),%eax + movups %xmm0,(%eax) + ret +.align 16 +.L001mmx: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 24(%esp),%eax + movl 32(%esp),%ebx + call _mul_1x1_mmx + movq %mm0,%mm7 + movl 28(%esp),%eax + movl 36(%esp),%ebx + call _mul_1x1_mmx + movq %mm0,%mm6 + movl 24(%esp),%eax + movl 32(%esp),%ebx + xorl 28(%esp),%eax + xorl 36(%esp),%ebx + call _mul_1x1_mmx + pxor %mm7,%mm0 + movl 20(%esp),%eax + pxor %mm6,%mm0 + movq %mm0,%mm2 + psllq $32,%mm0 + popl %edi + psrlq $32,%mm2 + popl %esi + pxor %mm6,%mm0 + popl %ebx + pxor %mm7,%mm2 + movq %mm0,(%eax) + popl %ebp + movq %mm2,8(%eax) + emms + ret +.align 16 +.L000ialu: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + subl $20,%esp + movl 44(%esp),%eax + movl 52(%esp),%ebx + call _mul_1x1_ialu + movl %eax,8(%esp) + movl %edx,12(%esp) + movl 48(%esp),%eax + movl 56(%esp),%ebx + call _mul_1x1_ialu + movl %eax,(%esp) + movl %edx,4(%esp) + movl 44(%esp),%eax + movl 52(%esp),%ebx + xorl 48(%esp),%eax + xorl 56(%esp),%ebx + call _mul_1x1_ialu + movl 40(%esp),%ebp + movl (%esp),%ebx + movl 4(%esp),%ecx + movl 8(%esp),%edi + movl 12(%esp),%esi + xorl %edx,%eax + xorl %ecx,%edx + xorl %ebx,%eax + movl %ebx,(%ebp) + xorl %edi,%edx + movl %esi,12(%ebp) + xorl %esi,%eax + addl $20,%esp + xorl %esi,%edx + popl %edi + xorl %edx,%eax + popl %esi + movl %edx,8(%ebp) + popl %ebx + movl %eax,4(%ebp) + popl %ebp + ret +.size bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin +.byte 71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105 +.byte 99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32 +.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97 +.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103 +.byte 62,0 +.comm OPENSSL_ia32cap_P,8,4 diff --git a/secure/lib/libcrypto/asm/x86_64-gf2m.s b/secure/lib/libcrypto/asm/x86_64-gf2m.s new file mode 100644 index 0000000000..ccd2ed701c --- /dev/null +++ b/secure/lib/libcrypto/asm/x86_64-gf2m.s @@ -0,0 +1,291 @@ +.text + +.type _mul_1x1,@function +.align 16 +_mul_1x1: + subq $128+8,%rsp + movq $-1,%r9 + leaq (%rax,%rax,1),%rsi + shrq $3,%r9 + leaq (,%rax,4),%rdi + andq %rax,%r9 + leaq (,%rax,8),%r12 + sarq $63,%rax + leaq (%r9,%r9,1),%r10 + sarq $63,%rsi + leaq (,%r9,4),%r11 + andq %rbp,%rax + sarq $63,%rdi + movq %rax,%rdx + shlq $63,%rax + andq %rbp,%rsi + shrq $1,%rdx + movq %rsi,%rcx + shlq $62,%rsi + andq %rbp,%rdi + shrq $2,%rcx + xorq %rsi,%rax + movq %rdi,%rbx + shlq $61,%rdi + xorq %rcx,%rdx + shrq $3,%rbx + xorq %rdi,%rax + xorq %rbx,%rdx + + movq %r9,%r13 + movq $0,0(%rsp) + xorq %r10,%r13 + movq %r9,8(%rsp) + movq %r11,%r14 + movq %r10,16(%rsp) + xorq %r12,%r14 + movq %r13,24(%rsp) + + xorq %r11,%r9 + movq %r11,32(%rsp) + xorq %r11,%r10 + movq %r9,40(%rsp) + xorq %r11,%r13 + movq %r10,48(%rsp) + xorq %r14,%r9 + movq %r13,56(%rsp) + xorq %r14,%r10 + + movq %r12,64(%rsp) + xorq %r14,%r13 + movq %r9,72(%rsp) + xorq %r11,%r9 + movq %r10,80(%rsp) + xorq %r11,%r10 + movq %r13,88(%rsp) + + xorq %r11,%r13 + movq %r14,96(%rsp) + movq %r8,%rsi + movq %r9,104(%rsp) + andq %rbp,%rsi + movq %r10,112(%rsp) + shrq $4,%rbp + movq %r13,120(%rsp) + movq %r8,%rdi + andq %rbp,%rdi + shrq $4,%rbp + + movq (%rsp,%rsi,8),%xmm0 + movq %r8,%rsi + andq %rbp,%rsi + shrq $4,%rbp + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $4,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $60,%rbx + xorq %rcx,%rax + pslldq $1,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $12,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $52,%rbx + xorq %rcx,%rax + pslldq $2,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $20,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $44,%rbx + xorq %rcx,%rax + pslldq $3,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $28,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $36,%rbx + xorq %rcx,%rax + pslldq $4,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $36,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $28,%rbx + xorq %rcx,%rax + pslldq $5,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $44,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $20,%rbx + xorq %rcx,%rax + pslldq $6,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %r8,%rdi + movq %rcx,%rbx + shlq $52,%rcx + andq %rbp,%rdi + movq (%rsp,%rsi,8),%xmm1 + shrq $12,%rbx + xorq %rcx,%rax + pslldq $7,%xmm1 + movq %r8,%rsi + shrq $4,%rbp + xorq %rbx,%rdx + andq %rbp,%rsi + shrq $4,%rbp + pxor %xmm1,%xmm0 + movq (%rsp,%rdi,8),%rcx + movq %rcx,%rbx + shlq $60,%rcx +.byte 102,72,15,126,198 + shrq $4,%rbx + xorq %rcx,%rax + psrldq $8,%xmm0 + xorq %rbx,%rdx +.byte 102,72,15,126,199 + xorq %rsi,%rax + xorq %rdi,%rdx + + addq $128+8,%rsp + .byte 0xf3,0xc3 +.Lend_mul_1x1: +.size _mul_1x1,.-_mul_1x1 + +.globl bn_GF2m_mul_2x2 +.type bn_GF2m_mul_2x2,@function +.align 16 +bn_GF2m_mul_2x2: + movq OPENSSL_ia32cap_P(%rip),%rax + btq $33,%rax + jnc .Lvanilla_mul_2x2 + +.byte 102,72,15,110,198 +.byte 102,72,15,110,201 +.byte 102,72,15,110,210 +.byte 102,73,15,110,216 + movdqa %xmm0,%xmm4 + movdqa %xmm1,%xmm5 +.byte 102,15,58,68,193,0 + pxor %xmm2,%xmm4 + pxor %xmm3,%xmm5 +.byte 102,15,58,68,211,0 +.byte 102,15,58,68,229,0 + xorps %xmm0,%xmm4 + xorps %xmm2,%xmm4 + movdqa %xmm4,%xmm5 + pslldq $8,%xmm4 + psrldq $8,%xmm5 + pxor %xmm4,%xmm2 + pxor %xmm5,%xmm0 + movdqu %xmm2,0(%rdi) + movdqu %xmm0,16(%rdi) + .byte 0xf3,0xc3 + +.align 16 +.Lvanilla_mul_2x2: + leaq -136(%rsp),%rsp + movq %r14,80(%rsp) + movq %r13,88(%rsp) + movq %r12,96(%rsp) + movq %rbp,104(%rsp) + movq %rbx,112(%rsp) +.Lbody_mul_2x2: + movq %rdi,32(%rsp) + movq %rsi,40(%rsp) + movq %rdx,48(%rsp) + movq %rcx,56(%rsp) + movq %r8,64(%rsp) + + movq $15,%r8 + movq %rsi,%rax + movq %rcx,%rbp + call _mul_1x1 + movq %rax,16(%rsp) + movq %rdx,24(%rsp) + + movq 48(%rsp),%rax + movq 64(%rsp),%rbp + call _mul_1x1 + movq %rax,0(%rsp) + movq %rdx,8(%rsp) + + movq 40(%rsp),%rax + movq 56(%rsp),%rbp + xorq 48(%rsp),%rax + xorq 64(%rsp),%rbp + call _mul_1x1 + movq 0(%rsp),%rbx + movq 8(%rsp),%rcx + movq 16(%rsp),%rdi + movq 24(%rsp),%rsi + movq 32(%rsp),%rbp + + xorq %rdx,%rax + xorq %rcx,%rdx + xorq %rbx,%rax + movq %rbx,0(%rbp) + xorq %rdi,%rdx + movq %rsi,24(%rbp) + xorq %rsi,%rax + xorq %rsi,%rdx + xorq %rdx,%rax + movq %rdx,16(%rbp) + movq %rax,8(%rbp) + + movq 80(%rsp),%r14 + movq 88(%rsp),%r13 + movq 96(%rsp),%r12 + movq 104(%rsp),%rbp + movq 112(%rsp),%rbx + leaq 136(%rsp),%rsp + .byte 0xf3,0xc3 +.Lend_mul_2x2: +.size bn_GF2m_mul_2x2,.-bn_GF2m_mul_2x2 +.byte 71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.align 16 diff --git a/secure/lib/libcrypto/asm/x86_64-mont.s b/secure/lib/libcrypto/asm/x86_64-mont.s index b2460bfe29..95e2905287 100644 --- a/secure/lib/libcrypto/asm/x86_64-mont.s +++ b/secure/lib/libcrypto/asm/x86_64-mont.s @@ -4,6 +4,16 @@ .type bn_mul_mont,@function .align 16 bn_mul_mont: + testl $3,%r9d + jnz .Lmul_enter + cmpl $8,%r9d + jb .Lmul_enter + cmpq %rsi,%rdx + jne .Lmul4x_enter + jmp .Lsqr4x_enter + +.align 16 +.Lmul_enter: pushq %rbx pushq %rbp pushq %r12 @@ -19,48 +29,63 @@ bn_mul_mont: andq $-1024,%rsp movq %r11,8(%rsp,%r9,8) -.Lprologue: +.Lmul_body: movq %rdx,%r12 - movq (%r8),%r8 + movq (%r12),%rbx + movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 - movq (%r12),%rbx - movq (%rsi),%rax + movq %r8,%rbp mulq %rbx movq %rax,%r10 - movq %rdx,%r11 + movq (%rcx),%rax - imulq %r8,%rax - movq %rax,%rbp + imulq %r10,%rbp + movq %rdx,%r11 - mulq (%rcx) - addq %r10,%rax + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%r13 leaq 1(%r15),%r15 + jmp .L1st_enter + +.align 16 .L1st: + addq %rax,%r13 movq (%rsi,%r15,8),%rax - mulq %rbx - addq %r11,%rax adcq $0,%rdx - movq %rax,%r10 + addq %r11,%r13 + movq %r10,%r11 + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) + movq %rdx,%r13 + +.L1st_enter: + mulq %rbx + addq %rax,%r11 movq (%rcx,%r15,8),%rax - movq %rdx,%r11 + adcq $0,%rdx + leaq 1(%r15),%r15 + movq %rdx,%r10 mulq %rbp - addq %r13,%rax - leaq 1(%r15),%r15 + cmpq %r9,%r15 + jne .L1st + + addq %rax,%r13 + movq (%rsi),%rax adcq $0,%rdx - addq %r10,%rax + addq %r11,%r13 adcq $0,%rdx - movq %rax,-16(%rsp,%r15,8) - cmpq %r9,%r15 + movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 - jl .L1st + movq %r10,%r11 xorq %rdx,%rdx addq %r11,%r13 @@ -69,50 +94,64 @@ bn_mul_mont: movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 -.align 4 + jmp .Louter +.align 16 .Louter: - xorq %r15,%r15 - movq (%r12,%r14,8),%rbx - movq (%rsi),%rax + xorq %r15,%r15 + movq %r8,%rbp + movq (%rsp),%r10 mulq %rbx - addq (%rsp),%rax + addq %rax,%r10 + movq (%rcx),%rax adcq $0,%rdx - movq %rax,%r10 - movq %rdx,%r11 - imulq %r8,%rax - movq %rax,%rbp + imulq %r10,%rbp + movq %rdx,%r11 - mulq (%rcx,%r15,8) - addq %r10,%rax - movq 8(%rsp),%r10 + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax adcq $0,%rdx + movq 8(%rsp),%r10 movq %rdx,%r13 leaq 1(%r15),%r15 -.align 4 + jmp .Linner_enter + +.align 16 .Linner: + addq %rax,%r13 movq (%rsi,%r15,8),%rax - mulq %rbx - addq %r11,%rax adcq $0,%rdx - addq %rax,%r10 + addq %r10,%r13 + movq (%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) + movq %rdx,%r13 + +.Linner_enter: + mulq %rbx + addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx + addq %r11,%r10 movq %rdx,%r11 + adcq $0,%r11 + leaq 1(%r15),%r15 mulq %rbp - addq %r13,%rax - leaq 1(%r15),%r15 - adcq $0,%rdx - addq %r10,%rax + cmpq %r9,%r15 + jne .Linner + + addq %rax,%r13 + movq (%rsi),%rax adcq $0,%rdx + addq %r10,%r13 movq (%rsp,%r15,8),%r10 - cmpq %r9,%r15 - movq %rax,-16(%rsp,%r15,8) + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 - jl .Linner xorq %rdx,%rdx addq %r11,%r13 @@ -126,35 +165,434 @@ bn_mul_mont: cmpq %r9,%r14 jl .Louter - leaq (%rsp),%rsi - leaq -1(%r9),%r15 - - movq (%rsi),%rax xorq %r14,%r14 + movq (%rsp),%rax + leaq (%rsp),%rsi + movq %r9,%r15 jmp .Lsub .align 16 .Lsub: sbbq (%rcx,%r14,8),%rax movq %rax,(%rdi,%r14,8) - decq %r15 movq 8(%rsi,%r14,8),%rax leaq 1(%r14),%r14 - jge .Lsub + decq %r15 + jnz .Lsub sbbq $0,%rax + xorq %r14,%r14 andq %rax,%rsi notq %rax movq %rdi,%rcx andq %rax,%rcx - leaq -1(%r9),%r15 + movq %r9,%r15 orq %rcx,%rsi .align 16 .Lcopy: + movq (%rsi,%r14,8),%rax + movq %r14,(%rsp,%r14,8) + movq %rax,(%rdi,%r14,8) + leaq 1(%r14),%r14 + subq $1,%r15 + jnz .Lcopy + + movq 8(%rsp,%r9,8),%rsi + movq $1,%rax + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lmul_epilogue: + .byte 0xf3,0xc3 +.size bn_mul_mont,.-bn_mul_mont +.type bn_mul4x_mont,@function +.align 16 +bn_mul4x_mont: +.Lmul4x_enter: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + + movl %r9d,%r9d + leaq 4(%r9),%r10 + movq %rsp,%r11 + negq %r10 + leaq (%rsp,%r10,8),%rsp + andq $-1024,%rsp + + movq %r11,8(%rsp,%r9,8) +.Lmul4x_body: + movq %rdi,16(%rsp,%r9,8) + movq %rdx,%r12 + movq (%r8),%r8 + movq (%r12),%rbx + movq (%rsi),%rax + + xorq %r14,%r14 + xorq %r15,%r15 + + movq %r8,%rbp + mulq %rbx + movq %rax,%r10 + movq (%rcx),%rax + + imulq %r10,%rbp + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax + adcq $0,%rdx + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx),%rax + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq 16(%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + leaq 4(%r15),%r15 + adcq $0,%rdx + movq %rdi,(%rsp) + movq %rdx,%r13 + jmp .L1st4x +.align 16 +.L1st4x: + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%r13 + + mulq %rbx + addq %rax,%r10 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq 8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx,%r15,8),%rax + adcq $0,%rdx + leaq 4(%r15),%r15 + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq -16(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-32(%rsp,%r15,8) + movq %rdx,%r13 + cmpq %r9,%r15 + jl .L1st4x + + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%r13 + + xorq %rdi,%rdi + addq %r10,%r13 + adcq $0,%rdi + movq %r13,-8(%rsp,%r15,8) + movq %rdi,(%rsp,%r15,8) + + leaq 1(%r14),%r14 +.align 4 +.Louter4x: + movq (%r12,%r14,8),%rbx + xorq %r15,%r15 + movq (%rsp),%r10 + movq %r8,%rbp + mulq %rbx + addq %rax,%r10 + movq (%rcx),%rax + adcq $0,%rdx + + imulq %r10,%rbp + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax + adcq $0,%rdx + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx),%rax + adcq $0,%rdx + addq 8(%rsp),%r11 + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq 16(%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + leaq 4(%r15),%r15 + adcq $0,%rdx + movq %rdi,(%rsp) + movq %rdx,%r13 + jmp .Linner4x +.align 16 +.Linner4x: + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -16(%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -8(%rsp,%r15,8),%r11 + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi movq (%rsi,%r15,8),%rax - movq %rax,(%rdi,%r15,8) - movq %r14,(%rsp,%r15,8) + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%r13 + + mulq %rbx + addq %rax,%r10 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + addq (%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq 8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx,%r15,8),%rax + adcq $0,%rdx + addq 8(%rsp,%r15,8),%r11 + adcq $0,%rdx + leaq 4(%r15),%r15 + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq -16(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-32(%rsp,%r15,8) + movq %rdx,%r13 + cmpq %r9,%r15 + jl .Linner4x + + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -16(%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -8(%rsp,%r15,8),%r11 + adcq $0,%rdx + leaq 1(%r14),%r14 + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%r13 + + xorq %rdi,%rdi + addq %r10,%r13 + adcq $0,%rdi + addq (%rsp,%r9,8),%r13 + adcq $0,%rdi + movq %r13,-8(%rsp,%r15,8) + movq %rdi,(%rsp,%r15,8) + + cmpq %r9,%r14 + jl .Louter4x + movq 16(%rsp,%r9,8),%rdi + movq 0(%rsp),%rax + pxor %xmm0,%xmm0 + movq 8(%rsp),%rdx + shrq $2,%r9 + leaq (%rsp),%rsi + xorq %r14,%r14 + + subq 0(%rcx),%rax + movq 16(%rsi),%rbx + movq 24(%rsi),%rbp + sbbq 8(%rcx),%rdx + leaq -1(%r9),%r15 + jmp .Lsub4x +.align 16 +.Lsub4x: + movq %rax,0(%rdi,%r14,8) + movq %rdx,8(%rdi,%r14,8) + sbbq 16(%rcx,%r14,8),%rbx + movq 32(%rsi,%r14,8),%rax + movq 40(%rsi,%r14,8),%rdx + sbbq 24(%rcx,%r14,8),%rbp + movq %rbx,16(%rdi,%r14,8) + movq %rbp,24(%rdi,%r14,8) + sbbq 32(%rcx,%r14,8),%rax + movq 48(%rsi,%r14,8),%rbx + movq 56(%rsi,%r14,8),%rbp + sbbq 40(%rcx,%r14,8),%rdx + leaq 4(%r14),%r14 + decq %r15 + jnz .Lsub4x + + movq %rax,0(%rdi,%r14,8) + movq 32(%rsi,%r14,8),%rax + sbbq 16(%rcx,%r14,8),%rbx + movq %rdx,8(%rdi,%r14,8) + sbbq 24(%rcx,%r14,8),%rbp + movq %rbx,16(%rdi,%r14,8) + + sbbq $0,%rax + movq %rbp,24(%rdi,%r14,8) + xorq %r14,%r14 + andq %rax,%rsi + notq %rax + movq %rdi,%rcx + andq %rax,%rcx + leaq -1(%r9),%r15 + orq %rcx,%rsi + + movdqu (%rsi),%xmm1 + movdqa %xmm0,(%rsp) + movdqu %xmm1,(%rdi) + jmp .Lcopy4x +.align 16 +.Lcopy4x: + movdqu 16(%rsi,%r14,1),%xmm2 + movdqu 32(%rsi,%r14,1),%xmm1 + movdqa %xmm0,16(%rsp,%r14,1) + movdqu %xmm2,16(%rdi,%r14,1) + movdqa %xmm0,32(%rsp,%r14,1) + movdqu %xmm1,32(%rdi,%r14,1) + leaq 32(%r14),%r14 decq %r15 - jge .Lcopy + jnz .Lcopy4x + shlq $2,%r9 + movdqu 16(%rsi,%r14,1),%xmm2 + movdqa %xmm0,16(%rsp,%r14,1) + movdqu %xmm2,16(%rdi,%r14,1) movq 8(%rsp,%r9,8),%rsi movq $1,%rax movq (%rsi),%r15 @@ -164,8 +602,773 @@ bn_mul_mont: movq 32(%rsi),%rbp movq 40(%rsi),%rbx leaq 48(%rsi),%rsp -.Lepilogue: +.Lmul4x_epilogue: .byte 0xf3,0xc3 -.size bn_mul_mont,.-bn_mul_mont +.size bn_mul4x_mont,.-bn_mul4x_mont +.type bn_sqr4x_mont,@function +.align 16 +bn_sqr4x_mont: +.Lsqr4x_enter: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + + shll $3,%r9d + xorq %r10,%r10 + movq %rsp,%r11 + subq %r9,%r10 + movq (%r8),%r8 + leaq -72(%rsp,%r10,2),%rsp + andq $-1024,%rsp + + + + + + + + + + + + movq %rdi,32(%rsp) + movq %rcx,40(%rsp) + movq %r8,48(%rsp) + movq %r11,56(%rsp) +.Lsqr4x_body: + + + + + + + + leaq 32(%r10),%rbp + leaq (%rsi,%r9,1),%rsi + + movq %r9,%rcx + + + movq -32(%rsi,%rbp,1),%r14 + leaq 64(%rsp,%r9,2),%rdi + movq -24(%rsi,%rbp,1),%rax + leaq -32(%rdi,%rbp,1),%rdi + movq -16(%rsi,%rbp,1),%rbx + movq %rax,%r15 + + mulq %r14 + movq %rax,%r10 + movq %rbx,%rax + movq %rdx,%r11 + movq %r10,-24(%rdi,%rbp,1) + + xorq %r10,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + movq %r11,-16(%rdi,%rbp,1) + + leaq -16(%rbp),%rcx + + + movq 8(%rsi,%rcx,1),%rbx + mulq %r15 + movq %rax,%r12 + movq %rbx,%rax + movq %rdx,%r13 + + xorq %r11,%r11 + addq %r12,%r10 + leaq 16(%rcx),%rcx + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,-8(%rdi,%rcx,1) + jmp .Lsqr4x_1st + +.align 16 +.Lsqr4x_1st: + movq (%rsi,%rcx,1),%rbx + xorq %r12,%r12 + mulq %r15 + addq %rax,%r13 + movq %rbx,%rax + adcq %rdx,%r12 + + xorq %r10,%r10 + addq %r13,%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + movq %r11,(%rdi,%rcx,1) + + + movq 8(%rsi,%rcx,1),%rbx + xorq %r13,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + + xorq %r11,%r11 + addq %r12,%r10 + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,8(%rdi,%rcx,1) + + movq 16(%rsi,%rcx,1),%rbx + xorq %r12,%r12 + mulq %r15 + addq %rax,%r13 + movq %rbx,%rax + adcq %rdx,%r12 + + xorq %r10,%r10 + addq %r13,%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + movq %r11,16(%rdi,%rcx,1) + + + movq 24(%rsi,%rcx,1),%rbx + xorq %r13,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + + xorq %r11,%r11 + addq %r12,%r10 + leaq 32(%rcx),%rcx + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,-8(%rdi,%rcx,1) + + cmpq $0,%rcx + jne .Lsqr4x_1st + + xorq %r12,%r12 + addq %r11,%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + adcq %rdx,%r12 + + movq %r13,(%rdi) + leaq 16(%rbp),%rbp + movq %r12,8(%rdi) + jmp .Lsqr4x_outer + +.align 16 +.Lsqr4x_outer: + movq -32(%rsi,%rbp,1),%r14 + leaq 64(%rsp,%r9,2),%rdi + movq -24(%rsi,%rbp,1),%rax + leaq -32(%rdi,%rbp,1),%rdi + movq -16(%rsi,%rbp,1),%rbx + movq %rax,%r15 + + movq -24(%rdi,%rbp,1),%r10 + xorq %r11,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,-24(%rdi,%rbp,1) + + xorq %r10,%r10 + addq -16(%rdi,%rbp,1),%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + movq %r11,-16(%rdi,%rbp,1) + + leaq -16(%rbp),%rcx + xorq %r12,%r12 + + + movq 8(%rsi,%rcx,1),%rbx + xorq %r13,%r13 + addq 8(%rdi,%rcx,1),%r12 + adcq $0,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + + xorq %r11,%r11 + addq %r12,%r10 + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,8(%rdi,%rcx,1) + + leaq 16(%rcx),%rcx + jmp .Lsqr4x_inner + +.align 16 +.Lsqr4x_inner: + movq (%rsi,%rcx,1),%rbx + xorq %r12,%r12 + addq (%rdi,%rcx,1),%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + movq %rbx,%rax + adcq %rdx,%r12 + + xorq %r10,%r10 + addq %r13,%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + movq %r11,(%rdi,%rcx,1) + + movq 8(%rsi,%rcx,1),%rbx + xorq %r13,%r13 + addq 8(%rdi,%rcx,1),%r12 + adcq $0,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + + xorq %r11,%r11 + addq %r12,%r10 + leaq 16(%rcx),%rcx + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,-8(%rdi,%rcx,1) + + cmpq $0,%rcx + jne .Lsqr4x_inner + + xorq %r12,%r12 + addq %r11,%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + adcq %rdx,%r12 + + movq %r13,(%rdi) + movq %r12,8(%rdi) + + addq $16,%rbp + jnz .Lsqr4x_outer + + + movq -32(%rsi),%r14 + leaq 64(%rsp,%r9,2),%rdi + movq -24(%rsi),%rax + leaq -32(%rdi,%rbp,1),%rdi + movq -16(%rsi),%rbx + movq %rax,%r15 + + xorq %r11,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,-24(%rdi) + + xorq %r10,%r10 + addq %r13,%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + movq %r11,-16(%rdi) + + movq -8(%rsi),%rbx + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq $0,%rdx + + xorq %r11,%r11 + addq %r12,%r10 + movq %rdx,%r13 + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %rbx,%rax + adcq %rdx,%r11 + movq %r10,-8(%rdi) + + xorq %r12,%r12 + addq %r11,%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + movq -16(%rsi),%rax + adcq %rdx,%r12 + + movq %r13,(%rdi) + movq %r12,8(%rdi) + + mulq %rbx + addq $16,%rbp + xorq %r14,%r14 + subq %r9,%rbp + xorq %r15,%r15 + + addq %r12,%rax + adcq $0,%rdx + movq %rax,8(%rdi) + movq %rdx,16(%rdi) + movq %r15,24(%rdi) + + movq -16(%rsi,%rbp,1),%rax + leaq 64(%rsp,%r9,2),%rdi + xorq %r10,%r10 + movq -24(%rdi,%rbp,2),%r11 + + leaq (%r14,%r10,2),%r12 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r13 + shrq $63,%r11 + orq %r10,%r13 + movq -16(%rdi,%rbp,2),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq -8(%rdi,%rbp,2),%r11 + adcq %rax,%r12 + movq -8(%rsi,%rbp,1),%rax + movq %r12,-32(%rdi,%rbp,2) + adcq %rdx,%r13 + + leaq (%r14,%r10,2),%rbx + movq %r13,-24(%rdi,%rbp,2) + sbbq %r15,%r15 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r8 + shrq $63,%r11 + orq %r10,%r8 + movq 0(%rdi,%rbp,2),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq 8(%rdi,%rbp,2),%r11 + adcq %rax,%rbx + movq 0(%rsi,%rbp,1),%rax + movq %rbx,-16(%rdi,%rbp,2) + adcq %rdx,%r8 + leaq 16(%rbp),%rbp + movq %r8,-40(%rdi,%rbp,2) + sbbq %r15,%r15 + jmp .Lsqr4x_shift_n_add + +.align 16 +.Lsqr4x_shift_n_add: + leaq (%r14,%r10,2),%r12 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r13 + shrq $63,%r11 + orq %r10,%r13 + movq -16(%rdi,%rbp,2),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq -8(%rdi,%rbp,2),%r11 + adcq %rax,%r12 + movq -8(%rsi,%rbp,1),%rax + movq %r12,-32(%rdi,%rbp,2) + adcq %rdx,%r13 + + leaq (%r14,%r10,2),%rbx + movq %r13,-24(%rdi,%rbp,2) + sbbq %r15,%r15 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r8 + shrq $63,%r11 + orq %r10,%r8 + movq 0(%rdi,%rbp,2),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq 8(%rdi,%rbp,2),%r11 + adcq %rax,%rbx + movq 0(%rsi,%rbp,1),%rax + movq %rbx,-16(%rdi,%rbp,2) + adcq %rdx,%r8 + + leaq (%r14,%r10,2),%r12 + movq %r8,-8(%rdi,%rbp,2) + sbbq %r15,%r15 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r13 + shrq $63,%r11 + orq %r10,%r13 + movq 16(%rdi,%rbp,2),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq 24(%rdi,%rbp,2),%r11 + adcq %rax,%r12 + movq 8(%rsi,%rbp,1),%rax + movq %r12,0(%rdi,%rbp,2) + adcq %rdx,%r13 + + leaq (%r14,%r10,2),%rbx + movq %r13,8(%rdi,%rbp,2) + sbbq %r15,%r15 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r8 + shrq $63,%r11 + orq %r10,%r8 + movq 32(%rdi,%rbp,2),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq 40(%rdi,%rbp,2),%r11 + adcq %rax,%rbx + movq 16(%rsi,%rbp,1),%rax + movq %rbx,16(%rdi,%rbp,2) + adcq %rdx,%r8 + movq %r8,24(%rdi,%rbp,2) + sbbq %r15,%r15 + addq $32,%rbp + jnz .Lsqr4x_shift_n_add + + leaq (%r14,%r10,2),%r12 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r13 + shrq $63,%r11 + orq %r10,%r13 + movq -16(%rdi),%r10 + movq %r11,%r14 + mulq %rax + negq %r15 + movq -8(%rdi),%r11 + adcq %rax,%r12 + movq -8(%rsi),%rax + movq %r12,-32(%rdi) + adcq %rdx,%r13 + + leaq (%r14,%r10,2),%rbx + movq %r13,-24(%rdi) + sbbq %r15,%r15 + shrq $63,%r10 + leaq (%rcx,%r11,2),%r8 + shrq $63,%r11 + orq %r10,%r8 + mulq %rax + negq %r15 + adcq %rax,%rbx + adcq %rdx,%r8 + movq %rbx,-16(%rdi) + movq %r8,-8(%rdi) + movq 40(%rsp),%rsi + movq 48(%rsp),%r8 + xorq %rcx,%rcx + movq %r9,0(%rsp) + subq %r9,%rcx + movq 64(%rsp),%r10 + movq %r8,%r14 + leaq 64(%rsp,%r9,2),%rax + leaq 64(%rsp,%r9,1),%rdi + movq %rax,8(%rsp) + leaq (%rsi,%r9,1),%rsi + xorq %rbp,%rbp + + movq 0(%rsi,%rcx,1),%rax + movq 8(%rsi,%rcx,1),%r9 + imulq %r10,%r14 + movq %rax,%rbx + jmp .Lsqr4x_mont_outer + +.align 16 +.Lsqr4x_mont_outer: + xorq %r11,%r11 + mulq %r14 + addq %rax,%r10 + movq %r9,%rax + adcq %rdx,%r11 + movq %r8,%r15 + + xorq %r10,%r10 + addq 8(%rdi,%rcx,1),%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + + imulq %r11,%r15 + + movq 16(%rsi,%rcx,1),%rbx + xorq %r13,%r13 + addq %r11,%r12 + adcq $0,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + movq %r12,8(%rdi,%rcx,1) + + xorq %r11,%r11 + addq 16(%rdi,%rcx,1),%r10 + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %r9,%rax + adcq %rdx,%r11 + + movq 24(%rsi,%rcx,1),%r9 + xorq %r12,%r12 + addq %r10,%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + movq %r9,%rax + adcq %rdx,%r12 + movq %r13,16(%rdi,%rcx,1) + + xorq %r10,%r10 + addq 24(%rdi,%rcx,1),%r11 + leaq 32(%rcx),%rcx + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + jmp .Lsqr4x_mont_inner + +.align 16 +.Lsqr4x_mont_inner: + movq (%rsi,%rcx,1),%rbx + xorq %r13,%r13 + addq %r11,%r12 + adcq $0,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + movq %r12,-8(%rdi,%rcx,1) + + xorq %r11,%r11 + addq (%rdi,%rcx,1),%r10 + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %r9,%rax + adcq %rdx,%r11 + + movq 8(%rsi,%rcx,1),%r9 + xorq %r12,%r12 + addq %r10,%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + movq %r9,%rax + adcq %rdx,%r12 + movq %r13,(%rdi,%rcx,1) + + xorq %r10,%r10 + addq 8(%rdi,%rcx,1),%r11 + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + + + movq 16(%rsi,%rcx,1),%rbx + xorq %r13,%r13 + addq %r11,%r12 + adcq $0,%r13 + mulq %r15 + addq %rax,%r12 + movq %rbx,%rax + adcq %rdx,%r13 + movq %r12,8(%rdi,%rcx,1) + + xorq %r11,%r11 + addq 16(%rdi,%rcx,1),%r10 + adcq $0,%r11 + mulq %r14 + addq %rax,%r10 + movq %r9,%rax + adcq %rdx,%r11 + + movq 24(%rsi,%rcx,1),%r9 + xorq %r12,%r12 + addq %r10,%r13 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + movq %r9,%rax + adcq %rdx,%r12 + movq %r13,16(%rdi,%rcx,1) + + xorq %r10,%r10 + addq 24(%rdi,%rcx,1),%r11 + leaq 32(%rcx),%rcx + adcq $0,%r10 + mulq %r14 + addq %rax,%r11 + movq %rbx,%rax + adcq %rdx,%r10 + cmpq $0,%rcx + jne .Lsqr4x_mont_inner + + subq 0(%rsp),%rcx + movq %r8,%r14 + + xorq %r13,%r13 + addq %r11,%r12 + adcq $0,%r13 + mulq %r15 + addq %rax,%r12 + movq %r9,%rax + adcq %rdx,%r13 + movq %r12,-8(%rdi) + + xorq %r11,%r11 + addq (%rdi),%r10 + adcq $0,%r11 + movq 0(%rsi,%rcx,1),%rbx + addq %rbp,%r10 + adcq $0,%r11 + + imulq 16(%rdi,%rcx,1),%r14 + xorq %r12,%r12 + movq 8(%rsi,%rcx,1),%r9 + addq %r10,%r13 + movq 16(%rdi,%rcx,1),%r10 + adcq $0,%r12 + mulq %r15 + addq %rax,%r13 + movq %rbx,%rax + adcq %rdx,%r12 + movq %r13,(%rdi) + + xorq %rbp,%rbp + addq 8(%rdi),%r12 + adcq %rbp,%rbp + addq %r11,%r12 + leaq 16(%rdi),%rdi + adcq $0,%rbp + movq %r12,-8(%rdi) + cmpq 8(%rsp),%rdi + jb .Lsqr4x_mont_outer + + movq 0(%rsp),%r9 + movq %rbp,(%rdi) + movq 64(%rsp,%r9,1),%rax + leaq 64(%rsp,%r9,1),%rbx + movq 40(%rsp),%rsi + shrq $5,%r9 + movq 8(%rbx),%rdx + xorq %rbp,%rbp + + movq 32(%rsp),%rdi + subq 0(%rsi),%rax + movq 16(%rbx),%r10 + movq 24(%rbx),%r11 + sbbq 8(%rsi),%rdx + leaq -1(%r9),%rcx + jmp .Lsqr4x_sub +.align 16 +.Lsqr4x_sub: + movq %rax,0(%rdi,%rbp,8) + movq %rdx,8(%rdi,%rbp,8) + sbbq 16(%rsi,%rbp,8),%r10 + movq 32(%rbx,%rbp,8),%rax + movq 40(%rbx,%rbp,8),%rdx + sbbq 24(%rsi,%rbp,8),%r11 + movq %r10,16(%rdi,%rbp,8) + movq %r11,24(%rdi,%rbp,8) + sbbq 32(%rsi,%rbp,8),%rax + movq 48(%rbx,%rbp,8),%r10 + movq 56(%rbx,%rbp,8),%r11 + sbbq 40(%rsi,%rbp,8),%rdx + leaq 4(%rbp),%rbp + decq %rcx + jnz .Lsqr4x_sub + + movq %rax,0(%rdi,%rbp,8) + movq 32(%rbx,%rbp,8),%rax + sbbq 16(%rsi,%rbp,8),%r10 + movq %rdx,8(%rdi,%rbp,8) + sbbq 24(%rsi,%rbp,8),%r11 + movq %r10,16(%rdi,%rbp,8) + + sbbq $0,%rax + movq %r11,24(%rdi,%rbp,8) + xorq %rbp,%rbp + andq %rax,%rbx + notq %rax + movq %rdi,%rsi + andq %rax,%rsi + leaq -1(%r9),%rcx + orq %rsi,%rbx + + pxor %xmm0,%xmm0 + leaq 64(%rsp,%r9,8),%rsi + movdqu (%rbx),%xmm1 + leaq (%rsi,%r9,8),%rsi + movdqa %xmm0,64(%rsp) + movdqa %xmm0,(%rsi) + movdqu %xmm1,(%rdi) + jmp .Lsqr4x_copy +.align 16 +.Lsqr4x_copy: + movdqu 16(%rbx,%rbp,1),%xmm2 + movdqu 32(%rbx,%rbp,1),%xmm1 + movdqa %xmm0,80(%rsp,%rbp,1) + movdqa %xmm0,96(%rsp,%rbp,1) + movdqa %xmm0,16(%rsi,%rbp,1) + movdqa %xmm0,32(%rsi,%rbp,1) + movdqu %xmm2,16(%rdi,%rbp,1) + movdqu %xmm1,32(%rdi,%rbp,1) + leaq 32(%rbp),%rbp + decq %rcx + jnz .Lsqr4x_copy + + movdqu 16(%rbx,%rbp,1),%xmm2 + movdqa %xmm0,80(%rsp,%rbp,1) + movdqa %xmm0,16(%rsi,%rbp,1) + movdqu %xmm2,16(%rdi,%rbp,1) + movq 56(%rsp),%rsi + movq $1,%rax + movq 0(%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lsqr4x_epilogue: + .byte 0xf3,0xc3 +.size bn_sqr4x_mont,.-bn_sqr4x_mont .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 16 diff --git a/secure/lib/libcrypto/asm/x86_64-mont5.s b/secure/lib/libcrypto/asm/x86_64-mont5.s new file mode 100644 index 0000000000..49ec6ac67b --- /dev/null +++ b/secure/lib/libcrypto/asm/x86_64-mont5.s @@ -0,0 +1,784 @@ +.text + +.globl bn_mul_mont_gather5 +.type bn_mul_mont_gather5,@function +.align 64 +bn_mul_mont_gather5: + testl $3,%r9d + jnz .Lmul_enter + cmpl $8,%r9d + jb .Lmul_enter + jmp .Lmul4x_enter + +.align 16 +.Lmul_enter: + movl %r9d,%r9d + movl 8(%rsp),%r10d + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%rax + leaq 2(%r9),%r11 + negq %r11 + leaq (%rsp,%r11,8),%rsp + andq $-1024,%rsp + + movq %rax,8(%rsp,%r9,8) +.Lmul_body: + movq %rdx,%r12 + movq %r10,%r11 + shrq $3,%r10 + andq $7,%r11 + notq %r10 + leaq .Lmagic_masks(%rip),%rax + andq $3,%r10 + leaq 96(%r12,%r11,8),%r12 + movq 0(%rax,%r10,8),%xmm4 + movq 8(%rax,%r10,8),%xmm5 + movq 16(%rax,%r10,8),%xmm6 + movq 24(%rax,%r10,8),%xmm7 + + movq -96(%r12),%xmm0 + movq -32(%r12),%xmm1 + pand %xmm4,%xmm0 + movq 32(%r12),%xmm2 + pand %xmm5,%xmm1 + movq 96(%r12),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + por %xmm2,%xmm0 + leaq 256(%r12),%r12 + por %xmm3,%xmm0 + +.byte 102,72,15,126,195 + + movq (%r8),%r8 + movq (%rsi),%rax + + xorq %r14,%r14 + xorq %r15,%r15 + + movq -96(%r12),%xmm0 + movq -32(%r12),%xmm1 + pand %xmm4,%xmm0 + movq 32(%r12),%xmm2 + pand %xmm5,%xmm1 + + movq %r8,%rbp + mulq %rbx + movq %rax,%r10 + movq (%rcx),%rax + + movq 96(%r12),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + + imulq %r10,%rbp + movq %rdx,%r11 + + por %xmm2,%xmm0 + leaq 256(%r12),%r12 + por %xmm3,%xmm0 + + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax + adcq $0,%rdx + movq %rdx,%r13 + + leaq 1(%r15),%r15 + jmp .L1st_enter + +.align 16 +.L1st: + addq %rax,%r13 + movq (%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%r13 + movq %r10,%r11 + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) + movq %rdx,%r13 + +.L1st_enter: + mulq %rbx + addq %rax,%r11 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + leaq 1(%r15),%r15 + movq %rdx,%r10 + + mulq %rbp + cmpq %r9,%r15 + jne .L1st + +.byte 102,72,15,126,195 + + addq %rax,%r13 + movq (%rsi),%rax + adcq $0,%rdx + addq %r11,%r13 + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) + movq %rdx,%r13 + movq %r10,%r11 + + xorq %rdx,%rdx + addq %r11,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r9,8) + movq %rdx,(%rsp,%r9,8) + + leaq 1(%r14),%r14 + jmp .Louter +.align 16 +.Louter: + xorq %r15,%r15 + movq %r8,%rbp + movq (%rsp),%r10 + + movq -96(%r12),%xmm0 + movq -32(%r12),%xmm1 + pand %xmm4,%xmm0 + movq 32(%r12),%xmm2 + pand %xmm5,%xmm1 + + mulq %rbx + addq %rax,%r10 + movq (%rcx),%rax + adcq $0,%rdx + + movq 96(%r12),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + + imulq %r10,%rbp + movq %rdx,%r11 + + por %xmm2,%xmm0 + leaq 256(%r12),%r12 + por %xmm3,%xmm0 + + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax + adcq $0,%rdx + movq 8(%rsp),%r10 + movq %rdx,%r13 + + leaq 1(%r15),%r15 + jmp .Linner_enter + +.align 16 +.Linner: + addq %rax,%r13 + movq (%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + movq (%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) + movq %rdx,%r13 + +.Linner_enter: + mulq %rbx + addq %rax,%r11 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + addq %r11,%r10 + movq %rdx,%r11 + adcq $0,%r11 + leaq 1(%r15),%r15 + + mulq %rbp + cmpq %r9,%r15 + jne .Linner + +.byte 102,72,15,126,195 + + addq %rax,%r13 + movq (%rsi),%rax + adcq $0,%rdx + addq %r10,%r13 + movq (%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %r13,-16(%rsp,%r15,8) + movq %rdx,%r13 + + xorq %rdx,%rdx + addq %r11,%r13 + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r9,8) + movq %rdx,(%rsp,%r9,8) + + leaq 1(%r14),%r14 + cmpq %r9,%r14 + jl .Louter + + xorq %r14,%r14 + movq (%rsp),%rax + leaq (%rsp),%rsi + movq %r9,%r15 + jmp .Lsub +.align 16 +.Lsub: sbbq (%rcx,%r14,8),%rax + movq %rax,(%rdi,%r14,8) + movq 8(%rsi,%r14,8),%rax + leaq 1(%r14),%r14 + decq %r15 + jnz .Lsub + + sbbq $0,%rax + xorq %r14,%r14 + andq %rax,%rsi + notq %rax + movq %rdi,%rcx + andq %rax,%rcx + movq %r9,%r15 + orq %rcx,%rsi +.align 16 +.Lcopy: + movq (%rsi,%r14,8),%rax + movq %r14,(%rsp,%r14,8) + movq %rax,(%rdi,%r14,8) + leaq 1(%r14),%r14 + subq $1,%r15 + jnz .Lcopy + + movq 8(%rsp,%r9,8),%rsi + movq $1,%rax + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lmul_epilogue: + .byte 0xf3,0xc3 +.size bn_mul_mont_gather5,.-bn_mul_mont_gather5 +.type bn_mul4x_mont_gather5,@function +.align 16 +bn_mul4x_mont_gather5: +.Lmul4x_enter: + movl %r9d,%r9d + movl 8(%rsp),%r10d + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%rax + leaq 4(%r9),%r11 + negq %r11 + leaq (%rsp,%r11,8),%rsp + andq $-1024,%rsp + + movq %rax,8(%rsp,%r9,8) +.Lmul4x_body: + movq %rdi,16(%rsp,%r9,8) + movq %rdx,%r12 + movq %r10,%r11 + shrq $3,%r10 + andq $7,%r11 + notq %r10 + leaq .Lmagic_masks(%rip),%rax + andq $3,%r10 + leaq 96(%r12,%r11,8),%r12 + movq 0(%rax,%r10,8),%xmm4 + movq 8(%rax,%r10,8),%xmm5 + movq 16(%rax,%r10,8),%xmm6 + movq 24(%rax,%r10,8),%xmm7 + + movq -96(%r12),%xmm0 + movq -32(%r12),%xmm1 + pand %xmm4,%xmm0 + movq 32(%r12),%xmm2 + pand %xmm5,%xmm1 + movq 96(%r12),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + por %xmm2,%xmm0 + leaq 256(%r12),%r12 + por %xmm3,%xmm0 + +.byte 102,72,15,126,195 + movq (%r8),%r8 + movq (%rsi),%rax + + xorq %r14,%r14 + xorq %r15,%r15 + + movq -96(%r12),%xmm0 + movq -32(%r12),%xmm1 + pand %xmm4,%xmm0 + movq 32(%r12),%xmm2 + pand %xmm5,%xmm1 + + movq %r8,%rbp + mulq %rbx + movq %rax,%r10 + movq (%rcx),%rax + + movq 96(%r12),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + + imulq %r10,%rbp + movq %rdx,%r11 + + por %xmm2,%xmm0 + leaq 256(%r12),%r12 + por %xmm3,%xmm0 + + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax + adcq $0,%rdx + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx),%rax + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq 16(%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + leaq 4(%r15),%r15 + adcq $0,%rdx + movq %rdi,(%rsp) + movq %rdx,%r13 + jmp .L1st4x +.align 16 +.L1st4x: + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%r13 + + mulq %rbx + addq %rax,%r10 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq 8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx,%r15,8),%rax + adcq $0,%rdx + leaq 4(%r15),%r15 + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq -16(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-32(%rsp,%r15,8) + movq %rdx,%r13 + cmpq %r9,%r15 + jl .L1st4x + + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%r13 + +.byte 102,72,15,126,195 + + xorq %rdi,%rdi + addq %r10,%r13 + adcq $0,%rdi + movq %r13,-8(%rsp,%r15,8) + movq %rdi,(%rsp,%r15,8) + + leaq 1(%r14),%r14 +.align 4 +.Louter4x: + xorq %r15,%r15 + movq -96(%r12),%xmm0 + movq -32(%r12),%xmm1 + pand %xmm4,%xmm0 + movq 32(%r12),%xmm2 + pand %xmm5,%xmm1 + + movq (%rsp),%r10 + movq %r8,%rbp + mulq %rbx + addq %rax,%r10 + movq (%rcx),%rax + adcq $0,%rdx + + movq 96(%r12),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + + imulq %r10,%rbp + movq %rdx,%r11 + + por %xmm2,%xmm0 + leaq 256(%r12),%r12 + por %xmm3,%xmm0 + + mulq %rbp + addq %rax,%r10 + movq 8(%rsi),%rax + adcq $0,%rdx + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx),%rax + adcq $0,%rdx + addq 8(%rsp),%r11 + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq 16(%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + leaq 4(%r15),%r15 + adcq $0,%rdx + movq %rdx,%r13 + jmp .Linner4x +.align 16 +.Linner4x: + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -16(%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %rdi,-32(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -8(%rsp,%r15,8),%r11 + adcq $0,%rdx + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%r13 + + mulq %rbx + addq %rax,%r10 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + addq (%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq 8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %rdi,-16(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq 8(%rcx,%r15,8),%rax + adcq $0,%rdx + addq 8(%rsp,%r15,8),%r11 + adcq $0,%rdx + leaq 4(%r15),%r15 + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq -16(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %r13,-40(%rsp,%r15,8) + movq %rdx,%r13 + cmpq %r9,%r15 + jl .Linner4x + + mulq %rbx + addq %rax,%r10 + movq -16(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -16(%rsp,%r15,8),%r10 + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %rax,%r13 + movq -8(%rsi,%r15,8),%rax + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %rdi,-32(%rsp,%r15,8) + movq %rdx,%rdi + + mulq %rbx + addq %rax,%r11 + movq -8(%rcx,%r15,8),%rax + adcq $0,%rdx + addq -8(%rsp,%r15,8),%r11 + adcq $0,%rdx + leaq 1(%r14),%r14 + movq %rdx,%r10 + + mulq %rbp + addq %rax,%rdi + movq (%rsi),%rax + adcq $0,%rdx + addq %r11,%rdi + adcq $0,%rdx + movq %r13,-24(%rsp,%r15,8) + movq %rdx,%r13 + +.byte 102,72,15,126,195 + movq %rdi,-16(%rsp,%r15,8) + + xorq %rdi,%rdi + addq %r10,%r13 + adcq $0,%rdi + addq (%rsp,%r9,8),%r13 + adcq $0,%rdi + movq %r13,-8(%rsp,%r15,8) + movq %rdi,(%rsp,%r15,8) + + cmpq %r9,%r14 + jl .Louter4x + movq 16(%rsp,%r9,8),%rdi + movq 0(%rsp),%rax + pxor %xmm0,%xmm0 + movq 8(%rsp),%rdx + shrq $2,%r9 + leaq (%rsp),%rsi + xorq %r14,%r14 + + subq 0(%rcx),%rax + movq 16(%rsi),%rbx + movq 24(%rsi),%rbp + sbbq 8(%rcx),%rdx + leaq -1(%r9),%r15 + jmp .Lsub4x +.align 16 +.Lsub4x: + movq %rax,0(%rdi,%r14,8) + movq %rdx,8(%rdi,%r14,8) + sbbq 16(%rcx,%r14,8),%rbx + movq 32(%rsi,%r14,8),%rax + movq 40(%rsi,%r14,8),%rdx + sbbq 24(%rcx,%r14,8),%rbp + movq %rbx,16(%rdi,%r14,8) + movq %rbp,24(%rdi,%r14,8) + sbbq 32(%rcx,%r14,8),%rax + movq 48(%rsi,%r14,8),%rbx + movq 56(%rsi,%r14,8),%rbp + sbbq 40(%rcx,%r14,8),%rdx + leaq 4(%r14),%r14 + decq %r15 + jnz .Lsub4x + + movq %rax,0(%rdi,%r14,8) + movq 32(%rsi,%r14,8),%rax + sbbq 16(%rcx,%r14,8),%rbx + movq %rdx,8(%rdi,%r14,8) + sbbq 24(%rcx,%r14,8),%rbp + movq %rbx,16(%rdi,%r14,8) + + sbbq $0,%rax + movq %rbp,24(%rdi,%r14,8) + xorq %r14,%r14 + andq %rax,%rsi + notq %rax + movq %rdi,%rcx + andq %rax,%rcx + leaq -1(%r9),%r15 + orq %rcx,%rsi + + movdqu (%rsi),%xmm1 + movdqa %xmm0,(%rsp) + movdqu %xmm1,(%rdi) + jmp .Lcopy4x +.align 16 +.Lcopy4x: + movdqu 16(%rsi,%r14,1),%xmm2 + movdqu 32(%rsi,%r14,1),%xmm1 + movdqa %xmm0,16(%rsp,%r14,1) + movdqu %xmm2,16(%rdi,%r14,1) + movdqa %xmm0,32(%rsp,%r14,1) + movdqu %xmm1,32(%rdi,%r14,1) + leaq 32(%r14),%r14 + decq %r15 + jnz .Lcopy4x + + shlq $2,%r9 + movdqu 16(%rsi,%r14,1),%xmm2 + movdqa %xmm0,16(%rsp,%r14,1) + movdqu %xmm2,16(%rdi,%r14,1) + movq 8(%rsp,%r9,8),%rsi + movq $1,%rax + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lmul4x_epilogue: + .byte 0xf3,0xc3 +.size bn_mul4x_mont_gather5,.-bn_mul4x_mont_gather5 +.globl bn_scatter5 +.type bn_scatter5,@function +.align 16 +bn_scatter5: + cmpq $0,%rsi + jz .Lscatter_epilogue + leaq (%rdx,%rcx,8),%rdx +.Lscatter: + movq (%rdi),%rax + leaq 8(%rdi),%rdi + movq %rax,(%rdx) + leaq 256(%rdx),%rdx + subq $1,%rsi + jnz .Lscatter +.Lscatter_epilogue: + .byte 0xf3,0xc3 +.size bn_scatter5,.-bn_scatter5 + +.globl bn_gather5 +.type bn_gather5,@function +.align 16 +bn_gather5: + movq %rcx,%r11 + shrq $3,%rcx + andq $7,%r11 + notq %rcx + leaq .Lmagic_masks(%rip),%rax + andq $3,%rcx + leaq 96(%rdx,%r11,8),%rdx + movq 0(%rax,%rcx,8),%xmm4 + movq 8(%rax,%rcx,8),%xmm5 + movq 16(%rax,%rcx,8),%xmm6 + movq 24(%rax,%rcx,8),%xmm7 + jmp .Lgather +.align 16 +.Lgather: + movq -96(%rdx),%xmm0 + movq -32(%rdx),%xmm1 + pand %xmm4,%xmm0 + movq 32(%rdx),%xmm2 + pand %xmm5,%xmm1 + movq 96(%rdx),%xmm3 + pand %xmm6,%xmm2 + por %xmm1,%xmm0 + pand %xmm7,%xmm3 + por %xmm2,%xmm0 + leaq 256(%rdx),%rdx + por %xmm3,%xmm0 + + movq %xmm0,(%rdi) + leaq 8(%rdi),%rdi + subq $1,%rsi + jnz .Lgather + .byte 0xf3,0xc3 +.LSEH_end_bn_gather5: +.size bn_gather5,.-bn_gather5 +.align 64 +.Lmagic_masks: +.long 0,0, 0,0, 0,0, -1,-1 +.long 0,0, 0,0, 0,0, 0,0 +.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,119,105,116,104,32,115,99,97,116,116,101,114,47,103,97,116,104,101,114,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 diff --git a/secure/lib/libcrypto/asm/x86_64cpuid.s b/secure/lib/libcrypto/asm/x86_64cpuid.s index c26790e5d3..562b03abd1 100644 --- a/secure/lib/libcrypto/asm/x86_64cpuid.s +++ b/secure/lib/libcrypto/asm/x86_64cpuid.s @@ -1,7 +1,11 @@ +.hidden OPENSSL_cpuid_setup .section .init call OPENSSL_cpuid_setup +.hidden OPENSSL_ia32cap_P +.comm OPENSSL_ia32cap_P,8,4 + .text .globl OPENSSL_atomic_add @@ -64,7 +68,15 @@ OPENSSL_ia32_cpuid: movl $2147483648,%eax cpuid - cmpl $2147483656,%eax + cmpl $2147483649,%eax + jb .Lintel + movl %eax,%r10d + movl $2147483649,%eax + cpuid + orl %ecx,%r9d + andl $2049,%r9d + + cmpl $2147483656,%r10d jb .Lintel movl $2147483656,%eax @@ -75,12 +87,12 @@ OPENSSL_ia32_cpuid: movl $1,%eax cpuid btl $28,%edx - jnc .Ldone + jnc .Lgeneric shrl $16,%ebx cmpb %r10b,%bl - ja .Ldone + ja .Lgeneric andl $4026531839,%edx - jmp .Ldone + jmp .Lgeneric .Lintel: cmpl $4,%r11d @@ -97,30 +109,47 @@ OPENSSL_ia32_cpuid: .Lnocacheinfo: movl $1,%eax cpuid + andl $3220176895,%edx cmpl $0,%r9d jne .Lnotintel - orl $1048576,%edx + orl $1073741824,%edx andb $15,%ah cmpb $15,%ah - je .Lnotintel - orl $1073741824,%edx + jne .Lnotintel + orl $1048576,%edx .Lnotintel: btl $28,%edx - jnc .Ldone + jnc .Lgeneric andl $4026531839,%edx cmpl $0,%r10d - je .Ldone + je .Lgeneric orl $268435456,%edx shrl $16,%ebx cmpb $1,%bl - ja .Ldone + ja .Lgeneric andl $4026531839,%edx +.Lgeneric: + andl $2048,%r9d + andl $4294965247,%ecx + orl %ecx,%r9d + + movl %edx,%r10d + btl $27,%r9d + jnc .Lclear_avx + xorl %ecx,%ecx +.byte 0x0f,0x01,0xd0 + andl $6,%eax + cmpl $6,%eax + je .Ldone +.Lclear_avx: + movl $4026525695,%eax + andl %eax,%r9d .Ldone: - shlq $32,%rcx - movl %edx,%eax + shlq $32,%r9 + movl %r10d,%eax movq %r8,%rbx - orq %rcx,%rax + orq %r9,%rax .byte 0xf3,0xc3 .size OPENSSL_ia32_cpuid,.-OPENSSL_ia32_cpuid @@ -189,3 +218,17 @@ OPENSSL_wipe_cpu: leaq 8(%rsp),%rax .byte 0xf3,0xc3 .size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu +.globl OPENSSL_ia32_rdrand +.type OPENSSL_ia32_rdrand,@function +.align 16 +OPENSSL_ia32_rdrand: + movl $8,%ecx +.Loop_rdrand: +.byte 72,15,199,240 + jc .Lbreak_rdrand + loop .Loop_rdrand +.Lbreak_rdrand: + cmpq $0,%rax + cmoveq %rcx,%rax + .byte 0xf3,0xc3 +.size OPENSSL_ia32_rdrand,.-OPENSSL_ia32_rdrand diff --git a/secure/lib/libcrypto/asm/x86cpuid.s b/secure/lib/libcrypto/asm/x86cpuid.s index a6ab162599..3601671b24 100644 --- a/secure/lib/libcrypto/asm/x86cpuid.s +++ b/secure/lib/libcrypto/asm/x86cpuid.s @@ -19,9 +19,9 @@ OPENSSL_ia32_cpuid: pushfl popl %eax xorl %eax,%ecx - btl $21,%ecx - jnc .L000done xorl %eax,%eax + btl $21,%ecx + jnc .L000nocpuid .byte 0x0f,0xa2 movl %eax,%edi xorl %eax,%eax @@ -47,7 +47,14 @@ OPENSSL_ia32_cpuid: jnz .L001intel movl $2147483648,%eax .byte 0x0f,0xa2 - cmpl $2147483656,%eax + cmpl $2147483649,%eax + jb .L001intel + movl %eax,%esi + movl $2147483649,%eax + .byte 0x0f,0xa2 + orl %ecx,%ebp + andl $2049,%ebp + cmpl $2147483656,%esi jb .L001intel movl $2147483656,%eax .byte 0x0f,0xa2 @@ -56,46 +63,68 @@ OPENSSL_ia32_cpuid: movl $1,%eax .byte 0x0f,0xa2 btl $28,%edx - jnc .L000done + jnc .L002generic shrl $16,%ebx andl $255,%ebx cmpl %esi,%ebx - ja .L000done + ja .L002generic andl $4026531839,%edx - jmp .L000done + jmp .L002generic .L001intel: cmpl $4,%edi movl $-1,%edi - jb .L002nocacheinfo + jb .L003nocacheinfo movl $4,%eax movl $0,%ecx .byte 0x0f,0xa2 movl %eax,%edi shrl $14,%edi andl $4095,%edi -.L002nocacheinfo: +.L003nocacheinfo: movl $1,%eax .byte 0x0f,0xa2 + andl $3220176895,%edx cmpl $0,%ebp - jne .L003notP4 + jne .L004notintel + orl $1073741824,%edx andb $15,%ah cmpb $15,%ah - jne .L003notP4 + jne .L004notintel orl $1048576,%edx -.L003notP4: +.L004notintel: btl $28,%edx - jnc .L000done + jnc .L002generic andl $4026531839,%edx cmpl $0,%edi - je .L000done + je .L002generic orl $268435456,%edx shrl $16,%ebx cmpb $1,%bl - ja .L000done + ja .L002generic andl $4026531839,%edx -.L000done: - movl %edx,%eax - movl %ecx,%edx +.L002generic: + andl $2048,%ebp + andl $4294965247,%ecx + movl %edx,%esi + orl %ecx,%ebp + btl $27,%ecx + jnc .L005clear_avx + xorl %ecx,%ecx +.byte 15,1,208 + andl $6,%eax + cmpl $6,%eax + je .L006done + cmpl $2,%eax + je .L005clear_avx +.L007clear_xmm: + andl $4261412861,%ebp + andl $4278190079,%esi +.L005clear_avx: + andl $4026525695,%ebp +.L006done: + movl %esi,%eax + movl %ebp,%edx +.L000nocpuid: popl %edi popl %esi popl %ebx @@ -111,9 +140,9 @@ OPENSSL_rdtsc: xorl %edx,%edx leal OPENSSL_ia32cap_P,%ecx btl $4,(%ecx) - jnc .L004notsc + jnc .L008notsc .byte 0x0f,0x31 -.L004notsc: +.L008notsc: ret .size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin .globl OPENSSL_instrument_halt @@ -123,14 +152,14 @@ OPENSSL_instrument_halt: .L_OPENSSL_instrument_halt_begin: leal OPENSSL_ia32cap_P,%ecx btl $4,(%ecx) - jnc .L005nohalt + jnc .L009nohalt .long 2421723150 andl $3,%eax - jnz .L005nohalt + jnz .L009nohalt pushfl popl %eax btl $9,%eax - jnc .L005nohalt + jnc .L009nohalt .byte 0x0f,0x31 pushl %edx pushl %eax @@ -140,7 +169,7 @@ OPENSSL_instrument_halt: sbbl 4(%esp),%edx addl $8,%esp ret -.L005nohalt: +.L009nohalt: xorl %eax,%eax xorl %edx,%edx ret @@ -153,21 +182,21 @@ OPENSSL_far_spin: pushfl popl %eax btl $9,%eax - jnc .L006nospin + jnc .L010nospin movl 4(%esp),%eax movl 8(%esp),%ecx .long 2430111262 xorl %eax,%eax movl (%ecx),%edx - jmp .L007spin + jmp .L011spin .align 16 -.L007spin: +.L011spin: incl %eax cmpl (%ecx),%edx - je .L007spin + je .L011spin .long 529567888 ret -.L006nospin: +.L010nospin: xorl %eax,%eax xorl %edx,%edx ret @@ -182,9 +211,10 @@ OPENSSL_wipe_cpu: leal OPENSSL_ia32cap_P,%ecx movl (%ecx),%ecx btl $1,(%ecx) - jnc .L008no_x87 - btl $26,(%ecx) - jnc .L009no_sse2 + jnc .L012no_x87 + andl $83886080,%ecx + cmpl $83886080,%ecx + jne .L013no_sse2 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 @@ -193,9 +223,9 @@ OPENSSL_wipe_cpu: pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 -.L009no_sse2: +.L013no_sse2: .long 4007259865,4007259865,4007259865,4007259865,2430851995 -.L008no_x87: +.L012no_x87: leal 4(%esp),%eax ret .size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin @@ -209,11 +239,11 @@ OPENSSL_atomic_add: pushl %ebx nop movl (%edx),%eax -.L010spin: +.L014spin: leal (%eax,%ecx,1),%ebx nop .long 447811568 - jne .L010spin + jne .L014spin movl %ebx,%eax popl %ebx ret @@ -254,37 +284,49 @@ OPENSSL_cleanse: movl 8(%esp),%ecx xorl %eax,%eax cmpl $7,%ecx - jae .L011lot + jae .L015lot cmpl $0,%ecx - je .L012ret -.L013little: + je .L016ret +.L017little: movb %al,(%edx) subl $1,%ecx leal 1(%edx),%edx - jnz .L013little -.L012ret: + jnz .L017little +.L016ret: ret .align 16 -.L011lot: +.L015lot: testl $3,%edx - jz .L014aligned + jz .L018aligned movb %al,(%edx) leal -1(%ecx),%ecx leal 1(%edx),%edx - jmp .L011lot -.L014aligned: + jmp .L015lot +.L018aligned: movl %eax,(%edx) leal -4(%ecx),%ecx testl $-4,%ecx leal 4(%edx),%edx - jnz .L014aligned + jnz .L018aligned cmpl $0,%ecx - jne .L013little + jne .L017little ret .size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin -.comm OPENSSL_ia32cap_P,4,4 +.globl OPENSSL_ia32_rdrand +.type OPENSSL_ia32_rdrand,@function +.align 16 +OPENSSL_ia32_rdrand: +.L_OPENSSL_ia32_rdrand_begin: + movl $8,%ecx +.L019loop: +.byte 15,199,240 + jc .L020break + loop .L019loop +.L020break: + cmpl $0,%eax + cmovel %ecx,%eax + ret +.size OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin +.comm OPENSSL_ia32cap_P,8,4 .section .init call OPENSSL_cpuid_setup - jmp .Linitalign -.align 16 -.Linitalign: diff --git a/secure/lib/libcrypto/man/ASN1_OBJECT_new.3 b/secure/lib/libcrypto/man/ASN1_OBJECT_new.3 index 11edc41f67..5cfdd1cf40 100644 --- a/secure/lib/libcrypto/man/ASN1_OBJECT_new.3 +++ b/secure/lib/libcrypto/man/ASN1_OBJECT_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ASN1_OBJECT_new 3" -.TH ASN1_OBJECT_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ASN1_OBJECT_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ASN1_STRING_length.3 b/secure/lib/libcrypto/man/ASN1_STRING_length.3 index 7fe53d2df9..1566e74fe4 100644 --- a/secure/lib/libcrypto/man/ASN1_STRING_length.3 +++ b/secure/lib/libcrypto/man/ASN1_STRING_length.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ASN1_STRING_length 3" -.TH ASN1_STRING_length 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ASN1_STRING_length 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ASN1_STRING_new.3 b/secure/lib/libcrypto/man/ASN1_STRING_new.3 index e09f221e61..86da0f452a 100644 --- a/secure/lib/libcrypto/man/ASN1_STRING_new.3 +++ b/secure/lib/libcrypto/man/ASN1_STRING_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ASN1_STRING_new 3" -.TH ASN1_STRING_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ASN1_STRING_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ASN1_STRING_print_ex.3 b/secure/lib/libcrypto/man/ASN1_STRING_print_ex.3 index 43877abed0..90e14a2a51 100644 --- a/secure/lib/libcrypto/man/ASN1_STRING_print_ex.3 +++ b/secure/lib/libcrypto/man/ASN1_STRING_print_ex.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ASN1_STRING_print_ex 3" -.TH ASN1_STRING_print_ex 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ASN1_STRING_print_ex 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ASN1_generate_nconf.3 b/secure/lib/libcrypto/man/ASN1_generate_nconf.3 index 0e37c5f456..6d45c24578 100644 --- a/secure/lib/libcrypto/man/ASN1_generate_nconf.3 +++ b/secure/lib/libcrypto/man/ASN1_generate_nconf.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ASN1_generate_nconf 3" -.TH ASN1_generate_nconf 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ASN1_generate_nconf 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_ctrl.3 b/secure/lib/libcrypto/man/BIO_ctrl.3 index c53f9a4923..70e5926a26 100644 --- a/secure/lib/libcrypto/man/BIO_ctrl.3 +++ b/secure/lib/libcrypto/man/BIO_ctrl.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_ctrl 3" -.TH BIO_ctrl 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_ctrl 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_f_base64.3 b/secure/lib/libcrypto/man/BIO_f_base64.3 index 1d2477057d..0990cd7ca8 100644 --- a/secure/lib/libcrypto/man/BIO_f_base64.3 +++ b/secure/lib/libcrypto/man/BIO_f_base64.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_f_base64 3" -.TH BIO_f_base64 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_f_base64 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_f_buffer.3 b/secure/lib/libcrypto/man/BIO_f_buffer.3 index 0cb823aeac..e6de46859c 100644 --- a/secure/lib/libcrypto/man/BIO_f_buffer.3 +++ b/secure/lib/libcrypto/man/BIO_f_buffer.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_f_buffer 3" -.TH BIO_f_buffer 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_f_buffer 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_f_cipher.3 b/secure/lib/libcrypto/man/BIO_f_cipher.3 index 1aecd4b685..317367589b 100644 --- a/secure/lib/libcrypto/man/BIO_f_cipher.3 +++ b/secure/lib/libcrypto/man/BIO_f_cipher.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_f_cipher 3" -.TH BIO_f_cipher 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_f_cipher 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_f_md.3 b/secure/lib/libcrypto/man/BIO_f_md.3 index 987b078d2d..5522245db6 100644 --- a/secure/lib/libcrypto/man/BIO_f_md.3 +++ b/secure/lib/libcrypto/man/BIO_f_md.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_f_md 3" -.TH BIO_f_md 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_f_md 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_f_null.3 b/secure/lib/libcrypto/man/BIO_f_null.3 index 0dd20a016d..333e7e1a9b 100644 --- a/secure/lib/libcrypto/man/BIO_f_null.3 +++ b/secure/lib/libcrypto/man/BIO_f_null.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_f_null 3" -.TH BIO_f_null 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_f_null 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_f_ssl.3 b/secure/lib/libcrypto/man/BIO_f_ssl.3 index 8e081fa5ed..511fa672ed 100644 --- a/secure/lib/libcrypto/man/BIO_f_ssl.3 +++ b/secure/lib/libcrypto/man/BIO_f_ssl.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_f_ssl 3" -.TH BIO_f_ssl 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_f_ssl 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_find_type.3 b/secure/lib/libcrypto/man/BIO_find_type.3 index 299f284170..849fabd9e3 100644 --- a/secure/lib/libcrypto/man/BIO_find_type.3 +++ b/secure/lib/libcrypto/man/BIO_find_type.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_find_type 3" -.TH BIO_find_type 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_find_type 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_new.3 b/secure/lib/libcrypto/man/BIO_new.3 index d3ad39f884..59f659e2bf 100644 --- a/secure/lib/libcrypto/man/BIO_new.3 +++ b/secure/lib/libcrypto/man/BIO_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_new 3" -.TH BIO_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_new_CMS.3 b/secure/lib/libcrypto/man/BIO_new_CMS.3 index ca285c4fd7..7e89a014fd 100644 --- a/secure/lib/libcrypto/man/BIO_new_CMS.3 +++ b/secure/lib/libcrypto/man/BIO_new_CMS.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_new_CMS 3" -.TH BIO_new_CMS 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_new_CMS 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_push.3 b/secure/lib/libcrypto/man/BIO_push.3 index 88925cd915..cb55ffc681 100644 --- a/secure/lib/libcrypto/man/BIO_push.3 +++ b/secure/lib/libcrypto/man/BIO_push.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_push 3" -.TH BIO_push 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_push 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_read.3 b/secure/lib/libcrypto/man/BIO_read.3 index ccadb8167e..d2d79ca27f 100644 --- a/secure/lib/libcrypto/man/BIO_read.3 +++ b/secure/lib/libcrypto/man/BIO_read.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_read 3" -.TH BIO_read 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_read 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_accept.3 b/secure/lib/libcrypto/man/BIO_s_accept.3 index 1777afb337..d0f2b28cdb 100644 --- a/secure/lib/libcrypto/man/BIO_s_accept.3 +++ b/secure/lib/libcrypto/man/BIO_s_accept.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_accept 3" -.TH BIO_s_accept 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_accept 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_bio.3 b/secure/lib/libcrypto/man/BIO_s_bio.3 index ebd2962099..51a1625d1f 100644 --- a/secure/lib/libcrypto/man/BIO_s_bio.3 +++ b/secure/lib/libcrypto/man/BIO_s_bio.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_bio 3" -.TH BIO_s_bio 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_bio 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_connect.3 b/secure/lib/libcrypto/man/BIO_s_connect.3 index d63772f193..fb65e89a1a 100644 --- a/secure/lib/libcrypto/man/BIO_s_connect.3 +++ b/secure/lib/libcrypto/man/BIO_s_connect.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_connect 3" -.TH BIO_s_connect 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_connect 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_fd.3 b/secure/lib/libcrypto/man/BIO_s_fd.3 index ee317b5d87..83964eb9f3 100644 --- a/secure/lib/libcrypto/man/BIO_s_fd.3 +++ b/secure/lib/libcrypto/man/BIO_s_fd.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_fd 3" -.TH BIO_s_fd 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_fd 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_file.3 b/secure/lib/libcrypto/man/BIO_s_file.3 index 80856199c8..cdbd5478bb 100644 --- a/secure/lib/libcrypto/man/BIO_s_file.3 +++ b/secure/lib/libcrypto/man/BIO_s_file.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_file 3" -.TH BIO_s_file 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_file 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_mem.3 b/secure/lib/libcrypto/man/BIO_s_mem.3 index 92e8ce2794..47bf12310c 100644 --- a/secure/lib/libcrypto/man/BIO_s_mem.3 +++ b/secure/lib/libcrypto/man/BIO_s_mem.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_mem 3" -.TH BIO_s_mem 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_mem 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_null.3 b/secure/lib/libcrypto/man/BIO_s_null.3 index de1c209db3..c91ee4bc66 100644 --- a/secure/lib/libcrypto/man/BIO_s_null.3 +++ b/secure/lib/libcrypto/man/BIO_s_null.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_null 3" -.TH BIO_s_null 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_null 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_s_socket.3 b/secure/lib/libcrypto/man/BIO_s_socket.3 index 5a3f7acd72..2bcf53c365 100644 --- a/secure/lib/libcrypto/man/BIO_s_socket.3 +++ b/secure/lib/libcrypto/man/BIO_s_socket.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_s_socket 3" -.TH BIO_s_socket 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_s_socket 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_set_callback.3 b/secure/lib/libcrypto/man/BIO_set_callback.3 index 4067a05172..97a5ed48df 100644 --- a/secure/lib/libcrypto/man/BIO_set_callback.3 +++ b/secure/lib/libcrypto/man/BIO_set_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_set_callback 3" -.TH BIO_set_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_set_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BIO_should_retry.3 b/secure/lib/libcrypto/man/BIO_should_retry.3 index 7c666b7c8d..2092577bb7 100644 --- a/secure/lib/libcrypto/man/BIO_should_retry.3 +++ b/secure/lib/libcrypto/man/BIO_should_retry.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BIO_should_retry 3" -.TH BIO_should_retry 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BIO_should_retry 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_BLINDING_new.3 b/secure/lib/libcrypto/man/BN_BLINDING_new.3 index 51cfcfad8e..e003169405 100644 --- a/secure/lib/libcrypto/man/BN_BLINDING_new.3 +++ b/secure/lib/libcrypto/man/BN_BLINDING_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_BLINDING_new 3" -.TH BN_BLINDING_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_BLINDING_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_CTX_new.3 b/secure/lib/libcrypto/man/BN_CTX_new.3 index c682cb12cc..7dadc519b1 100644 --- a/secure/lib/libcrypto/man/BN_CTX_new.3 +++ b/secure/lib/libcrypto/man/BN_CTX_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_CTX_new 3" -.TH BN_CTX_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_CTX_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_CTX_start.3 b/secure/lib/libcrypto/man/BN_CTX_start.3 index 1795eb4c3f..6334974cd1 100644 --- a/secure/lib/libcrypto/man/BN_CTX_start.3 +++ b/secure/lib/libcrypto/man/BN_CTX_start.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_CTX_start 3" -.TH BN_CTX_start 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_CTX_start 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_add.3 b/secure/lib/libcrypto/man/BN_add.3 index 5a58882b34..4ea811cfb3 100644 --- a/secure/lib/libcrypto/man/BN_add.3 +++ b/secure/lib/libcrypto/man/BN_add.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_add 3" -.TH BN_add 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_add 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_add_word.3 b/secure/lib/libcrypto/man/BN_add_word.3 index 8101a72f7f..c83816e4f2 100644 --- a/secure/lib/libcrypto/man/BN_add_word.3 +++ b/secure/lib/libcrypto/man/BN_add_word.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_add_word 3" -.TH BN_add_word 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_add_word 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_bn2bin.3 b/secure/lib/libcrypto/man/BN_bn2bin.3 index 5047e62d79..1198d4bbee 100644 --- a/secure/lib/libcrypto/man/BN_bn2bin.3 +++ b/secure/lib/libcrypto/man/BN_bn2bin.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_bn2bin 3" -.TH BN_bn2bin 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_bn2bin 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_cmp.3 b/secure/lib/libcrypto/man/BN_cmp.3 index d820870706..725f6998f5 100644 --- a/secure/lib/libcrypto/man/BN_cmp.3 +++ b/secure/lib/libcrypto/man/BN_cmp.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_cmp 3" -.TH BN_cmp 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_cmp 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_copy.3 b/secure/lib/libcrypto/man/BN_copy.3 index c06da36f18..0741ff7296 100644 --- a/secure/lib/libcrypto/man/BN_copy.3 +++ b/secure/lib/libcrypto/man/BN_copy.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_copy 3" -.TH BN_copy 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_copy 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_generate_prime.3 b/secure/lib/libcrypto/man/BN_generate_prime.3 index d379ff8b0d..33fc87afec 100644 --- a/secure/lib/libcrypto/man/BN_generate_prime.3 +++ b/secure/lib/libcrypto/man/BN_generate_prime.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_generate_prime 3" -.TH BN_generate_prime 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_generate_prime 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_mod_inverse.3 b/secure/lib/libcrypto/man/BN_mod_inverse.3 index 0e2657efb8..e0552faeb5 100644 --- a/secure/lib/libcrypto/man/BN_mod_inverse.3 +++ b/secure/lib/libcrypto/man/BN_mod_inverse.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_mod_inverse 3" -.TH BN_mod_inverse 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_mod_inverse 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_mod_mul_montgomery.3 b/secure/lib/libcrypto/man/BN_mod_mul_montgomery.3 index 0475625d8d..972eaf362f 100644 --- a/secure/lib/libcrypto/man/BN_mod_mul_montgomery.3 +++ b/secure/lib/libcrypto/man/BN_mod_mul_montgomery.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_mod_mul_montgomery 3" -.TH BN_mod_mul_montgomery 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_mod_mul_montgomery 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_mod_mul_reciprocal.3 b/secure/lib/libcrypto/man/BN_mod_mul_reciprocal.3 index d1e7913195..be0858a4c2 100644 --- a/secure/lib/libcrypto/man/BN_mod_mul_reciprocal.3 +++ b/secure/lib/libcrypto/man/BN_mod_mul_reciprocal.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_mod_mul_reciprocal 3" -.TH BN_mod_mul_reciprocal 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_mod_mul_reciprocal 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_new.3 b/secure/lib/libcrypto/man/BN_new.3 index 76cdc90b9b..4bbd7269f7 100644 --- a/secure/lib/libcrypto/man/BN_new.3 +++ b/secure/lib/libcrypto/man/BN_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_new 3" -.TH BN_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_num_bytes.3 b/secure/lib/libcrypto/man/BN_num_bytes.3 index f87669df68..a79fc90b67 100644 --- a/secure/lib/libcrypto/man/BN_num_bytes.3 +++ b/secure/lib/libcrypto/man/BN_num_bytes.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_num_bytes 3" -.TH BN_num_bytes 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_num_bytes 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_rand.3 b/secure/lib/libcrypto/man/BN_rand.3 index f2dbfad120..fc5aaeb3cb 100644 --- a/secure/lib/libcrypto/man/BN_rand.3 +++ b/secure/lib/libcrypto/man/BN_rand.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_rand 3" -.TH BN_rand 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_rand 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_set_bit.3 b/secure/lib/libcrypto/man/BN_set_bit.3 index a8bec4c62c..c8beb91ad5 100644 --- a/secure/lib/libcrypto/man/BN_set_bit.3 +++ b/secure/lib/libcrypto/man/BN_set_bit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_set_bit 3" -.TH BN_set_bit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_set_bit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_swap.3 b/secure/lib/libcrypto/man/BN_swap.3 index c12601e102..cfad6f198d 100644 --- a/secure/lib/libcrypto/man/BN_swap.3 +++ b/secure/lib/libcrypto/man/BN_swap.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_swap 3" -.TH BN_swap 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_swap 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/BN_zero.3 b/secure/lib/libcrypto/man/BN_zero.3 index c0c46fea12..34272dda78 100644 --- a/secure/lib/libcrypto/man/BN_zero.3 +++ b/secure/lib/libcrypto/man/BN_zero.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "BN_zero 3" -.TH BN_zero 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH BN_zero 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_add0_cert.3 b/secure/lib/libcrypto/man/CMS_add0_cert.3 index dc58a0928c..cb83071abc 100644 --- a/secure/lib/libcrypto/man/CMS_add0_cert.3 +++ b/secure/lib/libcrypto/man/CMS_add0_cert.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_add0_cert 3" -.TH CMS_add0_cert 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_add0_cert 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_add1_recipient_cert.3 b/secure/lib/libcrypto/man/CMS_add1_recipient_cert.3 index 313911e2a3..059322ad19 100644 --- a/secure/lib/libcrypto/man/CMS_add1_recipient_cert.3 +++ b/secure/lib/libcrypto/man/CMS_add1_recipient_cert.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_add1_recipient_cert 3" -.TH CMS_add1_recipient_cert 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_add1_recipient_cert 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_compress.3 b/secure/lib/libcrypto/man/CMS_compress.3 index cdb6c83027..8d9dd64fd2 100644 --- a/secure/lib/libcrypto/man/CMS_compress.3 +++ b/secure/lib/libcrypto/man/CMS_compress.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_compress 3" -.TH CMS_compress 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_compress 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_decrypt.3 b/secure/lib/libcrypto/man/CMS_decrypt.3 index b3165b5304..05be1880da 100644 --- a/secure/lib/libcrypto/man/CMS_decrypt.3 +++ b/secure/lib/libcrypto/man/CMS_decrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_decrypt 3" -.TH CMS_decrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_decrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_encrypt.3 b/secure/lib/libcrypto/man/CMS_encrypt.3 index ab2b82a01a..732ecac6e2 100644 --- a/secure/lib/libcrypto/man/CMS_encrypt.3 +++ b/secure/lib/libcrypto/man/CMS_encrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_encrypt 3" -.TH CMS_encrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_encrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_final.3 b/secure/lib/libcrypto/man/CMS_final.3 index 7c51f73c4a..260dc64981 100644 --- a/secure/lib/libcrypto/man/CMS_final.3 +++ b/secure/lib/libcrypto/man/CMS_final.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_final 3" -.TH CMS_final 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_final 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_get0_RecipientInfos.3 b/secure/lib/libcrypto/man/CMS_get0_RecipientInfos.3 index 3eb4738a4e..f935757434 100644 --- a/secure/lib/libcrypto/man/CMS_get0_RecipientInfos.3 +++ b/secure/lib/libcrypto/man/CMS_get0_RecipientInfos.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_get0_RecipientInfos 3" -.TH CMS_get0_RecipientInfos 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_get0_RecipientInfos 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_get0_SignerInfos.3 b/secure/lib/libcrypto/man/CMS_get0_SignerInfos.3 index babb86e698..c7a8fcf8fb 100644 --- a/secure/lib/libcrypto/man/CMS_get0_SignerInfos.3 +++ b/secure/lib/libcrypto/man/CMS_get0_SignerInfos.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_get0_SignerInfos 3" -.TH CMS_get0_SignerInfos 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_get0_SignerInfos 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_get0_type.3 b/secure/lib/libcrypto/man/CMS_get0_type.3 index b32f6f3ae3..587766154b 100644 --- a/secure/lib/libcrypto/man/CMS_get0_type.3 +++ b/secure/lib/libcrypto/man/CMS_get0_type.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_get0_type 3" -.TH CMS_get0_type 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_get0_type 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_get1_ReceiptRequest.3 b/secure/lib/libcrypto/man/CMS_get1_ReceiptRequest.3 index e4089a7934..a1bf6195a2 100644 --- a/secure/lib/libcrypto/man/CMS_get1_ReceiptRequest.3 +++ b/secure/lib/libcrypto/man/CMS_get1_ReceiptRequest.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_get1_ReceiptRequest 3" -.TH CMS_get1_ReceiptRequest 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_get1_ReceiptRequest 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_sign.3 b/secure/lib/libcrypto/man/CMS_sign.3 index d9b37d24f6..036dbcea78 100644 --- a/secure/lib/libcrypto/man/CMS_sign.3 +++ b/secure/lib/libcrypto/man/CMS_sign.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_sign 3" -.TH CMS_sign 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_sign 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_sign_add1_signer.3 b/secure/lib/libcrypto/man/CMS_sign_add1_signer.3 index 9869463bf9..fe598784d8 100644 --- a/secure/lib/libcrypto/man/CMS_sign_add1_signer.3 +++ b/secure/lib/libcrypto/man/CMS_sign_add1_signer.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_sign_add1_signer 3" -.TH CMS_sign_add1_signer 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_sign_add1_signer 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_sign_receipt.3 b/secure/lib/libcrypto/man/CMS_sign_receipt.3 index 78e1249dc7..c7663d1782 100644 --- a/secure/lib/libcrypto/man/CMS_sign_receipt.3 +++ b/secure/lib/libcrypto/man/CMS_sign_receipt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_sign_receipt 3" -.TH CMS_sign_receipt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_sign_receipt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_uncompress.3 b/secure/lib/libcrypto/man/CMS_uncompress.3 index 418aa598a0..e919cdbd3e 100644 --- a/secure/lib/libcrypto/man/CMS_uncompress.3 +++ b/secure/lib/libcrypto/man/CMS_uncompress.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_uncompress 3" -.TH CMS_uncompress 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_uncompress 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_verify.3 b/secure/lib/libcrypto/man/CMS_verify.3 index 00ac9eaee4..58ec3003f4 100644 --- a/secure/lib/libcrypto/man/CMS_verify.3 +++ b/secure/lib/libcrypto/man/CMS_verify.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_verify 3" -.TH CMS_verify 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_verify 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CMS_verify_receipt.3 b/secure/lib/libcrypto/man/CMS_verify_receipt.3 index f5d16647b0..e51658c59b 100644 --- a/secure/lib/libcrypto/man/CMS_verify_receipt.3 +++ b/secure/lib/libcrypto/man/CMS_verify_receipt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS_verify_receipt 3" -.TH CMS_verify_receipt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS_verify_receipt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CONF_modules_free.3 b/secure/lib/libcrypto/man/CONF_modules_free.3 index f5fe8486bd..5922e49524 100644 --- a/secure/lib/libcrypto/man/CONF_modules_free.3 +++ b/secure/lib/libcrypto/man/CONF_modules_free.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CONF_modules_free 3" -.TH CONF_modules_free 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CONF_modules_free 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CONF_modules_load_file.3 b/secure/lib/libcrypto/man/CONF_modules_load_file.3 index f58a0f2471..4c6f1c1c31 100644 --- a/secure/lib/libcrypto/man/CONF_modules_load_file.3 +++ b/secure/lib/libcrypto/man/CONF_modules_load_file.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CONF_modules_load_file 3" -.TH CONF_modules_load_file 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CONF_modules_load_file 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/CRYPTO_set_ex_data.3 b/secure/lib/libcrypto/man/CRYPTO_set_ex_data.3 index a65b26c05f..1e424c61e1 100644 --- a/secure/lib/libcrypto/man/CRYPTO_set_ex_data.3 +++ b/secure/lib/libcrypto/man/CRYPTO_set_ex_data.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CRYPTO_set_ex_data 3" -.TH CRYPTO_set_ex_data 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CRYPTO_set_ex_data 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DH_generate_key.3 b/secure/lib/libcrypto/man/DH_generate_key.3 index b0f405291f..0ce40d60a9 100644 --- a/secure/lib/libcrypto/man/DH_generate_key.3 +++ b/secure/lib/libcrypto/man/DH_generate_key.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DH_generate_key 3" -.TH DH_generate_key 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DH_generate_key 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DH_generate_parameters.3 b/secure/lib/libcrypto/man/DH_generate_parameters.3 index da82aba5a6..11f0255d91 100644 --- a/secure/lib/libcrypto/man/DH_generate_parameters.3 +++ b/secure/lib/libcrypto/man/DH_generate_parameters.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DH_generate_parameters 3" -.TH DH_generate_parameters 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DH_generate_parameters 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DH_get_ex_new_index.3 b/secure/lib/libcrypto/man/DH_get_ex_new_index.3 index cc0755945f..eef777e0a3 100644 --- a/secure/lib/libcrypto/man/DH_get_ex_new_index.3 +++ b/secure/lib/libcrypto/man/DH_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DH_get_ex_new_index 3" -.TH DH_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DH_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DH_new.3 b/secure/lib/libcrypto/man/DH_new.3 index 290c6aceec..bed15151e8 100644 --- a/secure/lib/libcrypto/man/DH_new.3 +++ b/secure/lib/libcrypto/man/DH_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DH_new 3" -.TH DH_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DH_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DH_set_method.3 b/secure/lib/libcrypto/man/DH_set_method.3 index 5901b0a117..25a3eb324a 100644 --- a/secure/lib/libcrypto/man/DH_set_method.3 +++ b/secure/lib/libcrypto/man/DH_set_method.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DH_set_method 3" -.TH DH_set_method 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DH_set_method 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DH_size.3 b/secure/lib/libcrypto/man/DH_size.3 index 6ba15baad3..040d330330 100644 --- a/secure/lib/libcrypto/man/DH_size.3 +++ b/secure/lib/libcrypto/man/DH_size.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DH_size 3" -.TH DH_size 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DH_size 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_SIG_new.3 b/secure/lib/libcrypto/man/DSA_SIG_new.3 index 324f55adc9..d6ba8d3d1b 100644 --- a/secure/lib/libcrypto/man/DSA_SIG_new.3 +++ b/secure/lib/libcrypto/man/DSA_SIG_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_SIG_new 3" -.TH DSA_SIG_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_SIG_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_do_sign.3 b/secure/lib/libcrypto/man/DSA_do_sign.3 index 0702c1303e..012e7e0fbc 100644 --- a/secure/lib/libcrypto/man/DSA_do_sign.3 +++ b/secure/lib/libcrypto/man/DSA_do_sign.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_do_sign 3" -.TH DSA_do_sign 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_do_sign 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_dup_DH.3 b/secure/lib/libcrypto/man/DSA_dup_DH.3 index 769f267132..784f8062b9 100644 --- a/secure/lib/libcrypto/man/DSA_dup_DH.3 +++ b/secure/lib/libcrypto/man/DSA_dup_DH.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_dup_DH 3" -.TH DSA_dup_DH 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_dup_DH 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_generate_key.3 b/secure/lib/libcrypto/man/DSA_generate_key.3 index a099bbc595..ddbe1269fd 100644 --- a/secure/lib/libcrypto/man/DSA_generate_key.3 +++ b/secure/lib/libcrypto/man/DSA_generate_key.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_generate_key 3" -.TH DSA_generate_key 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_generate_key 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_generate_parameters.3 b/secure/lib/libcrypto/man/DSA_generate_parameters.3 index 0cf81431b9..203c172e0f 100644 --- a/secure/lib/libcrypto/man/DSA_generate_parameters.3 +++ b/secure/lib/libcrypto/man/DSA_generate_parameters.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_generate_parameters 3" -.TH DSA_generate_parameters 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_generate_parameters 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_get_ex_new_index.3 b/secure/lib/libcrypto/man/DSA_get_ex_new_index.3 index 51a23b80c5..f8382749d8 100644 --- a/secure/lib/libcrypto/man/DSA_get_ex_new_index.3 +++ b/secure/lib/libcrypto/man/DSA_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_get_ex_new_index 3" -.TH DSA_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_new.3 b/secure/lib/libcrypto/man/DSA_new.3 index 188b2d69b1..b723a4efc1 100644 --- a/secure/lib/libcrypto/man/DSA_new.3 +++ b/secure/lib/libcrypto/man/DSA_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_new 3" -.TH DSA_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_set_method.3 b/secure/lib/libcrypto/man/DSA_set_method.3 index 245eae8d75..4e5ccaed1e 100644 --- a/secure/lib/libcrypto/man/DSA_set_method.3 +++ b/secure/lib/libcrypto/man/DSA_set_method.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_set_method 3" -.TH DSA_set_method 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_set_method 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_sign.3 b/secure/lib/libcrypto/man/DSA_sign.3 index f3d166edf1..8ac91cf414 100644 --- a/secure/lib/libcrypto/man/DSA_sign.3 +++ b/secure/lib/libcrypto/man/DSA_sign.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_sign 3" -.TH DSA_sign 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_sign 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/DSA_size.3 b/secure/lib/libcrypto/man/DSA_size.3 index 2036b33292..1cadae1276 100644 --- a/secure/lib/libcrypto/man/DSA_size.3 +++ b/secure/lib/libcrypto/man/DSA_size.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA_size 3" -.TH DSA_size 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA_size 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_GET_LIB.3 b/secure/lib/libcrypto/man/ERR_GET_LIB.3 index ffc87e3855..d4f341f130 100644 --- a/secure/lib/libcrypto/man/ERR_GET_LIB.3 +++ b/secure/lib/libcrypto/man/ERR_GET_LIB.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_GET_LIB 3" -.TH ERR_GET_LIB 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_GET_LIB 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_clear_error.3 b/secure/lib/libcrypto/man/ERR_clear_error.3 index b08fdeb398..708d7097db 100644 --- a/secure/lib/libcrypto/man/ERR_clear_error.3 +++ b/secure/lib/libcrypto/man/ERR_clear_error.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_clear_error 3" -.TH ERR_clear_error 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_clear_error 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_error_string.3 b/secure/lib/libcrypto/man/ERR_error_string.3 index 6abd5f5fea..0897d9ed83 100644 --- a/secure/lib/libcrypto/man/ERR_error_string.3 +++ b/secure/lib/libcrypto/man/ERR_error_string.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_error_string 3" -.TH ERR_error_string 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_error_string 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_get_error.3 b/secure/lib/libcrypto/man/ERR_get_error.3 index 48d36ed2b7..3896b25033 100644 --- a/secure/lib/libcrypto/man/ERR_get_error.3 +++ b/secure/lib/libcrypto/man/ERR_get_error.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_get_error 3" -.TH ERR_get_error 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_get_error 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_load_crypto_strings.3 b/secure/lib/libcrypto/man/ERR_load_crypto_strings.3 index fccbf6f9fc..a3dd432c6f 100644 --- a/secure/lib/libcrypto/man/ERR_load_crypto_strings.3 +++ b/secure/lib/libcrypto/man/ERR_load_crypto_strings.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_load_crypto_strings 3" -.TH ERR_load_crypto_strings 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_load_crypto_strings 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_load_strings.3 b/secure/lib/libcrypto/man/ERR_load_strings.3 index 484e770503..87d04ba401 100644 --- a/secure/lib/libcrypto/man/ERR_load_strings.3 +++ b/secure/lib/libcrypto/man/ERR_load_strings.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_load_strings 3" -.TH ERR_load_strings 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_load_strings 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_print_errors.3 b/secure/lib/libcrypto/man/ERR_print_errors.3 index d8ce43070d..e66fd6ace9 100644 --- a/secure/lib/libcrypto/man/ERR_print_errors.3 +++ b/secure/lib/libcrypto/man/ERR_print_errors.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_print_errors 3" -.TH ERR_print_errors 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_print_errors 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_put_error.3 b/secure/lib/libcrypto/man/ERR_put_error.3 index 2426f970ec..8c7badc85d 100644 --- a/secure/lib/libcrypto/man/ERR_put_error.3 +++ b/secure/lib/libcrypto/man/ERR_put_error.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_put_error 3" -.TH ERR_put_error 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_put_error 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_remove_state.3 b/secure/lib/libcrypto/man/ERR_remove_state.3 index 8120d36de5..7552103369 100644 --- a/secure/lib/libcrypto/man/ERR_remove_state.3 +++ b/secure/lib/libcrypto/man/ERR_remove_state.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_remove_state 3" -.TH ERR_remove_state 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_remove_state 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ERR_set_mark.3 b/secure/lib/libcrypto/man/ERR_set_mark.3 index 1d17c10c62..2930589cfb 100644 --- a/secure/lib/libcrypto/man/ERR_set_mark.3 +++ b/secure/lib/libcrypto/man/ERR_set_mark.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERR_set_mark 3" -.TH ERR_set_mark 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERR_set_mark 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_BytesToKey.3 b/secure/lib/libcrypto/man/EVP_BytesToKey.3 index 055809df18..bc4a9e92e6 100644 --- a/secure/lib/libcrypto/man/EVP_BytesToKey.3 +++ b/secure/lib/libcrypto/man/EVP_BytesToKey.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_BytesToKey 3" -.TH EVP_BytesToKey 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_BytesToKey 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_DigestInit.3 b/secure/lib/libcrypto/man/EVP_DigestInit.3 index 51bb214538..5d93f7b17f 100644 --- a/secure/lib/libcrypto/man/EVP_DigestInit.3 +++ b/secure/lib/libcrypto/man/EVP_DigestInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_DigestInit 3" -.TH EVP_DigestInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_DigestInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_DigestSignInit.3 b/secure/lib/libcrypto/man/EVP_DigestSignInit.3 index 0fdb2ecf22..13d881df2d 100644 --- a/secure/lib/libcrypto/man/EVP_DigestSignInit.3 +++ b/secure/lib/libcrypto/man/EVP_DigestSignInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_DigestSignInit 3" -.TH EVP_DigestSignInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_DigestSignInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_DigestVerifyInit.3 b/secure/lib/libcrypto/man/EVP_DigestVerifyInit.3 index 72045ffde8..d57ab12d44 100644 --- a/secure/lib/libcrypto/man/EVP_DigestVerifyInit.3 +++ b/secure/lib/libcrypto/man/EVP_DigestVerifyInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_DigestVerifyInit 3" -.TH EVP_DigestVerifyInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_DigestVerifyInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_EncryptInit.3 b/secure/lib/libcrypto/man/EVP_EncryptInit.3 index d52eddec77..403012dcf4 100644 --- a/secure/lib/libcrypto/man/EVP_EncryptInit.3 +++ b/secure/lib/libcrypto/man/EVP_EncryptInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_EncryptInit 3" -.TH EVP_EncryptInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_EncryptInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_OpenInit.3 b/secure/lib/libcrypto/man/EVP_OpenInit.3 index 8ae8f42e12..86c45154ad 100644 --- a/secure/lib/libcrypto/man/EVP_OpenInit.3 +++ b/secure/lib/libcrypto/man/EVP_OpenInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_OpenInit 3" -.TH EVP_OpenInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_OpenInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_CTX_ctrl.3 b/secure/lib/libcrypto/man/EVP_PKEY_CTX_ctrl.3 index b0af2a350f..c415bbc47d 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_CTX_ctrl.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_CTX_ctrl.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_CTX_ctrl 3" -.TH EVP_PKEY_CTX_ctrl 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_CTX_ctrl 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_CTX_new.3 b/secure/lib/libcrypto/man/EVP_PKEY_CTX_new.3 index 7794b46d1a..94da86230d 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_CTX_new.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_CTX_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_CTX_new 3" -.TH EVP_PKEY_CTX_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_CTX_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_cmp.3 b/secure/lib/libcrypto/man/EVP_PKEY_cmp.3 index 7cd8333bfd..400e100d5a 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_cmp.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_cmp.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_cmp 3" -.TH EVP_PKEY_cmp 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_cmp 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_decrypt.3 b/secure/lib/libcrypto/man/EVP_PKEY_decrypt.3 index e19625e213..7fefc49309 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_decrypt.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_decrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_decrypt 3" -.TH EVP_PKEY_decrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_decrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_derive.3 b/secure/lib/libcrypto/man/EVP_PKEY_derive.3 index eabb094ac4..30b79d8e1b 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_derive.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_derive.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_derive 3" -.TH EVP_PKEY_derive 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_derive 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_encrypt.3 b/secure/lib/libcrypto/man/EVP_PKEY_encrypt.3 index de126879df..8b76e41dab 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_encrypt.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_encrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_encrypt 3" -.TH EVP_PKEY_encrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_encrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_get_default_digest.3 b/secure/lib/libcrypto/man/EVP_PKEY_get_default_digest.3 index 88053c07ee..2b96e661e1 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_get_default_digest.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_get_default_digest.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_get_default_digest 3" -.TH EVP_PKEY_get_default_digest 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_get_default_digest 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_keygen.3 b/secure/lib/libcrypto/man/EVP_PKEY_keygen.3 index f384349568..6d32c70a88 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_keygen.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_keygen.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_keygen 3" -.TH EVP_PKEY_keygen 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_keygen 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_new.3 b/secure/lib/libcrypto/man/EVP_PKEY_new.3 index ef02dbb6b0..90b7b45e1d 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_new.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_new 3" -.TH EVP_PKEY_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_print_private.3 b/secure/lib/libcrypto/man/EVP_PKEY_print_private.3 index a59963478d..095d34e255 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_print_private.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_print_private.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_print_private 3" -.TH EVP_PKEY_print_private 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_print_private 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_set1_RSA.3 b/secure/lib/libcrypto/man/EVP_PKEY_set1_RSA.3 index 456bbca459..29fccdcbe5 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_set1_RSA.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_set1_RSA.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_set1_RSA 3" -.TH EVP_PKEY_set1_RSA 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_set1_RSA 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_sign.3 b/secure/lib/libcrypto/man/EVP_PKEY_sign.3 index fda5209c97..71f6c816f0 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_sign.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_sign.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_sign 3" -.TH EVP_PKEY_sign 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_sign 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_verify.3 b/secure/lib/libcrypto/man/EVP_PKEY_verify.3 index e5121fb013..2359338b97 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_verify.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_verify.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_verify 3" -.TH EVP_PKEY_verify 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_verify 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_PKEY_verifyrecover.3 b/secure/lib/libcrypto/man/EVP_PKEY_verifyrecover.3 index ecd5643579..ef752a3955 100644 --- a/secure/lib/libcrypto/man/EVP_PKEY_verifyrecover.3 +++ b/secure/lib/libcrypto/man/EVP_PKEY_verifyrecover.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_PKEY_verifyrecover 3" -.TH EVP_PKEY_verifyrecover 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_PKEY_verifyrecover 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_SealInit.3 b/secure/lib/libcrypto/man/EVP_SealInit.3 index bf5ac5cecc..4c1ac24d9b 100644 --- a/secure/lib/libcrypto/man/EVP_SealInit.3 +++ b/secure/lib/libcrypto/man/EVP_SealInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_SealInit 3" -.TH EVP_SealInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_SealInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_SignInit.3 b/secure/lib/libcrypto/man/EVP_SignInit.3 index 5e38866c78..254402a455 100644 --- a/secure/lib/libcrypto/man/EVP_SignInit.3 +++ b/secure/lib/libcrypto/man/EVP_SignInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_SignInit 3" -.TH EVP_SignInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_SignInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/EVP_VerifyInit.3 b/secure/lib/libcrypto/man/EVP_VerifyInit.3 index 62e9ffb8f2..c01593936e 100644 --- a/secure/lib/libcrypto/man/EVP_VerifyInit.3 +++ b/secure/lib/libcrypto/man/EVP_VerifyInit.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EVP_VerifyInit 3" -.TH EVP_VerifyInit 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EVP_VerifyInit 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OBJ_nid2obj.3 b/secure/lib/libcrypto/man/OBJ_nid2obj.3 index a75eaf4a80..4d6a1c9036 100644 --- a/secure/lib/libcrypto/man/OBJ_nid2obj.3 +++ b/secure/lib/libcrypto/man/OBJ_nid2obj.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OBJ_nid2obj 3" -.TH OBJ_nid2obj 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OBJ_nid2obj 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OPENSSL_Applink.3 b/secure/lib/libcrypto/man/OPENSSL_Applink.3 index 3d70a1ae01..4c60f3ea05 100644 --- a/secure/lib/libcrypto/man/OPENSSL_Applink.3 +++ b/secure/lib/libcrypto/man/OPENSSL_Applink.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OPENSSL_Applink 3" -.TH OPENSSL_Applink 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OPENSSL_Applink 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OPENSSL_VERSION_NUMBER.3 b/secure/lib/libcrypto/man/OPENSSL_VERSION_NUMBER.3 index 9d45cffbc6..84a17d35ee 100644 --- a/secure/lib/libcrypto/man/OPENSSL_VERSION_NUMBER.3 +++ b/secure/lib/libcrypto/man/OPENSSL_VERSION_NUMBER.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OPENSSL_VERSION_NUMBER 3" -.TH OPENSSL_VERSION_NUMBER 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OPENSSL_VERSION_NUMBER 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OPENSSL_config.3 b/secure/lib/libcrypto/man/OPENSSL_config.3 index f21fa93b8e..c47302d352 100644 --- a/secure/lib/libcrypto/man/OPENSSL_config.3 +++ b/secure/lib/libcrypto/man/OPENSSL_config.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OPENSSL_config 3" -.TH OPENSSL_config 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OPENSSL_config 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OPENSSL_ia32cap.3 b/secure/lib/libcrypto/man/OPENSSL_ia32cap.3 index 3c0cc9fcc7..203f9ce0f5 100644 --- a/secure/lib/libcrypto/man/OPENSSL_ia32cap.3 +++ b/secure/lib/libcrypto/man/OPENSSL_ia32cap.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OPENSSL_ia32cap 3" -.TH OPENSSL_ia32cap 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OPENSSL_ia32cap 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OPENSSL_load_builtin_modules.3 b/secure/lib/libcrypto/man/OPENSSL_load_builtin_modules.3 index 03800d4696..2093434c1f 100644 --- a/secure/lib/libcrypto/man/OPENSSL_load_builtin_modules.3 +++ b/secure/lib/libcrypto/man/OPENSSL_load_builtin_modules.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OPENSSL_load_builtin_modules 3" -.TH OPENSSL_load_builtin_modules 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OPENSSL_load_builtin_modules 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/OpenSSL_add_all_algorithms.3 b/secure/lib/libcrypto/man/OpenSSL_add_all_algorithms.3 index 0132ca662a..470fa7b55f 100644 --- a/secure/lib/libcrypto/man/OpenSSL_add_all_algorithms.3 +++ b/secure/lib/libcrypto/man/OpenSSL_add_all_algorithms.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OpenSSL_add_all_algorithms 3" -.TH OpenSSL_add_all_algorithms 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OpenSSL_add_all_algorithms 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PEM_write_bio_CMS_stream.3 b/secure/lib/libcrypto/man/PEM_write_bio_CMS_stream.3 index 48e97c50e3..432583c06c 100644 --- a/secure/lib/libcrypto/man/PEM_write_bio_CMS_stream.3 +++ b/secure/lib/libcrypto/man/PEM_write_bio_CMS_stream.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PEM_write_bio_CMS_stream 3" -.TH PEM_write_bio_CMS_stream 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PEM_write_bio_CMS_stream 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PEM_write_bio_PKCS7_stream.3 b/secure/lib/libcrypto/man/PEM_write_bio_PKCS7_stream.3 index 9ac8027d9d..6604e35b2e 100644 --- a/secure/lib/libcrypto/man/PEM_write_bio_PKCS7_stream.3 +++ b/secure/lib/libcrypto/man/PEM_write_bio_PKCS7_stream.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PEM_write_bio_PKCS7_stream 3" -.TH PEM_write_bio_PKCS7_stream 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PEM_write_bio_PKCS7_stream 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS12_create.3 b/secure/lib/libcrypto/man/PKCS12_create.3 index 939c842cab..f811f8396d 100644 --- a/secure/lib/libcrypto/man/PKCS12_create.3 +++ b/secure/lib/libcrypto/man/PKCS12_create.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS12_create 3" -.TH PKCS12_create 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS12_create 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS12_parse.3 b/secure/lib/libcrypto/man/PKCS12_parse.3 index a3ffbed1ed..23b087e3be 100644 --- a/secure/lib/libcrypto/man/PKCS12_parse.3 +++ b/secure/lib/libcrypto/man/PKCS12_parse.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS12_parse 3" -.TH PKCS12_parse 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS12_parse 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS7_decrypt.3 b/secure/lib/libcrypto/man/PKCS7_decrypt.3 index 1bd45fc8d5..7a6c6979d1 100644 --- a/secure/lib/libcrypto/man/PKCS7_decrypt.3 +++ b/secure/lib/libcrypto/man/PKCS7_decrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS7_decrypt 3" -.TH PKCS7_decrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS7_decrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS7_encrypt.3 b/secure/lib/libcrypto/man/PKCS7_encrypt.3 index 15fd8b8797..bc701c02f1 100644 --- a/secure/lib/libcrypto/man/PKCS7_encrypt.3 +++ b/secure/lib/libcrypto/man/PKCS7_encrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS7_encrypt 3" -.TH PKCS7_encrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS7_encrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS7_sign.3 b/secure/lib/libcrypto/man/PKCS7_sign.3 index c10cc07d34..982d5ec67b 100644 --- a/secure/lib/libcrypto/man/PKCS7_sign.3 +++ b/secure/lib/libcrypto/man/PKCS7_sign.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS7_sign 3" -.TH PKCS7_sign 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS7_sign 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS7_sign_add_signer.3 b/secure/lib/libcrypto/man/PKCS7_sign_add_signer.3 index 1d904e006e..f9f58992ec 100644 --- a/secure/lib/libcrypto/man/PKCS7_sign_add_signer.3 +++ b/secure/lib/libcrypto/man/PKCS7_sign_add_signer.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS7_sign_add_signer 3" -.TH PKCS7_sign_add_signer 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS7_sign_add_signer 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/PKCS7_verify.3 b/secure/lib/libcrypto/man/PKCS7_verify.3 index fb68edc552..6e891d46a5 100644 --- a/secure/lib/libcrypto/man/PKCS7_verify.3 +++ b/secure/lib/libcrypto/man/PKCS7_verify.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS7_verify 3" -.TH PKCS7_verify 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS7_verify 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RAND_add.3 b/secure/lib/libcrypto/man/RAND_add.3 index 182c53aec7..44ab165532 100644 --- a/secure/lib/libcrypto/man/RAND_add.3 +++ b/secure/lib/libcrypto/man/RAND_add.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND_add 3" -.TH RAND_add 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND_add 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RAND_bytes.3 b/secure/lib/libcrypto/man/RAND_bytes.3 index bd77673cf2..e94aa65716 100644 --- a/secure/lib/libcrypto/man/RAND_bytes.3 +++ b/secure/lib/libcrypto/man/RAND_bytes.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND_bytes 3" -.TH RAND_bytes 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND_bytes 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RAND_cleanup.3 b/secure/lib/libcrypto/man/RAND_cleanup.3 index 2f5da64688..b878878e46 100644 --- a/secure/lib/libcrypto/man/RAND_cleanup.3 +++ b/secure/lib/libcrypto/man/RAND_cleanup.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND_cleanup 3" -.TH RAND_cleanup 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND_cleanup 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RAND_egd.3 b/secure/lib/libcrypto/man/RAND_egd.3 index 5893c5ddce..ef32a6d5d8 100644 --- a/secure/lib/libcrypto/man/RAND_egd.3 +++ b/secure/lib/libcrypto/man/RAND_egd.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND_egd 3" -.TH RAND_egd 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND_egd 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RAND_load_file.3 b/secure/lib/libcrypto/man/RAND_load_file.3 index db8b0f0a3d..3af02c011e 100644 --- a/secure/lib/libcrypto/man/RAND_load_file.3 +++ b/secure/lib/libcrypto/man/RAND_load_file.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND_load_file 3" -.TH RAND_load_file 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND_load_file 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RAND_set_rand_method.3 b/secure/lib/libcrypto/man/RAND_set_rand_method.3 index 649402dc05..21e071a4ff 100644 --- a/secure/lib/libcrypto/man/RAND_set_rand_method.3 +++ b/secure/lib/libcrypto/man/RAND_set_rand_method.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND_set_rand_method 3" -.TH RAND_set_rand_method 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND_set_rand_method 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_blinding_on.3 b/secure/lib/libcrypto/man/RSA_blinding_on.3 index 08d8abf908..70aae73479 100644 --- a/secure/lib/libcrypto/man/RSA_blinding_on.3 +++ b/secure/lib/libcrypto/man/RSA_blinding_on.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_blinding_on 3" -.TH RSA_blinding_on 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_blinding_on 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_check_key.3 b/secure/lib/libcrypto/man/RSA_check_key.3 index 1aac8e75d7..b92e029aad 100644 --- a/secure/lib/libcrypto/man/RSA_check_key.3 +++ b/secure/lib/libcrypto/man/RSA_check_key.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_check_key 3" -.TH RSA_check_key 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_check_key 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_generate_key.3 b/secure/lib/libcrypto/man/RSA_generate_key.3 index c635b5bace..45e1a0bb4a 100644 --- a/secure/lib/libcrypto/man/RSA_generate_key.3 +++ b/secure/lib/libcrypto/man/RSA_generate_key.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_generate_key 3" -.TH RSA_generate_key 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_generate_key 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_get_ex_new_index.3 b/secure/lib/libcrypto/man/RSA_get_ex_new_index.3 index 78f86e8130..043615e2de 100644 --- a/secure/lib/libcrypto/man/RSA_get_ex_new_index.3 +++ b/secure/lib/libcrypto/man/RSA_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_get_ex_new_index 3" -.TH RSA_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_new.3 b/secure/lib/libcrypto/man/RSA_new.3 index 587043246c..203f2df907 100644 --- a/secure/lib/libcrypto/man/RSA_new.3 +++ b/secure/lib/libcrypto/man/RSA_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_new 3" -.TH RSA_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_padding_add_PKCS1_type_1.3 b/secure/lib/libcrypto/man/RSA_padding_add_PKCS1_type_1.3 index e57bca10bc..3ca162472c 100644 --- a/secure/lib/libcrypto/man/RSA_padding_add_PKCS1_type_1.3 +++ b/secure/lib/libcrypto/man/RSA_padding_add_PKCS1_type_1.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_padding_add_PKCS1_type_1 3" -.TH RSA_padding_add_PKCS1_type_1 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_padding_add_PKCS1_type_1 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_print.3 b/secure/lib/libcrypto/man/RSA_print.3 index 3fda8cee1b..6d1c383a21 100644 --- a/secure/lib/libcrypto/man/RSA_print.3 +++ b/secure/lib/libcrypto/man/RSA_print.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_print 3" -.TH RSA_print 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_print 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_private_encrypt.3 b/secure/lib/libcrypto/man/RSA_private_encrypt.3 index ce3244c8bf..df6e1409f4 100644 --- a/secure/lib/libcrypto/man/RSA_private_encrypt.3 +++ b/secure/lib/libcrypto/man/RSA_private_encrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_private_encrypt 3" -.TH RSA_private_encrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_private_encrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_public_encrypt.3 b/secure/lib/libcrypto/man/RSA_public_encrypt.3 index 7dad6c1514..e2d11a7905 100644 --- a/secure/lib/libcrypto/man/RSA_public_encrypt.3 +++ b/secure/lib/libcrypto/man/RSA_public_encrypt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_public_encrypt 3" -.TH RSA_public_encrypt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_public_encrypt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_set_method.3 b/secure/lib/libcrypto/man/RSA_set_method.3 index 6ca3f3af99..0681ef1b6e 100644 --- a/secure/lib/libcrypto/man/RSA_set_method.3 +++ b/secure/lib/libcrypto/man/RSA_set_method.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_set_method 3" -.TH RSA_set_method 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_set_method 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_sign.3 b/secure/lib/libcrypto/man/RSA_sign.3 index dca17d2d6b..0e9a032e9c 100644 --- a/secure/lib/libcrypto/man/RSA_sign.3 +++ b/secure/lib/libcrypto/man/RSA_sign.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_sign 3" -.TH RSA_sign 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_sign 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_sign_ASN1_OCTET_STRING.3 b/secure/lib/libcrypto/man/RSA_sign_ASN1_OCTET_STRING.3 index ce1a6a22fe..0d9b83f6c1 100644 --- a/secure/lib/libcrypto/man/RSA_sign_ASN1_OCTET_STRING.3 +++ b/secure/lib/libcrypto/man/RSA_sign_ASN1_OCTET_STRING.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_sign_ASN1_OCTET_STRING 3" -.TH RSA_sign_ASN1_OCTET_STRING 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_sign_ASN1_OCTET_STRING 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/RSA_size.3 b/secure/lib/libcrypto/man/RSA_size.3 index 2f4bf52fc3..dcaf10bd8a 100644 --- a/secure/lib/libcrypto/man/RSA_size.3 +++ b/secure/lib/libcrypto/man/RSA_size.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA_size 3" -.TH RSA_size 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA_size 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/SMIME_read_CMS.3 b/secure/lib/libcrypto/man/SMIME_read_CMS.3 index 85d7d0ff13..e504758ca4 100644 --- a/secure/lib/libcrypto/man/SMIME_read_CMS.3 +++ b/secure/lib/libcrypto/man/SMIME_read_CMS.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SMIME_read_CMS 3" -.TH SMIME_read_CMS 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SMIME_read_CMS 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/SMIME_read_PKCS7.3 b/secure/lib/libcrypto/man/SMIME_read_PKCS7.3 index d4fdd0c6fd..7cd65ae265 100644 --- a/secure/lib/libcrypto/man/SMIME_read_PKCS7.3 +++ b/secure/lib/libcrypto/man/SMIME_read_PKCS7.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SMIME_read_PKCS7 3" -.TH SMIME_read_PKCS7 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SMIME_read_PKCS7 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/SMIME_write_CMS.3 b/secure/lib/libcrypto/man/SMIME_write_CMS.3 index fb4f39fd0c..c73b9e0432 100644 --- a/secure/lib/libcrypto/man/SMIME_write_CMS.3 +++ b/secure/lib/libcrypto/man/SMIME_write_CMS.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SMIME_write_CMS 3" -.TH SMIME_write_CMS 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SMIME_write_CMS 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/SMIME_write_PKCS7.3 b/secure/lib/libcrypto/man/SMIME_write_PKCS7.3 index 0f13a9efec..8c45f34a02 100644 --- a/secure/lib/libcrypto/man/SMIME_write_PKCS7.3 +++ b/secure/lib/libcrypto/man/SMIME_write_PKCS7.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SMIME_write_PKCS7 3" -.TH SMIME_write_PKCS7 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SMIME_write_PKCS7 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_NAME_ENTRY_get_object.3 b/secure/lib/libcrypto/man/X509_NAME_ENTRY_get_object.3 index 2bfaad100d..5f36b5fe26 100644 --- a/secure/lib/libcrypto/man/X509_NAME_ENTRY_get_object.3 +++ b/secure/lib/libcrypto/man/X509_NAME_ENTRY_get_object.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_NAME_ENTRY_get_object 3" -.TH X509_NAME_ENTRY_get_object 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_NAME_ENTRY_get_object 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_NAME_add_entry_by_txt.3 b/secure/lib/libcrypto/man/X509_NAME_add_entry_by_txt.3 index 210e4bb179..8e49c12350 100644 --- a/secure/lib/libcrypto/man/X509_NAME_add_entry_by_txt.3 +++ b/secure/lib/libcrypto/man/X509_NAME_add_entry_by_txt.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_NAME_add_entry_by_txt 3" -.TH X509_NAME_add_entry_by_txt 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_NAME_add_entry_by_txt 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_NAME_get_index_by_NID.3 b/secure/lib/libcrypto/man/X509_NAME_get_index_by_NID.3 index bcd77e901b..0125ea6e54 100644 --- a/secure/lib/libcrypto/man/X509_NAME_get_index_by_NID.3 +++ b/secure/lib/libcrypto/man/X509_NAME_get_index_by_NID.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_NAME_get_index_by_NID 3" -.TH X509_NAME_get_index_by_NID 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_NAME_get_index_by_NID 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_NAME_print_ex.3 b/secure/lib/libcrypto/man/X509_NAME_print_ex.3 index f9c138461e..bd584f6f32 100644 --- a/secure/lib/libcrypto/man/X509_NAME_print_ex.3 +++ b/secure/lib/libcrypto/man/X509_NAME_print_ex.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_NAME_print_ex 3" -.TH X509_NAME_print_ex 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_NAME_print_ex 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_STORE_CTX_get_error.3 b/secure/lib/libcrypto/man/X509_STORE_CTX_get_error.3 index 62df1c6989..d2b10aab8d 100644 --- a/secure/lib/libcrypto/man/X509_STORE_CTX_get_error.3 +++ b/secure/lib/libcrypto/man/X509_STORE_CTX_get_error.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_STORE_CTX_get_error 3" -.TH X509_STORE_CTX_get_error 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_STORE_CTX_get_error 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_STORE_CTX_get_ex_new_index.3 b/secure/lib/libcrypto/man/X509_STORE_CTX_get_ex_new_index.3 index ba59913d77..29678dc996 100644 --- a/secure/lib/libcrypto/man/X509_STORE_CTX_get_ex_new_index.3 +++ b/secure/lib/libcrypto/man/X509_STORE_CTX_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_STORE_CTX_get_ex_new_index 3" -.TH X509_STORE_CTX_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_STORE_CTX_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_STORE_CTX_new.3 b/secure/lib/libcrypto/man/X509_STORE_CTX_new.3 index 8e70ab5420..251fea2d70 100644 --- a/secure/lib/libcrypto/man/X509_STORE_CTX_new.3 +++ b/secure/lib/libcrypto/man/X509_STORE_CTX_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_STORE_CTX_new 3" -.TH X509_STORE_CTX_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_STORE_CTX_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_STORE_CTX_set_verify_cb.3 b/secure/lib/libcrypto/man/X509_STORE_CTX_set_verify_cb.3 index 26e92914f0..9517f2f080 100644 --- a/secure/lib/libcrypto/man/X509_STORE_CTX_set_verify_cb.3 +++ b/secure/lib/libcrypto/man/X509_STORE_CTX_set_verify_cb.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_STORE_CTX_set_verify_cb 3" -.TH X509_STORE_CTX_set_verify_cb 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_STORE_CTX_set_verify_cb 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_STORE_set_verify_cb_func.3 b/secure/lib/libcrypto/man/X509_STORE_set_verify_cb_func.3 index df6016a131..48d4eaaa84 100644 --- a/secure/lib/libcrypto/man/X509_STORE_set_verify_cb_func.3 +++ b/secure/lib/libcrypto/man/X509_STORE_set_verify_cb_func.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_STORE_set_verify_cb_func 3" -.TH X509_STORE_set_verify_cb_func 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_STORE_set_verify_cb_func 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_VERIFY_PARAM_set_flags.3 b/secure/lib/libcrypto/man/X509_VERIFY_PARAM_set_flags.3 index d35e32a466..8580f89099 100644 --- a/secure/lib/libcrypto/man/X509_VERIFY_PARAM_set_flags.3 +++ b/secure/lib/libcrypto/man/X509_VERIFY_PARAM_set_flags.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_VERIFY_PARAM_set_flags 3" -.TH X509_VERIFY_PARAM_set_flags 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_VERIFY_PARAM_set_flags 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_new.3 b/secure/lib/libcrypto/man/X509_new.3 index 2229578af1..681d09125c 100644 --- a/secure/lib/libcrypto/man/X509_new.3 +++ b/secure/lib/libcrypto/man/X509_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_new 3" -.TH X509_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/X509_verify_cert.3 b/secure/lib/libcrypto/man/X509_verify_cert.3 index 88431cf442..7a67a5d0fa 100644 --- a/secure/lib/libcrypto/man/X509_verify_cert.3 +++ b/secure/lib/libcrypto/man/X509_verify_cert.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509_verify_cert 3" -.TH X509_verify_cert 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509_verify_cert 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/bio.3 b/secure/lib/libcrypto/man/bio.3 index a363520bea..64cb40db3b 100644 --- a/secure/lib/libcrypto/man/bio.3 +++ b/secure/lib/libcrypto/man/bio.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "bio 3" -.TH bio 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH bio 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/blowfish.3 b/secure/lib/libcrypto/man/blowfish.3 index 0a064d1d6b..bee504c0f9 100644 --- a/secure/lib/libcrypto/man/blowfish.3 +++ b/secure/lib/libcrypto/man/blowfish.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "blowfish 3" -.TH blowfish 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH blowfish 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/bn.3 b/secure/lib/libcrypto/man/bn.3 index e8df65ee94..370718f028 100644 --- a/secure/lib/libcrypto/man/bn.3 +++ b/secure/lib/libcrypto/man/bn.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "bn 3" -.TH bn 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH bn 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/bn_internal.3 b/secure/lib/libcrypto/man/bn_internal.3 index 232f6e8644..33005faf39 100644 --- a/secure/lib/libcrypto/man/bn_internal.3 +++ b/secure/lib/libcrypto/man/bn_internal.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "bn_internal 3" -.TH bn_internal 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH bn_internal 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/buffer.3 b/secure/lib/libcrypto/man/buffer.3 index 9f141acee2..0d9ab6a709 100644 --- a/secure/lib/libcrypto/man/buffer.3 +++ b/secure/lib/libcrypto/man/buffer.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "buffer 3" -.TH buffer 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH buffer 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/crypto.3 b/secure/lib/libcrypto/man/crypto.3 index 905d333ee2..46c815a376 100644 --- a/secure/lib/libcrypto/man/crypto.3 +++ b/secure/lib/libcrypto/man/crypto.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "crypto 3" -.TH crypto 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH crypto 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_ASN1_OBJECT.3 b/secure/lib/libcrypto/man/d2i_ASN1_OBJECT.3 index 6ee199c44b..f44ca3cb4c 100644 --- a/secure/lib/libcrypto/man/d2i_ASN1_OBJECT.3 +++ b/secure/lib/libcrypto/man/d2i_ASN1_OBJECT.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_ASN1_OBJECT 3" -.TH d2i_ASN1_OBJECT 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_ASN1_OBJECT 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_DHparams.3 b/secure/lib/libcrypto/man/d2i_DHparams.3 index 2c5f502406..7bd705118e 100644 --- a/secure/lib/libcrypto/man/d2i_DHparams.3 +++ b/secure/lib/libcrypto/man/d2i_DHparams.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_DHparams 3" -.TH d2i_DHparams 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_DHparams 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_DSAPublicKey.3 b/secure/lib/libcrypto/man/d2i_DSAPublicKey.3 index 95db5e1b09..5499c08135 100644 --- a/secure/lib/libcrypto/man/d2i_DSAPublicKey.3 +++ b/secure/lib/libcrypto/man/d2i_DSAPublicKey.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_DSAPublicKey 3" -.TH d2i_DSAPublicKey 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_DSAPublicKey 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_PKCS8PrivateKey.3 b/secure/lib/libcrypto/man/d2i_PKCS8PrivateKey.3 index 3afcf02bd8..4e07c4a4e7 100644 --- a/secure/lib/libcrypto/man/d2i_PKCS8PrivateKey.3 +++ b/secure/lib/libcrypto/man/d2i_PKCS8PrivateKey.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_PKCS8PrivateKey 3" -.TH d2i_PKCS8PrivateKey 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_PKCS8PrivateKey 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_RSAPublicKey.3 b/secure/lib/libcrypto/man/d2i_RSAPublicKey.3 index 08ba2c4bf9..2a76d48857 100644 --- a/secure/lib/libcrypto/man/d2i_RSAPublicKey.3 +++ b/secure/lib/libcrypto/man/d2i_RSAPublicKey.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_RSAPublicKey 3" -.TH d2i_RSAPublicKey 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_RSAPublicKey 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_X509.3 b/secure/lib/libcrypto/man/d2i_X509.3 index 729c521e94..67c1c7df72 100644 --- a/secure/lib/libcrypto/man/d2i_X509.3 +++ b/secure/lib/libcrypto/man/d2i_X509.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_X509 3" -.TH d2i_X509 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_X509 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_X509_ALGOR.3 b/secure/lib/libcrypto/man/d2i_X509_ALGOR.3 index 2dceb877f8..88122abc0f 100644 --- a/secure/lib/libcrypto/man/d2i_X509_ALGOR.3 +++ b/secure/lib/libcrypto/man/d2i_X509_ALGOR.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_X509_ALGOR 3" -.TH d2i_X509_ALGOR 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_X509_ALGOR 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_X509_CRL.3 b/secure/lib/libcrypto/man/d2i_X509_CRL.3 index cb683a9c58..bac1f00de4 100644 --- a/secure/lib/libcrypto/man/d2i_X509_CRL.3 +++ b/secure/lib/libcrypto/man/d2i_X509_CRL.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_X509_CRL 3" -.TH d2i_X509_CRL 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_X509_CRL 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_X509_NAME.3 b/secure/lib/libcrypto/man/d2i_X509_NAME.3 index 60445fefce..45607b3152 100644 --- a/secure/lib/libcrypto/man/d2i_X509_NAME.3 +++ b/secure/lib/libcrypto/man/d2i_X509_NAME.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_X509_NAME 3" -.TH d2i_X509_NAME 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_X509_NAME 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_X509_REQ.3 b/secure/lib/libcrypto/man/d2i_X509_REQ.3 index e7e9169b3f..70e4094415 100644 --- a/secure/lib/libcrypto/man/d2i_X509_REQ.3 +++ b/secure/lib/libcrypto/man/d2i_X509_REQ.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_X509_REQ 3" -.TH d2i_X509_REQ 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_X509_REQ 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/d2i_X509_SIG.3 b/secure/lib/libcrypto/man/d2i_X509_SIG.3 index 48d0158be2..b0a106d4d8 100644 --- a/secure/lib/libcrypto/man/d2i_X509_SIG.3 +++ b/secure/lib/libcrypto/man/d2i_X509_SIG.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_X509_SIG 3" -.TH d2i_X509_SIG 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_X509_SIG 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/des.3 b/secure/lib/libcrypto/man/des.3 index 86025118eb..38e09e99c0 100644 --- a/secure/lib/libcrypto/man/des.3 +++ b/secure/lib/libcrypto/man/des.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "des 3" -.TH des 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH des 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/des_modes.7 b/secure/lib/libcrypto/man/des_modes.7 index fb79afcdac..dfefdbb030 100644 --- a/secure/lib/libcrypto/man/des_modes.7 +++ b/secure/lib/libcrypto/man/des_modes.7 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DES_MODES 7" -.TH DES_MODES 7 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DES_MODES 7 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/dh.3 b/secure/lib/libcrypto/man/dh.3 index a19f6a9d70..f3aefcb8d2 100644 --- a/secure/lib/libcrypto/man/dh.3 +++ b/secure/lib/libcrypto/man/dh.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "dh 3" -.TH dh 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH dh 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/dsa.3 b/secure/lib/libcrypto/man/dsa.3 index fd2de13a13..b9a3bc0c8a 100644 --- a/secure/lib/libcrypto/man/dsa.3 +++ b/secure/lib/libcrypto/man/dsa.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "dsa 3" -.TH dsa 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH dsa 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ecdsa.3 b/secure/lib/libcrypto/man/ecdsa.3 index fe2c61161b..379a60cc0c 100644 --- a/secure/lib/libcrypto/man/ecdsa.3 +++ b/secure/lib/libcrypto/man/ecdsa.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ecdsa 3" -.TH ecdsa 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ecdsa 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/engine.3 b/secure/lib/libcrypto/man/engine.3 index c4f8edb7ba..203ce26498 100644 --- a/secure/lib/libcrypto/man/engine.3 +++ b/secure/lib/libcrypto/man/engine.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "engine 3" -.TH engine 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH engine 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/err.3 b/secure/lib/libcrypto/man/err.3 index 852f9a64d6..34cf7152ce 100644 --- a/secure/lib/libcrypto/man/err.3 +++ b/secure/lib/libcrypto/man/err.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "err 3" -.TH err 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH err 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/evp.3 b/secure/lib/libcrypto/man/evp.3 index ab85efb24c..84cc176c91 100644 --- a/secure/lib/libcrypto/man/evp.3 +++ b/secure/lib/libcrypto/man/evp.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "evp 3" -.TH evp 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH evp 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/hmac.3 b/secure/lib/libcrypto/man/hmac.3 index 588ee852ef..59c85787f5 100644 --- a/secure/lib/libcrypto/man/hmac.3 +++ b/secure/lib/libcrypto/man/hmac.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "hmac 3" -.TH hmac 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH hmac 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/i2d_CMS_bio_stream.3 b/secure/lib/libcrypto/man/i2d_CMS_bio_stream.3 index 5456bdae10..b2e2c782aa 100644 --- a/secure/lib/libcrypto/man/i2d_CMS_bio_stream.3 +++ b/secure/lib/libcrypto/man/i2d_CMS_bio_stream.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "i2d_CMS_bio_stream 3" -.TH i2d_CMS_bio_stream 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH i2d_CMS_bio_stream 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/i2d_PKCS7_bio_stream.3 b/secure/lib/libcrypto/man/i2d_PKCS7_bio_stream.3 index d8c65dfbe2..f69d2b87d2 100644 --- a/secure/lib/libcrypto/man/i2d_PKCS7_bio_stream.3 +++ b/secure/lib/libcrypto/man/i2d_PKCS7_bio_stream.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "i2d_PKCS7_bio_stream 3" -.TH i2d_PKCS7_bio_stream 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH i2d_PKCS7_bio_stream 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/lh_stats.3 b/secure/lib/libcrypto/man/lh_stats.3 index 23ca1f4f2c..771a2f775e 100644 --- a/secure/lib/libcrypto/man/lh_stats.3 +++ b/secure/lib/libcrypto/man/lh_stats.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "lh_stats 3" -.TH lh_stats 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH lh_stats 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/lhash.3 b/secure/lib/libcrypto/man/lhash.3 index 7cc56f9e28..744d33b5c0 100644 --- a/secure/lib/libcrypto/man/lhash.3 +++ b/secure/lib/libcrypto/man/lhash.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "lhash 3" -.TH lhash 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH lhash 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/md5.3 b/secure/lib/libcrypto/man/md5.3 index 043b059815..2bcbffedc3 100644 --- a/secure/lib/libcrypto/man/md5.3 +++ b/secure/lib/libcrypto/man/md5.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "md5 3" -.TH md5 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH md5 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/mdc2.3 b/secure/lib/libcrypto/man/mdc2.3 index 5fea84b0e1..a7cc3ea26c 100644 --- a/secure/lib/libcrypto/man/mdc2.3 +++ b/secure/lib/libcrypto/man/mdc2.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "mdc2 3" -.TH mdc2 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH mdc2 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/pem.3 b/secure/lib/libcrypto/man/pem.3 index fcf24aea3b..940aacf733 100644 --- a/secure/lib/libcrypto/man/pem.3 +++ b/secure/lib/libcrypto/man/pem.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "pem 3" -.TH pem 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH pem 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/rand.3 b/secure/lib/libcrypto/man/rand.3 index 80eb864112..3908a857e4 100644 --- a/secure/lib/libcrypto/man/rand.3 +++ b/secure/lib/libcrypto/man/rand.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "rand 3" -.TH rand 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH rand 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/rc4.3 b/secure/lib/libcrypto/man/rc4.3 index 2094ca355b..79803fc76b 100644 --- a/secure/lib/libcrypto/man/rc4.3 +++ b/secure/lib/libcrypto/man/rc4.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "rc4 3" -.TH rc4 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH rc4 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ripemd.3 b/secure/lib/libcrypto/man/ripemd.3 index ffbdb79ad4..0b8609cec5 100644 --- a/secure/lib/libcrypto/man/ripemd.3 +++ b/secure/lib/libcrypto/man/ripemd.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ripemd 3" -.TH ripemd 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ripemd 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/rsa.3 b/secure/lib/libcrypto/man/rsa.3 index baf29389f6..3f9198a24f 100644 --- a/secure/lib/libcrypto/man/rsa.3 +++ b/secure/lib/libcrypto/man/rsa.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "rsa 3" -.TH rsa 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH rsa 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/sha.3 b/secure/lib/libcrypto/man/sha.3 index b90cc44c67..b24caa5c1f 100644 --- a/secure/lib/libcrypto/man/sha.3 +++ b/secure/lib/libcrypto/man/sha.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "sha 3" -.TH sha 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH sha 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/threads.3 b/secure/lib/libcrypto/man/threads.3 index 25abfd809e..c60be4b83a 100644 --- a/secure/lib/libcrypto/man/threads.3 +++ b/secure/lib/libcrypto/man/threads.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "threads 3" -.TH threads 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH threads 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ui.3 b/secure/lib/libcrypto/man/ui.3 index 5e5e79c3bd..d65bf1bf3a 100644 --- a/secure/lib/libcrypto/man/ui.3 +++ b/secure/lib/libcrypto/man/ui.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ui 3" -.TH ui 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ui 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/ui_compat.3 b/secure/lib/libcrypto/man/ui_compat.3 index c438a84008..270ccb209e 100644 --- a/secure/lib/libcrypto/man/ui_compat.3 +++ b/secure/lib/libcrypto/man/ui_compat.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ui_compat 3" -.TH ui_compat 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ui_compat 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/man/x509.3 b/secure/lib/libcrypto/man/x509.3 index 5a41252123..2ee3b296fa 100644 --- a/secure/lib/libcrypto/man/x509.3 +++ b/secure/lib/libcrypto/man/x509.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "x509 3" -.TH x509 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH x509 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libcrypto/opensslconf-i386.h b/secure/lib/libcrypto/opensslconf-i386.h index 1f939f17fc..90856fd0c1 100644 --- a/secure/lib/libcrypto/opensslconf-i386.h +++ b/secure/lib/libcrypto/opensslconf-i386.h @@ -5,6 +5,9 @@ #ifndef OPENSSL_DOING_MAKEDEPEND +#ifndef OPENSSL_NO_EC_NISTP_64_GCC_128 +# define OPENSSL_NO_EC_NISTP_64_GCC_128 +#endif #ifndef OPENSSL_NO_GMP # define OPENSSL_NO_GMP #endif @@ -26,6 +29,9 @@ #ifndef OPENSSL_NO_RFC3779 # define OPENSSL_NO_RFC3779 #endif +#ifndef OPENSSL_NO_SCTP +# define OPENSSL_NO_SCTP +#endif #ifndef OPENSSL_NO_STORE # define OPENSSL_NO_STORE #endif @@ -44,6 +50,9 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_EC_NISTP_64_GCC_128) && !defined(NO_EC_NISTP_64_GCC_128) +# define NO_EC_NISTP_64_GCC_128 +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif @@ -65,6 +74,9 @@ # if defined(OPENSSL_NO_RFC3779) && !defined(NO_RFC3779) # define NO_RFC3779 # endif +# if defined(OPENSSL_NO_SCTP) && !defined(NO_SCTP) +# define NO_SCTP +# endif # if defined(OPENSSL_NO_STORE) && !defined(NO_STORE) # define NO_STORE # endif diff --git a/secure/lib/libcrypto/opensslconf-x86_64.h b/secure/lib/libcrypto/opensslconf-x86_64.h index 49381cb2ce..b5f90ff20e 100644 --- a/secure/lib/libcrypto/opensslconf-x86_64.h +++ b/secure/lib/libcrypto/opensslconf-x86_64.h @@ -5,6 +5,9 @@ #ifndef OPENSSL_DOING_MAKEDEPEND +#ifndef OPENSSL_NO_EC_NISTP_64_GCC_128 +# define OPENSSL_NO_EC_NISTP_64_GCC_128 +#endif #ifndef OPENSSL_NO_GMP # define OPENSSL_NO_GMP #endif @@ -26,6 +29,9 @@ #ifndef OPENSSL_NO_RFC3779 # define OPENSSL_NO_RFC3779 #endif +#ifndef OPENSSL_NO_SCTP +# define OPENSSL_NO_SCTP +#endif #ifndef OPENSSL_NO_STORE # define OPENSSL_NO_STORE #endif @@ -44,6 +50,9 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_EC_NISTP_64_GCC_128) && !defined(NO_EC_NISTP_64_GCC_128) +# define NO_EC_NISTP_64_GCC_128 +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif @@ -65,6 +74,9 @@ # if defined(OPENSSL_NO_RFC3779) && !defined(NO_RFC3779) # define NO_RFC3779 # endif +# if defined(OPENSSL_NO_SCTP) && !defined(NO_SCTP) +# define NO_SCTP +# endif # if defined(OPENSSL_NO_STORE) && !defined(NO_STORE) # define NO_STORE # endif diff --git a/secure/lib/libssl/Makefile b/secure/lib/libssl/Makefile index 1b08d8c19e..fa73a00326 100644 --- a/secure/lib/libssl/Makefile +++ b/secure/lib/libssl/Makefile @@ -12,15 +12,18 @@ NOLINT= true .endif .include "../libcrypto/Makefile.inc" -SRCS+= bio_ssl.c d1_both.c d1_clnt.c d1_enc.c d1_lib.c d1_meth.c d1_pkt.c \ - d1_srvr.c s23_clnt.c s23_lib.c s23_meth.c s23_pkt.c s23_srvr.c \ - s2_clnt.c s2_enc.c s2_lib.c s2_meth.c s2_pkt.c s2_srvr.c \ - s3_both.c s3_clnt.c s3_enc.c s3_lib.c s3_meth.c s3_pkt.c \ - s3_srvr.c ssl_algs.c ssl_asn1.c ssl_cert.c ssl_ciph.c \ - ssl_err.c ssl_err2.c ssl_lib.c ssl_rsa.c ssl_sess.c ssl_stat.c \ - ssl_txt.c t1_clnt.c t1_enc.c t1_lib.c t1_meth.c t1_reneg.c t1_srvr.c - -INCS= dtls1.h kssl.h ssl.h ssl2.h ssl23.h ssl3.h tls1.h +SRCS+= s2_meth.c s2_srvr.c s2_clnt.c s2_lib.c s2_enc.c s2_pkt.c \ + s3_meth.c s3_srvr.c s3_clnt.c s3_lib.c s3_enc.c s3_pkt.c s3_both.c \ + s23_meth.c s23_srvr.c s23_clnt.c s23_lib.c s23_pkt.c \ + t1_meth.c t1_srvr.c t1_clnt.c t1_lib.c t1_enc.c \ + d1_meth.c d1_srvr.c d1_clnt.c d1_lib.c d1_pkt.c \ + d1_both.c d1_enc.c d1_srtp.c \ + ssl_lib.c ssl_err2.c ssl_cert.c ssl_sess.c \ + ssl_ciph.c ssl_stat.c ssl_rsa.c \ + ssl_asn1.c ssl_txt.c ssl_algs.c \ + bio_ssl.c ssl_err.c tls_srp.c t1_reneg.c + +INCS= ssl.h ssl2.h ssl3.h ssl23.h tls1.h dtls1.h kssl.h srtp.h INCSDIR=${INCLUDEDIR}/openssl LDADD= -lcrypto diff --git a/secure/lib/libssl/man/SSL_CIPHER_get_name.3 b/secure/lib/libssl/man/SSL_CIPHER_get_name.3 index 4fba5d13f4..0a271d636f 100644 --- a/secure/lib/libssl/man/SSL_CIPHER_get_name.3 +++ b/secure/lib/libssl/man/SSL_CIPHER_get_name.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CIPHER_get_name 3" -.TH SSL_CIPHER_get_name 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CIPHER_get_name 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_COMP_add_compression_method.3 b/secure/lib/libssl/man/SSL_COMP_add_compression_method.3 index 71dba7983d..dca32836d4 100644 --- a/secure/lib/libssl/man/SSL_COMP_add_compression_method.3 +++ b/secure/lib/libssl/man/SSL_COMP_add_compression_method.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_COMP_add_compression_method 3" -.TH SSL_COMP_add_compression_method 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_COMP_add_compression_method 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_add_extra_chain_cert.3 b/secure/lib/libssl/man/SSL_CTX_add_extra_chain_cert.3 index 8b78a6debb..6816e57409 100644 --- a/secure/lib/libssl/man/SSL_CTX_add_extra_chain_cert.3 +++ b/secure/lib/libssl/man/SSL_CTX_add_extra_chain_cert.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_add_extra_chain_cert 3" -.TH SSL_CTX_add_extra_chain_cert 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_add_extra_chain_cert 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_add_session.3 b/secure/lib/libssl/man/SSL_CTX_add_session.3 index 8f8d0530c8..9f93a281ce 100644 --- a/secure/lib/libssl/man/SSL_CTX_add_session.3 +++ b/secure/lib/libssl/man/SSL_CTX_add_session.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_add_session 3" -.TH SSL_CTX_add_session 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_add_session 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_ctrl.3 b/secure/lib/libssl/man/SSL_CTX_ctrl.3 index 92bceb6ba4..cc106d3725 100644 --- a/secure/lib/libssl/man/SSL_CTX_ctrl.3 +++ b/secure/lib/libssl/man/SSL_CTX_ctrl.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_ctrl 3" -.TH SSL_CTX_ctrl 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_ctrl 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_flush_sessions.3 b/secure/lib/libssl/man/SSL_CTX_flush_sessions.3 index 4ae5210eed..27a3827214 100644 --- a/secure/lib/libssl/man/SSL_CTX_flush_sessions.3 +++ b/secure/lib/libssl/man/SSL_CTX_flush_sessions.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_flush_sessions 3" -.TH SSL_CTX_flush_sessions 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_flush_sessions 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_free.3 b/secure/lib/libssl/man/SSL_CTX_free.3 index 1206cc4146..9c5f982b2d 100644 --- a/secure/lib/libssl/man/SSL_CTX_free.3 +++ b/secure/lib/libssl/man/SSL_CTX_free.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_free 3" -.TH SSL_CTX_free 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_free 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_get_ex_new_index.3 b/secure/lib/libssl/man/SSL_CTX_get_ex_new_index.3 index 6697ad8799..8ea60ea253 100644 --- a/secure/lib/libssl/man/SSL_CTX_get_ex_new_index.3 +++ b/secure/lib/libssl/man/SSL_CTX_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_get_ex_new_index 3" -.TH SSL_CTX_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_get_verify_mode.3 b/secure/lib/libssl/man/SSL_CTX_get_verify_mode.3 index 64ab098a34..7c8face96a 100644 --- a/secure/lib/libssl/man/SSL_CTX_get_verify_mode.3 +++ b/secure/lib/libssl/man/SSL_CTX_get_verify_mode.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_get_verify_mode 3" -.TH SSL_CTX_get_verify_mode 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_get_verify_mode 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_load_verify_locations.3 b/secure/lib/libssl/man/SSL_CTX_load_verify_locations.3 index 9dd0ca4106..a8da28f5e8 100644 --- a/secure/lib/libssl/man/SSL_CTX_load_verify_locations.3 +++ b/secure/lib/libssl/man/SSL_CTX_load_verify_locations.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_load_verify_locations 3" -.TH SSL_CTX_load_verify_locations 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_load_verify_locations 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_new.3 b/secure/lib/libssl/man/SSL_CTX_new.3 index 81de6ff429..f4c3216c5c 100644 --- a/secure/lib/libssl/man/SSL_CTX_new.3 +++ b/secure/lib/libssl/man/SSL_CTX_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_new 3" -.TH SSL_CTX_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_sess_number.3 b/secure/lib/libssl/man/SSL_CTX_sess_number.3 index e9eda59ba7..c567b17a6f 100644 --- a/secure/lib/libssl/man/SSL_CTX_sess_number.3 +++ b/secure/lib/libssl/man/SSL_CTX_sess_number.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_sess_number 3" -.TH SSL_CTX_sess_number 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_sess_number 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_sess_set_cache_size.3 b/secure/lib/libssl/man/SSL_CTX_sess_set_cache_size.3 index 285290ed48..9e796a33e9 100644 --- a/secure/lib/libssl/man/SSL_CTX_sess_set_cache_size.3 +++ b/secure/lib/libssl/man/SSL_CTX_sess_set_cache_size.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_sess_set_cache_size 3" -.TH SSL_CTX_sess_set_cache_size 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_sess_set_cache_size 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_sess_set_get_cb.3 b/secure/lib/libssl/man/SSL_CTX_sess_set_get_cb.3 index 80272826ff..a7a2ba8e55 100644 --- a/secure/lib/libssl/man/SSL_CTX_sess_set_get_cb.3 +++ b/secure/lib/libssl/man/SSL_CTX_sess_set_get_cb.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_sess_set_get_cb 3" -.TH SSL_CTX_sess_set_get_cb 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_sess_set_get_cb 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_sessions.3 b/secure/lib/libssl/man/SSL_CTX_sessions.3 index 1ebb1897e0..a074ced19d 100644 --- a/secure/lib/libssl/man/SSL_CTX_sessions.3 +++ b/secure/lib/libssl/man/SSL_CTX_sessions.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_sessions 3" -.TH SSL_CTX_sessions 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_sessions 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_cert_store.3 b/secure/lib/libssl/man/SSL_CTX_set_cert_store.3 index a7c0d0a4aa..b375a6ceb7 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_cert_store.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_cert_store.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_cert_store 3" -.TH SSL_CTX_set_cert_store 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_cert_store 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_cert_verify_callback.3 b/secure/lib/libssl/man/SSL_CTX_set_cert_verify_callback.3 index ecd05d588a..c9a1965690 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_cert_verify_callback.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_cert_verify_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_cert_verify_callback 3" -.TH SSL_CTX_set_cert_verify_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_cert_verify_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_cipher_list.3 b/secure/lib/libssl/man/SSL_CTX_set_cipher_list.3 index 47178f14f3..0a5e5b5129 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_cipher_list.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_cipher_list.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_cipher_list 3" -.TH SSL_CTX_set_cipher_list 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_cipher_list 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_client_CA_list.3 b/secure/lib/libssl/man/SSL_CTX_set_client_CA_list.3 index 230053398d..21ece7a9fb 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_client_CA_list.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_client_CA_list.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_client_CA_list 3" -.TH SSL_CTX_set_client_CA_list 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_client_CA_list 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_client_cert_cb.3 b/secure/lib/libssl/man/SSL_CTX_set_client_cert_cb.3 index 7a481db22c..51a4f9b54c 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_client_cert_cb.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_client_cert_cb.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_client_cert_cb 3" -.TH SSL_CTX_set_client_cert_cb 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_client_cert_cb 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_default_passwd_cb.3 b/secure/lib/libssl/man/SSL_CTX_set_default_passwd_cb.3 index 8e2031cde7..15bd8a2729 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_default_passwd_cb.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_default_passwd_cb.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_default_passwd_cb 3" -.TH SSL_CTX_set_default_passwd_cb 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_default_passwd_cb 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_generate_session_id.3 b/secure/lib/libssl/man/SSL_CTX_set_generate_session_id.3 index 383b567360..71c6ecabf0 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_generate_session_id.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_generate_session_id.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_generate_session_id 3" -.TH SSL_CTX_set_generate_session_id 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_generate_session_id 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_info_callback.3 b/secure/lib/libssl/man/SSL_CTX_set_info_callback.3 index 06f3c63904..bb45094e92 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_info_callback.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_info_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_info_callback 3" -.TH SSL_CTX_set_info_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_info_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_max_cert_list.3 b/secure/lib/libssl/man/SSL_CTX_set_max_cert_list.3 index a4df6720fb..0a9db1f1f9 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_max_cert_list.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_max_cert_list.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_max_cert_list 3" -.TH SSL_CTX_set_max_cert_list 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_max_cert_list 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_mode.3 b/secure/lib/libssl/man/SSL_CTX_set_mode.3 index 65aea720a1..23b98a5065 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_mode.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_mode.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_mode 3" -.TH SSL_CTX_set_mode 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_mode 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_msg_callback.3 b/secure/lib/libssl/man/SSL_CTX_set_msg_callback.3 index a53bfafde3..e260d77cce 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_msg_callback.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_msg_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_msg_callback 3" -.TH SSL_CTX_set_msg_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_msg_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_options.3 b/secure/lib/libssl/man/SSL_CTX_set_options.3 index 3f1954ce3c..aa3e3e87ff 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_options.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_options.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_options 3" -.TH SSL_CTX_set_options 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_options 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_psk_client_callback.3 b/secure/lib/libssl/man/SSL_CTX_set_psk_client_callback.3 index e7021a7221..7390c912b7 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_psk_client_callback.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_psk_client_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_psk_client_callback 3" -.TH SSL_CTX_set_psk_client_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_psk_client_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_quiet_shutdown.3 b/secure/lib/libssl/man/SSL_CTX_set_quiet_shutdown.3 index fadd5c2700..b5b369e2be 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_quiet_shutdown.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_quiet_shutdown.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_quiet_shutdown 3" -.TH SSL_CTX_set_quiet_shutdown 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_quiet_shutdown 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_session_cache_mode.3 b/secure/lib/libssl/man/SSL_CTX_set_session_cache_mode.3 index eb885be61f..62b05da732 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_session_cache_mode.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_session_cache_mode.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_session_cache_mode 3" -.TH SSL_CTX_set_session_cache_mode 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_session_cache_mode 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_session_id_context.3 b/secure/lib/libssl/man/SSL_CTX_set_session_id_context.3 index 930a9c9d73..9a7c88ca39 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_session_id_context.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_session_id_context.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_session_id_context 3" -.TH SSL_CTX_set_session_id_context 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_session_id_context 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_ssl_version.3 b/secure/lib/libssl/man/SSL_CTX_set_ssl_version.3 index 935b1c1971..9884bd9a6e 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_ssl_version.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_ssl_version.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_ssl_version 3" -.TH SSL_CTX_set_ssl_version 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_ssl_version 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_timeout.3 b/secure/lib/libssl/man/SSL_CTX_set_timeout.3 index 2c2fb7bc46..89c01ad268 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_timeout.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_timeout.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_timeout 3" -.TH SSL_CTX_set_timeout 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_timeout 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_tmp_dh_callback.3 b/secure/lib/libssl/man/SSL_CTX_set_tmp_dh_callback.3 index 7eda14838b..3127a868e3 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_tmp_dh_callback.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_tmp_dh_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_tmp_dh_callback 3" -.TH SSL_CTX_set_tmp_dh_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_tmp_dh_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_tmp_rsa_callback.3 b/secure/lib/libssl/man/SSL_CTX_set_tmp_rsa_callback.3 index b4e8c5c658..2ff86ce646 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_tmp_rsa_callback.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_tmp_rsa_callback.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_tmp_rsa_callback 3" -.TH SSL_CTX_set_tmp_rsa_callback 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_tmp_rsa_callback 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_set_verify.3 b/secure/lib/libssl/man/SSL_CTX_set_verify.3 index 44c2a0f63d..5414cbb56f 100644 --- a/secure/lib/libssl/man/SSL_CTX_set_verify.3 +++ b/secure/lib/libssl/man/SSL_CTX_set_verify.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_set_verify 3" -.TH SSL_CTX_set_verify 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_set_verify 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_use_certificate.3 b/secure/lib/libssl/man/SSL_CTX_use_certificate.3 index 34d18587ac..e2d778cff6 100644 --- a/secure/lib/libssl/man/SSL_CTX_use_certificate.3 +++ b/secure/lib/libssl/man/SSL_CTX_use_certificate.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_use_certificate 3" -.TH SSL_CTX_use_certificate 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_use_certificate 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_CTX_use_psk_identity_hint.3 b/secure/lib/libssl/man/SSL_CTX_use_psk_identity_hint.3 index 55fec908a7..4fe7c54182 100644 --- a/secure/lib/libssl/man/SSL_CTX_use_psk_identity_hint.3 +++ b/secure/lib/libssl/man/SSL_CTX_use_psk_identity_hint.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_CTX_use_psk_identity_hint 3" -.TH SSL_CTX_use_psk_identity_hint 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_CTX_use_psk_identity_hint 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_SESSION_free.3 b/secure/lib/libssl/man/SSL_SESSION_free.3 index 1730bf9fd7..f12186697e 100644 --- a/secure/lib/libssl/man/SSL_SESSION_free.3 +++ b/secure/lib/libssl/man/SSL_SESSION_free.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_SESSION_free 3" -.TH SSL_SESSION_free 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_SESSION_free 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_SESSION_get_ex_new_index.3 b/secure/lib/libssl/man/SSL_SESSION_get_ex_new_index.3 index f1cb0ebcab..c64fad3900 100644 --- a/secure/lib/libssl/man/SSL_SESSION_get_ex_new_index.3 +++ b/secure/lib/libssl/man/SSL_SESSION_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_SESSION_get_ex_new_index 3" -.TH SSL_SESSION_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_SESSION_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_SESSION_get_time.3 b/secure/lib/libssl/man/SSL_SESSION_get_time.3 index 5e153edb21..1707ac5c83 100644 --- a/secure/lib/libssl/man/SSL_SESSION_get_time.3 +++ b/secure/lib/libssl/man/SSL_SESSION_get_time.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_SESSION_get_time 3" -.TH SSL_SESSION_get_time 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_SESSION_get_time 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_accept.3 b/secure/lib/libssl/man/SSL_accept.3 index 8bda5130e4..e13c034487 100644 --- a/secure/lib/libssl/man/SSL_accept.3 +++ b/secure/lib/libssl/man/SSL_accept.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_accept 3" -.TH SSL_accept 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_accept 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_alert_type_string.3 b/secure/lib/libssl/man/SSL_alert_type_string.3 index 4f58f9f77d..0b78aceb3b 100644 --- a/secure/lib/libssl/man/SSL_alert_type_string.3 +++ b/secure/lib/libssl/man/SSL_alert_type_string.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_alert_type_string 3" -.TH SSL_alert_type_string 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_alert_type_string 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l @@ -337,6 +337,11 @@ satisfy a request; the process might receive security parameters (key length, authentication, etc.) at startup and it might be difficult to communicate changes to these parameters after that point. This message is always a warning. +.ie n .IP """\s-1UP\s0""/""unknown \s-1PSK\s0 identity""" 4 +.el .IP "``\s-1UP\s0''/``unknown \s-1PSK\s0 identity''" 4 +.IX Item "UP/unknown PSK identity" +Sent by the server to indicate that it does not recognize a \s-1PSK\s0 +identity or an \s-1SRP\s0 identity. .ie n .IP """\s-1UK\s0""/""unknown""" 4 .el .IP "``\s-1UK\s0''/``unknown''" 4 .IX Item "UK/unknown" diff --git a/secure/lib/libssl/man/SSL_clear.3 b/secure/lib/libssl/man/SSL_clear.3 index 8d7c71554a..239688034a 100644 --- a/secure/lib/libssl/man/SSL_clear.3 +++ b/secure/lib/libssl/man/SSL_clear.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_clear 3" -.TH SSL_clear 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_clear 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_connect.3 b/secure/lib/libssl/man/SSL_connect.3 index aa85ef2a2e..50a55601fb 100644 --- a/secure/lib/libssl/man/SSL_connect.3 +++ b/secure/lib/libssl/man/SSL_connect.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_connect 3" -.TH SSL_connect 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_connect 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_do_handshake.3 b/secure/lib/libssl/man/SSL_do_handshake.3 index cf44e7c9b8..51d3d3a606 100644 --- a/secure/lib/libssl/man/SSL_do_handshake.3 +++ b/secure/lib/libssl/man/SSL_do_handshake.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_do_handshake 3" -.TH SSL_do_handshake 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_do_handshake 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_free.3 b/secure/lib/libssl/man/SSL_free.3 index 59c9c4c2d5..2da1d58e74 100644 --- a/secure/lib/libssl/man/SSL_free.3 +++ b/secure/lib/libssl/man/SSL_free.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_free 3" -.TH SSL_free 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_free 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_SSL_CTX.3 b/secure/lib/libssl/man/SSL_get_SSL_CTX.3 index 92699a4d7a..5fe734de91 100644 --- a/secure/lib/libssl/man/SSL_get_SSL_CTX.3 +++ b/secure/lib/libssl/man/SSL_get_SSL_CTX.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_SSL_CTX 3" -.TH SSL_get_SSL_CTX 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_SSL_CTX 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_ciphers.3 b/secure/lib/libssl/man/SSL_get_ciphers.3 index b506bad650..e41da62f18 100644 --- a/secure/lib/libssl/man/SSL_get_ciphers.3 +++ b/secure/lib/libssl/man/SSL_get_ciphers.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_ciphers 3" -.TH SSL_get_ciphers 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_ciphers 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_client_CA_list.3 b/secure/lib/libssl/man/SSL_get_client_CA_list.3 index 060924c025..10061e2a93 100644 --- a/secure/lib/libssl/man/SSL_get_client_CA_list.3 +++ b/secure/lib/libssl/man/SSL_get_client_CA_list.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_client_CA_list 3" -.TH SSL_get_client_CA_list 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_client_CA_list 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_current_cipher.3 b/secure/lib/libssl/man/SSL_get_current_cipher.3 index 5fb8d291eb..dec407284b 100644 --- a/secure/lib/libssl/man/SSL_get_current_cipher.3 +++ b/secure/lib/libssl/man/SSL_get_current_cipher.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_current_cipher 3" -.TH SSL_get_current_cipher 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_current_cipher 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_default_timeout.3 b/secure/lib/libssl/man/SSL_get_default_timeout.3 index 82bf9b4c6b..5ca7f45a95 100644 --- a/secure/lib/libssl/man/SSL_get_default_timeout.3 +++ b/secure/lib/libssl/man/SSL_get_default_timeout.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_default_timeout 3" -.TH SSL_get_default_timeout 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_default_timeout 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_error.3 b/secure/lib/libssl/man/SSL_get_error.3 index 6a23dbdf5d..2a1e1b3d83 100644 --- a/secure/lib/libssl/man/SSL_get_error.3 +++ b/secure/lib/libssl/man/SSL_get_error.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_error 3" -.TH SSL_get_error 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_error 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_ex_data_X509_STORE_CTX_idx.3 b/secure/lib/libssl/man/SSL_get_ex_data_X509_STORE_CTX_idx.3 index b511174383..5d0599afd8 100644 --- a/secure/lib/libssl/man/SSL_get_ex_data_X509_STORE_CTX_idx.3 +++ b/secure/lib/libssl/man/SSL_get_ex_data_X509_STORE_CTX_idx.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_ex_data_X509_STORE_CTX_idx 3" -.TH SSL_get_ex_data_X509_STORE_CTX_idx 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_ex_data_X509_STORE_CTX_idx 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_ex_new_index.3 b/secure/lib/libssl/man/SSL_get_ex_new_index.3 index 8afa8c7312..d35e22333f 100644 --- a/secure/lib/libssl/man/SSL_get_ex_new_index.3 +++ b/secure/lib/libssl/man/SSL_get_ex_new_index.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_ex_new_index 3" -.TH SSL_get_ex_new_index 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_ex_new_index 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_fd.3 b/secure/lib/libssl/man/SSL_get_fd.3 index d7f46e4328..4de8b9daf3 100644 --- a/secure/lib/libssl/man/SSL_get_fd.3 +++ b/secure/lib/libssl/man/SSL_get_fd.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_fd 3" -.TH SSL_get_fd 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_fd 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_peer_cert_chain.3 b/secure/lib/libssl/man/SSL_get_peer_cert_chain.3 index d702b4a372..c4e67b5100 100644 --- a/secure/lib/libssl/man/SSL_get_peer_cert_chain.3 +++ b/secure/lib/libssl/man/SSL_get_peer_cert_chain.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_peer_cert_chain 3" -.TH SSL_get_peer_cert_chain 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_peer_cert_chain 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_peer_certificate.3 b/secure/lib/libssl/man/SSL_get_peer_certificate.3 index 24343b78f6..fd490cf611 100644 --- a/secure/lib/libssl/man/SSL_get_peer_certificate.3 +++ b/secure/lib/libssl/man/SSL_get_peer_certificate.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_peer_certificate 3" -.TH SSL_get_peer_certificate 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_peer_certificate 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_psk_identity.3 b/secure/lib/libssl/man/SSL_get_psk_identity.3 index f351adc76b..0d7f1cf0ae 100644 --- a/secure/lib/libssl/man/SSL_get_psk_identity.3 +++ b/secure/lib/libssl/man/SSL_get_psk_identity.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_psk_identity 3" -.TH SSL_get_psk_identity 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_psk_identity 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_rbio.3 b/secure/lib/libssl/man/SSL_get_rbio.3 index f42e5429b2..55f1a4c1a9 100644 --- a/secure/lib/libssl/man/SSL_get_rbio.3 +++ b/secure/lib/libssl/man/SSL_get_rbio.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_rbio 3" -.TH SSL_get_rbio 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_rbio 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_session.3 b/secure/lib/libssl/man/SSL_get_session.3 index 56a1123207..a432680e67 100644 --- a/secure/lib/libssl/man/SSL_get_session.3 +++ b/secure/lib/libssl/man/SSL_get_session.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_session 3" -.TH SSL_get_session 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_session 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_verify_result.3 b/secure/lib/libssl/man/SSL_get_verify_result.3 index 184311c474..cd1cbe4abb 100644 --- a/secure/lib/libssl/man/SSL_get_verify_result.3 +++ b/secure/lib/libssl/man/SSL_get_verify_result.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_verify_result 3" -.TH SSL_get_verify_result 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_verify_result 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_get_version.3 b/secure/lib/libssl/man/SSL_get_version.3 index 14a068bfb4..9e1d7df672 100644 --- a/secure/lib/libssl/man/SSL_get_version.3 +++ b/secure/lib/libssl/man/SSL_get_version.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_get_version 3" -.TH SSL_get_version 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_get_version 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_library_init.3 b/secure/lib/libssl/man/SSL_library_init.3 index 2ae48e77f4..700c595c04 100644 --- a/secure/lib/libssl/man/SSL_library_init.3 +++ b/secure/lib/libssl/man/SSL_library_init.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_library_init 3" -.TH SSL_library_init 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_library_init 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_load_client_CA_file.3 b/secure/lib/libssl/man/SSL_load_client_CA_file.3 index c37b3a52e0..b26a80d0a1 100644 --- a/secure/lib/libssl/man/SSL_load_client_CA_file.3 +++ b/secure/lib/libssl/man/SSL_load_client_CA_file.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_load_client_CA_file 3" -.TH SSL_load_client_CA_file 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_load_client_CA_file 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_new.3 b/secure/lib/libssl/man/SSL_new.3 index 6224fb5744..eb496d277f 100644 --- a/secure/lib/libssl/man/SSL_new.3 +++ b/secure/lib/libssl/man/SSL_new.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_new 3" -.TH SSL_new 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_new 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_pending.3 b/secure/lib/libssl/man/SSL_pending.3 index ec4310f120..cbc0d64401 100644 --- a/secure/lib/libssl/man/SSL_pending.3 +++ b/secure/lib/libssl/man/SSL_pending.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_pending 3" -.TH SSL_pending 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_pending 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_read.3 b/secure/lib/libssl/man/SSL_read.3 index 1800a6ab82..01f21c97b0 100644 --- a/secure/lib/libssl/man/SSL_read.3 +++ b/secure/lib/libssl/man/SSL_read.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_read 3" -.TH SSL_read 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_read 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_rstate_string.3 b/secure/lib/libssl/man/SSL_rstate_string.3 index 9ec4f968d3..8c4c52b233 100644 --- a/secure/lib/libssl/man/SSL_rstate_string.3 +++ b/secure/lib/libssl/man/SSL_rstate_string.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_rstate_string 3" -.TH SSL_rstate_string 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_rstate_string 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_session_reused.3 b/secure/lib/libssl/man/SSL_session_reused.3 index dea81e8da3..4a8a663a9e 100644 --- a/secure/lib/libssl/man/SSL_session_reused.3 +++ b/secure/lib/libssl/man/SSL_session_reused.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_session_reused 3" -.TH SSL_session_reused 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_session_reused 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_set_bio.3 b/secure/lib/libssl/man/SSL_set_bio.3 index e9d4ae1cd9..98bdaf12f0 100644 --- a/secure/lib/libssl/man/SSL_set_bio.3 +++ b/secure/lib/libssl/man/SSL_set_bio.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_set_bio 3" -.TH SSL_set_bio 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_set_bio 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_set_connect_state.3 b/secure/lib/libssl/man/SSL_set_connect_state.3 index 0379be6fd9..379175dbd5 100644 --- a/secure/lib/libssl/man/SSL_set_connect_state.3 +++ b/secure/lib/libssl/man/SSL_set_connect_state.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_set_connect_state 3" -.TH SSL_set_connect_state 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_set_connect_state 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_set_fd.3 b/secure/lib/libssl/man/SSL_set_fd.3 index aa79059adc..6e896b7081 100644 --- a/secure/lib/libssl/man/SSL_set_fd.3 +++ b/secure/lib/libssl/man/SSL_set_fd.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_set_fd 3" -.TH SSL_set_fd 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_set_fd 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_set_session.3 b/secure/lib/libssl/man/SSL_set_session.3 index 1afab462c3..21da75981f 100644 --- a/secure/lib/libssl/man/SSL_set_session.3 +++ b/secure/lib/libssl/man/SSL_set_session.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_set_session 3" -.TH SSL_set_session 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_set_session 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_set_shutdown.3 b/secure/lib/libssl/man/SSL_set_shutdown.3 index 94cbf281ee..562674c93c 100644 --- a/secure/lib/libssl/man/SSL_set_shutdown.3 +++ b/secure/lib/libssl/man/SSL_set_shutdown.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_set_shutdown 3" -.TH SSL_set_shutdown 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_set_shutdown 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_set_verify_result.3 b/secure/lib/libssl/man/SSL_set_verify_result.3 index ab07cd164a..daa1d07ba5 100644 --- a/secure/lib/libssl/man/SSL_set_verify_result.3 +++ b/secure/lib/libssl/man/SSL_set_verify_result.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_set_verify_result 3" -.TH SSL_set_verify_result 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_set_verify_result 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_shutdown.3 b/secure/lib/libssl/man/SSL_shutdown.3 index 1a031d09e2..9fa79360ba 100644 --- a/secure/lib/libssl/man/SSL_shutdown.3 +++ b/secure/lib/libssl/man/SSL_shutdown.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_shutdown 3" -.TH SSL_shutdown 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_shutdown 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_state_string.3 b/secure/lib/libssl/man/SSL_state_string.3 index 5b68549f54..ad2ca19a4b 100644 --- a/secure/lib/libssl/man/SSL_state_string.3 +++ b/secure/lib/libssl/man/SSL_state_string.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_state_string 3" -.TH SSL_state_string 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_state_string 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_want.3 b/secure/lib/libssl/man/SSL_want.3 index 000304b5ee..40355776ae 100644 --- a/secure/lib/libssl/man/SSL_want.3 +++ b/secure/lib/libssl/man/SSL_want.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_want 3" -.TH SSL_want 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_want 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/SSL_write.3 b/secure/lib/libssl/man/SSL_write.3 index c52017d9c5..17ad599586 100644 --- a/secure/lib/libssl/man/SSL_write.3 +++ b/secure/lib/libssl/man/SSL_write.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SSL_write 3" -.TH SSL_write 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SSL_write 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/d2i_SSL_SESSION.3 b/secure/lib/libssl/man/d2i_SSL_SESSION.3 index 11522d74cb..904980a551 100644 --- a/secure/lib/libssl/man/d2i_SSL_SESSION.3 +++ b/secure/lib/libssl/man/d2i_SSL_SESSION.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "d2i_SSL_SESSION 3" -.TH d2i_SSL_SESSION 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH d2i_SSL_SESSION 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/lib/libssl/man/ssl.3 b/secure/lib/libssl/man/ssl.3 index 23ab4afee7..ff754d1440 100644 --- a/secure/lib/libssl/man/ssl.3 +++ b/secure/lib/libssl/man/ssl.3 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ssl 3" -.TH ssl 3 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ssl 3 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/Makefile b/secure/usr.bin/openssl/Makefile index 04dc458a5a..ddc08dbf1e 100644 --- a/secure/usr.bin/openssl/Makefile +++ b/secure/usr.bin/openssl/Makefile @@ -14,14 +14,14 @@ LDADD= -lssl -lcrypto CFLAGS+= -DMONOLITH -I${.CURDIR} -SRCS+= app_rand.c apps.c asn1pars.c ca.c ciphers.c cms.c crl.c crl2p7.c \ - dgst.c dh.c dhparam.c dsa.c dsaparam.c ec.c ecparam.c enc.c \ - engine.c errstr.c gendh.c gendsa.c genpkey.c \ - genrsa.c nseq.c ocsp.c openssl.c \ - passwd.c pkcs12.c pkcs7.c pkcs8.c pkey.c pkeyparam.c pkeyutl.c \ - prime.c rand.c req.c rsa.c \ - rsautl.c s_cb.c s_client.c s_server.c s_socket.c s_time.c sess_id.c \ - smime.c speed.c spkac.c ts.c verify.c version.c x509.c +SRCS= openssl.c +SRCS+= verify.c asn1pars.c req.c dgst.c dh.c dhparam.c enc.c passwd.c gendh.c errstr.c \ + ca.c pkcs7.c crl2p7.c crl.c \ + rsa.c rsautl.c dsa.c dsaparam.c ec.c ecparam.c \ + x509.c genrsa.c gendsa.c genpkey.c s_server.c s_client.c speed.c \ + s_time.c apps.c s_cb.c s_socket.c app_rand.c version.c sess_id.c \ + ciphers.c nseq.c pkcs12.c pkcs8.c pkey.c pkeyparam.c pkeyutl.c \ + spkac.c smime.c cms.c rand.c engine.c ocsp.c prime.c ts.c srp.c .include diff --git a/secure/usr.bin/openssl/man/CA.pl.1 b/secure/usr.bin/openssl/man/CA.pl.1 index c43c5af44a..6e980f364f 100644 --- a/secure/usr.bin/openssl/man/CA.pl.1 +++ b/secure/usr.bin/openssl/man/CA.pl.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CA.PL 1" -.TH CA.PL 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CA.PL 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/asn1parse.1 b/secure/usr.bin/openssl/man/asn1parse.1 index 77944ff649..24926fd201 100644 --- a/secure/usr.bin/openssl/man/asn1parse.1 +++ b/secure/usr.bin/openssl/man/asn1parse.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ASN1PARSE 1" -.TH ASN1PARSE 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ASN1PARSE 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/ca.1 b/secure/usr.bin/openssl/man/ca.1 index a39c677cde..ddf20272de 100644 --- a/secure/usr.bin/openssl/man/ca.1 +++ b/secure/usr.bin/openssl/man/ca.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CA 1" -.TH CA 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CA 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/ciphers.1 b/secure/usr.bin/openssl/man/ciphers.1 index 423cdf972b..d0e39f8aa8 100644 --- a/secure/usr.bin/openssl/man/ciphers.1 +++ b/secure/usr.bin/openssl/man/ciphers.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CIPHERS 1" -.TH CIPHERS 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CIPHERS 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/cms.1 b/secure/usr.bin/openssl/man/cms.1 index 6ecc77e48a..761891e923 100644 --- a/secure/usr.bin/openssl/man/cms.1 +++ b/secure/usr.bin/openssl/man/cms.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CMS 1" -.TH CMS 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CMS 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/config.5 b/secure/usr.bin/openssl/man/config.5 index 28bf9956e7..97b7b9690a 100644 --- a/secure/usr.bin/openssl/man/config.5 +++ b/secure/usr.bin/openssl/man/config.5 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CONFIG 5" -.TH CONFIG 5 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CONFIG 5 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/crl.1 b/secure/usr.bin/openssl/man/crl.1 index ba4d613242..6da2aed5e0 100644 --- a/secure/usr.bin/openssl/man/crl.1 +++ b/secure/usr.bin/openssl/man/crl.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CRL 1" -.TH CRL 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CRL 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/crl2pkcs7.1 b/secure/usr.bin/openssl/man/crl2pkcs7.1 index 4142e99404..0b3355878c 100644 --- a/secure/usr.bin/openssl/man/crl2pkcs7.1 +++ b/secure/usr.bin/openssl/man/crl2pkcs7.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "CRL2PKCS7 1" -.TH CRL2PKCS7 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH CRL2PKCS7 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/dgst.1 b/secure/usr.bin/openssl/man/dgst.1 index 0a930f1abe..6a2c4a3d31 100644 --- a/secure/usr.bin/openssl/man/dgst.1 +++ b/secure/usr.bin/openssl/man/dgst.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DGST 1" -.TH DGST 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DGST 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/dhparam.1 b/secure/usr.bin/openssl/man/dhparam.1 index 108b332c62..c5a3a70b57 100644 --- a/secure/usr.bin/openssl/man/dhparam.1 +++ b/secure/usr.bin/openssl/man/dhparam.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DHPARAM 1" -.TH DHPARAM 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DHPARAM 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/dsa.1 b/secure/usr.bin/openssl/man/dsa.1 index 0124ed79b7..ce1de6a3be 100644 --- a/secure/usr.bin/openssl/man/dsa.1 +++ b/secure/usr.bin/openssl/man/dsa.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSA 1" -.TH DSA 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSA 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/dsaparam.1 b/secure/usr.bin/openssl/man/dsaparam.1 index 9f225ff960..9bc1dcdd77 100644 --- a/secure/usr.bin/openssl/man/dsaparam.1 +++ b/secure/usr.bin/openssl/man/dsaparam.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "DSAPARAM 1" -.TH DSAPARAM 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH DSAPARAM 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/ec.1 b/secure/usr.bin/openssl/man/ec.1 index af4dc31cf5..580b8268ae 100644 --- a/secure/usr.bin/openssl/man/ec.1 +++ b/secure/usr.bin/openssl/man/ec.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "EC 1" -.TH EC 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH EC 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/ecparam.1 b/secure/usr.bin/openssl/man/ecparam.1 index d1cfdbf0d4..8961aa5495 100644 --- a/secure/usr.bin/openssl/man/ecparam.1 +++ b/secure/usr.bin/openssl/man/ecparam.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ECPARAM 1" -.TH ECPARAM 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ECPARAM 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/enc.1 b/secure/usr.bin/openssl/man/enc.1 index 56f73b1d71..62a3844a96 100644 --- a/secure/usr.bin/openssl/man/enc.1 +++ b/secure/usr.bin/openssl/man/enc.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ENC 1" -.TH ENC 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ENC 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/errstr.1 b/secure/usr.bin/openssl/man/errstr.1 index 212c5d8cd6..f56f22a2f2 100644 --- a/secure/usr.bin/openssl/man/errstr.1 +++ b/secure/usr.bin/openssl/man/errstr.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "ERRSTR 1" -.TH ERRSTR 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH ERRSTR 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/gendsa.1 b/secure/usr.bin/openssl/man/gendsa.1 index c480af70e3..8435e2dbce 100644 --- a/secure/usr.bin/openssl/man/gendsa.1 +++ b/secure/usr.bin/openssl/man/gendsa.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "GENDSA 1" -.TH GENDSA 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH GENDSA 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/genpkey.1 b/secure/usr.bin/openssl/man/genpkey.1 index 9ecb217658..6a7c358b1d 100644 --- a/secure/usr.bin/openssl/man/genpkey.1 +++ b/secure/usr.bin/openssl/man/genpkey.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "GENPKEY 1" -.TH GENPKEY 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH GENPKEY 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l @@ -304,9 +304,3 @@ Generate \s-1DH\s0 key from parameters: .Vb 1 \& openssl genpkey \-paramfile dhp.pem \-out dhkey.pem .Ve -.SH "POD ERRORS" -.IX Header "POD ERRORS" -Hey! \fBThe above document had some coding errors, which are explained below:\fR -.IP "Around line 117:" 4 -.IX Item "Around line 117:" -You forgot a '=back' before '=head1' diff --git a/secure/usr.bin/openssl/man/genrsa.1 b/secure/usr.bin/openssl/man/genrsa.1 index 89e3bf90c4..59d8e4e619 100644 --- a/secure/usr.bin/openssl/man/genrsa.1 +++ b/secure/usr.bin/openssl/man/genrsa.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "GENRSA 1" -.TH GENRSA 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH GENRSA 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/nseq.1 b/secure/usr.bin/openssl/man/nseq.1 index 9ffadf4dfd..9d72fa238a 100644 --- a/secure/usr.bin/openssl/man/nseq.1 +++ b/secure/usr.bin/openssl/man/nseq.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "NSEQ 1" -.TH NSEQ 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH NSEQ 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/ocsp.1 b/secure/usr.bin/openssl/man/ocsp.1 index 5e993f4c06..2e3ecbfdd0 100644 --- a/secure/usr.bin/openssl/man/ocsp.1 +++ b/secure/usr.bin/openssl/man/ocsp.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OCSP 1" -.TH OCSP 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OCSP 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/openssl.1 b/secure/usr.bin/openssl/man/openssl.1 index 1338a2f55a..e80f7a0f13 100644 --- a/secure/usr.bin/openssl/man/openssl.1 +++ b/secure/usr.bin/openssl/man/openssl.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "OPENSSL 1" -.TH OPENSSL 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH OPENSSL 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l @@ -357,16 +357,16 @@ X.509 Certificate Data Management. .IP "\fBsha1\fR" 10 .IX Item "sha1" \&\s-1SHA\-1\s0 Digest -.IP "\fBsha224\fR" 4 +.IP "\fBsha224\fR" 10 .IX Item "sha224" \&\s-1SHA\-224\s0 Digest -.IP "\fBsha256\fR" 4 +.IP "\fBsha256\fR" 10 .IX Item "sha256" \&\s-1SHA\-256\s0 Digest -.IP "\fBsha384\fR" 4 +.IP "\fBsha384\fR" 10 .IX Item "sha384" \&\s-1SHA\-384\s0 Digest -.IP "\fBsha512\fR" 4 +.IP "\fBsha512\fR" 10 .IX Item "sha512" \&\s-1SHA\-512\s0 Digest .SS "\s-1ENCODING\s0 \s-1AND\s0 \s-1CIPHER\s0 \s-1COMMANDS\s0" @@ -457,12 +457,3 @@ The \fBlist\-\fR\fI\s-1XXX\s0\fR\fB\-algorithms\fR pseudo-commands were added in the \fBno\-\fR\fI\s-1XXX\s0\fR pseudo-commands were added in OpenSSL 0.9.5a. For notes on the availability of other commands, see their individual manual pages. -.SH "POD ERRORS" -.IX Header "POD ERRORS" -Hey! \fBThe above document had some coding errors, which are explained below:\fR -.IP "Around line 292:" 4 -.IX Item "Around line 292:" -\&'=item' outside of any '=over' -.IP "Around line 308:" 4 -.IX Item "Around line 308:" -You forgot a '=back' before '=head2' diff --git a/secure/usr.bin/openssl/man/passwd.1 b/secure/usr.bin/openssl/man/passwd.1 index 769d527688..6a153c7cab 100644 --- a/secure/usr.bin/openssl/man/passwd.1 +++ b/secure/usr.bin/openssl/man/passwd.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PASSWD 1" -.TH PASSWD 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PASSWD 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/pkcs12.1 b/secure/usr.bin/openssl/man/pkcs12.1 index 82d2ce7fba..0d3b0f1b76 100644 --- a/secure/usr.bin/openssl/man/pkcs12.1 +++ b/secure/usr.bin/openssl/man/pkcs12.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS12 1" -.TH PKCS12 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS12 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/pkcs7.1 b/secure/usr.bin/openssl/man/pkcs7.1 index e2a767426b..5e7909e48f 100644 --- a/secure/usr.bin/openssl/man/pkcs7.1 +++ b/secure/usr.bin/openssl/man/pkcs7.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS7 1" -.TH PKCS7 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS7 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/pkcs8.1 b/secure/usr.bin/openssl/man/pkcs8.1 index cc8175cdde..a54d5cc716 100644 --- a/secure/usr.bin/openssl/man/pkcs8.1 +++ b/secure/usr.bin/openssl/man/pkcs8.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKCS8 1" -.TH PKCS8 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKCS8 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/pkey.1 b/secure/usr.bin/openssl/man/pkey.1 index a85b7cad03..11c5b9871a 100644 --- a/secure/usr.bin/openssl/man/pkey.1 +++ b/secure/usr.bin/openssl/man/pkey.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKEY 1" -.TH PKEY 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKEY 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/pkeyparam.1 b/secure/usr.bin/openssl/man/pkeyparam.1 index 441c74f0b3..8e2438d611 100644 --- a/secure/usr.bin/openssl/man/pkeyparam.1 +++ b/secure/usr.bin/openssl/man/pkeyparam.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKEYPARAM 1" -.TH PKEYPARAM 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKEYPARAM 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/pkeyutl.1 b/secure/usr.bin/openssl/man/pkeyutl.1 index e04904d34b..45261e5ae2 100644 --- a/secure/usr.bin/openssl/man/pkeyutl.1 +++ b/secure/usr.bin/openssl/man/pkeyutl.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "PKEYUTL 1" -.TH PKEYUTL 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH PKEYUTL 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/rand.1 b/secure/usr.bin/openssl/man/rand.1 index 3e1d9ab76d..4757ff3c30 100644 --- a/secure/usr.bin/openssl/man/rand.1 +++ b/secure/usr.bin/openssl/man/rand.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RAND 1" -.TH RAND 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RAND 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/req.1 b/secure/usr.bin/openssl/man/req.1 index 4a0938d641..e5f5ce98de 100644 --- a/secure/usr.bin/openssl/man/req.1 +++ b/secure/usr.bin/openssl/man/req.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "REQ 1" -.TH REQ 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH REQ 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/rsa.1 b/secure/usr.bin/openssl/man/rsa.1 index 22856b6fc4..1747142342 100644 --- a/secure/usr.bin/openssl/man/rsa.1 +++ b/secure/usr.bin/openssl/man/rsa.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSA 1" -.TH RSA 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSA 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/rsautl.1 b/secure/usr.bin/openssl/man/rsautl.1 index babb09a862..b34f7c6607 100644 --- a/secure/usr.bin/openssl/man/rsautl.1 +++ b/secure/usr.bin/openssl/man/rsautl.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "RSAUTL 1" -.TH RSAUTL 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH RSAUTL 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/s_client.1 b/secure/usr.bin/openssl/man/s_client.1 index 5def1942f1..6f823c0ea5 100644 --- a/secure/usr.bin/openssl/man/s_client.1 +++ b/secure/usr.bin/openssl/man/s_client.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "S_CLIENT 1" -.TH S_CLIENT 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH S_CLIENT 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/s_server.1 b/secure/usr.bin/openssl/man/s_server.1 index f73e82b39d..1af91f664b 100644 --- a/secure/usr.bin/openssl/man/s_server.1 +++ b/secure/usr.bin/openssl/man/s_server.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "S_SERVER 1" -.TH S_SERVER 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH S_SERVER 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/s_time.1 b/secure/usr.bin/openssl/man/s_time.1 index 88382041a8..71dcebdffc 100644 --- a/secure/usr.bin/openssl/man/s_time.1 +++ b/secure/usr.bin/openssl/man/s_time.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "S_TIME 1" -.TH S_TIME 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH S_TIME 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/sess_id.1 b/secure/usr.bin/openssl/man/sess_id.1 index 082ab05982..2d98cba2dd 100644 --- a/secure/usr.bin/openssl/man/sess_id.1 +++ b/secure/usr.bin/openssl/man/sess_id.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SESS_ID 1" -.TH SESS_ID 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SESS_ID 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/smime.1 b/secure/usr.bin/openssl/man/smime.1 index 4d1e808e44..4319bd0bbf 100644 --- a/secure/usr.bin/openssl/man/smime.1 +++ b/secure/usr.bin/openssl/man/smime.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SMIME 1" -.TH SMIME 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SMIME 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/speed.1 b/secure/usr.bin/openssl/man/speed.1 index 6af03fee14..aeed7306a5 100644 --- a/secure/usr.bin/openssl/man/speed.1 +++ b/secure/usr.bin/openssl/man/speed.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SPEED 1" -.TH SPEED 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SPEED 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/spkac.1 b/secure/usr.bin/openssl/man/spkac.1 index a5fafb963e..d53345eb10 100644 --- a/secure/usr.bin/openssl/man/spkac.1 +++ b/secure/usr.bin/openssl/man/spkac.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "SPKAC 1" -.TH SPKAC 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH SPKAC 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/ts.1 b/secure/usr.bin/openssl/man/ts.1 index 3db9cd08cf..7d6863335e 100644 --- a/secure/usr.bin/openssl/man/ts.1 +++ b/secure/usr.bin/openssl/man/ts.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "TS 1" -.TH TS 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH TS 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/tsget.1 b/secure/usr.bin/openssl/man/tsget.1 index 03d99d149a..fd349e2b66 100644 --- a/secure/usr.bin/openssl/man/tsget.1 +++ b/secure/usr.bin/openssl/man/tsget.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "TSGET 1" -.TH TSGET 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH TSGET 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/verify.1 b/secure/usr.bin/openssl/man/verify.1 index 39b71e1b53..3afd88179e 100644 --- a/secure/usr.bin/openssl/man/verify.1 +++ b/secure/usr.bin/openssl/man/verify.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "VERIFY 1" -.TH VERIFY 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH VERIFY 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/version.1 b/secure/usr.bin/openssl/man/version.1 index b64a008eef..980137c91e 100644 --- a/secure/usr.bin/openssl/man/version.1 +++ b/secure/usr.bin/openssl/man/version.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "VERSION 1" -.TH VERSION 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH VERSION 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/x509.1 b/secure/usr.bin/openssl/man/x509.1 index 702855c06d..80ed69e7c5 100644 --- a/secure/usr.bin/openssl/man/x509.1 +++ b/secure/usr.bin/openssl/man/x509.1 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509 1" -.TH X509 1 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509 1 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l diff --git a/secure/usr.bin/openssl/man/x509v3_config.5 b/secure/usr.bin/openssl/man/x509v3_config.5 index 7fa76c016b..43ecbf8552 100644 --- a/secure/usr.bin/openssl/man/x509v3_config.5 +++ b/secure/usr.bin/openssl/man/x509v3_config.5 @@ -124,7 +124,7 @@ .\" ======================================================================== .\" .IX Title "X509V3_CONFIG 5" -.TH X509V3_CONFIG 5 "2012-01-18" "1.0.0g" "OpenSSL" +.TH X509V3_CONFIG 5 "2012-03-14" "1.0.1" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l -- 2.41.0