diff --git a/gcc/config/arm/crypto.md b/gcc/config/arm/crypto.md index 6071ea17eac..020dfba7dcf 100644 --- a/gcc/config/arm/crypto.md +++ b/gcc/config/arm/crypto.md @@ -22,7 +22,7 @@ (define_insn "crypto_" [(set (match_operand: 0 "register_operand" "=w") (unspec: - [(match_operand: 1 "register_operand" "w")] + [(match_operand: 1 "register_operand" "w")] CRYPTO_AESMC))] "TARGET_CRYPTO" ".\\t%q0, %q1" @@ -30,12 +30,12 @@ (define_insn "crypto_" ) (define_insn "crypto_" - [(set (match_operand:V16QI 0 "register_operand" "=w") - (unspec:V16QI - [(xor:V16QI - (match_operand:V16QI 1 "register_operand" "%0") - (match_operand:V16QI 2 "register_operand" "w"))] - CRYPTO_AES))] + [(set (match_operand: 0 "register_operand" "=w") + (unspec: + [(xor: + (match_operand: 1 "register_operand" "%0") + (match_operand: 2 "register_operand" "w"))] + CRYPTO_AES))] "TARGET_CRYPTO" ".\\t%q0, %q2" [(set_attr "type" "")] @@ -44,17 +44,16 @@ (define_insn "crypto_" ;; When AESE/AESMC fusion is enabled we really want to keep the two together ;; and enforce the register dependency without scheduling or register ;; allocation messing up the order or introducing moves inbetween. -;; Mash the two together during combine. +;; Mash the two together during combine. (define_insn "*aarch32_crypto_aese_fused" [(set (match_operand:V16QI 0 "register_operand" "=w") (unspec:V16QI - [(unspec:V16QI - [(xor:V16QI - (match_operand:V16QI 1 "register_operand" "%0") - (match_operand:V16QI 2 "register_operand" "w"))] - UNSPEC_AESE)] - UNSPEC_AESMC))] + [(unspec:V16QI [(xor:V16QI + (match_operand:V16QI 1 "register_operand" "%0") + (match_operand:V16QI 2 "register_operand" "w"))] + UNSPEC_AESE)] + UNSPEC_AESMC))] "TARGET_CRYPTO && arm_fusion_enabled_p (tune_params::FUSE_AES_AESMC)" "aese.8\\t%q0, %q2\;aesmc.8\\t%q0, %q0" @@ -65,17 +64,16 @@ (define_insn "*aarch32_crypto_aese_fused" ;; When AESD/AESIMC fusion is enabled we really want to keep the two together ;; and enforce the register dependency without scheduling or register ;; allocation messing up the order or introducing moves inbetween. -;; Mash the two together during combine. +;; Mash the two together during combine. (define_insn "*aarch32_crypto_aesd_fused" [(set (match_operand:V16QI 0 "register_operand" "=w") (unspec:V16QI - [(unspec:V16QI - [(xor:V16QI - (match_operand:V16QI 1 "register_operand" "%0") - (match_operand:V16QI 2 "register_operand" "w"))] - UNSPEC_AESD)] - UNSPEC_AESIMC))] + [(unspec:V16QI [(xor:V16QI + (match_operand:V16QI 1 "register_operand" "%0") + (match_operand:V16QI 2 "register_operand" "w"))] + UNSPEC_AESD)] + UNSPEC_AESIMC))] "TARGET_CRYPTO && arm_fusion_enabled_p (tune_params::FUSE_AES_AESMC)" "aesd.8\\t%q0, %q2\;aesimc.8\\t%q0, %q0" @@ -86,9 +84,9 @@ (define_insn "*aarch32_crypto_aesd_fused" (define_insn "crypto_" [(set (match_operand: 0 "register_operand" "=w") (unspec: - [(match_operand: 1 "register_operand" "0") - (match_operand: 2 "register_operand" "w")] - CRYPTO_BINARY))] + [(match_operand: 1 "register_operand" "0") + (match_operand: 2 "register_operand" "w")] + CRYPTO_BINARY))] "TARGET_CRYPTO" ".\\t%q0, %q2" [(set_attr "type" "")] @@ -96,18 +94,20 @@ (define_insn "crypto_" (define_insn "crypto_" [(set (match_operand: 0 "register_operand" "=w") - (unspec: [(match_operand: 1 "register_operand" "0") - (match_operand: 2 "register_operand" "w") - (match_operand: 3 "register_operand" "w")] - CRYPTO_TERNARY))] + (unspec: + [(match_operand: 1 "register_operand" "0") + (match_operand: 2 "register_operand" "w") + (match_operand: 3 "register_operand" "w")] + CRYPTO_TERNARY))] "TARGET_CRYPTO" ".\\t%q0, %q2, %q3" [(set_attr "type" "")] ) -/* The vec_select operation always selects index 0 from the lower V2SI subreg - of the V4SI, adjusted for endianness. Required due to neon_vget_lane and - neon_set_lane that change the element ordering in memory for big-endian. */ +;; The vec_select operation always selects index 0 from the lower V2SI +;; subreg of the V4SI, adjusted for endianness. Required due to +;; neon_vget_lane and neon_set_lane that change the element ordering +;; in memory for big-endian. (define_expand "crypto_sha1h" [(set (match_operand:V4SI 0 "register_operand") @@ -122,10 +122,10 @@ (define_expand "crypto_sha1h" (define_insn "crypto_sha1h_lb" [(set (match_operand:V4SI 0 "register_operand" "=w") (unspec:V4SI - [(vec_select:SI + [(vec_select:SI (match_operand:V4SI 1 "register_operand" "w") (parallel [(match_operand:SI 2 "immediate_operand" "i")]))] - UNSPEC_SHA1H))] + UNSPEC_SHA1H))] "TARGET_CRYPTO && INTVAL (operands[2]) == NEON_ENDIAN_LANE_N (V2SImode, 0)" "sha1h.32\\t%q0, %q1" [(set_attr "type" "crypto_sha1_fast")] @@ -133,9 +133,9 @@ (define_insn "crypto_sha1h_lb" (define_insn "crypto_vmullp64" [(set (match_operand:TI 0 "register_operand" "=w") - (unspec:TI [(match_operand:DI 1 "register_operand" "w") - (match_operand:DI 2 "register_operand" "w")] - UNSPEC_VMULLP64))] + (unspec:TI [(match_operand:DI 1 "register_operand" "w") + (match_operand:DI 2 "register_operand" "w")] + UNSPEC_VMULLP64))] "TARGET_CRYPTO" "vmull.p64\\t%q0, %P1, %P2" [(set_attr "type" "crypto_pmull")] @@ -148,10 +148,10 @@ (define_insn "crypto_vmullp64" (define_expand "crypto_" [(set (match_operand:V4SI 0 "register_operand") (unspec: - [(match_operand: 1 "register_operand") - (match_operand: 2 "register_operand") - (match_operand: 3 "register_operand")] - CRYPTO_SELECTING))] + [(match_operand: 1 "register_operand") + (match_operand: 2 "register_operand") + (match_operand: 3 "register_operand")] + CRYPTO_SELECTING))] "TARGET_CRYPTO" { rtx op4 = GEN_INT (NEON_ENDIAN_LANE_N (V2SImode, 0)); @@ -162,13 +162,13 @@ (define_expand "crypto_" (define_insn "crypto__lb" [(set (match_operand:V4SI 0 "register_operand" "=w") - (unspec: - [(match_operand: 1 "register_operand" "0") - (vec_select:SI - (match_operand: 2 "register_operand" "w") - (parallel [(match_operand:SI 4 "immediate_operand" "i")])) - (match_operand: 3 "register_operand" "w")] - CRYPTO_SELECTING))] + (unspec: + [(match_operand: 1 "register_operand" "0") + (vec_select:SI + (match_operand: 2 "register_operand" "w") + (parallel [(match_operand:SI 4 "immediate_operand" "i")])) + (match_operand: 3 "register_operand" "w")] + CRYPTO_SELECTING))] "TARGET_CRYPTO && INTVAL (operands[4]) == NEON_ENDIAN_LANE_N (V2SImode, 0)" ".\\t%q0, %q2, %q3" [(set_attr "type" "")]