public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* [patch] Use NOTE_P, CALL_P, etc. instead of GET_CODE in a bunch of files
@ 2013-03-22 19:10 Steven Bosscher
  2013-03-23  0:45 ` Steven Bosscher
  0 siblings, 1 reply; 3+ messages in thread
From: Steven Bosscher @ 2013-03-22 19:10 UTC (permalink / raw)
  To: GCC Patches

[-- Attachment #1: Type: text/plain, Size: 699 bytes --]

Hello,

This is an almost completely mechanical replacement of GET_CODE(thing)
== ... with the equivalent predicate macro from rtl.h. This particular
set of files fell victim to my plans for GCC 4.9 to make
JUMP_TABLE_DATA_P a separate rtx instead of a JUMP_INSN. The script I
used is in the patch. I checked and cleaned up the changes by hand and
modified a few places by hand.

Bootstrapped&tested on powerpc64-unknown-linux-gnu and on
ia64-unknown-linux-gnu. Also did a quick cross to a few of the
affected ports (sparc, bfin).
OK for trunk?

Can I please have pre-approval for similar changes to other files,
subject to posting and waiting at least 2 days for
comments/objections?

Ciao!
Steven

[-- Attachment #2: replace_GET_CODE.diff --]
[-- Type: application/octet-stream, Size: 46226 bytes --]

	* config/avr/avr.c, config/bfin/bfin.c, config/c6x/c6x.c,
	config/epiphany/epiphany.c, config/frv/frv.c, config/ia64/ia64.c,
	config/iq2000/iq2000.c, config/mcore/mcore.c, config/mep/mep.c,
	config/pa/pa.c, config/rs6000/rs6000.c, config/s390/s390.c,
	config/sparc/sparc.c, config/spu/spu.c, config/stormy16/stormy16.c,
	config/v850/v850.c, config/xtensa/xtensa.c,
	dwarf2out.c, hw-doloop.c, resource.c: Where applicable, use the
	predicates NOTE_P, NONJUMP_INSN_P, JUMP_P, CALL_P, LABEL_P, and
	BARRIER_P instead of GET_CODE.

-------- 8< --------
#!/usr/bin/bash

for f in 				\
	config/avr/avr.c		\
	config/bfin/bfin.c		\
	config/c6x/c6x.c		\
	config/epiphany/epiphany.c	\
	config/frv/frv.c		\
	config/ia64/ia64.c		\
	config/iq2000/iq2000.c		\
	config/mcore/mcore.c		\
	config/mep/mep.c		\
	config/pa/pa.c			\
	config/rs6000/rs6000.c		\
	config/s390/s390.c		\
	config/sparc/sparc.c		\
	config/spu/spu.c		\
	config/stormy16/stormy16.c	\
	config/v850/v850.c		\
	config/xtensa/xtensa.c		\
	dwarf2out.c			\
	hw-doloop.c			\
	resource.c			;
do
	sed --in-place \
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) == NOTE/NOTE_P (\1)/" 		\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) \!= NOTE/\! NOTE_P (\1)/" 		\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) == INSN/NONJUMP_INSN_P (\1)/" 	\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) \!= INSN/\! NONJUMP_INSN_P (\1)/" 	\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) == JUMP_INSN/JUMP_P (\1)/" 		\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) \!= JUMP_INSN/\! JUMP_P (\1)/" 	\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) == CALL_INSN/CALL_P (\1)/" 		\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) \!= CALL_INSN/\! CALL_P (\1)/" 	\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) == CODE_LABEL/LABEL_P (\1)/" 		\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) \!= CODE_LABEL/\! LABEL_P (\1)/" 	\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) == BARRIER/BARRIER_P (\1)/" 		\
		-e "s/GET_CODE (\([a-zA-Z0-9_->()\s]\+\)) \!= BARRIER/\! BARRIER_P (\1)/" 	\
		$f
done
-------- 8< --------

Index: config/avr/avr.c
===================================================================
--- config/avr/avr.c	(revision 196975)
+++ config/avr/avr.c	(working copy)
@@ -7629,9 +7629,9 @@ _reg_unused_after (rtx insn, rtx reg)
 	      rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
 	      rtx set = single_set (this_insn);
 
-	      if (GET_CODE (this_insn) == CALL_INSN)
+	      if (CALL_P (this_insn))
 		code = CALL_INSN;
-	      else if (GET_CODE (this_insn) == JUMP_INSN)
+	      else if (JUMP_P (this_insn))
 		{
 		  if (INSN_ANNULLED_BRANCH_P (this_insn))
 		    return 0;
Index: config/bfin/bfin.c
===================================================================
--- config/bfin/bfin.c	(revision 196975)
+++ config/bfin/bfin.c	(working copy)
@@ -3887,8 +3887,7 @@ gen_one_bundle (rtx slot[3])
       rtx t = NEXT_INSN (slot[0]);
       while (t != slot[1])
 	{
-	  if (GET_CODE (t) != NOTE
-	      || NOTE_KIND (t) != NOTE_INSN_DELETED)
+	  if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
 	    return false;
 	  t = NEXT_INSN (t);
 	}
@@ -3898,8 +3897,7 @@ gen_one_bundle (rtx slot[3])
       rtx t = NEXT_INSN (slot[1]);
       while (t != slot[2])
 	{
-	  if (GET_CODE (t) != NOTE
-	      || NOTE_KIND (t) != NOTE_INSN_DELETED)
+	  if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
 	    return false;
 	  t = NEXT_INSN (t);
 	}
Index: config/c6x/c6x.c
===================================================================
--- config/c6x/c6x.c	(revision 196975)
+++ config/c6x/c6x.c	(working copy)
@@ -4848,7 +4848,7 @@ reorg_split_calls (rtx *call_labels)
 {
   unsigned int reservation_mask = 0;
   rtx insn = get_insns ();
-  gcc_assert (GET_CODE (insn) == NOTE);
+  gcc_assert (NOTE_P (insn));
   insn = next_real_insn (insn);
   while (insn)
     {
Index: config/epiphany/epiphany.c
===================================================================
--- config/epiphany/epiphany.c	(revision 196975)
+++ config/epiphany/epiphany.c	(working copy)
@@ -2386,7 +2386,7 @@ epiphany_mode_after (int entity, int last_mode, rt
      calls.  */
   if (entity == EPIPHANY_MSW_ENTITY_AND || entity == EPIPHANY_MSW_ENTITY_OR)
     {
-      if (GET_CODE (insn) == CALL_INSN)
+      if (CALL_P (insn))
 	return 0;
       return last_mode;
     }
Index: config/frv/frv.c
===================================================================
--- config/frv/frv.c	(revision 196975)
+++ config/frv/frv.c	(working copy)
@@ -1408,7 +1408,7 @@ frv_function_contains_far_jump (void)
 {
   rtx insn = get_insns ();
   while (insn != NULL
-	 && !(GET_CODE (insn) == JUMP_INSN
+	 && !(JUMP_P (insn)
 	      /* Ignore tablejump patterns.  */
 	      && GET_CODE (PATTERN (insn)) != ADDR_VEC
 	      && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
@@ -1446,7 +1446,7 @@ frv_function_prologue (FILE *file, HOST_WIDE_INT s
 	 simply emit a different assembly directive because bralr and jmpl
 	 execute in different units.  */
       for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
-	if (GET_CODE (insn) == JUMP_INSN)
+	if (JUMP_P (insn))
 	  {
 	    rtx pattern = PATTERN (insn);
 	    if (GET_CODE (pattern) == PARALLEL
@@ -2649,7 +2649,7 @@ frv_print_operand_jump_hint (rtx insn)
   HOST_WIDE_INT prob = -1;
   enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
 
-  gcc_assert (GET_CODE (insn) == JUMP_INSN);
+  gcc_assert (JUMP_P (insn));
 
   /* Assume any non-conditional jump is likely.  */
   if (! any_condjump_p (insn))
@@ -7387,7 +7387,7 @@ frv_pack_insn_p (rtx insn)
        - There's no point putting a call in its own packet unless
 	 we have to.  */
   if (frv_packet.num_insns > 0
-      && GET_CODE (insn) == INSN
+      && NONJUMP_INSN_P (insn)
       && GET_MODE (insn) == TImode
       && GET_CODE (PATTERN (insn)) != COND_EXEC)
     return false;
@@ -7430,7 +7430,7 @@ frv_insert_nop_in_packet (rtx insn)
 
   packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
   last = frv_packet.insns[frv_packet.num_insns - 1];
-  if (GET_CODE (last) != INSN)
+  if (! NONJUMP_INSN_P (last))
     {
       insn = emit_insn_before (PATTERN (insn), last);
       frv_packet.insns[frv_packet.num_insns - 1] = insn;
@@ -7492,7 +7492,7 @@ frv_for_each_packet (void (*handle_packet) (void))
 
 	  default:
 	    /* Calls mustn't be packed on a TOMCAT.  */
-	    if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
+	    if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
 	      frv_finish_packet (handle_packet);
 
 	    /* Since the last instruction in a packet determines the EH
@@ -7913,7 +7913,7 @@ frv_optimize_membar_local (basic_block bb, struct
   CLEAR_HARD_REG_SET (used_regs);
 
   for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
-    if (GET_CODE (insn) == CALL_INSN)
+    if (CALL_P (insn))
       {
 	/* We can't predict what a call will do to volatile memory.  */
 	memset (next_io, 0, sizeof (struct frv_io));
Index: config/ia64/ia64.c
===================================================================
--- config/ia64/ia64.c	(revision 196975)
+++ config/ia64/ia64.c	(working copy)
@@ -5470,7 +5470,7 @@ ia64_print_operand (FILE * file, rtx x, int code)
 	    else
 	      which = ".sptk";
 	  }
-	else if (GET_CODE (current_output_insn) == CALL_INSN)
+	else if (CALL_P (current_output_insn))
 	  which = ".sptk";
 	else
 	  which = ".dptk";
@@ -6811,8 +6811,7 @@ group_barrier_needed (rtx insn)
       memset (rws_insn, 0, sizeof (rws_insn));
 
       /* Don't bundle a call following another call.  */
-      if ((pat = prev_active_insn (insn))
-	  && GET_CODE (pat) == CALL_INSN)
+      if ((pat = prev_active_insn (insn)) && CALL_P (pat))
 	{
 	  need_barrier = 1;
 	  break;
@@ -6826,8 +6825,7 @@ group_barrier_needed (rtx insn)
 	flags.is_branch = 1;
 
       /* Don't bundle a jump following a call.  */
-      if ((pat = prev_active_insn (insn))
-	  && GET_CODE (pat) == CALL_INSN)
+      if ((pat = prev_active_insn (insn)) && CALL_P (pat))
 	{
 	  need_barrier = 1;
 	  break;
@@ -6929,20 +6927,20 @@ emit_insn_group_barriers (FILE *dump)
 
   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) == CODE_LABEL)
+      if (LABEL_P (insn))
 	{
 	  if (insns_since_last_label)
 	    last_label = insn;
 	  insns_since_last_label = 0;
 	}
-      else if (GET_CODE (insn) == NOTE
+      else if (NOTE_P (insn)
 	       && NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK)
 	{
 	  if (insns_since_last_label)
 	    last_label = insn;
 	  insns_since_last_label = 0;
 	}
-      else if (GET_CODE (insn) == INSN
+      else if (NONJUMP_INSN_P (insn)
 	       && GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
 	       && XINT (PATTERN (insn), 1) == UNSPECV_INSN_GROUP_BARRIER)
 	{
@@ -6983,13 +6981,13 @@ emit_all_insn_group_barriers (FILE *dump ATTRIBUTE
 
   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) == BARRIER)
+      if (BARRIER_P (insn))
 	{
 	  rtx last = prev_active_insn (insn);
 
 	  if (! last)
 	    continue;
-	  if (GET_CODE (last) == JUMP_INSN
+	  if (JUMP_P (last)
 	      && GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
 	    last = prev_active_insn (last);
 	  if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
@@ -7487,7 +7485,7 @@ ia64_variable_issue (FILE *dump ATTRIBUTE_UNUSED,
       int needed = group_barrier_needed (insn);
       
       gcc_assert (!needed);
-      if (GET_CODE (insn) == CALL_INSN)
+      if (CALL_P (insn))
 	init_insn_group_barriers ();
       stops_p [INSN_UID (insn)] = stop_before_p;
       stop_before_p = 0;
@@ -7576,7 +7574,7 @@ ia64_dfa_new_cycle (FILE *dump, int verbose, rtx i
 	       && last_scheduled_insn
 	       && scheduled_good_insn (last_scheduled_insn))))
       || (last_scheduled_insn
-	  && (GET_CODE (last_scheduled_insn) == CALL_INSN
+	  && (CALL_P (last_scheduled_insn)
 	      || unknown_for_bundling_p (last_scheduled_insn))))
     {
       init_insn_group_barriers ();
@@ -7594,7 +7592,7 @@ ia64_dfa_new_cycle (FILE *dump, int verbose, rtx i
 	  state_transition (curr_state, dfa_stop_insn);
 	  if (TARGET_EARLY_STOP_BITS)
 	    *sort_p = (last_scheduled_insn == NULL_RTX
-		       || GET_CODE (last_scheduled_insn) != CALL_INSN);
+		       || ! CALL_P (last_scheduled_insn));
 	  else
 	    *sort_p = 0;
 	  return 1;
@@ -8936,9 +8934,9 @@ ia64_add_bundle_selector_before (int template0, rt
 	{
 	  do
 	    insn = next_active_insn (insn);
-	  while (GET_CODE (insn) == INSN
+	  while (NONJUMP_INSN_P (insn)
 		 && get_attr_empty (insn) == EMPTY_YES);
-	  if (GET_CODE (insn) == CALL_INSN)
+	  if (CALL_P (insn))
 	    note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
 	  else if (note)
 	    {
@@ -9372,13 +9370,13 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBU
        insn != current_sched_info->next_tail;
        insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) == BARRIER)
+      if (BARRIER_P (insn))
 	{
 	  rtx last = prev_active_insn (insn);
 
 	  if (! last)
 	    continue;
-	  if (GET_CODE (last) == JUMP_INSN
+	  if (JUMP_P (last)
 	      && GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
 	    last = prev_active_insn (last);
 	  if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
@@ -9445,8 +9443,7 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBU
 	  else if (recog_memoized (insn) >= 0
 		   && important_for_bundling_p (insn))
 	    seen_good_insn = 1;
-	  need_barrier_p = (GET_CODE (insn) == CALL_INSN
-			    || unknown_for_bundling_p (insn));
+	  need_barrier_p = (CALL_P (insn) || unknown_for_bundling_p (insn));
 	}
     }
 }
@@ -9590,7 +9587,7 @@ emit_predicate_relation_info (void)
       rtx head = BB_HEAD (bb);
 
       /* We only need such notes at code labels.  */
-      if (GET_CODE (head) != CODE_LABEL)
+      if (! LABEL_P (head))
 	continue;
       if (NOTE_INSN_BASIC_BLOCK_P (NEXT_INSN (head)))
 	head = NEXT_INSN (head);
@@ -9618,7 +9615,7 @@ emit_predicate_relation_info (void)
 
       while (1)
 	{
-	  if (GET_CODE (insn) == CALL_INSN
+	  if (CALL_P (insn)
 	      && GET_CODE (PATTERN (insn)) == COND_EXEC
 	      && find_reg_note (insn, REG_NORETURN, NULL_RTX))
 	    {
@@ -9766,7 +9763,7 @@ ia64_reorg (void)
       if (insn)
 	{
 	  /* Skip over insns that expand to nothing.  */
-	  while (GET_CODE (insn) == INSN
+	  while (NONJUMP_INSN_P (insn)
 		 && get_attr_empty (insn) == EMPTY_YES)
 	    {
 	      if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
@@ -9774,7 +9771,7 @@ ia64_reorg (void)
 		saw_stop = 1;
 	      insn = prev_active_insn (insn);
 	    }
-	  if (GET_CODE (insn) == CALL_INSN)
+	  if (CALL_P (insn))
 	    {
 	      if (! saw_stop)
 		emit_insn (gen_insn_group_barrier (GEN_INT (3)));
@@ -10184,7 +10181,7 @@ ia64_asm_unwind_emit (FILE *asm_out_file, rtx insn
 	}
     }
 
-  if (GET_CODE (insn) == NOTE || ! RTX_FRAME_RELATED_P (insn))
+  if (NOTE_P (insn) || ! RTX_FRAME_RELATED_P (insn))
     return;
 
   /* Look for the ALLOC insn.  */
Index: config/iq2000/iq2000.c
===================================================================
--- config/iq2000/iq2000.c	(revision 196975)
+++ config/iq2000/iq2000.c	(working copy)
@@ -381,8 +381,7 @@ iq2000_fill_delay_slot (const char *ret, enum dela
   /* Make sure that we don't put nop's after labels.  */
   next_insn = NEXT_INSN (cur_insn);
   while (next_insn != 0
-	 && (GET_CODE (next_insn) == NOTE
-	     || GET_CODE (next_insn) == CODE_LABEL))
+	 && (NOTE_P (next_insn) || LABEL_P (next_insn)))
     next_insn = NEXT_INSN (next_insn);
 
   dslots_load_total += num_nops;
@@ -391,7 +390,7 @@ iq2000_fill_delay_slot (const char *ret, enum dela
       || operands == 0
       || cur_insn == 0
       || next_insn == 0
-      || GET_CODE (next_insn) == CODE_LABEL
+      || LABEL_P (next_insn)
       || (set_reg = operands[0]) == 0)
     {
       dslots_number_nops = 0;
@@ -1533,8 +1532,8 @@ final_prescan_insn (rtx insn, rtx opvec[] ATTRIBUT
       iq2000_load_reg4 = 0;
     }
 
-  if (   (GET_CODE (insn) == JUMP_INSN
-       || GET_CODE (insn) == CALL_INSN
+  if (   (JUMP_P (insn)
+       || CALL_P (insn)
        || (GET_CODE (PATTERN (insn)) == RETURN))
 	   && NEXT_INSN (PREV_INSN (insn)) == insn)
     {
@@ -1544,7 +1543,7 @@ final_prescan_insn (rtx insn, rtx opvec[] ATTRIBUT
     }
   
   if (TARGET_STATS
-      && (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN))
+      && (JUMP_P (insn) || CALL_P (insn)))
     dslots_jump_total ++;
 }
 \f
@@ -2285,8 +2284,8 @@ iq2000_adjust_insn_length (rtx insn, int length)
   /* A unconditional jump has an unfilled delay slot if it is not part
      of a sequence.  A conditional jump normally has a delay slot.  */
   if (simplejump_p (insn)
-      || (   (GET_CODE (insn) == JUMP_INSN
-	   || GET_CODE (insn) == CALL_INSN)))
+      || (   (JUMP_P (insn)
+	   || CALL_P (insn))))
     length += 4;
 
   return length;
Index: config/mcore/mcore.c
===================================================================
--- config/mcore/mcore.c	(revision 196975)
+++ config/mcore/mcore.c	(working copy)
@@ -914,10 +914,10 @@ mcore_is_dead (rtx first, rtx reg)
      to assume that it is live.  */
   for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) == JUMP_INSN)
+      if (JUMP_P (insn))
 	return 0;	/* We lose track, assume it is alive.  */
 
-      else if (GET_CODE(insn) == CALL_INSN)
+      else if (CALL_P (insn))
 	{
 	  /* Call's might use it for target or register parms.  */
 	  if (reg_referenced_p (reg, PATTERN (insn))
@@ -926,7 +926,7 @@ mcore_is_dead (rtx first, rtx reg)
 	  else if (dead_or_set_p (insn, reg))
             return 1;
 	}
-      else if (GET_CODE (insn) == INSN)
+      else if (NONJUMP_INSN_P (insn))
 	{
 	  if (reg_referenced_p (reg, PATTERN (insn)))
             return 0;
@@ -2254,7 +2254,7 @@ is_cond_candidate (rtx insn)
      changed into a conditional.  Only bother with SImode items.  If 
      we wanted to be a little more aggressive, we could also do other
      modes such as DImode with reg-reg move or load 0.  */
-  if (GET_CODE (insn) == INSN)
+  if (NONJUMP_INSN_P (insn))
     {
       rtx pat = PATTERN (insn);
       rtx src, dst;
@@ -2305,9 +2305,9 @@ is_cond_candidate (rtx insn)
       */            
 
     }
-  else if (GET_CODE (insn) == JUMP_INSN &&
-	   GET_CODE (PATTERN (insn)) == SET &&
-	   GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
+  else if (JUMP_P (insn)
+	   && GET_CODE (PATTERN (insn)) == SET
+	   && GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
     return COND_BRANCH_INSN;
 
   return COND_NO;
@@ -2328,7 +2328,7 @@ emit_new_cond_insn (rtx insn, int cond)
 
   pat = PATTERN (insn);
 
-  if (GET_CODE (insn) == INSN)
+  if (NONJUMP_INSN_P (insn))
     {
       dst = SET_DEST (pat);
       src = SET_SRC (pat);
@@ -2449,9 +2449,9 @@ conditionalize_block (rtx first)
   /* Check that the first insn is a candidate conditional jump.  This is
      the one that we'll eliminate.  If not, advance to the next insn to
      try.  */
-  if (GET_CODE (first) != JUMP_INSN ||
-      GET_CODE (PATTERN (first)) != SET ||
-      GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
+  if (! JUMP_P (first)
+      || GET_CODE (PATTERN (first)) != SET
+      || GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
     return NEXT_INSN (first);
 
   /* Extract some information we need.  */
Index: config/mep/mep.c
===================================================================
--- config/mep/mep.c	(revision 196975)
+++ config/mep/mep.c	(working copy)
@@ -4882,7 +4882,7 @@ mep_reorg_regmove (rtx insns)
 
   if (dump_file)
     for (insn = insns; insn; insn = NEXT_INSN (insn))
-      if (GET_CODE (insn) == INSN)
+      if (NONJUMP_INSN_P (insn))
 	before++;
 
   /* We're looking for (set r2 r1) moves where r1 dies, followed by a
@@ -4896,7 +4896,7 @@ mep_reorg_regmove (rtx insns)
       for (insn = insns; insn; insn = next)
 	{
 	  next = next_nonnote_nondebug_insn (insn);
-	  if (GET_CODE (insn) != INSN)
+	  if (! NONJUMP_INSN_P (insn))
 	    continue;
 	  pat = PATTERN (insn);
 
@@ -4912,7 +4912,7 @@ mep_reorg_regmove (rtx insns)
 	      if (dump_file)
 		fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
 
-	      while (follow && GET_CODE (follow) == INSN
+	      while (follow && NONJUMP_INSN_P (follow)
 		     && GET_CODE (PATTERN (follow)) == SET
 		     && !dead_or_set_p (follow, SET_SRC (pat))
 		     && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
@@ -4925,7 +4925,7 @@ mep_reorg_regmove (rtx insns)
 
 	      if (dump_file)
 		fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
-	      if (follow && GET_CODE (follow) == INSN
+	      if (follow && NONJUMP_INSN_P (follow)
 		  && GET_CODE (PATTERN (follow)) == SET
 		  && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
 		{
@@ -5523,8 +5523,7 @@ mep_reorg_erepeat (rtx insns)
 	count = simplejump_p (insn) ? 0 : 1;
 	for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
 	  {
-	    if (GET_CODE (prev) == CALL_INSN
-		|| BARRIER_P (prev))
+	    if (CALL_P (prev) || BARRIER_P (prev))
 	      break;
 
 	    if (prev == JUMP_LABEL (insn))
@@ -5543,10 +5542,10 @@ mep_reorg_erepeat (rtx insns)
 		       *after* the label.  */
 		    rtx barrier;
 		    for (barrier = PREV_INSN (prev);
-			 barrier && GET_CODE (barrier) == NOTE;
+			 barrier && NOTE_P (barrier);
 			 barrier = PREV_INSN (barrier))
 		      ;
-		    if (barrier && GET_CODE (barrier) != BARRIER)
+		    if (barrier && ! BARRIER_P (barrier))
 		      break;
 		  }
 		else
@@ -5590,10 +5589,9 @@ mep_reorg_erepeat (rtx insns)
 		if (LABEL_NUSES (prev) == 1)
 		  {
 		    for (user = PREV_INSN (prev);
-			 user && (INSN_P (user) || GET_CODE (user) == NOTE);
+			 user && (INSN_P (user) || NOTE_P (user));
 			 user = PREV_INSN (user))
-		      if (GET_CODE (user) == JUMP_INSN
-			  && JUMP_LABEL (user) == prev)
+		      if (JUMP_P (user) && JUMP_LABEL (user) == prev)
 			{
 			  safe = INSN_UID (user);
 			  break;
@@ -5631,8 +5629,8 @@ mep_jmp_return_reorg (rtx insns)
       /* Find the fist real insn the jump jumps to.  */
       label = ret = JUMP_LABEL (insn);
       while (ret
-	     && (GET_CODE (ret) == NOTE
-		 || GET_CODE (ret) == CODE_LABEL
+	     && (NOTE_P (ret)
+		 || LABEL_P (ret)
 		 || GET_CODE (PATTERN (ret)) == USE))
 	ret = NEXT_INSN (ret);
 
@@ -7018,7 +7016,7 @@ mep_bundle_insns (rtx insns)
       if (recog_memoized (insn) >= 0
 	  && get_attr_slot (insn) == SLOT_COP)
 	{
-	  if (GET_CODE (insn) == JUMP_INSN
+	  if (JUMP_P (insn)
 	      || ! last
 	      || recog_memoized (last) < 0
 	      || get_attr_slot (last) != SLOT_CORE
Index: config/pa/pa.c
===================================================================
--- config/pa/pa.c	(revision 196975)
+++ config/pa/pa.c	(working copy)
@@ -3320,7 +3320,7 @@ remove_useless_addtr_insns (int check_notes)
 	  rtx tmp;
 
 	  /* Ignore anything that isn't an INSN or a JUMP_INSN.  */
-	  if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
+	  if (! NONJUMP_INSN_P (insn) && ! JUMP_P (insn))
 	    continue;
 
 	  tmp = PATTERN (insn);
@@ -3359,7 +3359,7 @@ remove_useless_addtr_insns (int check_notes)
 	  rtx tmp, next;
 
 	  /* Ignore anything that isn't an INSN.  */
-	  if (GET_CODE (insn) != INSN)
+	  if (! NONJUMP_INSN_P (insn))
 	    continue;
 
 	  tmp = PATTERN (insn);
@@ -3382,13 +3382,11 @@ remove_useless_addtr_insns (int check_notes)
 	  while (next)
 	    {
 	      /* Jumps, calls and labels stop our search.  */
-	      if (GET_CODE (next) == JUMP_INSN
-		  || GET_CODE (next) == CALL_INSN
-		  || GET_CODE (next) == CODE_LABEL)
+	      if (JUMP_P (next) || CALL_P (next) || LABEL_P (next))
 		break;
 
 	      /* As does another fcmp insn.  */
-	      if (GET_CODE (next) == INSN
+	      if (NONJUMP_INSN_P (next)
 		  && GET_CODE (PATTERN (next)) == SET
 		  && GET_CODE (SET_DEST (PATTERN (next))) == REG
 		  && REGNO (SET_DEST (PATTERN (next))) == 0)
@@ -3398,8 +3396,7 @@ remove_useless_addtr_insns (int check_notes)
 	    }
 
 	  /* Is NEXT_INSN a branch?  */
-	  if (next
-	      && GET_CODE (next) == JUMP_INSN)
+	  if (next && JUMP_P (next))
 	    {
 	      rtx pattern = PATTERN (next);
 
@@ -4160,16 +4157,16 @@ pa_output_function_epilogue (FILE *file, HOST_WIDE
      always point to a valid instruction in the current function.  */
 
   /* Get the last real insn.  */
-  if (GET_CODE (insn) == NOTE)
+  if (NOTE_P (insn))
     insn = prev_real_insn (insn);
 
   /* If it is a sequence, then look inside.  */
-  if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
     insn = XVECEXP (PATTERN (insn), 0, 0);
 
   /* If insn is a CALL_INSN, then it must be a call to a volatile
      function (otherwise there would be epilogue insns).  */
-  if (insn && GET_CODE (insn) == CALL_INSN)
+  if (insn && CALL_P (insn))
     {
       fputs ("\tnop\n", file);
       last_address += 4;
@@ -4930,12 +4927,12 @@ pa_adjust_insn_length (rtx insn, int length)
 
   /* Jumps inside switch tables which have unfilled delay slots need
      adjustment.  */
-  if (GET_CODE (insn) == JUMP_INSN
+  if (JUMP_P (insn)
       && GET_CODE (pat) == PARALLEL
       && get_attr_type (insn) == TYPE_BTABLE_BRANCH)
     length += 4;
   /* Block move pattern.  */
-  else if (GET_CODE (insn) == INSN
+  else if (NONJUMP_INSN_P (insn)
 	   && GET_CODE (pat) == PARALLEL
 	   && GET_CODE (XVECEXP (pat, 0, 0)) == SET
 	   && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
@@ -4944,7 +4941,7 @@ pa_adjust_insn_length (rtx insn, int length)
 	   && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
     length += compute_movmem_length (insn) - 4;
   /* Block clear pattern.  */
-  else if (GET_CODE (insn) == INSN
+  else if (NONJUMP_INSN_P (insn)
 	   && GET_CODE (pat) == PARALLEL
 	   && GET_CODE (XVECEXP (pat, 0, 0)) == SET
 	   && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
@@ -4952,7 +4949,7 @@ pa_adjust_insn_length (rtx insn, int length)
 	   && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
     length += compute_clrmem_length (insn) - 4;
   /* Conditional branch with an unfilled delay slot.  */
-  else if (GET_CODE (insn) == JUMP_INSN && ! simplejump_p (insn))
+  else if (JUMP_P (insn) && ! simplejump_p (insn))
     {
       /* Adjust a short backwards conditional with an unfilled delay slot.  */
       if (GET_CODE (pat) == SET
@@ -5846,7 +5843,7 @@ pa_output_arg_descriptor (rtx call_insn)
       return;
     }
 
-  gcc_assert (GET_CODE (call_insn) == CALL_INSN);
+  gcc_assert (CALL_P (call_insn));
   for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
        link; link = XEXP (link, 1))
     {
@@ -6641,7 +6638,7 @@ pa_output_lbranch (rtx dest, rtx insn, int xdelay)
   if (xdelay && dbr_sequence_length () != 0)
     {
       /* We can't handle a jump in the delay slot.  */
-      gcc_assert (GET_CODE (NEXT_INSN (insn)) != JUMP_INSN);
+      gcc_assert (! JUMP_P (NEXT_INSN (insn)));
 
       final_scan_insn (NEXT_INSN (insn), asm_out_file,
 		       optimize, 0, NULL);
@@ -7650,7 +7647,7 @@ pa_output_millicode_call (rtx insn, rtx call_dest)
     output_asm_insn ("nop", xoperands);
 
   /* We are done if there isn't a jump in the delay slot.  */
-  if (seq_length == 0 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
+  if (seq_length == 0 || ! JUMP_P (NEXT_INSN (insn)))
     return "";
 
   /* This call has an unconditional jump in its delay slot.  */
@@ -7708,7 +7705,7 @@ pa_attr_length_call (rtx insn, int sibcall)
   rtx pat = PATTERN (insn);
   unsigned long distance = -1;
 
-  gcc_assert (GET_CODE (insn) == CALL_INSN);
+  gcc_assert (CALL_P (insn));
 
   if (INSN_ADDRESSES_SET_P ())
     {
@@ -7822,7 +7819,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibca
 	     delay slot.  We can't do this in a sibcall as we don't
 	     have a second call-clobbered scratch register available.  */
 	  if (seq_length != 0
-	      && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+	      && ! JUMP_P (NEXT_INSN (insn))
 	      && !sibcall)
 	    {
 	      final_scan_insn (NEXT_INSN (insn), asm_out_file,
@@ -7866,7 +7863,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibca
 	    indirect_call = 1;
 
 	  if (seq_length != 0
-	      && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+	      && ! JUMP_P (NEXT_INSN (insn))
 	      && !sibcall
 	      && (!TARGET_PA_20
 		  || indirect_call
@@ -8032,7 +8029,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibca
   /* We are done if there isn't a jump in the delay slot.  */
   if (seq_length == 0
       || delay_insn_deleted
-      || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
+      || ! JUMP_P (NEXT_INSN (insn)))
     return "";
 
   /* A sibcall should never have a branch in the delay slot.  */
@@ -8826,12 +8823,12 @@ int
 pa_jump_in_call_delay (rtx insn)
 {
 
-  if (GET_CODE (insn) != JUMP_INSN)
+  if (! JUMP_P (insn))
     return 0;
 
   if (PREV_INSN (insn)
       && PREV_INSN (PREV_INSN (insn))
-      && GET_CODE (next_real_insn (PREV_INSN (PREV_INSN (insn)))) == INSN)
+      && NONJUMP_INSN_P (next_real_insn (PREV_INSN (PREV_INSN (insn)))))
     {
       rtx test_insn = next_real_insn (PREV_INSN (PREV_INSN (insn)));
 
@@ -8928,14 +8925,14 @@ pa_following_call (rtx insn)
 
   /* Find the previous real insn, skipping NOTEs.  */
   insn = PREV_INSN (insn);
-  while (insn && GET_CODE (insn) == NOTE)
+  while (insn && NOTE_P (insn))
     insn = PREV_INSN (insn);
 
   /* Check for CALL_INSNs and millicode calls.  */
   if (insn
-      && ((GET_CODE (insn) == CALL_INSN
+      && ((CALL_P (insn)
 	   && get_attr_type (insn) != TYPE_DYNCALL)
-	  || (GET_CODE (insn) == INSN
+	  || (NONJUMP_INSN_P (insn)
 	      && GET_CODE (PATTERN (insn)) != SEQUENCE
 	      && GET_CODE (PATTERN (insn)) != USE
 	      && GET_CODE (PATTERN (insn)) != CLOBBER
@@ -9000,7 +8997,7 @@ pa_reorg (void)
 	  unsigned int length, i;
 
 	  /* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode.  */
-	  if (GET_CODE (insn) != JUMP_INSN
+	  if (! JUMP_P (insn)
 	      || (GET_CODE (PATTERN (insn)) != ADDR_VEC
 		  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
 	    continue;
@@ -9059,7 +9056,7 @@ pa_reorg (void)
       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
 	{
 	  /* Find an ADDR_VEC insn.  */
-	  if (GET_CODE (insn) != JUMP_INSN
+	  if (! JUMP_P (insn)
 	      || (GET_CODE (PATTERN (insn)) != ADDR_VEC
 		  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
 	    continue;
@@ -9140,9 +9137,7 @@ pa_combine_instructions (void)
 
       /* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
 	 Also ignore any special USE insns.  */
-      if ((GET_CODE (anchor) != INSN
-	  && GET_CODE (anchor) != JUMP_INSN
-	  && GET_CODE (anchor) != CALL_INSN)
+      if ((! NONJUMP_INSN_P (anchor) && ! JUMP_P (anchor) && ! CALL_P (anchor))
 	  || GET_CODE (PATTERN (anchor)) == USE
 	  || GET_CODE (PATTERN (anchor)) == CLOBBER
 	  || GET_CODE (PATTERN (anchor)) == ADDR_VEC
@@ -9162,14 +9157,14 @@ pa_combine_instructions (void)
 	       floater;
 	       floater = PREV_INSN (floater))
 	    {
-	      if (GET_CODE (floater) == NOTE
-		  || (GET_CODE (floater) == INSN
+	      if (NOTE_P (floater)
+		  || (NONJUMP_INSN_P (floater)
 		      && (GET_CODE (PATTERN (floater)) == USE
 			  || GET_CODE (PATTERN (floater)) == CLOBBER)))
 		continue;
 
 	      /* Anything except a regular INSN will stop our search.  */
-	      if (GET_CODE (floater) != INSN
+	      if (! NONJUMP_INSN_P (floater)
 		  || GET_CODE (PATTERN (floater)) == ADDR_VEC
 		  || GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
 		{
@@ -9223,15 +9218,15 @@ pa_combine_instructions (void)
 	    {
 	      for (floater = anchor; floater; floater = NEXT_INSN (floater))
 		{
-		  if (GET_CODE (floater) == NOTE
-		      || (GET_CODE (floater) == INSN
+		  if (NOTE_P (floater)
+		      || (NONJUMP_INSN_P (floater)
 			  && (GET_CODE (PATTERN (floater)) == USE
 			      || GET_CODE (PATTERN (floater)) == CLOBBER)))
 
 		    continue;
 
 		  /* Anything except a regular INSN will stop our search.  */
-		  if (GET_CODE (floater) != INSN
+		  if (! NONJUMP_INSN_P (floater)
 		      || GET_CODE (PATTERN (floater)) == ADDR_VEC
 		      || GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
 		    {
@@ -9386,7 +9381,7 @@ pa_can_combine_p (rtx new_rtx, rtx anchor, rtx flo
 int
 pa_insn_refs_are_delayed (rtx insn)
 {
-  return ((GET_CODE (insn) == INSN
+  return ((NONJUMP_INSN_P (insn)
 	   && GET_CODE (PATTERN (insn)) != SEQUENCE
 	   && GET_CODE (PATTERN (insn)) != USE
 	   && GET_CODE (PATTERN (insn)) != CLOBBER
Index: config/rs6000/rs6000.c
===================================================================
--- config/rs6000/rs6000.c	(revision 196975)
+++ config/rs6000/rs6000.c	(working copy)
@@ -17843,9 +17843,8 @@ compute_save_world_info (rs6000_stack_t *info_ptr)
   if (WORLD_SAVE_P (info_ptr))
     {
       rtx insn;
-      for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
-	if ( GET_CODE (insn) == CALL_INSN
-	     && SIBLING_CALL_P (insn))
+      for (insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
+	if (CALL_P (insn) && SIBLING_CALL_P (insn))
 	  {
 	    info_ptr->world_save_p = 0;
 	    break;
@@ -23837,7 +23836,7 @@ is_load_insn (rtx insn, rtx *load_mem)
   if (!insn || !INSN_P (insn))
     return false;
 
-  if (GET_CODE (insn) == CALL_INSN)
+  if (CALL_P (insn))
     return false;
 
   return is_load_insn1 (PATTERN (insn), load_mem);
@@ -24232,7 +24231,7 @@ insn_must_be_first_in_group (rtx insn)
   enum attr_type type;
 
   if (!insn
-      || GET_CODE (insn) == NOTE
+      || NOTE_P (insn)
       || DEBUG_INSN_P (insn)
       || GET_CODE (PATTERN (insn)) == USE
       || GET_CODE (PATTERN (insn)) == CLOBBER)
@@ -24363,7 +24362,7 @@ insn_must_be_last_in_group (rtx insn)
   enum attr_type type;
 
   if (!insn
-      || GET_CODE (insn) == NOTE
+      || NOTE_P (insn)
       || DEBUG_INSN_P (insn)
       || GET_CODE (PATTERN (insn)) == USE
       || GET_CODE (PATTERN (insn)) == CLOBBER)
Index: config/s390/s390.c
===================================================================
--- config/s390/s390.c	(revision 196975)
+++ config/s390/s390.c	(working copy)
@@ -5738,7 +5738,7 @@ addr_generation_dependency_p (rtx dep_rtx, rtx ins
 {
   rtx target, pat;
 
-  if (GET_CODE (dep_rtx) == INSN)
+  if (NONJUMP_INSN_P (dep_rtx))
       dep_rtx = PATTERN (dep_rtx);
 
   if (GET_CODE (dep_rtx) == SET)
@@ -5978,7 +5978,7 @@ s390_split_branches (void)
 
   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) != JUMP_INSN)
+      if (! JUMP_P (insn))
 	continue;
 
       pat = PATTERN (insn);
@@ -6398,7 +6398,7 @@ s390_find_constant (struct constant_pool *pool, rt
 static rtx
 s390_execute_label (rtx insn)
 {
-  if (GET_CODE (insn) == INSN
+  if (NONJUMP_INSN_P (insn)
       && GET_CODE (PATTERN (insn)) == PARALLEL
       && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
       && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
@@ -6603,7 +6603,7 @@ s390_mainpool_start (void)
 
   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) == INSN
+      if (NONJUMP_INSN_P (insn)
 	  && GET_CODE (PATTERN (insn)) == SET
 	  && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
 	  && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
@@ -6616,7 +6616,7 @@ s390_mainpool_start (void)
 	{
 	  s390_add_execute (pool, insn);
 	}
-      else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+      else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
 	{
 	  rtx pool_ref = NULL_RTX;
 	  find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -6758,7 +6758,7 @@ s390_mainpool_finish (struct constant_pool *pool)
       if (INSN_P (insn))
 	replace_ltrel_base (&PATTERN (insn));
 
-      if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+      if (NONJUMP_INSN_P (insn) || CALL_P (insn))
         {
           rtx addr, pool_ref = NULL_RTX;
           find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -6840,7 +6840,7 @@ s390_chunkify_start (void)
 	  s390_add_execute (curr_pool, insn);
 	  s390_add_pool_insn (curr_pool, insn);
 	}
-      else if (GET_CODE (insn) == INSN || CALL_P (insn))
+      else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
 	{
 	  rtx pool_ref = NULL_RTX;
 	  find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -6867,7 +6867,7 @@ s390_chunkify_start (void)
 	    }
 	}
 
-      if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
+      if (JUMP_P (insn) || LABEL_P (insn))
 	{
 	  if (curr_pool)
 	    s390_add_pool_insn (curr_pool, insn);
@@ -6911,7 +6911,7 @@ s390_chunkify_start (void)
 	     Those will have an effect on code size, which we need to
 	     consider here.  This calculation makes rather pessimistic
 	     worst-case assumptions.  */
-	  if (GET_CODE (insn) == CODE_LABEL)
+	  if (LABEL_P (insn))
 	    extra_size += 6;
 
 	  if (chunk_size < S390_POOL_CHUNK_MIN
@@ -6920,7 +6920,7 @@ s390_chunkify_start (void)
 	    continue;
 
 	  /* Pool chunks can only be inserted after BARRIERs ...  */
-	  if (GET_CODE (insn) == BARRIER)
+	  if (BARRIER_P (insn))
 	    {
 	      s390_end_pool (curr_pool, insn);
 	      curr_pool = NULL;
@@ -6937,7 +6937,7 @@ s390_chunkify_start (void)
 	      if (!section_switch_p)
 		{
 		  /* We can insert the barrier only after a 'real' insn.  */
-		  if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
+		  if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
 		    continue;
 		  if (get_attr_length (insn) == 0)
 		    continue;
@@ -7009,11 +7009,11 @@ s390_chunkify_start (void)
 	 Don't do that, however, if it is the label before
 	 a jump table.  */
 
-      if (GET_CODE (insn) == CODE_LABEL
+      if (LABEL_P (insn)
 	  && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
 	{
 	  rtx vec_insn = next_real_insn (insn);
-	  rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
+	  rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
 			PATTERN (vec_insn) : NULL_RTX;
 	  if (!vec_pat
 	      || !(GET_CODE (vec_pat) == ADDR_VEC
@@ -7023,7 +7023,7 @@ s390_chunkify_start (void)
 
       /* If we have a direct jump (conditional or unconditional)
 	 or a casesi jump, check all potential targets.  */
-      else if (GET_CODE (insn) == JUMP_INSN)
+      else if (JUMP_P (insn))
 	{
           rtx pat = PATTERN (insn);
 	  if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
@@ -7048,7 +7048,7 @@ s390_chunkify_start (void)
 	      /* Find the jump table used by this casesi jump.  */
 	      rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
 	      rtx vec_insn = next_real_insn (vec_label);
-	      rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
+	      rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
 			    PATTERN (vec_insn) : NULL_RTX;
 	      if (vec_pat
 		  && (GET_CODE (vec_pat) == ADDR_VEC
@@ -7082,7 +7082,7 @@ s390_chunkify_start (void)
   /* Insert base register reload insns at every far label.  */
 
   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
-    if (GET_CODE (insn) == CODE_LABEL
+    if (LABEL_P (insn)
         && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
       {
 	struct constant_pool *pool = s390_find_pool (pool_list, insn);
@@ -7128,7 +7128,7 @@ s390_chunkify_finish (struct constant_pool *pool_l
       if (!curr_pool)
 	continue;
 
-      if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+      if (NONJUMP_INSN_P (insn) || CALL_P (insn))
         {
           rtx addr, pool_ref = NULL_RTX;
           find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -7181,9 +7181,9 @@ s390_chunkify_cancel (struct constant_pool *pool_l
       rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
       rtx label = NEXT_INSN (curr_pool->pool_insn);
 
-      if (jump && GET_CODE (jump) == JUMP_INSN
-	  && barrier && GET_CODE (barrier) == BARRIER
-	  && label && GET_CODE (label) == CODE_LABEL
+      if (jump && JUMP_P (jump)
+	  && barrier && BARRIER_P (barrier)
+	  && label && LABEL_P (label)
 	  && GET_CODE (PATTERN (jump)) == SET
 	  && SET_DEST (PATTERN (jump)) == pc_rtx
 	  && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
@@ -7203,7 +7203,7 @@ s390_chunkify_cancel (struct constant_pool *pool_l
     {
       rtx next_insn = NEXT_INSN (insn);
 
-      if (GET_CODE (insn) == INSN
+      if (NONJUMP_INSN_P (insn)
 	  && GET_CODE (PATTERN (insn)) == SET
 	  && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
 	  && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
@@ -10080,7 +10080,7 @@ s390_optimize_prologue (void)
 
       next_insn = NEXT_INSN (insn);
 
-      if (GET_CODE (insn) != INSN)
+      if (! NONJUMP_INSN_P (insn))
 	continue;
 
       if (GET_CODE (PATTERN (insn)) == PARALLEL
Index: config/sparc/sparc.c
===================================================================
--- config/sparc/sparc.c	(revision 196975)
+++ config/sparc/sparc.c	(working copy)
@@ -3063,10 +3063,10 @@ emit_cbcond_nop (rtx insn)
   if (!next)
     return 1;
 
-  if (GET_CODE (next) == INSN
+  if (NONJUMP_INSN_P (next)
       && GET_CODE (PATTERN (next)) == SEQUENCE)
     next = XVECEXP (PATTERN (next), 0, 0);
-  else if (GET_CODE (next) == CALL_INSN
+  else if (CALL_P (next)
 	   && GET_CODE (PATTERN (next)) == PARALLEL)
     {
       rtx delay = XVECEXP (PATTERN (next), 0, 1);
@@ -3222,7 +3222,7 @@ eligible_for_return_delay (rtx trial)
   int regno;
   rtx pat;
 
-  if (GET_CODE (trial) != INSN)
+  if (! NONJUMP_INSN_P (trial))
     return 0;
 
   if (get_attr_length (trial) != 1)
@@ -3293,7 +3293,7 @@ eligible_for_sibcall_delay (rtx trial)
 {
   rtx pat;
 
-  if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
+  if (! NONJUMP_INSN_P (trial) || GET_CODE (PATTERN (trial)) != SET)
     return 0;
 
   if (get_attr_length (trial) != 1)
@@ -5424,7 +5424,7 @@ sparc_asm_function_epilogue (FILE *file, HOST_WIDE
 
   last_real_insn = prev_real_insn (insn);
   if (last_real_insn
-      && GET_CODE (last_real_insn) == INSN
+      && NONJUMP_INSN_P (last_real_insn)
       && GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
     last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
 
Index: config/spu/spu.c
===================================================================
--- config/spu/spu.c	(revision 196975)
+++ config/spu/spu.c	(working copy)
@@ -1962,7 +1962,7 @@ struct spu_bb_info
 static struct spu_bb_info *spu_bb_info;
 
 #define STOP_HINT_P(INSN) \
-		(GET_CODE(INSN) == CALL_INSN \
+		(CALL_P(INSN) \
 		 || INSN_CODE(INSN) == CODE_FOR_divmodsi4 \
 		 || INSN_CODE(INSN) == CODE_FOR_udivmodsi4)
 
@@ -2163,7 +2163,7 @@ spu_emit_branch_hint (rtx before, rtx branch, rtx
 static rtx
 get_branch_target (rtx branch)
 {
-  if (GET_CODE (branch) == JUMP_INSN)
+  if (JUMP_P (branch))
     {
       rtx set, src;
 
@@ -2212,7 +2212,7 @@ get_branch_target (rtx branch)
 
       return src;
     }
-  else if (GET_CODE (branch) == CALL_INSN)
+  else if (CALL_P (branch))
     {
       rtx call;
       /* All of our call patterns are in a PARALLEL and the CALL is
Index: config/stormy16/stormy16.c
===================================================================
--- config/stormy16/stormy16.c	(revision 196975)
+++ config/stormy16/stormy16.c	(working copy)
@@ -2441,8 +2441,7 @@ combine_bnp (rtx insn)
 	  if (reg_mentioned_p (reg, and_insn))
 	    return;
 
-	  if (GET_CODE (and_insn) != NOTE
-	      && GET_CODE (and_insn) != INSN)
+	  if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
 	    return;
 	}
     }
@@ -2461,8 +2460,7 @@ combine_bnp (rtx insn)
 	  if (reg_mentioned_p (reg, and_insn))
 	    return;
 
-	  if (GET_CODE (and_insn) != NOTE
-	      && GET_CODE (and_insn) != INSN)
+	  if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
 	    return;
 	}
 
@@ -2486,8 +2484,7 @@ combine_bnp (rtx insn)
 		break;
 
 	      if (reg_mentioned_p (reg, shift)
-		  || (GET_CODE (shift) != NOTE
-		      && GET_CODE (shift) != INSN))
+		  || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
 		{
 		  shift = NULL_RTX;
 		  break;
@@ -2534,8 +2531,7 @@ combine_bnp (rtx insn)
       if (reg_mentioned_p (reg, load))
 	return;
 
-      if (GET_CODE (load) != NOTE
-	  && GET_CODE (load) != INSN)
+      if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
 	return;
     }
   if (!load)
Index: config/v850/v850.c
===================================================================
--- config/v850/v850.c	(revision 196975)
+++ config/v850/v850.c	(working copy)
@@ -1133,13 +1133,13 @@ Saved %d bytes (%d uses of register %s) in functio
 	     IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
 	     INSN_UID (first_insn), INSN_UID (last_insn));
 
-  if (GET_CODE (first_insn) == NOTE)
+  if (NOTE_P (first_insn))
     first_insn = next_nonnote_insn (first_insn);
 
   last_insn = next_nonnote_insn (last_insn);
   for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
     {
-      if (GET_CODE (insn) == INSN)
+      if (NONJUMP_INSN_P (insn))
 	{
 	  rtx pattern = single_set (insn);
 
@@ -1199,7 +1199,7 @@ Saved %d bytes (%d uses of register %s) in functio
 
   /* Optimize back to back cases of ep <- r1 & r1 <- ep.  */
   insn = prev_nonnote_insn (first_insn);
-  if (insn && GET_CODE (insn) == INSN
+  if (insn && NONJUMP_INSN_P (insn)
       && GET_CODE (PATTERN (insn)) == SET
       && SET_DEST (PATTERN (insn)) == *p_ep
       && SET_SRC (PATTERN (insn)) == *p_r1)
Index: config/xtensa/xtensa.c
===================================================================
--- config/xtensa/xtensa.c	(revision 196975)
+++ config/xtensa/xtensa.c	(working copy)
@@ -1650,7 +1650,7 @@ xtensa_emit_loop_end (rtx insn, rtx *operands)
 	  {
 	    rtx body = PATTERN (insn);
 
-	    if (GET_CODE (body) == JUMP_INSN)
+	    if (JUMP_P (body))
 	      {
 		output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
 		done = 1;
Index: dwarf2out.c
===================================================================
--- dwarf2out.c	(revision 196975)
+++ dwarf2out.c	(working copy)
@@ -5059,7 +5059,7 @@ add_var_loc_to_decl (tree decl, rtx loc_note, cons
   if (temp->last
       && temp->first == temp->last
       && TREE_CODE (decl) == PARM_DECL
-      && GET_CODE (temp->first->loc) == NOTE
+      && NOTE_P (temp->first->loc)
       && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
       && DECL_INCOMING_RTL (decl)
       && NOTE_VAR_LOCATION_LOC (temp->first->loc)
@@ -13475,7 +13475,7 @@ dw_loc_list (var_loc_list *loc_list, tree decl, in
 	    *listp = new_loc_list (descr, node->label, endname, secname);
 	    if (TREE_CODE (decl) == PARM_DECL
 		&& node == loc_list->first
-		&& GET_CODE (node->loc) == NOTE
+		&& NOTE_P (node->loc)
 		&& strcmp (node->label, endname) == 0)
 	      (*listp)->force = true;
 	    listp = &(*listp)->dw_loc_next;
@@ -20702,7 +20702,7 @@ dwarf2out_var_location (rtx loc_note)
   next_note = NEXT_INSN (loc_note);
   if (! next_note
       || INSN_DELETED_P (next_note)
-      || GET_CODE (next_note) != NOTE
+      || ! NOTE_P (next_note)
       || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
 	  && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
     next_note = NULL_RTX;
Index: hw-doloop.c
===================================================================
--- hw-doloop.c	(revision 196975)
+++ hw-doloop.c	(working copy)
@@ -365,7 +365,7 @@ discover_loops (bitmap_obstack *loop_stack, struct
       rtx tail = BB_END (bb);
       rtx insn, reg;
 
-      while (tail && GET_CODE (tail) == NOTE && tail != BB_HEAD (bb))
+      while (tail && NOTE_P (tail) && tail != BB_HEAD (bb))
 	tail = PREV_INSN (tail);
 
       if (tail == NULL_RTX)
Index: resource.c
===================================================================
--- resource.c	(revision 196975)
+++ resource.c	(working copy)
@@ -175,14 +175,12 @@ next_insn_no_annul (rtx insn)
 	  && NEXT_INSN (PREV_INSN (insn)) != insn)
 	{
 	  rtx next = NEXT_INSN (insn);
-	  enum rtx_code code = GET_CODE (next);
 
-	  while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
+	  while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
 		 && INSN_FROM_TARGET_P (next))
 	    {
 	      insn = next;
 	      next = NEXT_INSN (insn);
-	      code = GET_CODE (next);
 	    }
 	}
 

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [patch] Use NOTE_P, CALL_P, etc. instead of GET_CODE in a bunch of files
  2013-03-22 19:10 [patch] Use NOTE_P, CALL_P, etc. instead of GET_CODE in a bunch of files Steven Bosscher
@ 2013-03-23  0:45 ` Steven Bosscher
  2013-03-23  1:57   ` Jeff Law
  0 siblings, 1 reply; 3+ messages in thread
From: Steven Bosscher @ 2013-03-23  0:45 UTC (permalink / raw)
  To: GCC Patches

On Fri, Mar 22, 2013 at 8:09 PM, Steven Bosscher wrote:
> Hello,
>
> This is an almost completely mechanical replacement of GET_CODE(thing)
> == ... with the equivalent predicate macro from rtl.h. This particular
> set of files fell victim to my plans for GCC 4.9 to make
> JUMP_TABLE_DATA_P a separate rtx instead of a JUMP_INSN. The script I
> used is in the patch. I checked and cleaned up the changes by hand and
> modified a few places by hand.
>
> Bootstrapped&tested on powerpc64-unknown-linux-gnu and on
> ia64-unknown-linux-gnu. Also did a quick cross to a few of the
> affected ports (sparc, bfin).
> OK for trunk?
>
> Can I please have pre-approval for similar changes to other files,
> subject to posting and waiting at least 2 days for
> comments/objections?

FWIW, the only other instance where the silly script triggers (apart
from a few wrong INSN_LIST changes) is in mmix.c:

Index: config/mmix/mmix.c
===================================================================
--- config/mmix/mmix.c  (revision 196975)
+++ config/mmix/mmix.c  (working copy)
@@ -1728,7 +1728,7 @@ mmix_print_operand (FILE *stream, rtx x, int code)
       if (CONSTANT_P (modified_x)
          /* Strangely enough, this is not included in CONSTANT_P.
             FIXME: Ask/check about sanity here.  */
-         || GET_CODE (modified_x) == CODE_LABEL)
+         || LABEL_P (modified_x))
        {
          output_addr_const (stream, modified_x);
          return;


It looks like the gcc/ directory was already completely converted to
the _P idiom.

Ciao!
Steven

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [patch] Use NOTE_P, CALL_P, etc. instead of GET_CODE in a bunch of files
  2013-03-23  0:45 ` Steven Bosscher
@ 2013-03-23  1:57   ` Jeff Law
  0 siblings, 0 replies; 3+ messages in thread
From: Jeff Law @ 2013-03-23  1:57 UTC (permalink / raw)
  To: Steven Bosscher; +Cc: GCC Patches

On 03/22/2013 06:44 PM, Steven Bosscher wrote:
> On Fri, Mar 22, 2013 at 8:09 PM, Steven Bosscher wrote:
>> Hello,
>>
>> This is an almost completely mechanical replacement of GET_CODE(thing)
>> == ... with the equivalent predicate macro from rtl.h. This particular
>> set of files fell victim to my plans for GCC 4.9 to make
>> JUMP_TABLE_DATA_P a separate rtx instead of a JUMP_INSN. The script I
>> used is in the patch. I checked and cleaned up the changes by hand and
>> modified a few places by hand.
>>
>> Bootstrapped&tested on powerpc64-unknown-linux-gnu and on
>> ia64-unknown-linux-gnu. Also did a quick cross to a few of the
>> affected ports (sparc, bfin).
>> OK for trunk?
>>
>> Can I please have pre-approval for similar changes to other files,
>> subject to posting and waiting at least 2 days for
>> comments/objections?
>
> FWIW, the only other instance where the silly script triggers (apart
> from a few wrong INSN_LIST changes) is in mmix.c:
I'll pre-approve the mechanical replacements.

jeff

^ permalink raw reply	[flat|nested] 3+ messages in thread

end of thread, other threads:[~2013-03-23  1:57 UTC | newest]

Thread overview: 3+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2013-03-22 19:10 [patch] Use NOTE_P, CALL_P, etc. instead of GET_CODE in a bunch of files Steven Bosscher
2013-03-23  0:45 ` Steven Bosscher
2013-03-23  1:57   ` Jeff Law

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).