public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* Make gimple.h checks conditional with ENABLE_GIMPLE_CHECKING
@ 2010-06-08 23:13 Jan Hubicka
  2010-06-09  6:13 ` Basile Starynkevitch
                   ` (3 more replies)
  0 siblings, 4 replies; 15+ messages in thread
From: Jan Hubicka @ 2010-06-08 23:13 UTC (permalink / raw)
  To: gcc-patches, rguenther

Hi,
this patch conditionalize the sanity checks in gimple accessor functions.
I originally intended to go through my list of most frequent aborts remianing
in the code, but it seems now to me that it would cause just maintenance
problems since it would be difficult to decide what to check and what not.

The nature of the checks is very similar to the tree/rtl and gimple checking
so I think we should just handle them same way and put them all into
ENABLE_GIMPLE_CHECKING

Bootstrapped/regtested x86_64-linux.  My build time test actually regressed
to 11m16s (it used to be 10m19s), but I am quite convinced it is due to
merge of df-improv branch
OK?

Honza

	* gimple.h (set_bb_seq): Make check conditional with ENABLE_ChECKING.
	(gimple_set_def_ops, gimple_set_use_ops,
	gimple_set_vuse, gimple_set_vdef,
	gimple_omp_subcode, gimple_omp_set_subcode, gimple_ops, gimple_op,
	gimple_op_ptr, gimple_op_ptr, gimple_set_op, gimple_bind_set_block,
	gimple_asm_input_op, gimple_asm_input_op_ptr, gimple_asm_set_input_op,
	gimple_asm_output_op, gimple_asm_output_op_ptr,
	gimple_asm_set_output_op, gimple_asm_clobber_op, 
	gimple_asm_set_clobber_op, gimple_asm_label_op,
	gimple_asm_set_label_op, gimple_try_set_kind, gimple_try_catch_is_cleanup
	gimple_try_set_catch_is_cleanup, gimple_phi_arg, 
	gimple_switch_num_labels, gimple_switch_set_index, gimple_switch_label,
	gimple_switch_set_label, gimple_omp_for_index, gimple_omp_for_index_ptr,
	gimple_omp_for_set_index, gimple_omp_for_initial, gimple_omp_for_initial_ptr,
	gimple_omp_for_set_initial, gimple_omp_for_final, gimple_omp_for_final_ptr,
	gimple_omp_for_set_final, gimple_omp_for_incr, gimple_omp_for_incr_ptr,
	gimple_omp_for_set_incr, gimple_omp_for_set_cond, gimple_omp_for_cond): Make
	checking conditional with ENABLE_GIMPLE_CHECKING.
	(gimple_phi_set_arg): Likewise; replace memcpy by assignment.
Index: gimple.h
===================================================================
--- gimple.h	(revision 160447)
+++ gimple.h	(working copy)
@@ -244,7 +244,9 @@ bb_seq (const_basic_block bb)
 static inline void
 set_bb_seq (basic_block bb, gimple_seq seq)
 {
+#ifdef ENABLE_CHECKING
   gcc_assert (!(bb->flags & BB_RTL));
+#endif
   bb->il.gimple->seq = seq;
 }
 
@@ -1326,7 +1328,9 @@ gimple_def_ops (const_gimple g)
 static inline void
 gimple_set_def_ops (gimple g, struct def_optype_d *def)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_has_ops (g));
+#endif
   g->gsops.opbase.def_ops = def;
 }
 
@@ -1347,7 +1351,9 @@ gimple_use_ops (const_gimple g)
 static inline void
 gimple_set_use_ops (gimple g, struct use_optype_d *use)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_has_ops (g));
+#endif
   g->gsops.opbase.use_ops = use;
 }
 
@@ -1428,7 +1434,9 @@ gimple_vdef_ptr (gimple g)
 static inline void
 gimple_set_vuse (gimple g, tree vuse)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_has_mem_ops (g));
+#endif
   g->gsmembase.vuse = vuse;
 }
 
@@ -1437,7 +1445,9 @@ gimple_set_vuse (gimple g, tree vuse)
 static inline void
 gimple_set_vdef (gimple g, tree vdef)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_has_mem_ops (g));
+#endif
   g->gsmembase.vdef = vdef;
 }
 
@@ -1528,8 +1538,10 @@ gimple_references_memory_p (gimple stmt)
 static inline unsigned
 gimple_omp_subcode (const_gimple s)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD
 	      && gimple_code (s) <= GIMPLE_OMP_SINGLE);
+#endif
   return s->gsbase.subcode;
 }
 
@@ -1540,7 +1552,9 @@ gimple_omp_set_subcode (gimple s, unsign
 {
   /* We only have 16 bits for the subcode.  Assert that we are not
      overflowing it.  */
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (subcode < (1 << 16));
+#ENDif
   s->gsbase.subcode = subcode;
 }
 
@@ -1640,7 +1654,9 @@ gimple_ops (gimple gs)
      of the structure.  Note that those structures that do not
      have an operand vector have a zero offset.  */
   off = gimple_ops_offset_[gimple_statement_structure (gs)];
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (off != 0);
+#endif
 
   return (tree *) ((char *) gs + off);
 }
@@ -1653,7 +1669,7 @@ gimple_op (const_gimple gs, unsigned i)
 {
   if (gimple_has_ops (gs))
     {
-#ifdef ENABLE_CHECKING
+#ifdef ENABLE_GIMPLE_CHECKING
       gcc_assert (i < gimple_num_ops (gs));
 #endif
       return gimple_ops (CONST_CAST_GIMPLE (gs))[i];
@@ -1669,7 +1685,7 @@ gimple_op_ptr (const_gimple gs, unsigned
 {
   if (gimple_has_ops (gs))
     {
-#ifdef ENABLE_CHECKING
+#ifdef ENABLE_GIMPLE_CHECKING
       gcc_assert (i < gimple_num_ops (gs));
 #endif
       return gimple_ops (CONST_CAST_GIMPLE (gs)) + i;
@@ -1683,7 +1699,9 @@ gimple_op_ptr (const_gimple gs, unsigned
 static inline void
 gimple_set_op (gimple gs, unsigned i, tree op)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_has_ops (gs) && i < gimple_num_ops (gs));
+#endif
 
   /* Note.  It may be tempting to assert that OP matches
      is_gimple_operand, but that would be wrong.  Different tuples
@@ -2626,7 +2644,9 @@ static inline void
 gimple_bind_set_block (gimple gs, tree block)
 {
   GIMPLE_CHECK (gs, GIMPLE_BIND);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (block == NULL_TREE || TREE_CODE (block) == BLOCK);
+#endif
   gs->gimple_bind.block = block;
 }
 
@@ -2675,7 +2695,9 @@ static inline tree
 gimple_asm_input_op (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.ni);
+#endif
   return gimple_op (gs, index);
 }
 
@@ -2685,7 +2707,9 @@ static inline tree *
 gimple_asm_input_op_ptr (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.ni);
+#endif
   return gimple_op_ptr (gs, index);
 }
 
@@ -2696,8 +2720,10 @@ static inline void
 gimple_asm_set_input_op (gimple gs, unsigned index, tree in_op)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.ni);
   gcc_assert (TREE_CODE (in_op) == TREE_LIST);
+#endif
   gimple_set_op (gs, index, in_op);
 }
 
@@ -2708,7 +2734,9 @@ static inline tree
 gimple_asm_output_op (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.no);
+#endif
   return gimple_op (gs, index + gs->gimple_asm.ni);
 }
 
@@ -2718,7 +2746,9 @@ static inline tree *
 gimple_asm_output_op_ptr (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.no);
+#endif
   return gimple_op_ptr (gs, index + gs->gimple_asm.ni);
 }
 
@@ -2729,8 +2759,10 @@ static inline void
 gimple_asm_set_output_op (gimple gs, unsigned index, tree out_op)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.no);
   gcc_assert (TREE_CODE (out_op) == TREE_LIST);
+#endif
   gimple_set_op (gs, index + gs->gimple_asm.ni, out_op);
 }
 
@@ -2741,7 +2773,9 @@ static inline tree
 gimple_asm_clobber_op (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.nc);
+#endif
   return gimple_op (gs, index + gs->gimple_asm.ni + gs->gimple_asm.no);
 }
 
@@ -2752,8 +2786,10 @@ static inline void
 gimple_asm_set_clobber_op (gimple gs, unsigned index, tree clobber_op)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.nc);
   gcc_assert (TREE_CODE (clobber_op) == TREE_LIST);
+#endif
   gimple_set_op (gs, index + gs->gimple_asm.ni + gs->gimple_asm.no, clobber_op);
 }
 
@@ -2763,7 +2799,9 @@ static inline tree
 gimple_asm_label_op (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.nl);
+#endif
   return gimple_op (gs, index + gs->gimple_asm.ni + gs->gimple_asm.nc);
 }
 
@@ -2773,8 +2811,10 @@ static inline void
 gimple_asm_set_label_op (gimple gs, unsigned index, tree label_op)
 {
   GIMPLE_CHECK (gs, GIMPLE_ASM);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_asm.nl);
   gcc_assert (TREE_CODE (label_op) == TREE_LIST);
+#endif
   gimple_set_op (gs, index + gs->gimple_asm.ni + gs->gimple_asm.nc, label_op);
 }
 
@@ -2987,7 +3027,9 @@ static inline void
 gimple_try_set_kind (gimple gs, enum gimple_try_flags kind)
 {
   GIMPLE_CHECK (gs, GIMPLE_TRY);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
+#endif
   if (gimple_try_kind (gs) != kind)
     gs->gsbase.subcode = (unsigned int) kind;
 }
@@ -2998,7 +3040,9 @@ gimple_try_set_kind (gimple gs, enum gim
 static inline bool
 gimple_try_catch_is_cleanup (const_gimple gs)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_try_kind (gs) == GIMPLE_TRY_CATCH);
+#endif
   return (gs->gsbase.subcode & GIMPLE_TRY_CATCH_IS_CLEANUP) != 0;
 }
 
@@ -3029,7 +3073,9 @@ gimple_try_cleanup (gimple gs)
 static inline void
 gimple_try_set_catch_is_cleanup (gimple g, bool catch_is_cleanup)
 {
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_try_kind (g) == GIMPLE_TRY_CATCH);
+#endif
   if (catch_is_cleanup)
     g->gsbase.subcode |= GIMPLE_TRY_CATCH_IS_CLEANUP;
   else
@@ -3156,7 +3202,9 @@ static inline struct phi_arg_d *
 gimple_phi_arg (gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_PHI);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_phi.capacity);
+#endif
   return &(gs->gimple_phi.args[index]);
 }
 
@@ -3167,8 +3215,10 @@ static inline void
 gimple_phi_set_arg (gimple gs, unsigned index, struct phi_arg_d * phiarg)
 {
   GIMPLE_CHECK (gs, GIMPLE_PHI);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (index <= gs->gimple_phi.nargs);
-  memcpy (gs->gimple_phi.args + index, phiarg, sizeof (struct phi_arg_d));
+#endif
+  gs->gimple_phi.args[index] = *phiarg;
 }
 
 /* Return the region number for GIMPLE_RESX GS.  */
@@ -3215,7 +3265,9 @@ gimple_switch_num_labels (const_gimple g
   unsigned num_ops;
   GIMPLE_CHECK (gs, GIMPLE_SWITCH);
   num_ops = gimple_num_ops (gs);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (num_ops > 1);
+#endif
   return num_ops - 1;
 }
 
@@ -3256,7 +3308,9 @@ static inline void
 gimple_switch_set_index (gimple gs, tree index)
 {
   GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (SSA_VAR_P (index) || CONSTANT_CLASS_P (index));
+#endif
   gimple_set_op (gs, 0, index);
 }
 
@@ -3268,7 +3322,9 @@ static inline tree
 gimple_switch_label (const_gimple gs, unsigned index)
 {
   GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_num_ops (gs) > index + 1);
+#endif
   return gimple_op (gs, index + 1);
 }
 
@@ -3278,8 +3334,10 @@ static inline void
 gimple_switch_set_label (gimple gs, unsigned index, tree label)
 {
   GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (gimple_num_ops (gs) > index + 1);
   gcc_assert (label == NULL_TREE || TREE_CODE (label) == CASE_LABEL_EXPR);
+#endif
   gimple_set_op (gs, index + 1, label);
 }
 
@@ -3506,7 +3564,9 @@ static inline tree
 gimple_omp_for_index (const_gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return gs->gimple_omp_for.iter[i].index;
 }
 
@@ -3517,7 +3577,9 @@ static inline tree *
 gimple_omp_for_index_ptr (gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return &gs->gimple_omp_for.iter[i].index;
 }
 
@@ -3528,7 +3590,9 @@ static inline void
 gimple_omp_for_set_index (gimple gs, size_t i, tree index)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   gs->gimple_omp_for.iter[i].index = index;
 }
 
@@ -3539,7 +3603,9 @@ static inline tree
 gimple_omp_for_initial (const_gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return gs->gimple_omp_for.iter[i].initial;
 }
 
@@ -3550,7 +3616,9 @@ static inline tree *
 gimple_omp_for_initial_ptr (gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return &gs->gimple_omp_for.iter[i].initial;
 }
 
@@ -3561,7 +3629,9 @@ static inline void
 gimple_omp_for_set_initial (gimple gs, size_t i, tree initial)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   gs->gimple_omp_for.iter[i].initial = initial;
 }
 
@@ -3572,7 +3642,9 @@ static inline tree
 gimple_omp_for_final (const_gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return gs->gimple_omp_for.iter[i].final;
 }
 
@@ -3583,7 +3655,9 @@ static inline tree *
 gimple_omp_for_final_ptr (gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return &gs->gimple_omp_for.iter[i].final;
 }
 
@@ -3594,7 +3668,9 @@ static inline void
 gimple_omp_for_set_final (gimple gs, size_t i, tree final)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   gs->gimple_omp_for.iter[i].final = final;
 }
 
@@ -3605,7 +3681,9 @@ static inline tree
 gimple_omp_for_incr (const_gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return gs->gimple_omp_for.iter[i].incr;
 }
 
@@ -3616,7 +3694,9 @@ static inline tree *
 gimple_omp_for_incr_ptr (gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return &gs->gimple_omp_for.iter[i].incr;
 }
 
@@ -3627,7 +3707,9 @@ static inline void
 gimple_omp_for_set_incr (gimple gs, size_t i, tree incr)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   gs->gimple_omp_for.iter[i].incr = incr;
 }
 
@@ -4129,8 +4211,10 @@ static inline void
 gimple_omp_for_set_cond (gimple gs, size_t i, enum tree_code cond)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (TREE_CODE_CLASS (cond) == tcc_comparison);
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   gs->gimple_omp_for.iter[i].cond = cond;
 }
 
@@ -4141,7 +4225,9 @@ static inline enum tree_code
 gimple_omp_for_cond (const_gimple gs, size_t i)
 {
   GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+#ifdef ENABLE_GIMPLE_CHECKING
   gcc_assert (i < gs->gimple_omp_for.collapse);
+#endif
   return gs->gimple_omp_for.iter[i].cond;
 }
 

^ permalink raw reply	[flat|nested] 15+ messages in thread

end of thread, other threads:[~2010-06-09 18:58 UTC | newest]

Thread overview: 15+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2010-06-08 23:13 Make gimple.h checks conditional with ENABLE_GIMPLE_CHECKING Jan Hubicka
2010-06-09  6:13 ` Basile Starynkevitch
2010-06-09  8:37 ` Manuel López-Ibáñez
2010-06-09  9:31   ` Richard Guenther
2010-06-09  9:29 ` Richard Guenther
2010-06-09  9:47   ` Jakub Jelinek
2010-06-09 11:15   ` Jan Hubicka
2010-06-09 11:25     ` Jakub Jelinek
2010-06-09 11:30     ` Richard Guenther
2010-06-09 10:05 ` Paolo Bonzini
2010-06-09 10:05   ` Laurynas Biveinis
2010-06-09 10:12     ` Paolo Bonzini
2010-06-09 10:50       ` Jan Hubicka
2010-06-09 11:14         ` Paolo Bonzini
2010-06-09 19:11           ` Jan Hubicka

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).