From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: (qmail 21508 invoked by alias); 8 Feb 2008 17:40:24 -0000 Received: (qmail 21126 invoked by uid 22791); 8 Feb 2008 17:40:19 -0000 X-Spam-Check-By: sourceware.org Received: from ns1.suse.de (HELO mx1.suse.de) (195.135.220.2) by sourceware.org (qpsmtpd/0.31) with ESMTP; Fri, 08 Feb 2008 17:39:51 +0000 Received: from Relay1.suse.de (mail2.suse.de [195.135.221.8]) (using TLSv1 with cipher DHE-RSA-AES256-SHA (256/256 bits)) (No client certificate requested) by mx1.suse.de (Postfix) with ESMTP id 9FC802AE80 for ; Fri, 8 Feb 2008 18:39:47 +0100 (CET) Date: Fri, 08 Feb 2008 17:47:00 -0000 From: Richard Guenther To: gcc-patches@gcc.gnu.org Subject: Re: [RFC] Change (flatten) representation of memory references In-Reply-To: Message-ID: References: MIME-Version: 1.0 Content-Type: TEXT/PLAIN; charset=US-ASCII Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org X-SW-Source: 2008-02/txt/msg00263.txt.bz2 On Wed, 6 Feb 2008, Richard Guenther wrote: > On Mon, 4 Feb 2008, Richard Guenther wrote: > > > Following the old discussions at > > > > http://gcc.gnu.org/ml/gcc/2007-04/msg00096.html > > > With starting to prototype the proposed MEM_REF scheme I noticed > a few things that I'd like to add. First let me summarize the > idea again. The idea is to unify all memory reference trees into > two, MEM_REF and INDIRECT_MEM_REF with the following main goals: So, here's a prototype implementation that has in addition to MEM_REF, INDIRECT_MEM_REF and IDX_EXPR also BIT_FIELD_EXPR to be able to lower stores to bitfields BIT_FIELD_REF = value; to word = MEM_REF ; word = BIT_FIELD_EXPR ; MEM_REF = word; that is, making the read-modify-write cycle explicit and giving the accessed word an SSA_NAME. The patch implements full lowering (apart from accesses to gimple registers and things I missed ;)) and the C execute testsuite is clean at -O0. Basic support for optimized compile is also in, but there are still a lot of ICEs and miscompiles (aliasing is still hosed I believe). But at least you can give it a try to see how it "feels". MEM_REFs are dumped as MEM_REF <&base_object + offset {alias-set}> INDIRECT_MEM_REFs are dumped as MEM_REF Richard. Index: trunk/gcc/gimplify.c =================================================================== *** trunk.orig/gcc/gimplify.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/gimplify.c 2008-02-08 14:29:08.000000000 +0100 *************** gimplify_init_constructor (tree *expr_p, *** 3451,3456 **** --- 3451,3483 ---- return GS_ALL_DONE; } + /* Build a MEM_REF or INDIRECT_MEM_REF of type TYPE with BASE, OFFSET and + ALIAS_SET. Performs simple combining on the specified base. */ + + tree + build_gimple_mem_ref (enum tree_code code, tree type, tree base, tree offset, + alias_set_type alias_set) + { + if (code == MEM_REF + && TREE_CODE (base) == INDIRECT_REF) + return build_gimple_mem_ref (INDIRECT_MEM_REF, type, + TREE_OPERAND (base, 0), offset, alias_set); + else if (code == INDIRECT_MEM_REF + && TREE_CODE (base) == ADDR_EXPR) + return build_gimple_mem_ref (MEM_REF, type, + TREE_OPERAND (base, 0), offset, alias_set); + else if (code == MEM_REF + && (TREE_CODE (base) == MEM_REF + || TREE_CODE (base) == INDIRECT_MEM_REF)) + return build_gimple_mem_ref (TREE_CODE (base), type, TREE_OPERAND (base, 0), + size_binop (PLUS_EXPR, + offset, TREE_OPERAND (base, 1)), + alias_set); + + return build3 (code, type, base, offset, + build_int_cst (integer_type_node, alias_set)); + } + /* Given a pointer value OP0, return a simplified version of an indirection through OP0, or NULL_TREE if no simplification is possible. Note that the resulting type may be different from *************** gimplify_expr (tree *expr_p, tree *pre_p *** 6024,6029 **** --- 6051,6072 ---- ret = GS_ALL_DONE; break; + case IDX_EXPR: + { + enum gimplify_status r0, r1, r2; + + r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, + post_p, is_gimple_val, fb_rvalue); + r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, + post_p, is_gimple_val, fb_rvalue); + r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, + post_p, is_gimple_val, fb_rvalue); + + ret = MIN (MIN (r0, r1), r2); + break; + } + + case POINTER_PLUS_EXPR: /* Convert ((type *)A)+offset into &A->field_of_type_and_offset. The second is gimple immediate saving a need for extra statement. Index: trunk/gcc/tree-gimple.c =================================================================== *** trunk.orig/gcc/tree-gimple.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree-gimple.c 2008-02-08 15:13:01.000000000 +0100 *************** is_gimple_formal_tmp_rhs (tree t) *** 74,79 **** --- 74,80 ---- case VECTOR_CST: case OBJ_TYPE_REF: case ASSERT_EXPR: + case IDX_EXPR: return true; default: *************** bool *** 164,170 **** is_gimple_addressable (tree t) { return (is_gimple_id (t) || handled_component_p (t) ! || INDIRECT_REF_P (t)); } /* Return true if T is a GIMPLE minimal invariant. It's a restricted --- 165,172 ---- is_gimple_addressable (tree t) { return (is_gimple_id (t) || handled_component_p (t) ! || INDIRECT_REF_P (t) || TREE_CODE (t) == INDIRECT_MEM_REF ! || TREE_CODE (t) == MEM_REF); } /* Return true if T is a GIMPLE minimal invariant. It's a restricted *************** is_gimple_min_invariant (const_tree t) *** 176,181 **** --- 178,184 ---- switch (TREE_CODE (t)) { case ADDR_EXPR: + case POINTER_PLUS_EXPR: return TREE_INVARIANT (t); case INTEGER_CST: *************** bool *** 420,426 **** is_gimple_min_lval (tree t) { return (is_gimple_id (t) ! || TREE_CODE (t) == INDIRECT_REF); } /* Return true if T is a typecast operation. */ --- 423,429 ---- is_gimple_min_lval (tree t) { return (is_gimple_id (t) ! || TREE_CODE (t) == INDIRECT_REF || TREE_CODE (t) == INDIRECT_MEM_REF); } /* Return true if T is a typecast operation. */ *************** get_base_address (tree t) *** 477,483 **** if (SSA_VAR_P (t) || TREE_CODE (t) == STRING_CST || TREE_CODE (t) == CONSTRUCTOR ! || INDIRECT_REF_P (t)) return t; else return NULL_TREE; --- 480,488 ---- if (SSA_VAR_P (t) || TREE_CODE (t) == STRING_CST || TREE_CODE (t) == CONSTRUCTOR ! || INDIRECT_REF_P (t) ! /* ??? This isn't really the base address. */ ! || TREE_CODE (t) == INDIRECT_MEM_REF) return t; else return NULL_TREE; *************** recalculate_side_effects (tree t) *** 516,521 **** --- 521,527 ---- case tcc_binary: /* a binary arithmetic expression */ case tcc_reference: /* a reference */ case tcc_vl_exp: /* a function call */ + case IDX_EXPR: TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); for (i = 0; i < len; ++i) { Index: trunk/gcc/tree-pretty-print.c =================================================================== *** trunk.orig/gcc/tree-pretty-print.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree-pretty-print.c 2008-02-08 12:36:45.000000000 +0100 *************** dump_generic_node (pretty_printer *buffe *** 956,961 **** --- 956,973 ---- pp_string (buffer, ">"); break; + case BIT_FIELD_EXPR: + pp_string (buffer, "BIT_FIELD_EXPR <"); + dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false); + pp_string (buffer, ", "); + dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false); + pp_string (buffer, ", "); + dump_generic_node (buffer, TREE_OPERAND (node, 2), spc, flags, false); + pp_string (buffer, ", "); + dump_generic_node (buffer, TREE_OPERAND (node, 3), spc, flags, false); + pp_string (buffer, ">"); + break; + case ARRAY_REF: case ARRAY_RANGE_REF: op0 = TREE_OPERAND (node, 0); *************** dump_generic_node (pretty_printer *buffe *** 1484,1489 **** --- 1496,1531 ---- pp_string (buffer, ">"); break; + case MEM_REF: + case INDIRECT_MEM_REF: + pp_string (buffer, "MEM <"); + if (TREE_CODE (node) == MEM_REF) + pp_string (buffer, "&"); + dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, flags); + if (!integer_zerop (TREE_OPERAND (node, 1))) + { + pp_string (buffer, " + "); + dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, flags); + } + pp_string (buffer, " {"); + { + char tmp[16]; + snprintf (tmp, 16, "%lu", (unsigned long)MEM_REF_ALIAS_SET (node)); + pp_string (buffer, tmp); + } + pp_string (buffer, "}>"); + break; + + case IDX_EXPR: + pp_string (buffer, "IDX <"); + dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, flags); + pp_string (buffer, " + "); + dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, flags); + pp_string (buffer, " * "); + dump_generic_node (buffer, TREE_OPERAND (node, 2), spc, flags, flags); + pp_string (buffer, ">"); + break; + case VA_ARG_EXPR: pp_string (buffer, "VA_ARG_EXPR <"); dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false); Index: trunk/gcc/tree.def =================================================================== *** trunk.orig/gcc/tree.def 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree.def 2008-02-08 12:34:59.000000000 +0100 *************** DEFTREECODE (TRANSLATION_UNIT_DECL, "tra *** 379,384 **** --- 379,393 ---- /* References to storage. */ + /* A memory reference with the specified type at the specified location. + Operand 0 is the object that is used as the base for the reference. + Operand 1 is the offset in bytes relative to the base. + Operand 3 is the effective alias set of this memory reference. */ + DEFTREECODE (MEM_REF, "mem_ref", tcc_reference, 3) + + /* Like MEM_REF, but the base object is dereferenced. */ + DEFTREECODE (INDIRECT_MEM_REF, "indirect_mem_ref", tcc_reference, 3) + /* Value is structure or union component. Operand 0 is the structure or union (an expression). Operand 1 is the field (a node of type FIELD_DECL). *************** DEFTREECODE (PLUS_EXPR, "plus_expr", tcc *** 620,625 **** --- 629,641 ---- DEFTREECODE (MINUS_EXPR, "minus_expr", tcc_binary, 2) DEFTREECODE (MULT_EXPR, "mult_expr", tcc_binary, 2) + /* Multiply-add expression with non-communtative operands. + Operand 0 is a tree for the first summand; + Operand 1 is a tree for the first multiplicand of the second summand; + Operand 2 is a tree for the second multiplicand of the second summand. + The expression computed is op0 + op1 * op2. */ + DEFTREECODE (IDX_EXPR, "idx_expr", tcc_expression, 3) + /* Pointer addition. The first operand is always a pointer and the second operand is an integer of type sizetype. */ DEFTREECODE (POINTER_PLUS_EXPR, "pointer_plus_expr", tcc_binary, 2) *************** DEFTREECODE (ADDR_EXPR, "addr_expr", tcc *** 774,779 **** --- 790,807 ---- descriptor of type ptr_mode. */ DEFTREECODE (FDESC_EXPR, "fdesc_expr", tcc_expression, 2) + /* Given a word, a value and a bitfield position and size within + the word, produce the value that results if replacing the + described parts of word with value. + Operand 0 is a tree for the word of integral type; + Operand 1 is a tree for the value of integral type; + Operand 2 is a tree giving the constant number of bits being + referenced which is less or equal to the precision of the value; + Operand 3 is a tree giving the constant position of the first referenced + bit such that the sum of operands 2 and 3 is less or equal to the + precision of the word. */ + DEFTREECODE (BIT_FIELD_EXPR, "bitfield_expr", tcc_expression, 4) + /* Given two real or integer operands of the same type, returns a complex value of the corresponding complex type. */ DEFTREECODE (COMPLEX_EXPR, "complex_expr", tcc_binary, 2) Index: trunk/gcc/gimple-low.c =================================================================== *** trunk.orig/gcc/gimple-low.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/gimple-low.c 2008-02-08 18:31:58.000000000 +0100 *************** struct tree_opt_pass pass_mark_used_bloc *** 809,811 **** --- 809,1226 ---- TODO_dump_func, /* todo_flags_finish */ 0 /* letter */ }; + + static tree + lm_get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, + HOST_WIDE_INT *pbitpos, tree *poffset, + enum machine_mode *pmode, int *punsignedp, + int *pvolatilep, int *pbitrefp, tree *typep) + { + tree size_tree = 0; + enum machine_mode mode = VOIDmode; + tree offset = size_zero_node; + tree bit_offset = bitsize_zero_node; + + *typep = TREE_TYPE (exp); + + /* First get the mode, signedness, and size. We do this from just the + outermost expression. */ + if (TREE_CODE (exp) == COMPONENT_REF) + { + size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); + if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) + mode = DECL_MODE (TREE_OPERAND (exp, 1)); + else + *typep = DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)); + + *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1)); + } + else if (TREE_CODE (exp) == BIT_FIELD_REF) + { + size_tree = TREE_OPERAND (exp, 1); + *punsignedp = BIT_FIELD_REF_UNSIGNED (exp); + + /* For vector types, with the correct size of access, use the mode of + inner type. */ + if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE + && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))) + && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp)))) + mode = TYPE_MODE (TREE_TYPE (exp)); + + /* ??? BIT_FIELD_REFs are created from fold during optimizing of bitfield + compares. The following type ends up being a structure type for + this reason sometimes. We should stop fold from creating such + references (see gcc.c-torture/execute/bf64-1.c). */ + *typep = TREE_TYPE (TREE_OPERAND (exp, 0)); + } + else + { + mode = TYPE_MODE (TREE_TYPE (exp)); + *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); + + if (mode == BLKmode) + size_tree = TYPE_SIZE (TREE_TYPE (exp)); + else + *pbitsize = GET_MODE_BITSIZE (mode); + } + + if (size_tree != 0) + { + if (! host_integerp (size_tree, 1)) + mode = BLKmode, *pbitsize = -1; + else + *pbitsize = tree_low_cst (size_tree, 1); + } + + *pmode = mode; + *pbitrefp = 0; + *pvolatilep = 0; + + /* Compute cumulative bit-offset for nested component-refs and array-refs, + and find the ultimate containing object. */ + while (1) + { + switch (TREE_CODE (exp)) + { + case BIT_FIELD_REF: + bit_offset = size_binop (PLUS_EXPR, bit_offset, + TREE_OPERAND (exp, 2)); + gcc_assert (!*pbitrefp); /* ??? */ + *pbitrefp = 1; + break; + + case COMPONENT_REF: + { + tree field = TREE_OPERAND (exp, 1); + tree this_offset = component_ref_field_offset (exp); + double_int bytes, bits; + + offset = size_binop (PLUS_EXPR, offset, this_offset); + + /* We actually do not care about all the fancy + DECL_OFFSET_ALIGN - simply account exceeding + alignment to the regular offset here. */ + gcc_assert (TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST); + bytes = double_int_udivmod (tree_to_double_int (DECL_FIELD_BIT_OFFSET (field)), + DECL_BIT_FIELD (field) + ? tree_to_double_int (TYPE_SIZE (DECL_BIT_FIELD_TYPE (field))) + : uhwi_to_double_int (BITS_PER_UNIT), + TRUNC_DIV_EXPR, &bits); + if (DECL_BIT_FIELD (field)) + { + bytes = double_int_mul (bytes, + tree_to_double_int (TYPE_SIZE_UNIT (DECL_BIT_FIELD_TYPE (field)))); + bit_offset = size_binop (PLUS_EXPR, bit_offset, + double_int_to_tree (bitsizetype, bits)); + gcc_assert (!*pbitrefp); /* ??? */ + *pbitrefp = 1; + } + else + gcc_assert (double_int_zero_p (bits)); + offset = size_binop (PLUS_EXPR, offset, double_int_to_tree (sizetype, bytes)); + } + break; + + case ARRAY_REF: + case ARRAY_RANGE_REF: + { + tree index = TREE_OPERAND (exp, 1); + tree low_bound = array_ref_low_bound (exp); + tree unit_size = array_ref_element_size (exp); + tree tem; + + /* We assume all arrays have sizes that are a multiple of a byte. + First subtract the lower bound, if any, in the type of the + index, then convert to sizetype and multiply by the size of + the array element. */ + if (! integer_zerop (low_bound)) + index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), + index, low_bound); + + tem = size_binop (MULT_EXPR, + fold_convert (sizetype, index), unit_size); + /* Build this IDX expr here if not everything collapses down + to a constant. */ + if (TREE_CODE (tem) != INTEGER_CST + || TREE_CODE (offset) != INTEGER_CST) + { + /* ??? convert to sizetype introduces extra IVs, but + mixed-type IDX chaining introduces other problems. */ + offset = build3 (IDX_EXPR, sizetype, offset, + fold_convert (sizetype, index), unit_size); + } + else + offset = size_binop (PLUS_EXPR, offset, tem); + } + break; + + case REALPART_EXPR: + break; + + case IMAGPART_EXPR: + offset = size_binop (PLUS_EXPR, offset, + TYPE_SIZE_UNIT (TREE_TYPE (exp))); + break; + + case VIEW_CONVERT_EXPR: + break; + + default: + goto done; + } + + /* If any reference in the chain is volatile, the effect is volatile. */ + if (TREE_THIS_VOLATILE (exp)) + *pvolatilep = 1; + + exp = TREE_OPERAND (exp, 0); + } + done: + + *pbitpos = tree_low_cst (bit_offset, 0); + *poffset = offset; + + return exp; + } + + static void + lower_mem_lvalue (tree *expr_p, tree *rhs_p, tree_stmt_iterator tsi) + { + tree ref = *expr_p; + tree type, base, offset, pre = NULL_TREE, post = NULL_TREE; + HOST_WIDE_INT bitpos, bitsize; + enum machine_mode mode; + int uns, vol = 0, bitf; + + /* get base and offset */ + base = lm_get_inner_reference (ref, &bitsize, &bitpos, &offset, + &mode, &uns, &vol, &bitf, &type); + + if (vol) + gcc_assert (!bitf); + + /* Do not lower gimple register accesses. */ + if (DECL_P (base) + && (TREE_CODE (TREE_TYPE (base)) == COMPLEX_TYPE + || TREE_CODE (TREE_TYPE (base)) == VECTOR_TYPE) + && (DECL_GIMPLE_REG_P (base) || !TREE_ADDRESSABLE (base))) + return; + + /* If we have a bitfield access, lower it. */ + if (bitf) + { + tree tmp, ref, stmt; + + /* Load the word containing the bitfield. */ + tmp = create_tmp_var (type, "MEML"); + ref = build_gimple_mem_ref (MEM_REF, type, base, + offset, get_alias_set (*expr_p)); + stmt = build_gimple_modify_stmt (tmp, ref); + + /* Gimplify the offset part and add the word load. */ + push_gimplify_context (); + gimplify_expr (&TREE_OPERAND (ref, 1), &pre, &post, + is_gimple_val, fb_rvalue); + pop_gimplify_context (NULL); + gcc_assert (!post); + if (pre) + tsi_link_before (&tsi, pre, TSI_SAME_STMT); + tsi_link_before (&tsi, stmt, TSI_SAME_STMT); + + /* Combine the word with the bitfield to write. */ + stmt = build_gimple_modify_stmt (tmp, + build4 (BIT_FIELD_EXPR, type, tmp, *rhs_p, + bitsize_int (bitsize), bitsize_int (bitpos))); + tsi_link_before (&tsi, stmt, TSI_SAME_STMT); + + /* Replace the bitfield write with a write of the full word. */ + *expr_p = unshare_expr (ref); + *rhs_p = tmp; + + return; + } + + gcc_assert (bitpos == 0); + *expr_p = build_gimple_mem_ref (MEM_REF, TREE_TYPE (*expr_p), base, offset, + get_alias_set (*expr_p)); + if (vol) + TREE_THIS_VOLATILE (*expr_p) = true; + + /* We need to gimplify the offset part. + ??? Here (and at gathering the offset) the magic producing + the chain of IDX expressions has to happen. */ + push_gimplify_context (); + gimplify_expr (&TREE_OPERAND (*expr_p, 1), &pre, &post, + is_gimple_val, fb_rvalue); + pop_gimplify_context (NULL); + gcc_assert (!post); + if (pre) + tsi_link_before (&tsi, pre, TSI_SAME_STMT); + } + + static void + lower_mem_rvalue (tree *expr_p, tree_stmt_iterator tsi) + { + tree ref = *expr_p; + tree type, base, offset, pre = NULL_TREE, post = NULL_TREE; + HOST_WIDE_INT bitpos, bitsize; + enum machine_mode mode; + int uns, vol = 0, bitf; + bool address_p = false; + + if ((TREE_CODE (ref) == NOP_EXPR + || TREE_CODE (ref) == CONVERT_EXPR) + && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) + { + expr_p = &TREE_OPERAND (ref, 0); + ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); + address_p = true; + } + else if (TREE_CODE (ref) == ADDR_EXPR) + { + ref = TREE_OPERAND (ref, 0); + address_p = true; + } + if (!REFERENCE_CLASS_P (ref)) + return; + + /* get base and offset */ + base = lm_get_inner_reference (ref, &bitsize, &bitpos, &offset, + &mode, &uns, &vol, &bitf, &type); + + if (address_p || vol) + gcc_assert (!bitf); + + /* Do not lower gimple register accesses. */ + if (!address_p + && DECL_P (base) + && (TREE_CODE (TREE_TYPE (base)) == COMPLEX_TYPE + || TREE_CODE (TREE_TYPE (base)) == VECTOR_TYPE) + && (DECL_GIMPLE_REG_P (base) || !TREE_ADDRESSABLE (base))) + return; + + /* If we have a bitfield load, lower it to a word load and a + bitfield extract. */ + if (bitf) + { + tree tmp, ref, stmt; + + tmp = create_tmp_var (type, "MEML"); + ref = build_gimple_mem_ref (MEM_REF, type, base, + offset, get_alias_set (*expr_p)); + stmt = build_gimple_modify_stmt (tmp, ref); + + /* Gimplify the offset part and add the word load. */ + push_gimplify_context (); + gimplify_expr (&TREE_OPERAND (ref, 1), &pre, &post, + is_gimple_val, fb_rvalue); + pop_gimplify_context (NULL); + gcc_assert (!post); + if (pre) + tsi_link_before (&tsi, pre, TSI_SAME_STMT); + tsi_link_before (&tsi, stmt, TSI_SAME_STMT); + + /* Replace the bitfield access with the bitfield extract. */ + *expr_p = build3 (BIT_FIELD_REF, TREE_TYPE (*expr_p), + tmp, bitsize_int (bitsize), bitsize_int (bitpos)); + BIT_FIELD_REF_UNSIGNED (*expr_p) = uns; + + return; + } + + gcc_assert (bitpos == 0); + + if (address_p) + { + bool invariant = TREE_INVARIANT (*expr_p); + if (TREE_CODE (base) == INDIRECT_REF) + base = TREE_OPERAND (base, 0); + else + base = build1 (ADDR_EXPR, TREE_TYPE (*expr_p), base); + /* Do not fold the POINTER_PLUS_EXPR to simplify re-gimplification + below. */ + *expr_p = build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), base, offset); + /* ??? recompute_tree_invariant_for_addr_expr doesn't handle + POINTER_PLUS_EXPR yet, nor is it structured to recurse into + its both arguments. Just copy over the flag for now. */ + TREE_INVARIANT (*expr_p) = invariant; + } + else + { + *expr_p = build_gimple_mem_ref (MEM_REF, TREE_TYPE (*expr_p), base, + offset, get_alias_set (*expr_p)); + if (vol) + TREE_THIS_VOLATILE (*expr_p) = true; + } + + /* We need to gimplify the offset part. + ??? Here (and at gathering the offset) the magic producing + the chain of IDX expressions has to happen. */ + push_gimplify_context (); + gimplify_expr (&TREE_OPERAND (*expr_p, 1), &pre, &post, + is_gimple_val, fb_rvalue); + pop_gimplify_context (NULL); + gcc_assert (!post); + if (pre) + tsi_link_before (&tsi, pre, TSI_SAME_STMT); + } + + static unsigned int + lower_mem_exprs (void) + { + tree stmts = DECL_SAVED_TREE (cfun->decl); + tree_stmt_iterator tsi; + + for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi)) + { + tree stmt = tsi_stmt (tsi); + + switch (TREE_CODE (stmt)) + { + case GIMPLE_MODIFY_STMT: + lower_mem_rvalue (&GIMPLE_STMT_OPERAND (stmt, 1), tsi); + if (REFERENCE_CLASS_P (GIMPLE_STMT_OPERAND (stmt, 0))) + lower_mem_lvalue (&GIMPLE_STMT_OPERAND (stmt, 0), + &GIMPLE_STMT_OPERAND (stmt, 1), tsi); + if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) != CALL_EXPR) + break; + + /* Falltrough. */ + case CALL_EXPR: + { + tree call = get_call_expr_in (stmt); + int i; + + for (i = 0; i < call_expr_nargs (call); ++i) + lower_mem_rvalue (&CALL_EXPR_ARG (call, i), tsi); + } + break; + + case ASM_EXPR: + /* ??? lower constraints */ + break; + + default:; + } + } + + return 0; + } + + struct tree_opt_pass pass_lower_mem = + { + "memlower", /* name */ + NULL, /* gate */ + lower_mem_exprs, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + 0, /* tv_id */ + PROP_gimple_lcf, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func, /* todo_flags_finish */ + 0 /* letter */ + }; + Index: trunk/gcc/passes.c =================================================================== *** trunk.orig/gcc/passes.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/passes.c 2008-02-07 16:22:16.000000000 +0100 *************** init_optimization_passes (void) *** 482,487 **** --- 482,488 ---- NEXT_PASS (pass_lower_cf); NEXT_PASS (pass_refactor_eh); NEXT_PASS (pass_lower_eh); + NEXT_PASS (pass_lower_mem); NEXT_PASS (pass_build_cfg); NEXT_PASS (pass_lower_complex_O0); NEXT_PASS (pass_lower_vector); Index: trunk/gcc/tree-pass.h =================================================================== *** trunk.orig/gcc/tree-pass.h 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree-pass.h 2008-02-07 16:22:16.000000000 +0100 *************** extern struct tree_opt_pass pass_mudflap *** 246,251 **** --- 246,252 ---- extern struct tree_opt_pass pass_mudflap_2; extern struct tree_opt_pass pass_remove_useless_stmts; extern struct tree_opt_pass pass_lower_cf; + extern struct tree_opt_pass pass_lower_mem; extern struct tree_opt_pass pass_refactor_eh; extern struct tree_opt_pass pass_lower_eh; extern struct tree_opt_pass pass_build_cfg; Index: trunk/gcc/tree-flow.h =================================================================== *** trunk.orig/gcc/tree-flow.h 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree-flow.h 2008-02-07 16:22:16.000000000 +0100 *************** tree force_gimple_operand (tree, tree *, *** 1125,1130 **** --- 1125,1131 ---- tree force_gimple_operand_bsi (block_stmt_iterator *, tree, bool, tree, bool, enum bsi_iterator_update); tree gimple_fold_indirect_ref (tree); + tree build_gimple_mem_ref (enum tree_code, tree, tree, tree, alias_set_type); /* In tree-ssa-structalias.c */ bool find_what_p_points_to (tree); Index: trunk/gcc/tree.h =================================================================== *** trunk.orig/gcc/tree.h 2008-02-07 16:11:41.000000000 +0100 --- trunk/gcc/tree.h 2008-02-07 16:22:16.000000000 +0100 *************** struct tree_constructor GTY(()) *** 1650,1655 **** --- 1650,1659 ---- #define TMR_ORIGINAL(NODE) (TREE_OPERAND (TARGET_MEM_REF_CHECK (NODE), 5)) #define TMR_TAG(NODE) (TREE_OPERAND (TARGET_MEM_REF_CHECK (NODE), 6)) + /* MEM_REF and INDIRECT_MEM_REF accessors. */ + #define MEM_REF_ALIAS_SET(NODE) (TREE_INT_CST_LOW (TREE_OPERAND ((NODE), 2))) + #define MEM_REF_ALIGN(NODE) (/* FIXME */) + /* The operands of a BIND_EXPR. */ #define BIND_EXPR_VARS(NODE) (TREE_OPERAND (BIND_EXPR_CHECK (NODE), 0)) #define BIND_EXPR_BODY(NODE) (TREE_OPERAND (BIND_EXPR_CHECK (NODE), 1)) Index: trunk/gcc/expr.c =================================================================== *** trunk.orig/gcc/expr.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/expr.c 2008-02-08 18:13:50.000000000 +0100 *************** get_inner_reference (tree exp, HOST_WIDE *** 5994,5999 **** --- 5994,6007 ---- goto done; break; + case MEM_REF: + offset = size_binop (PLUS_EXPR, offset, TREE_OPERAND (exp, 1)); + break; + + /* ??? For INDIRECT_MEM_REF we have an offset, but cannot really + just return operand0 as base object. Return the complete + INDIRECT_MEM_REF as base for now. */ + default: goto done; } *************** handled_component_p (const_tree t) *** 6177,6182 **** --- 6185,6191 ---- case VIEW_CONVERT_EXPR: case REALPART_EXPR: case IMAGPART_EXPR: + case MEM_REF: return 1; default: *************** expand_expr_real_1 (tree exp, rtx target *** 7483,7488 **** --- 7492,7562 ---- return expand_constructor (exp, target, modifier, false); + case INDIRECT_MEM_REF: + /* FIXME. For now expand via pointer arithmetic. */ + if (integer_zerop (TREE_OPERAND (exp, 1))) + subexp0 = TREE_OPERAND (exp, 0); + else + subexp0 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 0)), + TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1)); + return expand_expr (build1 (INDIRECT_REF, TREE_TYPE (exp), subexp0), + target, tmode, modifier); + + case MEM_REF: + /* It happens that the base object is not addressable, try handling + the simple cases here as well. */ + if (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))) + { + /* FIXME. This is all not too great. */ + tree ref; + gcc_assert (host_integerp (TREE_OPERAND (exp, 1), 1)); + ref = build3 (BIT_FIELD_REF, TREE_TYPE (exp), TREE_OPERAND (exp, 0), + TYPE_SIZE (TREE_TYPE (exp)), + size_binop (MULT_EXPR, + fold_convert (bitsizetype, TREE_OPERAND (exp, 1)), + build_int_cst (bitsizetype, BITS_PER_UNIT))); + BIT_FIELD_REF_UNSIGNED (ref) = true; + return expand_expr (ref, target, tmode, modifier); + } + /* FIXME. For now expand via pointer arithmetic. */ + subexp0 = build_fold_addr_expr (TREE_OPERAND (exp, 0)); + if (!integer_zerop (TREE_OPERAND (exp, 1))) + subexp0 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (subexp0), + subexp0, TREE_OPERAND (exp, 1)); + return expand_expr (build1 (INDIRECT_REF, TREE_TYPE (exp), subexp0), + target, tmode, modifier); + + case BIT_FIELD_EXPR: + { + rtx tem, op0; + + /* tem = op0 */ + tem = gen_reg_rtx (tmode); + op0 = expand_expr (TREE_OPERAND (exp, 0), tem, tmode, EXPAND_NORMAL); + if (op0 != tem) + emit_move_insn (tem, op0); + + /* BIT_FIELD_REF = op1 */ + expand_expr (build_gimple_modify_stmt ( + build3 (BIT_FIELD_REF, void_type_node, + make_tree (TREE_TYPE (TREE_OPERAND (exp, 0)), tem), + TREE_OPERAND (exp, 2), TREE_OPERAND (exp, 3)), + TREE_OPERAND (exp, 1)), const0_rtx, tmode, EXPAND_NORMAL); + + emit_move_insn (target, tem); + return target; + } + + case IDX_EXPR: + /* ??? Fancy expansion possible? Expansion from inside + a MEM_REF could be done like expansion of TARGET_MEM_REF. */ + return expand_expr (fold_build2 (PLUS_EXPR, TREE_TYPE (exp), + TREE_OPERAND (exp, 0), + fold_build2 (MULT_EXPR, TREE_TYPE (exp), + TREE_OPERAND (exp, 1), + TREE_OPERAND (exp, 2))), + target, tmode, modifier); + case MISALIGNED_INDIRECT_REF: case ALIGN_INDIRECT_REF: case INDIRECT_REF: Index: trunk/gcc/alias.c =================================================================== *** trunk.orig/gcc/alias.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/alias.c 2008-02-07 16:22:16.000000000 +0100 *************** get_alias_set (tree t) *** 507,512 **** --- 507,516 ---- { tree inner = t; + if (TREE_CODE (t) == MEM_REF + || TREE_CODE (t) == INDIRECT_MEM_REF) + return MEM_REF_ALIAS_SET (t); + /* Remove any nops, then give the language a chance to do something with this tree before we look at it. */ STRIP_NOPS (t); Index: trunk/gcc/tree-dfa.c =================================================================== *** trunk.orig/gcc/tree-dfa.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree-dfa.c 2008-02-08 16:06:11.000000000 +0100 *************** get_ref_base_and_extent (tree exp, HOST_ *** 905,910 **** --- 905,923 ---- In the following it will only grow (or become -1). */ maxsize = bitsize; + /* First handle MEM_REF trees, ideally we would walk the SSA + chain to constrain maxsize if the offset is not constant. */ + if (TREE_CODE (exp) == MEM_REF + || TREE_CODE (exp) == INDIRECT_MEM_REF) + { + if (host_integerp (TREE_OPERAND (exp, 1), 0)) + bit_offset = TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT; + else + maxsize = -1; + exp = TREE_OPERAND (exp, 0); + goto done; + } + /* Compute cumulative bit-offset for nested component-refs and array-refs, and find the ultimate containing object. */ while (1) *************** get_ref_base_and_extent (tree exp, HOST_ *** 995,1000 **** --- 1008,1020 ---- /* ??? We probably should give up here and bail out. */ break; + case MEM_REF: + if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) + bit_offset += tree_low_cst (TREE_OPERAND (exp, 1), 0); + else + /* Variable access. */ + bitsize = -1; + default: goto done; } Index: trunk/gcc/tree-ssa-operands.c =================================================================== *** trunk.orig/gcc/tree-ssa-operands.c 2008-02-06 22:06:50.000000000 +0100 --- trunk/gcc/tree-ssa-operands.c 2008-02-08 14:42:49.000000000 +0100 *************** get_expr_operands (tree stmt, tree *expr *** 2146,2151 **** --- 2146,2211 ---- get_tmr_operands (stmt, expr, flags); return; + case MEM_REF: + case INDIRECT_MEM_REF: + { + tree ref; + HOST_WIDE_INT offset, size, maxsize; + bool none = true; + + if (TREE_THIS_VOLATILE (expr)) + s_ann->has_volatile_ops = true; + + /* This component reference becomes an access to all of the + subvariables it can touch, if we can determine that, but + *NOT* the real one. If we can't determine which fields we + could touch, the recursion will eventually get to a + variable and add *all* of its subvars, or whatever is the + minimum correct subset. */ + ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); + if (code == MEM_REF + && SSA_VAR_P (ref) && get_subvars_for_var (ref)) + { + subvar_t svars = get_subvars_for_var (ref); + unsigned int i; + tree subvar; + + for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i) + { + bool exact; + + if (overlap_subvar (offset, maxsize, subvar, &exact)) + { + int subvar_flags = flags; + none = false; + add_stmt_operand (&subvar, s_ann, subvar_flags); + } + } + + if (!none) + flags |= opf_no_vops; + + if ((DECL_P (ref) && TREE_THIS_VOLATILE (ref)) + || (TREE_CODE (ref) == SSA_NAME + && TREE_THIS_VOLATILE (SSA_NAME_VAR (ref)))) + s_ann->has_volatile_ops = true; + } + else if (code == INDIRECT_MEM_REF) + { + get_indirect_ref_operands (stmt, expr, flags, expr, offset, + maxsize, false); + flags |= opf_no_vops; + } + + /* Even if we found subvars above we need to ensure to see + immediate uses for d in s.a[d]. In case of s.a having + a subvar or we would miss it otherwise. */ + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); + + return; + } + case ARRAY_REF: case ARRAY_RANGE_REF: case COMPONENT_REF: *************** get_expr_operands (tree stmt, tree *expr *** 2264,2269 **** --- 2324,2334 ---- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; + case BIT_FIELD_EXPR: + gcc_assert (TREE_CODE (TREE_OPERAND (expr, 2)) == INTEGER_CST + && TREE_CODE (TREE_OPERAND (expr, 3)) == INTEGER_CST); + /* Fallthrough. */ + case TRUTH_AND_EXPR: case TRUTH_OR_EXPR: case TRUTH_XOR_EXPR: *************** get_expr_operands (tree stmt, tree *expr *** 2277,2282 **** --- 2342,2355 ---- return; } + case IDX_EXPR: + { + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); + return; + } + case DOT_PROD_EXPR: case REALIGN_LOAD_EXPR: { Index: trunk/gcc/tree-inline.c =================================================================== *** trunk.orig/gcc/tree-inline.c 2008-01-30 11:05:59.000000000 +0100 --- trunk/gcc/tree-inline.c 2008-02-08 18:29:15.000000000 +0100 *************** copy_body_r (tree *tp, int *walk_subtree *** 680,686 **** } } } ! else if (TREE_CODE (*tp) == INDIRECT_REF) { /* Get rid of *& from inline substitutions that can happen when a pointer argument is an ADDR_EXPR. */ --- 680,687 ---- } } } ! else if (TREE_CODE (*tp) == INDIRECT_REF ! || TREE_CODE (*tp) == INDIRECT_MEM_REF) { /* Get rid of *& from inline substitutions that can happen when a pointer argument is an ADDR_EXPR. */ *************** copy_body_r (tree *tp, int *walk_subtree *** 688,693 **** --- 689,705 ---- tree *n; n = (tree *) pointer_map_contains (id->decl_map, decl); + if (n && TREE_CODE (*n) == ADDR_EXPR) + { + tree old = *tp; + *tp = build_gimple_mem_ref (MEM_REF, TREE_TYPE (*tp), + unshare_expr (TREE_OPERAND (*n, 0)), + TREE_OPERAND (*tp, 1), + MEM_REF_ALIAS_SET (*tp)); + TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); + *walk_subtrees = 0; + return NULL; + } if (n) { tree new; *************** estimate_num_insns_1 (tree *tp, int *wal *** 2199,2205 **** } /* Assume that constants and references counts nothing. These should be majorized by amount of operations among them we count later ! and are common target of CSE and similar optimizations. */ else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x)) return NULL; --- 2211,2218 ---- } /* Assume that constants and references counts nothing. These should be majorized by amount of operations among them we count later ! and are common target of CSE and similar optimizations. ! ??? This is no longer true with MEM_REF and INDIRECT_MEM_REF. */ else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x)) return NULL; *************** estimate_num_insns_1 (tree *tp, int *wal *** 2408,2413 **** --- 2421,2436 ---- d->count += 1; break; + case IDX_EXPR: + d->count += 2; + break; + + case BIT_FIELD_EXPR: + /* This one is possibly quite expensive, as it requires masking, + shifting and oring. */ + d->count += 3; + break; + case SWITCH_EXPR: /* Take into account cost of the switch + guess 2 conditional jumps for each case label. Index: trunk/gcc/tree-eh.c =================================================================== *** trunk.orig/gcc/tree-eh.c 2008-01-22 14:19:09.000000000 +0100 --- trunk/gcc/tree-eh.c 2008-02-08 15:07:18.000000000 +0100 *************** tree_could_trap_p (tree expr) *** 1944,1952 **** --- 1944,1960 ---- return !in_array_bounds_p (expr); + case MEM_REF: + base = TREE_OPERAND (expr, 0); + if (tree_could_trap_p (base)) + return true; + + /* Fallthrough. */ + case INDIRECT_REF: case ALIGN_INDIRECT_REF: case MISALIGNED_INDIRECT_REF: + case INDIRECT_MEM_REF: return !TREE_THIS_NOTRAP (expr); case ASM_EXPR: Index: trunk/gcc/tree-ssa-alias.c =================================================================== *** trunk.orig/gcc/tree-ssa-alias.c 2008-01-29 11:43:26.000000000 +0100 --- trunk/gcc/tree-ssa-alias.c 2008-02-08 15:30:19.000000000 +0100 *************** count_ptr_derefs (tree *tp, int *walk_su *** 1893,1899 **** return NULL_TREE; } ! if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr) count_p->count++; return NULL_TREE; --- 1893,1900 ---- return NULL_TREE; } ! if ((INDIRECT_REF_P (*tp) || TREE_CODE (*tp) == INDIRECT_MEM_REF) ! && TREE_OPERAND (*tp, 0) == count_p->ptr) count_p->count++; return NULL_TREE; *************** find_used_portions (tree *tp, int *walk_ *** 3984,3989 **** --- 3985,3991 ---- case REALPART_EXPR: case IMAGPART_EXPR: case COMPONENT_REF: + case MEM_REF: case ARRAY_REF: { HOST_WIDE_INT bitsize; Index: trunk/gcc/tree-ssa-loop-im.c =================================================================== *** trunk.orig/gcc/tree-ssa-loop-im.c 2008-01-21 17:20:38.000000000 +0100 --- trunk/gcc/tree-ssa-loop-im.c 2008-02-08 14:58:02.000000000 +0100 *************** for_each_index (tree *addr_p, bool (*cbc *** 196,201 **** --- 196,213 ---- return false; break; + case MEM_REF: + nxt = &TREE_OPERAND (*addr_p, 0); + if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data)) + return false; + break; + + case INDIRECT_MEM_REF: + nxt = &TREE_OPERAND (*addr_p, 0); + if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data)) + return false; + return cbck (*addr_p, nxt, data); + case VAR_DECL: case PARM_DECL: case STRING_CST: Index: trunk/gcc/tree-ssa-sccvn.c =================================================================== *** trunk.orig/gcc/tree-ssa-sccvn.c 2008-02-08 14:50:31.000000000 +0100 --- trunk/gcc/tree-ssa-sccvn.c 2008-02-08 14:50:55.000000000 +0100 *************** copy_reference_ops_from_ref (tree ref, V *** 547,552 **** --- 547,557 ---- temp.op0 = TREE_OPERAND (ref, 1); temp.op1 = TREE_OPERAND (ref, 3); break; + case MEM_REF: + case INDIRECT_MEM_REF: + /* Record offset as operand. */ + temp.op0 = TREE_OPERAND (ref, 1); + break; case STRING_CST: case INTEGER_CST: case COMPLEX_CST: Index: trunk/gcc/tree-ssa-sink.c =================================================================== *** trunk.orig/gcc/tree-ssa-sink.c 2008-01-14 16:01:39.000000000 +0100 --- trunk/gcc/tree-ssa-sink.c 2008-02-08 16:07:15.000000000 +0100 *************** is_hidden_global_store (tree stmt) *** 188,194 **** return true; } ! else if (INDIRECT_REF_P (lhs)) { tree ptr = TREE_OPERAND (lhs, 0); struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); --- 188,195 ---- return true; } ! else if (INDIRECT_REF_P (lhs) ! || TREE_CODE (lhs) == INDIRECT_MEM_REF) { tree ptr = TREE_OPERAND (lhs, 0); struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); Index: trunk/gcc/tree-ssa-structalias.c =================================================================== *** trunk.orig/gcc/tree-ssa-structalias.c 2008-01-29 11:43:26.000000000 +0100 --- trunk/gcc/tree-ssa-structalias.c 2008-02-08 15:33:34.000000000 +0100 *************** get_constraint_for (tree t, VEC (ce_s, h *** 2905,2916 **** --- 2905,2924 ---- { switch (TREE_CODE (t)) { + case INDIRECT_MEM_REF: + { + get_constraint_for (TREE_OPERAND (t, 0), results); + /* ??? offset? */ + do_deref (results); + return; + } case INDIRECT_REF: { get_constraint_for (TREE_OPERAND (t, 0), results); do_deref (results); return; } + case MEM_REF: case ARRAY_REF: case ARRAY_RANGE_REF: case COMPONENT_REF: Index: trunk/gcc/fold-const.c =================================================================== *** trunk.orig/gcc/fold-const.c 2008-02-08 18:21:39.000000000 +0100 --- trunk/gcc/fold-const.c 2008-02-08 18:21:51.000000000 +0100 *************** build_fold_addr_expr_with_type_1 (tree t *** 7866,7871 **** --- 7866,7876 ---- if (TREE_TYPE (t) != ptrtype) t = build1 (NOP_EXPR, ptrtype, t); } + else if (TREE_CODE (t) == INDIRECT_MEM_REF) + { + return fold_build2 (POINTER_PLUS_EXPR, ptrtype, + TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)); + } else if (!in_fold) { tree base = t; Index: trunk/gcc/tree-ssa-ccp.c =================================================================== *** trunk.orig/gcc/tree-ssa-ccp.c 2008-01-13 17:28:55.000000000 +0100 --- trunk/gcc/tree-ssa-ccp.c 2008-02-08 18:06:26.000000000 +0100 *************** fold_stmt_r (tree *expr_p, int *walk_sub *** 2190,2195 **** --- 2190,2208 ---- t = maybe_fold_tmr (expr); break; + case INDIRECT_MEM_REF: + if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) + { + t = build_gimple_mem_ref (MEM_REF, TREE_TYPE (expr), + TREE_OPERAND (TREE_OPERAND (expr, 0), 0), + TREE_OPERAND (expr, 1), + MEM_REF_ALIAS_SET (expr)); + *walk_subtrees = 0; + } + else + t = NULL_TREE; + break; + case COND_EXPR: if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0))) {