rpms/gcc/devel gcc43-fortran-debug10.patch, NONE, 1.1 gcc43-fortran-debug11.patch, NONE, 1.1 gcc43-pr37248.patch, NONE, 1.1 .cvsignore, 1.241, 1.242 gcc43.spec, 1.39, 1.40 sources, 1.244, 1.245

Jakub Jelinek jakub at fedoraproject.org
Fri Aug 29 11:16:37 UTC 2008


Author: jakub

Update of /cvs/pkgs/rpms/gcc/devel
In directory cvs1.fedora.phx.redhat.com:/tmp/cvs-serv425

Modified Files:
	.cvsignore gcc43.spec sources 
Added Files:
	gcc43-fortran-debug10.patch gcc43-fortran-debug11.patch 
	gcc43-pr37248.patch 
Log Message:
4.3.2-1

gcc43-fortran-debug10.patch:

--- NEW FILE gcc43-fortran-debug10.patch ---
2008-08-26  Jakub Jelinek  <jakub at redhat.com>

	* dwarf2out.c (gen_const_die): New function.
	(size_of_die, value_format, output_die): Output larger
	dw_val_class_vec using DW_FORM_block2 or DW_FORM_block4.
	(native_encode_initializer): New function.
	(tree_add_const_value_attribute): Call it.
	(gen_decl_die, dwarf2out_decl): Handle CONST_DECLs if is_fortran ().

	* trans-decl.c (check_constant_initializer,
	gfc_emit_parameter_debug_info): New functions.
	(gfc_generate_module_vars, gfc_generate_function_code): Emit
	PARAMETERs and unreferenced variables with initializers into
	debug info.

--- gcc/fortran/trans-decl.c.jj	2008-08-26 21:43:36.000000000 +0200
+++ gcc/fortran/trans-decl.c	2008-08-26 22:54:24.000000000 +0200
@@ -3232,6 +3232,135 @@ gfc_trans_use_stmts (gfc_namespace * ns)
 }
 
 
+/* Return true if expr is a constant initializer that gfc_conv_initializer
+   will handle.  */
+
+static bool
+check_constant_initializer (gfc_expr *expr, gfc_typespec *ts, bool array,
+			    bool pointer)
+{
+  gfc_constructor *c;
+  gfc_component *cm;
+
+  if (pointer)
+    return true;
+  else if (array)
+    {
+      if (expr->expr_type == EXPR_CONSTANT || expr->expr_type == EXPR_NULL)
+	return true;
+      else if (expr->expr_type == EXPR_STRUCTURE)
+	return check_constant_initializer (expr, ts, false, false);
+      else if (expr->expr_type != EXPR_ARRAY)
+	return false;
+      for (c = expr->value.constructor; c; c = c->next)
+	{
+	  if (c->iterator)
+	    return false;
+	  if (c->expr->expr_type == EXPR_STRUCTURE)
+	    {
+	      if (!check_constant_initializer (c->expr, ts, false, false))
+		return false;
+	    }
+	  else if (c->expr->expr_type != EXPR_CONSTANT)
+	    return false;
+	}
+      return true;
+    }
+  else switch (ts->type)
+    {
+    case BT_DERIVED:
+      if (expr->expr_type != EXPR_STRUCTURE)
+	return false;
+      cm = expr->ts.derived->components;
+      for (c = expr->value.constructor; c; c = c->next, cm = cm->next)
+	{
+	  if (!c->expr || cm->allocatable)
+	    continue;
+	  if (!check_constant_initializer (c->expr, &cm->ts,
+					   cm->dimension,
+					   cm->pointer))
+	    return false;
+	}
+      return true;
+    default:
+      return expr->expr_type == EXPR_CONSTANT;
+    }
+}
+
+/* Emit debug info for parameters and unreferenced variables with
+   initializers.  */
+
+static void
+gfc_emit_parameter_debug_info (gfc_symbol *sym)
+{
+  tree decl;
+
+  if (sym->attr.flavor != FL_PARAMETER
+      && (sym->attr.flavor != FL_VARIABLE || sym->attr.referenced))
+    return;
+
+  if (sym->backend_decl != NULL
+      || sym->value == NULL
+      || sym->attr.use_assoc
+      || sym->attr.dummy
+      || sym->attr.result
+      || sym->attr.function
+      || sym->attr.intrinsic
+      || sym->attr.pointer
+      || sym->attr.allocatable
+      || sym->attr.cray_pointee
+      || sym->attr.threadprivate
+      || sym->attr.is_bind_c
+      || sym->attr.subref_array_pointer
+      || sym->attr.assign)
+    return;
+
+  if (sym->ts.type == BT_CHARACTER)
+    {
+      gfc_conv_const_charlen (sym->ts.cl);
+      if (sym->ts.cl->backend_decl == NULL
+	  || TREE_CODE (sym->ts.cl->backend_decl) != INTEGER_CST)
+	return;
+    }
+  else if (sym->ts.type == BT_DERIVED && sym->ts.derived->attr.alloc_comp)
+    return;
+
+  if (sym->as)
+    {
+      int n;
+
+      if (sym->as->type != AS_EXPLICIT)
+	return;
+      for (n = 0; n < sym->as->rank; n++)
+	if (sym->as->lower[n]->expr_type != EXPR_CONSTANT
+	    || sym->as->upper[n] == NULL
+	    || sym->as->upper[n]->expr_type != EXPR_CONSTANT)
+	  return;
+    }
+
+  if (!check_constant_initializer (sym->value, &sym->ts,
+				   sym->attr.dimension, false))
+    return;
+
+  /* Create the decl for the variable or constant.  */
+  decl = build_decl (sym->attr.flavor == FL_PARAMETER ? CONST_DECL : VAR_DECL,
+		     gfc_sym_identifier (sym), gfc_sym_type (sym));
+  if (sym->attr.flavor == FL_PARAMETER)
+    TREE_READONLY (decl) = 1;
+  gfc_set_decl_location (decl, &sym->declared_at);
+  if (sym->attr.dimension)
+    GFC_DECL_PACKED_ARRAY (decl) = 1;
+  DECL_CONTEXT (decl) = sym->ns->proc_name->backend_decl;
+  TREE_STATIC (decl) = 1;
+  TREE_USED (decl) = 1;
+  if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
+    TREE_PUBLIC (decl) = 1;
+  DECL_INITIAL (decl)
+    = gfc_conv_initializer (sym->value, &sym->ts, TREE_TYPE (decl),
+			    sym->attr.dimension, 0);
+  debug_hooks->global_decl (decl);
+}
+
 /* Generate all the required code for module variables.  */
 
 void
@@ -3252,6 +3381,7 @@ gfc_generate_module_vars (gfc_namespace 
   cur_module = NULL;
 
   gfc_trans_use_stmts (ns);
+  gfc_traverse_ns (ns, gfc_emit_parameter_debug_info);
 }
 
 
@@ -3787,6 +3917,7 @@ gfc_generate_function_code (gfc_namespac
     }
 
   gfc_trans_use_stmts (ns);
+  gfc_traverse_ns (ns, gfc_emit_parameter_debug_info);
 }
 
 void
--- gcc/dwarf2out.c.jj	2008-08-26 21:43:31.000000000 +0200
+++ gcc/dwarf2out.c	2008-08-26 21:43:42.000000000 +0200
@@ -5093,6 +5093,7 @@ static void gen_unspecified_parameters_d
 static void gen_formal_types_die (tree, dw_die_ref);
 static void gen_subprogram_die (tree, dw_die_ref);
 static void gen_variable_die (tree, dw_die_ref);
+static void gen_const_die (tree, dw_die_ref);
 static void gen_label_die (tree, dw_die_ref);
 static void gen_lexical_block_die (tree, dw_die_ref, int);
 static void gen_inlined_subroutine_die (tree, dw_die_ref, int);
@@ -7564,8 +7565,10 @@ size_of_die (dw_die_ref die)
 	  size += 1 + 2*HOST_BITS_PER_LONG/HOST_BITS_PER_CHAR; /* block */
 	  break;
 	case dw_val_class_vec:
-	  size += 1 + (a->dw_attr_val.v.val_vec.length
-		       * a->dw_attr_val.v.val_vec.elt_size); /* block */
+	  size += constant_size (a->dw_attr_val.v.val_vec.length
+				 * a->dw_attr_val.v.val_vec.elt_size)
+		  + a->dw_attr_val.v.val_vec.length
+		    * a->dw_attr_val.v.val_vec.elt_size; /* block */
 	  break;
 	case dw_val_class_flag:
 	  size += 1;
@@ -7764,7 +7767,18 @@ value_format (dw_attr_ref a)
     case dw_val_class_long_long:
       return DW_FORM_block1;
     case dw_val_class_vec:
-      return DW_FORM_block1;
+      switch (constant_size (a->dw_attr_val.v.val_vec.length
+			     * a->dw_attr_val.v.val_vec.elt_size))
+	{
+	case 1:
+	  return DW_FORM_block1;
+	case 2:
+	  return DW_FORM_block2;
+	case 4:
+	  return DW_FORM_block4;
+	default:
+	  gcc_unreachable ();
+	}
     case dw_val_class_flag:
       return DW_FORM_flag;
     case dw_val_class_die_ref:
@@ -8056,7 +8070,8 @@ output_die (dw_die_ref die)
 	    unsigned int i;
 	    unsigned char *p;
 
-	    dw2_asm_output_data (1, len * elt_size, "%s", name);
+	    dw2_asm_output_data (constant_size (len * elt_size),
+				 len * elt_size, "%s", name);
 	    if (elt_size > sizeof (HOST_WIDE_INT))
 	      {
 		elt_size /= 2;
@@ -11762,6 +11777,150 @@ add_location_or_const_value_attribute (d
   tree_add_const_value_attribute (die, decl);
 }
 
+/* Helper function for tree_add_const_value_attribute.  Natively encode
+   initializer INIT into an array.  Return true if successful.  */
+
+static bool
+native_encode_initializer (tree init, unsigned char *array, int size)
+{
+  tree type;
+
+  if (init == NULL_TREE)
+    return false;
+
+  STRIP_NOPS (init);
+  switch (TREE_CODE (init))
+    {
+    case STRING_CST:
+      type = TREE_TYPE (init);
+      if (TREE_CODE (type) == ARRAY_TYPE)
+	{
+	  tree enttype = TREE_TYPE (type);
+	  enum machine_mode mode = TYPE_MODE (enttype);
+
+	  if (GET_MODE_CLASS (mode) != MODE_INT || GET_MODE_SIZE (mode) != 1)
+	    return false;
+	  if (int_size_in_bytes (type) != size)
+	    return false;
+	  if (size > TREE_STRING_LENGTH (init))
+	    {
+	      memcpy (array, TREE_STRING_POINTER (init),
+		      TREE_STRING_LENGTH (init));
+	      memset (array + TREE_STRING_LENGTH (init),
+		      '\0', size - TREE_STRING_LENGTH (init));
+	    }
+	  else
+	    memcpy (array, TREE_STRING_POINTER (init), size);
+	  return true;
+	}
+      return false;
+    case CONSTRUCTOR:
+      type = TREE_TYPE (init);
+      if (int_size_in_bytes (type) != size)
+	return false;
+      if (TREE_CODE (type) == ARRAY_TYPE)
+	{
+	  HOST_WIDE_INT min_index;
+	  unsigned HOST_WIDE_INT cnt;
+	  int curpos = 0, fieldsize;
+	  constructor_elt *ce;
+
+	  if (TYPE_DOMAIN (type) == NULL_TREE
+	      || !host_integerp (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0))
+	    return false;
+
+	  fieldsize = int_size_in_bytes (TREE_TYPE (type));
+	  if (fieldsize <= 0)
+	    return false;
+
+	  min_index = tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0);
+	  memset (array, '\0', size);
+	  for (cnt = 0;
+	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (init), cnt, ce);
+	       cnt++)
+	    {
+	      tree val = ce->value;
+	      tree index = ce->index;
+	      int pos = curpos;
+	      if (index && TREE_CODE (index) == RANGE_EXPR)
+		pos = (tree_low_cst (TREE_OPERAND (index, 0), 0) - min_index)
+		      * fieldsize;
+	      else if (index)
+		pos = tree_low_cst (index, 0) * fieldsize;
+
+	      if (val)
+		{
+		  STRIP_NOPS (val);
+		  if (!native_encode_initializer (val, array + pos, fieldsize))
+		    return false;
+		}
+	      curpos = pos + fieldsize;
+	      if (index && TREE_CODE (index) == RANGE_EXPR)
+		{
+		  int count = tree_low_cst (TREE_OPERAND (index, 1), 0)
+			      - tree_low_cst (TREE_OPERAND (index, 0), 0);
+		  while (count > 0)
+		    {
+		      if (val)
+			memcpy (array + curpos, array + pos, fieldsize);
+		      curpos += fieldsize;
+		    }
+		}
+	      gcc_assert (curpos <= size);
+	    }
+	  return true;
+	}
+      else if (TREE_CODE (type) == RECORD_TYPE
+	       || TREE_CODE (type) == UNION_TYPE)
+	{
+	  tree field = NULL_TREE;
+	  unsigned HOST_WIDE_INT cnt;
+	  constructor_elt *ce;
+
+	  if (int_size_in_bytes (type) != size)
+	    return false;
+
+	  if (TREE_CODE (type) == RECORD_TYPE)
+	    field = TYPE_FIELDS (type);
+
+	  for (cnt = 0;
+	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (init), cnt, ce);
+	       cnt++, field = field ? TREE_CHAIN (field) : 0)
+	    {
+	      tree val = ce->value;
+	      int pos, fieldsize;
+
+	      if (ce->index != 0)
+		field = ce->index;
+
+	      if (val)
+		STRIP_NOPS (val);
+
+	      if (field == NULL_TREE || DECL_BIT_FIELD (field))
+		return false;
+
+	      if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
+		  && TYPE_DOMAIN (TREE_TYPE (field))
+		  && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
+		return false;
+	      else if (DECL_SIZE_UNIT (field) == NULL_TREE
+		       || !host_integerp (DECL_SIZE_UNIT (field), 0))
+		return false;
+	      fieldsize = tree_low_cst (DECL_SIZE_UNIT (field), 0);
+	      pos = int_byte_position (field);
+	      gcc_assert (pos + fieldsize <= size);
+	      if (val
+		  && !native_encode_initializer (val, array + pos, fieldsize))
+		return false;
+	    }
+	  return true;
+	}
+      return false;
+    default:
+      return native_encode_expr (init, array, size) == size;
+    }
+}
+
 /* If we don't have a copy of this variable in memory for some reason (such
    as a C++ member constant that doesn't have an out-of-line definition),
    we should tell the debugger about the constant value.  */
@@ -11781,6 +11940,18 @@ tree_add_const_value_attribute (dw_die_r
   rtl = rtl_for_decl_init (init, type);
   if (rtl)
     add_const_value_attribute (var_die, rtl);
+  /* If the host and target are sane, try harder.  */
+  else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8)
+    {
+      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
+      if (size > 0 && (int) size == size)
+	{
+	  unsigned char *array = GGC_CNEWVEC (unsigned char, size);
+
+	  if (native_encode_initializer (init, array, size))
+	    add_AT_vec (var_die, DW_AT_const_value, size, 1, array);
+	}
+    }
 }
 
 /* Convert the CFI instructions for the current function into a
@@ -13743,6 +13914,24 @@ gen_variable_die (tree decl, dw_die_ref 
     tree_add_const_value_attribute (var_die, decl);
 }
 
+/* Generate a DIE to represent a named constant.  */
+
+static void
+gen_const_die (tree decl, dw_die_ref context_die)
+{
+  dw_die_ref const_die;
+  tree type = TREE_TYPE (decl);
+
+  const_die = new_die (DW_TAG_constant, context_die, decl);
+  add_name_and_src_coords_attributes (const_die, decl);
+  add_type_attribute (const_die, type, 1, 0, context_die);
+  if (TREE_PUBLIC (decl))
+    add_AT_flag (const_die, DW_AT_external, 1);
+  if (DECL_ARTIFICIAL (decl))
+    add_AT_flag (const_die, DW_AT_artificial, 1);
+  tree_add_const_value_attribute (const_die, decl);
+}
+
 /* Generate a DIE to represent a label identifier.  */
 
 static void
@@ -14883,8 +15072,20 @@ gen_decl_die (tree decl, dw_die_ref cont
       break;
 
     case CONST_DECL:
-      /* The individual enumerators of an enum type get output when we output
-	 the Dwarf representation of the relevant enum type itself.  */
+      if (!is_fortran ())
+	{
+	  /* The individual enumerators of an enum type get output when we output
+	     the Dwarf representation of the relevant enum type itself.  */
+	  break;
+	}
+
+      /* Emit its type.  */
+      gen_type_die (TREE_TYPE (decl), context_die);
+
+      /* And its containing namespace.  */
+      context_die = declare_in_namespace (decl, context_die);
+
+      gen_const_die (decl, context_die);
       break;
 
     case FUNCTION_DECL:
@@ -15229,6 +15430,15 @@ dwarf2out_decl (tree decl)
 	return;
       break;
 
+    case CONST_DECL:
+      if (debug_info_level <= DINFO_LEVEL_TERSE)
+	return;
+      if (!is_fortran ())
+	return;
+      if (TREE_STATIC (decl) && decl_function_context (decl))
+	context_die = lookup_decl_die (DECL_CONTEXT (decl));
+      break;
+
     case NAMESPACE_DECL:
       if (debug_info_level <= DINFO_LEVEL_TERSE)
 	return;

gcc43-fortran-debug11.patch:

--- NEW FILE gcc43-fortran-debug11.patch ---
2008-08-28  Jakub Jelinek  <jakub at redhat.com>

	* dwarf2out.c (descr_info_loc): Handle VAR_DECL.

	* trans.h (struct lang_type): Add span.
	(GFC_TYPE_ARRAY_SPAN): Define.
	* trans-decl.c (gfc_get_symbol_decl): For subref array pointers,
	copy TREE_STATIC from decl to span instead of setting it
	unconditionally, set DECL_ARTIFICIAL, fix type of initializer
	and set GFC_TYPE_ARRAY_SPAN on decl's type.
	* trans-types.c (gfc_get_array_descr_info): If
	GFC_TYPE_ARRAY_SPAN is non-NULL, use it as element size.

--- gcc/fortran/trans.h.jj	2008-08-26 21:43:04.000000000 +0200
+++ gcc/fortran/trans.h	2008-08-28 09:58:01.000000000 +0200
@@ -605,6 +605,7 @@ struct lang_type		GTY(())
   tree offset;
   tree dtype;
   tree dataptr_type;
+  tree span;
 };
 
 struct lang_decl		GTY(())
@@ -657,6 +658,7 @@ struct lang_decl		GTY(())
 #define GFC_TYPE_ARRAY_DTYPE(node) (TYPE_LANG_SPECIFIC(node)->dtype)
 #define GFC_TYPE_ARRAY_DATAPTR_TYPE(node) \
   (TYPE_LANG_SPECIFIC(node)->dataptr_type)
+#define GFC_TYPE_ARRAY_SPAN(node) (TYPE_LANG_SPECIFIC(node)->span)
 
 /* Build an expression with void type.  */
 #define build1_v(code, arg) build1(code, void_type_node, arg)
--- gcc/fortran/trans-decl.c.jj	2008-08-26 22:54:24.000000000 +0200
+++ gcc/fortran/trans-decl.c	2008-08-28 10:54:28.000000000 +0200
@@ -1105,10 +1105,12 @@ gfc_get_symbol_decl (gfc_symbol * sym)
       span = build_decl (VAR_DECL, create_tmp_var_name ("span"),
 			 gfc_array_index_type);
       gfc_finish_var_decl (span, sym);
-      TREE_STATIC (span) = 1;
-      DECL_INITIAL (span) = build_int_cst (NULL_TREE, 0);
+      TREE_STATIC (span) = TREE_STATIC (decl);
+      DECL_ARTIFICIAL (span) = 1;
+      DECL_INITIAL (span) = build_int_cst (gfc_array_index_type, 0);
 
       GFC_DECL_SPAN (decl) = span;
+      GFC_TYPE_ARRAY_SPAN (TREE_TYPE (decl)) = span;
     }
 
   sym->backend_decl = decl;
--- gcc/fortran/trans-types.c.jj	2008-08-26 21:43:04.000000000 +0200
+++ gcc/fortran/trans-types.c	2008-08-28 10:23:39.000000000 +0200
@@ -2289,7 +2289,10 @@ gfc_get_array_descr_info (const_tree typ
   else
     info->base_decl = base_decl = build_decl (VAR_DECL, NULL_TREE, ptype);
 
-  elem_size = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (etype));
+  if (GFC_TYPE_ARRAY_SPAN (type))
+    elem_size = GFC_TYPE_ARRAY_SPAN (type);
+  else
+    elem_size = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (etype));
   field = TYPE_FIELDS (TYPE_MAIN_VARIANT (type));
   data_off = byte_position (field);
   field = TREE_CHAIN (field);
--- gcc/dwarf2out.c.jj	2008-08-26 21:43:42.000000000 +0200
+++ gcc/dwarf2out.c	2008-08-28 10:35:38.000000000 +0200
@@ -12232,6 +12232,8 @@ descr_info_loc (tree val, tree base_decl
     case NOP_EXPR:
     case CONVERT_EXPR:
       return descr_info_loc (TREE_OPERAND (val, 0), base_decl);
+    case VAR_DECL:
+      return loc_descriptor_from_tree_1 (val, 0);
     case INTEGER_CST:
       if (host_integerp (val, 0))
 	return int_loc_descriptor (tree_low_cst (val, 0));

gcc43-pr37248.patch:

--- NEW FILE gcc43-pr37248.patch ---
2008-08-29  Jakub Jelinek  <jakub at redhat.com>

	PR middle-end/37248
	PR middle-end/36449
	* fold-const.c (make_bit_field_ref): Change bitpos and bitsize
	arguments to HOST_WIDE_INT.
	(fold_truthop): Change first_bit and end_bit to HOST_WIDE_INT.

	* g++.dg/opt/pr36449.C: New test.

2008-08-29  Jakub Jelinek  <jakub at redhat.com>

	Revert:
	2008-06-11  Richard Guenther  <rguenther at suse.de>
	PR middle-end/36449
	* fold-const.c (fold_truthop): Remove code generating
	BIT_FIELD_REFs of structure bases.
	(fold_binary): Likewise.
	(make_bit_field_ref): Remove.
	(optimize_bit_field_compare): Remove.
	(all_ones_mask_p): Remove.

--- gcc/fold-const.c	(revision 136662)
+++ gcc/fold-const.c	(revision 136661)
@@ -109,9 +109,12 @@ static int twoval_comparison_p (tree, tr
 static tree eval_subst (tree, tree, tree, tree, tree);
 static tree pedantic_omit_one_operand (tree, tree, tree);
 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
+static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
+static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
 				    enum machine_mode *, int *, int *,
 				    tree *, tree *);
+static int all_ones_mask_p (const_tree, int);
 static tree sign_bit_p (tree, const_tree);
 static int simple_operand_p (const_tree);
 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
@@ -3848,6 +3851,202 @@ distribute_real_division (enum tree_code
   return NULL_TREE;
 }
 
+/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
+   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
+
+static tree
+make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
+		    HOST_WIDE_INT bitpos, int unsignedp)
+{
+  tree result;
+
+  if (bitpos == 0)
+    {
+      tree size = TYPE_SIZE (TREE_TYPE (inner));
+      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
+	   || POINTER_TYPE_P (TREE_TYPE (inner)))
+	  && host_integerp (size, 0) 
+	  && tree_low_cst (size, 0) == bitsize)
+	return fold_convert (type, inner);
+    }
+
+  result = build3 (BIT_FIELD_REF, type, inner,
+		   size_int (bitsize), bitsize_int (bitpos));
+
+  BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
+
+  return result;
+}
+
+/* Optimize a bit-field compare.
+
+   There are two cases:  First is a compare against a constant and the
+   second is a comparison of two items where the fields are at the same
+   bit position relative to the start of a chunk (byte, halfword, word)
+   large enough to contain it.  In these cases we can avoid the shift
+   implicit in bitfield extractions.
+
+   For constants, we emit a compare of the shifted constant with the
+   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
+   compared.  For two fields at the same position, we do the ANDs with the
+   similar mask and compare the result of the ANDs.
+
+   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
+   COMPARE_TYPE is the type of the comparison, and LHS and RHS
+   are the left and right operands of the comparison, respectively.
+
+   If the optimization described above can be done, we return the resulting
+   tree.  Otherwise we return zero.  */
+
+static tree
+optimize_bit_field_compare (enum tree_code code, tree compare_type,
+			    tree lhs, tree rhs)
+{
+  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
+  tree type = TREE_TYPE (lhs);
+  tree signed_type, unsigned_type;
+  int const_p = TREE_CODE (rhs) == INTEGER_CST;
+  enum machine_mode lmode, rmode, nmode;
+  int lunsignedp, runsignedp;
+  int lvolatilep = 0, rvolatilep = 0;
+  tree linner, rinner = NULL_TREE;
+  tree mask;
+  tree offset;
+
+  /* Get all the information about the extractions being done.  If the bit size
+     if the same as the size of the underlying object, we aren't doing an
+     extraction at all and so can do nothing.  We also don't want to
+     do anything if the inner expression is a PLACEHOLDER_EXPR since we
+     then will no longer be able to replace it.  */
+  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
+				&lunsignedp, &lvolatilep, false);
+  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
+      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
+    return 0;
+
+ if (!const_p)
+   {
+     /* If this is not a constant, we can only do something if bit positions,
+	sizes, and signedness are the same.  */
+     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
+				   &runsignedp, &rvolatilep, false);
+
+     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
+	 || lunsignedp != runsignedp || offset != 0
+	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
+       return 0;
+   }
+
+  /* See if we can find a mode to refer to this field.  We should be able to,
+     but fail if we can't.  */
+  nmode = get_best_mode (lbitsize, lbitpos,
+			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+				TYPE_ALIGN (TREE_TYPE (rinner))),
+			 word_mode, lvolatilep || rvolatilep);
+  if (nmode == VOIDmode)
+    return 0;
+
+  /* Set signed and unsigned types of the precision of this mode for the
+     shifts below.  */
+  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
+  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
+
+  /* Compute the bit position and size for the new reference and our offset
+     within it. If the new reference is the same size as the original, we
+     won't optimize anything, so return zero.  */
+  nbitsize = GET_MODE_BITSIZE (nmode);
+  nbitpos = lbitpos & ~ (nbitsize - 1);
+  lbitpos -= nbitpos;
+  if (nbitsize == lbitsize)
+    return 0;
+
+  if (BYTES_BIG_ENDIAN)
+    lbitpos = nbitsize - lbitsize - lbitpos;
+
+  /* Make the mask to be used against the extracted field.  */
+  mask = build_int_cst_type (unsigned_type, -1);
+  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
+  mask = const_binop (RSHIFT_EXPR, mask,
+		      size_int (nbitsize - lbitsize - lbitpos), 0);
+
+  if (! const_p)
+    /* If not comparing with constant, just rework the comparison
+       and return.  */
+    return fold_build2 (code, compare_type,
+			fold_build2 (BIT_AND_EXPR, unsigned_type,
+				     make_bit_field_ref (linner,
+							 unsigned_type,
+							 nbitsize, nbitpos,
+							 1),
+				     mask),
+			fold_build2 (BIT_AND_EXPR, unsigned_type,
+				     make_bit_field_ref (rinner,
+							 unsigned_type,
+							 nbitsize, nbitpos,
+							 1),
+				     mask));
+
+  /* Otherwise, we are handling the constant case. See if the constant is too
+     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
+     this not only for its own sake, but to avoid having to test for this
+     error case below.  If we didn't, we might generate wrong code.
+
+     For unsigned fields, the constant shifted right by the field length should
+     be all zero.  For signed fields, the high-order bits should agree with
+     the sign bit.  */
+
+  if (lunsignedp)
+    {
+      if (! integer_zerop (const_binop (RSHIFT_EXPR,
+					fold_convert (unsigned_type, rhs),
+					size_int (lbitsize), 0)))
+	{
+	  warning (0, "comparison is always %d due to width of bit-field",
+		   code == NE_EXPR);
+	  return constant_boolean_node (code == NE_EXPR, compare_type);
+	}
+    }
+  else
+    {
+      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
+			      size_int (lbitsize - 1), 0);
+      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
+	{
+	  warning (0, "comparison is always %d due to width of bit-field",
+		   code == NE_EXPR);
+	  return constant_boolean_node (code == NE_EXPR, compare_type);
+	}
+    }
+
+  /* Single-bit compares should always be against zero.  */
+  if (lbitsize == 1 && ! integer_zerop (rhs))
+    {
+      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
+      rhs = build_int_cst (type, 0);
+    }
+
+  /* Make a new bitfield reference, shift the constant over the
+     appropriate number of bits and mask it with the computed mask
+     (in case this was a signed field).  If we changed it, make a new one.  */
+  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
+  if (lvolatilep)
+    {
+      TREE_SIDE_EFFECTS (lhs) = 1;
+      TREE_THIS_VOLATILE (lhs) = 1;
+    }
+
+  rhs = const_binop (BIT_AND_EXPR,
+		     const_binop (LSHIFT_EXPR,
+				  fold_convert (unsigned_type, rhs),
+				  size_int (lbitpos), 0),
+		     mask, 0);
+
+  return build2 (code, compare_type,
+		 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
+		 rhs);
+}
+
 /* Subroutine for fold_truthop: decode a field reference.
 
    If EXP is a comparison reference, we return the innermost reference.
@@ -3939,6 +4138,27 @@ decode_field_reference (tree exp, HOST_W
   return inner;
 }
 
+/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
+   bit positions.  */
+
+static int
+all_ones_mask_p (const_tree mask, int size)
+{
+  tree type = TREE_TYPE (mask);
+  unsigned int precision = TYPE_PRECISION (type);
+  tree tmask;
+
+  tmask = build_int_cst_type (signed_type_for (type), -1);
+
+  return
+    tree_int_cst_equal (mask,
+			const_binop (RSHIFT_EXPR,
+				     const_binop (LSHIFT_EXPR, tmask,
+						  size_int (precision - size),
+						  0),
+				     size_int (precision - size), 0));
+}
+
 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
    represents the sign bit of EXP's type.  If EXP represents a sign
    or zero extension, also test VAL against the unextended type.
@@ -5264,15 +5484,15 @@ fold_truthop (enum tree_code code, tree 
   tree ll_inner, lr_inner, rl_inner, rr_inner;
   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
-  HOST_WIDE_INT xll_bitpos, xrl_bitpos;
-  HOST_WIDE_INT lnbitsize, lnbitpos;
+  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
+  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
-  enum machine_mode lnmode;
+  enum machine_mode lnmode, rnmode;
   tree ll_mask, lr_mask, rl_mask, rr_mask;
   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
   tree l_const, r_const;
-  tree lntype, result;
-  int first_bit, end_bit;
+  tree lntype, rntype, result;
+  HOST_WIDE_INT first_bit, end_bit;
   int volatilep;
   tree orig_lhs = lhs, orig_rhs = rhs;
@@ -5510,6 +5730,118 @@ fold_truthop (enum tree_code code, tree 
 	}
     }
 
+  /* If the right sides are not constant, do the same for it.  Also,
+     disallow this optimization if a size or signedness mismatch occurs
+     between the left and right sides.  */
+  if (l_const == 0)
+    {
+      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
+	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
+	  /* Make sure the two fields on the right
+	     correspond to the left without being swapped.  */
+	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
+	return 0;
+
+      first_bit = MIN (lr_bitpos, rr_bitpos);
+      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
+      rnmode = get_best_mode (end_bit - first_bit, first_bit,
+			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
+			      volatilep);
+      if (rnmode == VOIDmode)
+	return 0;
+
+      rnbitsize = GET_MODE_BITSIZE (rnmode);
+      rnbitpos = first_bit & ~ (rnbitsize - 1);
+      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
+      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
+
+      if (BYTES_BIG_ENDIAN)
+	{
+	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
+	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
+	}
+
+      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
+			     size_int (xlr_bitpos), 0);
+      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
+			     size_int (xrr_bitpos), 0);
+
+      /* Make a mask that corresponds to both fields being compared.
+	 Do this for both items being compared.  If the operands are the
+	 same size and the bits being compared are in the same position
+	 then we can do this by masking both and comparing the masked
+	 results.  */
+      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
+      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
+      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
+	{
+	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
+				    ll_unsignedp || rl_unsignedp);
+	  if (! all_ones_mask_p (ll_mask, lnbitsize))
+	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
+
+	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
+				    lr_unsignedp || rr_unsignedp);
+	  if (! all_ones_mask_p (lr_mask, rnbitsize))
+	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
+
+	  return build2 (wanted_code, truth_type, lhs, rhs);
+	}
+
+      /* There is still another way we can do something:  If both pairs of
+	 fields being compared are adjacent, we may be able to make a wider
+	 field containing them both.
+
+	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
+	 the mask must be shifted to account for the shift done by
+	 make_bit_field_ref.  */
+      if ((ll_bitsize + ll_bitpos == rl_bitpos
+	   && lr_bitsize + lr_bitpos == rr_bitpos)
+	  || (ll_bitpos == rl_bitpos + rl_bitsize
+	      && lr_bitpos == rr_bitpos + rr_bitsize))
+	{
+	  tree type;
+
+	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
+				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
+	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
+				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
+
+	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
+				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
+	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
+				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
+
+	  /* Convert to the smaller type before masking out unwanted bits.  */
+	  type = lntype;
+	  if (lntype != rntype)
+	    {
+	      if (lnbitsize > rnbitsize)
+		{
+		  lhs = fold_convert (rntype, lhs);
+		  ll_mask = fold_convert (rntype, ll_mask);
+		  type = rntype;
+		}
+	      else if (lnbitsize < rnbitsize)
+		{
+		  rhs = fold_convert (lntype, rhs);
+		  lr_mask = fold_convert (lntype, lr_mask);
+		  type = lntype;
+		}
+	    }
+
+	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
+	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
+
+	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
+	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
+
+	  return build2 (wanted_code, truth_type, lhs, rhs);
+	}
+
+      return 0;
+    }
+
   /* Handle the case of comparisons with constants.  If there is something in
      common between the masks, those bits of the constants must be the same.
      If not, the condition is always false.  Test for this to avoid generating
@@ -5531,7 +5863,19 @@ fold_truthop (enum tree_code code, tree 
 	}
     }
 
-  return NULL_TREE;
+  /* Construct the expression we will return.  First get the component
+     reference we will make.  Unless the mask is all ones the width of
+     that field, perform the mask operation.  Then compare with the
+     merged constant.  */
+  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
+			       ll_unsignedp || rl_unsignedp);
+
+  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
+  if (! all_ones_mask_p (ll_mask, lnbitsize))
+    result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
+
+  return build2 (wanted_code, truth_type, result,
+		 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
 }
 
 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
@@ -11914,6 +12258,18 @@ fold_binary (enum tree_code code, tree t
 	    return omit_one_operand (type, rslt, arg0);
 	}
 
+      /* If this is a comparison of a field, we may be able to simplify it.  */
+      if ((TREE_CODE (arg0) == COMPONENT_REF
+	   || TREE_CODE (arg0) == BIT_FIELD_REF)
+	  /* Handle the constant case even without -O
+	     to make sure the warnings are given.  */
+	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
+	{
+	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
+	  if (t1)
+	    return t1;
+	}
+
       /* Optimize comparisons of strlen vs zero to a compare of the
 	 first character of the string vs zero.  To wit,
 		strlen(ptr) == 0   =>  *ptr == 0
--- gcc/testsuite/g++.dg/opt/pr36449.C.jj	2008-08-26 11:03:24.000000000 +0200
+++ gcc/testsuite/g++.dg/opt/pr36449.C	2008-08-26 11:00:53.000000000 +0200
@@ -0,0 +1,70 @@
+// PR middle-end/36449
+// { dg-do run }
+// { dg-options "-O3" }
+
+extern "C" void exit (int);
+extern "C" void abort ();
+
+struct R
+{
+  short a;
+  short b;
+};
+
+struct S
+{
+  R e;
+  long f;
+  long g;
+};
+
+struct T
+{
+  short c;
+  short d;
+};
+
+struct U
+{
+  long h[0x1ffffff + 1];
+  T i;
+};
+
+U *j;
+
+void __attribute__((noinline))
+bar ()
+{
+  exit (0);
+}
+
+void __attribute__((noinline))
+foo ()
+{
+  S s;
+
+  s.e.a = 36;
+  s.e.b = 38;
+  if (s.e.a == j->i.c && s.e.b == j->i.d)
+    bar ();
+}
+
+int
+main ()
+{
+  try
+    {
+      j = new U;
+    }
+  catch (...)
+    {
+      return 0;
+    }
+  j->i.c = 36;
+  j->i.d = 38;
+  j->h[0] = 1;
+  j->h[1] = 2;
+  j->h[2] = 3;
+  foo ();
+  abort ();
+}


Index: .cvsignore
===================================================================
RCS file: /cvs/pkgs/rpms/gcc/devel/.cvsignore,v
retrieving revision 1.241
retrieving revision 1.242
diff -u -r1.241 -r1.242
--- .cvsignore	25 Aug 2008 12:07:40 -0000	1.241
+++ .cvsignore	29 Aug 2008 11:16:06 -0000	1.242
@@ -1,2 +1,2 @@
-gcc-4.3.1-20080825.tar.bz2
+gcc-4.3.2-20080829.tar.bz2
 fastjar-0.95.tar.gz


Index: gcc43.spec
===================================================================
RCS file: /cvs/pkgs/rpms/gcc/devel/gcc43.spec,v
retrieving revision 1.39
retrieving revision 1.40
diff -u -r1.39 -r1.40
--- gcc43.spec	25 Aug 2008 12:07:40 -0000	1.39
+++ gcc43.spec	29 Aug 2008 11:16:06 -0000	1.40
@@ -1,6 +1,6 @@
-%define DATE 20080825
-%define gcc_version 4.3.1
-%define gcc_release 8
+%define DATE 20080829
+%define gcc_version 4.3.2
+%define gcc_release 1
 %define _unpackaged_files_terminate_build 0
 %define multilib_64_archs sparc64 ppc64 s390x x86_64
 %define include_gappletviewer 1
@@ -152,6 +152,9 @@
 Patch23: gcc43-fortran-debug7.patch
 Patch24: gcc43-fortran-debug8.patch
 Patch25: gcc43-fortran-debug9.patch
+Patch26: gcc43-fortran-debug10.patch
+Patch27: gcc43-fortran-debug11.patch
+Patch28: gcc43-pr37248.patch
 
 # On ARM EABI systems, we do want -gnueabi to be part of the
 # target triple.
@@ -461,6 +464,9 @@
 %patch23 -p0 -b .fortran-debug7~
 %patch24 -p0 -b .fortran-debug8~
 %patch25 -p0 -b .fortran-debug9~
+%patch26 -p0 -b .fortran-debug10~
+%patch27 -p0 -b .fortran-debug11~
+%patch28 -p0 -b .pr37248~
 
 tar xzf %{SOURCE4}
 
@@ -468,7 +474,7 @@
 tar xjf %{SOURCE10}
 %endif
 
-sed -i -e 's/4\.3\.2/4.3.1/' gcc/BASE-VER
+sed -i -e 's/4\.3\.3/4.3.2/' gcc/BASE-VER
 echo 'Red Hat %{version}-%{gcc_release}' > gcc/DEV-PHASE
 
 cp -a libstdc++-v3/config/cpu/i{4,3}86/atomicity.h
@@ -1272,6 +1278,10 @@
 %{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/ppc-asm.h
 %{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/altivec.h
 %{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/spe.h
+%{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/paired.h
+%{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/ppu_intrinsics.h
+%{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/si2vmx.h
+%{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/include/spu2vmx.h
 %endif
 %{_prefix}/libexec/gcc/%{gcc_target_platform}/%{gcc_version}/collect2
 %{_prefix}/lib/gcc/%{gcc_target_platform}/%{gcc_version}/crt*.o
@@ -1682,6 +1692,17 @@
 %doc rpm.doc/changelogs/libmudflap/ChangeLog*
 
 %changelog
+* Fri Aug 29 2008 Jakub Jelinek <jakub at redhat.com> 4.3.2-1
+- update from gcc-4_3-branch
+  - 4.3.2 release
+  - PRs c++/36741, middle-end/36548, middle-end/36817, middle-end/37125,
+	target/37184, target/37191, target/37197
+- backport further Fortran debuginfo improvements (#460378, #459375)
+- revert removal of adjacent bitfield comparison
+  optimization (PR middle-end/37248)
+- on ppc/ppc64 add paired.h, ppu_instrinsics.h, si2vmx.h and spu2vmx.h
+  headers (#460497)
+
 * Mon Aug 25 2008 Jakub Jelinek <jakub at redhat.com> 4.3.1-8
 - update from gcc-4_3-branch
   - PRs debug/37156, libgcj/8995, libstdc++/37100, target/37101


Index: sources
===================================================================
RCS file: /cvs/pkgs/rpms/gcc/devel/sources,v
retrieving revision 1.244
retrieving revision 1.245
diff -u -r1.244 -r1.245
--- sources	25 Aug 2008 12:07:40 -0000	1.244
+++ sources	29 Aug 2008 11:16:06 -0000	1.245
@@ -1,2 +1,2 @@
-13550eec00d2563c42d1879e1f8f3407  gcc-4.3.1-20080825.tar.bz2
+69f70d92142466361146326f840a6185  gcc-4.3.2-20080829.tar.bz2
 92a70f9e56223b653bce0f58f90cf950  fastjar-0.95.tar.gz




More information about the fedora-extras-commits mailing list