Here's my current patch for the bitfield reversal feature I've been
working on for a while, with an RX-specific pragma to apply it
"globally".  Could someone please review this?  It would be nice
to get it in before stage1 closes again...


Index: gcc/doc/extend.texi
===================================================================
--- gcc/doc/extend.texi (revision 192009)
+++ gcc/doc/extend.texi (working copy)
@@ -5427,12 +5427,74 @@ Note that the type visibility is applied
 associated with the class (vtable, typeinfo node, etc.).  In
 particular, if a class is thrown as an exception in one shared object
 and caught in another, the class must have default visibility.
 Otherwise the two shared objects will be unable to use the same
 typeinfo node and exception handling will break.
 
+@item bit_order
+Normally, GCC allocates bitfields from either the least significant or
+most significant bit in the underlying type, such that bitfields
+happen to be allocated from lowest address to highest address.
+Specifically, big-endian targets allocate the MSB first, where
+little-endian targets allocate the LSB first.  The @code{bit_order}
+attribute overrides this default, allowing you to force allocation to
+be MSB-first, LSB-first, or the opposite of whatever gcc defaults to.  The
+@code{bit_order} attribute takes an optional argument:
+
+@table @code
+
+@item native
+This is the default, and also the mode when no argument is given.  GCC
+allocates LSB-first on little endian targets, and MSB-first on big
+endian targets.
+
+@item swapped
+Bitfield allocation is the opposite of @code{native}.
+
+@item lsb
+Bits are allocated LSB-first.
+
+@item msb
+Bits are allocated MSB-first.
+
+@end table
+
+A short example demonstrates bitfield allocation:
+
+@example
+struct __attribute__((bit_order(msb))) @{
+  char a:3;
+  char b:3;
+@} foo = @{ 3, 5 @};
+@end example
+
+With LSB-first allocation, @code{foo.a} will be in the 3 least
+significant bits (mask 0x07) and @code{foo.b} will be in the next 3
+bits (mask 0x38).  With MSB-first allocation, @code{foo.a} will be in
+the 3 most significant bits (mask 0xE0) and @code{foo.b} will be in
+the next 3 bits (mask 0x1C).
+
+Note that it is entirely up to the programmer to define bitfields that
+make sense when swapped.  Consider:
+
+@example
+struct __attribute__((bit_order(msb))) @{
+  short a:7;
+  char b:6;
+@} foo = @{ 3, 5 @};
+@end example
+
+On some targets, or if the struct is @code{packed}, GCC may only use
+one byte of storage for A despite it being a @code{short} type.
+Swapping the bit order of A would cause it to overlap B.  Worse, the
+bitfield for B may span bytes, so ``swapping'' would no longer be
+defined as there is no ``char'' to swap within.  To avoid such
+problems, the programmer should either fully-define each underlying
+type, or ensure that their target's ABI allocates enough space for
+each underlying type regardless of how much of it is used.
+
 @end table
 
 To specify multiple attributes, separate them by commas within the
 double parentheses: for example, @samp{__attribute__ ((aligned (16),
 packed))}.
 
Index: gcc/c-family/c-common.c
===================================================================
--- gcc/c-family/c-common.c     (revision 192009)
+++ gcc/c-family/c-common.c     (working copy)
@@ -310,12 +310,13 @@ struct visibility_flags visibility_optio
 
 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *);
 static tree check_case_value (tree);
 static bool check_case_bounds (tree, tree, tree *, tree *);
 
 static tree handle_packed_attribute (tree *, tree, tree, int, bool *);
+static tree handle_bitorder_attribute (tree *, tree, tree, int, bool *);
 static tree handle_nocommon_attribute (tree *, tree, tree, int, bool *);
 static tree handle_common_attribute (tree *, tree, tree, int, bool *);
 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
@@ -601,12 +602,14 @@ const unsigned int num_c_common_reswords
 const struct attribute_spec c_common_attribute_table[] =
 {
   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
        affects_type_identity } */
   { "packed",                 0, 0, false, false, false,
                              handle_packed_attribute , false},
+  { "bit_order",              0, 1, false, true, false,
+                             handle_bitorder_attribute , false},
   { "nocommon",               0, 0, true,  false, false,
                              handle_nocommon_attribute, false},
   { "common",                 0, 0, true,  false, false,
                              handle_common_attribute, false },
   /* FIXME: logically, noreturn attributes should be listed as
      "false, true, true" and apply to function types.  But implementing this
@@ -6237,12 +6240,42 @@ handle_packed_attribute (tree *node, tre
       *no_add_attrs = true;
     }
 
   return NULL_TREE;
 }
 
+/* Handle a "bit_order" attribute; arguments as in
+   struct attribute_spec.handler.  */
+
+static tree
+handle_bitorder_attribute (tree *ARG_UNUSED (node), tree ARG_UNUSED (name),
+                          tree ARG_UNUSED (args),
+                          int ARG_UNUSED (flags), bool *no_add_attrs)
+{
+  tree bmode;
+  const char *bname;
+
+  /* Allow no arguments to mean "native".  */
+  if (args == NULL_TREE)
+    return NULL_TREE;
+
+  bmode = TREE_VALUE (args);
+
+  bname = IDENTIFIER_POINTER (bmode);
+  if (strcmp (bname, "msb")
+      && strcmp (bname, "lsb")
+      && strcmp (bname, "swapped")
+      && strcmp (bname, "native"))
+    {
+      error ("%qE is not a valid bit_order - use lsb, msb, native, or 
swapped", bmode);
+      *no_add_attrs = true;
+    }
+
+  return NULL_TREE;
+}
+
 /* Handle a "nocommon" attribute; arguments as in
    struct attribute_spec.handler.  */
 
 static tree
 handle_nocommon_attribute (tree *node, tree name,
                           tree ARG_UNUSED (args),
Index: gcc/stor-layout.c
===================================================================
--- gcc/stor-layout.c   (revision 192009)
+++ gcc/stor-layout.c   (working copy)
@@ -1738,12 +1738,92 @@ finalize_type_size (tree type)
          TYPE_ALIGN (variant) = align;
          TYPE_USER_ALIGN (variant) = user_align;
          SET_TYPE_MODE (variant, mode);
        }
     }
 }
+  
+static void
+reverse_bitfield_layout (record_layout_info rli)
+{
+  tree field, oldtype, oldbtype;
+
+  for (field = TYPE_FIELDS (rli->t); field; field = TREE_CHAIN (field))
+    {
+      tree type = TREE_TYPE (field);
+      tree bit, byte, bmod, byte_offset;
+
+      if (TREE_CODE (field) != FIELD_DECL)
+       continue;
+      if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
+       return;
+
+      oldtype = TREE_TYPE (DECL_FIELD_BIT_OFFSET (field));
+      oldbtype = TREE_TYPE (DECL_FIELD_OFFSET (field));
+
+      bit = DECL_FIELD_BIT_OFFSET (field);
+      byte = DECL_FIELD_OFFSET (field);
+
+      /* Sometimes, the next field might be in the next type-size
+        container.  We have to calculate which *container* it's in,
+        and swap within that container.  Example: { char a:5; char
+        b:5; } will put B in the next char, but the byte/bit numbers
+        might show that as "bit 8 of byte 0".  */
+      bmod = size_binop (FLOOR_DIV_EXPR, bit, TYPE_SIZE (type));
+      bmod = size_binop (MULT_EXPR, bmod, TYPE_SIZE (type));
+      bit = size_binop (MINUS_EXPR, bit, bmod);
+
+      byte_offset = size_binop (FLOOR_DIV_EXPR, bmod, bitsize_int 
(BITS_PER_UNIT));
+      byte_offset = fold_convert (sizetype, byte_offset);
+      byte = size_binop (PLUS_EXPR, byte, byte_offset);
+
+      DECL_FIELD_BIT_OFFSET (field)
+       = size_binop (MINUS_EXPR,
+                     size_binop (MINUS_EXPR, TYPE_SIZE (type),
+                                 DECL_SIZE (field)),
+                     bit);
+      DECL_FIELD_OFFSET (field) = byte;
+
+      TREE_TYPE (DECL_FIELD_BIT_OFFSET (field)) = oldtype;
+      TREE_TYPE (DECL_FIELD_OFFSET (field)) = oldbtype;
+    }
+}
+
+static int
+reverse_bitfields_p (record_layout_info rli)
+{
+  tree st, arg;
+  const char *mode;
+
+  st = rli->t;
+
+  arg = lookup_attribute ("bit_order", TYPE_ATTRIBUTES (st));
+
+  if (!arg)
+    return 0;
+  arg = TREE_VALUE (TREE_VALUE (arg));
+  if (!arg)
+    return 0;
+
+  mode = IDENTIFIER_POINTER (arg);
+
+  if (strcmp (mode, "swapped") == 0)
+    return 1;
+  if (BYTES_BIG_ENDIAN)
+    {
+      if (strcmp (mode, "lsb") == 0)
+       return 1;
+    }
+  else
+    {
+      if (strcmp (mode, "msb") == 0)
+       return 1;
+    }
+
+  return 0;
+}
 
 /* Return a new underlying object for a bitfield started with FIELD.  */
 
 static tree
 start_bitfield_representative (tree field)
 {
@@ -1940,12 +2020,24 @@ finish_bitfield_layout (record_layout_in
                     || operand_equal_p (DECL_FIELD_OFFSET (repr),
                                         DECL_FIELD_OFFSET (field), 0)))
            {
              finish_bitfield_representative (repr, prev);
              repr = start_bitfield_representative (field);
            }
+
+         /* If the bitfield-order attribute has been used on this
+            structure, the fields might not be in bit-order.  In that
+            case, we need a separate representative for each
+            field.  */
+         else if (DECL_FIELD_OFFSET (field) < DECL_FIELD_OFFSET (repr)
+                  || (DECL_FIELD_OFFSET (field) == DECL_FIELD_OFFSET (repr)
+                      && DECL_FIELD_BIT_OFFSET (field) < DECL_FIELD_BIT_OFFSET 
(repr)))
+           {
+             finish_bitfield_representative (repr, prev);
+             repr = start_bitfield_representative (field);
+           }
        }
       else
        continue;
 
       if (repr)
        DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
@@ -1965,12 +2057,15 @@ finish_bitfield_layout (record_layout_in
 
 void
 finish_record_layout (record_layout_info rli, int free_p)
 {
   tree variant;
 
+  if (reverse_bitfields_p (rli))
+    reverse_bitfield_layout (rli);
+
   /* Compute the final size.  */
   finalize_record_size (rli);
 
   /* Compute the TYPE_MODE for the record.  */
   compute_record_mode (rli->t);
 
Index: gcc/varasm.c
===================================================================
--- gcc/varasm.c        (revision 192009)
+++ gcc/varasm.c        (working copy)
@@ -4716,25 +4716,17 @@ output_constructor_array_range (oc_local
 
       /* Count its size.  */
       local->total_bytes += fieldsize;
     }
 }
 
-/* Helper for output_constructor.  From the current LOCAL state, output a
-   field element that is not true bitfield or part of an outer one.  */
-
-static void
-output_constructor_regular_field (oc_local_state *local)
+/* Helper for output_constructor_regular_field */
+static HOST_WIDE_INT
+constructor_regular_field_bytepos (oc_local_state *local)
 {
-  /* Field size and position.  Since this structure is static, we know the
-     positions are constant.  */
-  unsigned HOST_WIDE_INT fieldsize;
   HOST_WIDE_INT fieldpos;
-
-  unsigned int align2;
-
   if (local->index != NULL_TREE)
     {
       /* Perform the index calculation in modulo arithmetic but
         sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
         but we are using an unsigned sizetype.  */
       unsigned prec = TYPE_PRECISION (sizetype);
@@ -4745,12 +4737,29 @@ output_constructor_regular_field (oc_loc
                  * idx.low);
     }
   else if (local->field != NULL_TREE)
     fieldpos = int_byte_position (local->field);
   else
     fieldpos = 0;
+  return fieldpos;
+}
+
+/* Helper for output_constructor.  From the current LOCAL state, output a
+   field element that is not true bitfield or part of an outer one.  */
+
+static void
+output_constructor_regular_field (oc_local_state *local)
+{
+  /* Field size and position.  Since this structure is static, we know the
+     positions are constant.  */
+  unsigned HOST_WIDE_INT fieldsize;
+  HOST_WIDE_INT fieldpos;
+
+  unsigned int align2;
+
+  fieldpos = constructor_regular_field_bytepos (local);
 
   /* Output any buffered-up bit-fields preceding this element.  */
   if (local->byte_buffer_in_use)
     {
       assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
       local->total_bytes++;
@@ -5001,18 +5010,49 @@ output_constructor_bitfield (oc_local_st
 }
 
 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
    Generate at least SIZE bytes, padding if necessary.  OUTER designates the
    caller output state of relevance in recursive invocations.  */
 
+typedef struct {
+  unsigned HOST_WIDE_INT cnt;
+  tree val;
+  tree index;
+  tree field;
+  int what_to_do;
+} constructor_field_list;
+
+#define WHAT_ARRAY    1
+#define WHAT_REGULAR  2
+#define WHAT_BITFIELD 3
+
+static int
+constructor_field_sort (const void *va, const void *vb)
+{
+  const constructor_field_list *a = (const constructor_field_list *)va;
+  const constructor_field_list *b = (const constructor_field_list *)vb;
+  /* A field that's exactly a whole number of bytes might show up as a
+     "regular" type instead of a "field" byte.  We can tell the
+     difference here, because those will have FIELD set.  Just
+     preserve the original order for non-field components.  */
+  if (! a->field || ! b->field)
+    return a->cnt - b->cnt;
+  /* For two fields, compare byte offset first, then bit offset.  */
+  if (int_byte_position (a->field) != int_byte_position (b->field))
+    return int_byte_position (a->field) - int_byte_position (b->field);
+  return int_bit_position (a->field) - int_bit_position (b->field);
+}
+
 static unsigned HOST_WIDE_INT
 output_constructor (tree exp, unsigned HOST_WIDE_INT size,
                    unsigned int align, oc_outer_state * outer)
 {
   unsigned HOST_WIDE_INT cnt;
   constructor_elt *ce;
+  constructor_field_list *constructor_fields;
+  unsigned HOST_WIDE_INT constructor_field_count;
 
   oc_local_state local;
 
   /* Setup our local state to communicate with helpers.  */
   local.exp = exp;
   local.size = size;
@@ -5043,12 +5083,15 @@ output_constructor (tree exp, unsigned H
      more one).  */
 
   local.field = NULL_TREE;
   if (TREE_CODE (local.type) == RECORD_TYPE)
     local.field = TYPE_FIELDS (local.type);
 
+  constructor_field_count = VEC_length (constructor_elt, CONSTRUCTOR_ELTS 
(exp));
+  constructor_fields = XNEWVEC (constructor_field_list, 
constructor_field_count);
+
   for (cnt = 0;
        VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), cnt, ce);
        cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
     {
       local.val = ce->value;
       local.index = NULL_TREE;
@@ -5069,41 +5112,72 @@ output_constructor (tree exp, unsigned H
                 : "<anonymous>");
 
       /* Eliminate the marker that makes a cast not be an lvalue.  */
       if (local.val != NULL_TREE)
        STRIP_NOPS (local.val);
 
-      /* Output the current element, using the appropriate helper ...  */
+      constructor_fields[cnt].cnt = cnt;
+      constructor_fields[cnt].val = local.val;
+      constructor_fields[cnt].index = local.index;
+      constructor_fields[cnt].field = local.field;
 
       /* For an array slice not part of an outer bitfield.  */
       if (!outer
          && local.index != NULL_TREE
          && TREE_CODE (local.index) == RANGE_EXPR)
-       output_constructor_array_range (&local);
+       constructor_fields[cnt].what_to_do = WHAT_ARRAY;
 
       /* For a field that is neither a true bitfield nor part of an outer one,
         known to be at least byte aligned and multiple-of-bytes long.  */
       else if (!outer
               && (local.field == NULL_TREE
                   || !CONSTRUCTOR_BITFIELD_P (local.field)))
-       output_constructor_regular_field (&local);
+       constructor_fields[cnt].what_to_do = WHAT_REGULAR;
 
       /* For a true bitfield or part of an outer one.  Only INTEGER_CSTs are
         supported for scalar fields, so we may need to convert first.  */
       else
         {
          if (TREE_CODE (local.val) == REAL_CST)
            local.val
              = fold_unary (VIEW_CONVERT_EXPR,
                            build_nonstandard_integer_type
                            (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
                            local.val);
+         constructor_fields[cnt].what_to_do = WHAT_BITFIELD;
+       }
+    }
+
+  qsort (constructor_fields, constructor_field_count,
+        sizeof(constructor_field_list), constructor_field_sort);
+
+  for (cnt = 0;
+       cnt < constructor_field_count;
+       cnt++)
+    {
+      /* Output the current element, using the appropriate helper ...  */
+      local.val = constructor_fields[cnt].val;
+      local.index = constructor_fields[cnt].index;
+      local.field = constructor_fields[cnt].field;
+
+      switch (constructor_fields[cnt].what_to_do)
+       {
+       case WHAT_ARRAY:
+         output_constructor_array_range (&local);
+         break;
+       case WHAT_REGULAR:
+         output_constructor_regular_field (&local);
+         break;
+       case WHAT_BITFIELD:
          output_constructor_bitfield (&local, outer);
+         break;
        }
     }
 
+  XDELETEVEC (constructor_fields);
+
   /* If we are not at toplevel, save the pending data for our caller.
      Otherwise output the pending data and padding zeros as needed. */
   if (outer)
     outer->byte = local.byte;
   else
     {
Index: gcc/config.gcc
===================================================================
--- gcc/config.gcc      (revision 192009)
+++ gcc/config.gcc      (working copy)
@@ -2119,12 +2119,13 @@ rl78-*-elf*)
        target_has_targetm_common=no
        c_target_objs="rl78-c.o"
        cxx_target_objs="rl78-c.o"
        tmake_file="${tmake_file} rl78/t-rl78"
        ;;
 rx-*-elf*)
+       c_target_objs="rx-c.o"
        tm_file="dbxelf.h elfos.h newlib-stdint.h ${tm_file}"
        tmake_file="${tmake_file} rx/t-rx"
        ;;
 s390-*-linux*)
        default_gnu_indirect_function=yes
        tm_file="s390/s390.h dbxelf.h elfos.h gnu-user.h linux.h glibc-stdint.h 
s390/linux.h"
Index: gcc/config/rx/rx.h
===================================================================
--- gcc/config/rx/rx.h  (revision 192009)
+++ gcc/config/rx/rx.h  (working copy)
@@ -49,12 +49,14 @@
        builtin_define ("__RX_AS100_SYNTAX__"); \
       else                                     \
        builtin_define ("__RX_GAS_SYNTAX__");   \
     }                                           \
   while (0)
 
+#define REGISTER_TARGET_PRAGMAS() rx_register_pragmas ()
+
 #undef  CC1_SPEC
 #define CC1_SPEC "\
   %{mas100-syntax:%{gdwarf*:%e-mas100-syntax is incompatible with -gdwarf}} \
   %{mcpu=rx200:%{fpu:%erx200 cpu does not have FPU hardware}}"
 
 #undef  STARTFILE_SPEC
Index: gcc/config/rx/rx.c
===================================================================
--- gcc/config/rx/rx.c  (revision 192009)
+++ gcc/config/rx/rx.c  (working copy)
@@ -1157,12 +1157,69 @@ static bool
 rx_return_in_msb (const_tree valtype)
 {
   return TARGET_BIG_ENDIAN_DATA
     && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
 }
 
+#define BITORDER_DEFAULT       0
+#define BITORDER_LEFT          1
+#define BITORDER_RIGHT         2
+
+static int rx_bitorder = BITORDER_DEFAULT;
+
+#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
+#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rx_set_default_type_attributes
+static void
+rx_set_default_type_attributes (tree node)
+{
+  unsigned addr;
+  tree attr, type_attr_list;
+  char *bit_order;
+
+  if (TREE_CODE (node) != RECORD_TYPE
+      && TREE_CODE (node) != UNION_TYPE)
+    return;
+
+  type_attr_list = TYPE_ATTRIBUTES (node);
+
+  for (attr=type_attr_list; attr; attr = TREE_CHAIN (attr))
+    {
+      if (strcmp (IDENTIFIER_POINTER (TREE_PURPOSE (attr)),
+                 "bit_order") == 0)
+       return;
+    }
+
+  if (rx_bitorder == BITORDER_LEFT)
+    bit_order = "msb";
+  else if (rx_bitorder == BITORDER_RIGHT)
+    bit_order = "lsb";
+  else
+    return;
+
+  type_attr_list = tree_cons (get_identifier ("bit_order"),
+                             build_tree_list (NULL_TREE, get_identifier 
(bit_order)),
+                             type_attr_list);
+
+  TYPE_ATTRIBUTES (node) = type_attr_list;
+}
+
+void
+rx_note_pragma_bitorder (char *mode)
+{
+  if (mode == NULL)
+    rx_bitorder = BITORDER_DEFAULT;
+  else if (strcmp (mode, "left") == 0)
+    rx_bitorder = BITORDER_LEFT;
+  else if (strcmp (mode, "right") == 0)
+    rx_bitorder = BITORDER_RIGHT;
+  else if (strcmp (mode, "native") == 0)
+    rx_bitorder = BITORDER_DEFAULT;
+  else
+    error ("pragma bit_order only takes left or right");
+}
+
 /* Returns true if the provided function has the specified attribute.  */
 
 static inline bool
 has_func_attr (const_tree decl, const char * func_attr)
 {
   if (decl == NULL_TREE)
Index: gcc/config/rx/t-rx
===================================================================
--- gcc/config/rx/t-rx  (revision 192009)
+++ gcc/config/rx/t-rx  (working copy)
@@ -24,6 +24,13 @@ MULTILIB_OPTIONS    = m64bit-doubles  no
 MULTILIB_DIRNAMES   =  64-bit-double  no-fpu-libs   big-endian-data   pid
 
 MULTILIB_MATCHES    = nofpu=mnofpu  nofpu=mcpu?rx200
 
 MULTILIB_EXCEPTIONS =
 MULTILIB_EXTRA_OPTS = 
+
+rx-c.o: $(srcdir)/config/rx/rx-c.c \
+  $(srcdir)/config/rx/rx-protos.h $(CONFIG_H) $(SYSTEM_H) coretypes.h \
+  $(TM_H) $(TREE_H) $(TM_P_H) $(FLAGS_H) $(C_COMMON_H) $(GGC_H) \
+  $(TARGET_H) $(TARGET_DEF_H) $(CPPLIB_H) $(C_PRAGMA_H)
+       $(COMPILER) -c $(ALL_COMPILERFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) \
+               $(srcdir)/config/rx/rx-c.c
Index: gcc/config/rx/rx-c.c
===================================================================
--- gcc/config/rx/rx-c.c        (revision 0)
+++ gcc/config/rx/rx-c.c        (revision 0)
@@ -0,0 +1,86 @@
+/* Subroutines used for macro/preprocessor support on the RX.
+   Copyright (C) 2008, 2009, 2010
+   Free Software Foundation, Inc.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 3, or (at your option)
+any later version.
+
+GCC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "tm_p.h"
+#include "flags.h"
+#include "c-family/c-common.h"
+#include "ggc.h"
+#include "target.h"
+#include "target-def.h"
+#include "cpplib.h"
+#include "c-family/c-pragma.h"
+
+/* Implements the "pragma bit_order" pragma.  This pragma takes an
+   optional left or right to force bitfield allocation.  */
+static void
+rx_pragma_bitorder (cpp_reader * reader ATTRIBUTE_UNUSED)
+{
+  tree mode;
+  enum cpp_ttype type;
+
+  type = pragma_lex (&mode);
+  if (type == CPP_EOF)
+    {
+      rx_note_pragma_bitorder (NULL);
+      return;
+    }
+  if (type == CPP_NAME)
+    {
+      rx_note_pragma_bitorder (IDENTIFIER_POINTER (mode));
+
+      type = pragma_lex (&mode);
+      if (type != CPP_EOF)
+       {
+         error ("junk at end of #pragma bit_order");
+       }
+      return;
+    }
+  error ("malformed #pragma bit_order [left|right|native]");
+}
+
+/* Additional pragmas purely for compatibility with existing RXC I/O
+   headers.  */
+
+#define SET_GLOBAL_ALIGNMENT(N) maximum_field_alignment = (N)
+
+static void
+rx_pragma_unpack (cpp_reader * reader ATTRIBUTE_UNUSED)
+{
+  SET_GLOBAL_ALIGNMENT (4*8);
+}
+
+static void
+rx_pragma_packoption (cpp_reader * reader ATTRIBUTE_UNUSED)
+{
+  SET_GLOBAL_ALIGNMENT (initial_max_fld_align);
+}
+
+void
+rx_register_pragmas (void)
+{
+  c_register_pragma (NULL, "bit_order", rx_pragma_bitorder);
+  c_register_pragma (NULL, "unpack", rx_pragma_unpack);
+  c_register_pragma (NULL, "packoption", rx_pragma_packoption);
+}

Reply via email to