diff -Naur pytables-1.3-ds-devel/setup.py ds/pytables-1.3-devel/setup.py
--- pytables-1.3-ds-devel/setup.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/setup.py	2006-01-17 13:53:23.000000000 +0100
@@ -354,16 +354,30 @@
     if LFLAGS:
         lflags_arg = LFLAGS
 
-    # HDF5 library (mandatory)
+    # HDF5 and HDF5_HL library (mandatory)
     (dirstub, dirheader) = (None, None)
+    (dirstub_hl, dirheader_hl) = (None, None)
     if HDF5_DIR:
         (dirstub, dirheader) = check_lib_win("HDF5", HDF5_DIR, "hdf5dll.dll",
                                              "dll", "hdf5dll.lib",  # Stubs
                                              "include", "H5public.h") # Headers
-    if dirstub and dirheader:
+
+        (dirstub_hl, dirheader_hl) = check_lib_win("HDF5_HL", HDF5_DIR, "hdf5_hldll.dll",
+                                                   "dll", "hdf5_hldll.lib",  # Stubs
+                                                   "include", "H5DS.h") # Headers
+
+    if dirstub and dirheader and dirstub_hl and dirheader_hl:
         lib_dirs.append(dirstub)
+        if dirstub_hl != dirstub:
+ 	        lib_dirs.append(dirstub_hl)
+
         inc_dirs.append(dirheader)
+        if dirheader_hl != dirheader:
+ 	        inc_dirs.append(dirheader_hl)
+
         libnames.append("hdf5dll")
+        libnames.append("hdf5_hldll")
+
     else:
         print "Unable to locate all the required HDF5 files"
         print """
diff -Naur pytables-1.3-ds-devel/src/arraytypes.c ds/pytables-1.3-devel/src/arraytypes.c
--- pytables-1.3-ds-devel/src/arraytypes.c	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/src/arraytypes.c	2006-01-17 11:02:06.000000000 +0100
@@ -185,7 +185,8 @@
       * R. Sassolas */
       break;      
     } else {
-      fprintf(stderr, "this H5T_COMPOUND class is neither a complex number nor a 'REFERENCE_LIST'.\n");
+      fprintf(stderr, "this H5T_COMPOUND class is neither \
+      a complex number nor a 'REFERENCE_LIST'.\n");
       goto out;
     }
     break; /* case H5T_COMPOUND */
diff -Naur pytables-1.3-ds-devel/src/hdf5Extension.pyx ds/pytables-1.3-devel/src/hdf5Extension.pyx
--- pytables-1.3-ds-devel/src/hdf5Extension.pyx	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/src/hdf5Extension.pyx	2006-01-25 13:45:55.000000000 +0100
@@ -358,6 +358,8 @@
   hid_t H5Aget_type(hid_t attr_id)
   hssize_t H5Sget_simple_extent_npoints(hid_t space_id)
   hid_t H5Rdereference(hid_t dataset, H5R_type_t ref_type, void *ref)
+  herr_t H5Rcreate(void *ref, hid_t loc_id, char *name, H5R_type_t ref_type, hid_t space_id)
+  ssize_t H5Iget_name(hid_t id, char *name, size_t size)
 
 # Functions from HDF5 HL Lite
 cdef extern from "H5ATTR.h":
@@ -390,9 +392,6 @@
 # 2005-11-09
 # R. Sassolas
 cdef extern from "H5DS.h":
-
-  herr_t  H5DSset_scale( hid_t dsid,
-                         char *dimname)
   
   herr_t  H5DSattach_scale( hid_t did,
                             hid_t dsid,
@@ -405,12 +404,6 @@
   htri_t H5DSis_attached( hid_t did,
                           hid_t dsid,
                           unsigned int idx)
-
-# herr_t  H5DSiterate_scales( hid_t did,
-#                             unsigned int dim,
-#                             int *idx,
-#                             H5DS_iterate_t visitor,
-#                             void *visitor_data)
   
   herr_t  H5DSset_label( hid_t did,
                          unsigned int idx,
@@ -519,7 +512,8 @@
   object H5UIget_info(hid_t loc_id, char *name, char *byteorder)
 
   object get_attribute_string_sys(hid_t loc_id, char *attr_name)
-
+  object get_filter_names( hid_t loc_id, char *dset_name,
+                           hid_t dataset_id)
 
 #-----------------------------------------------------------------------------
 
@@ -695,16 +689,149 @@
       if ret < 0:
         raise HDF5ExtError("Problems closing the file '%s'" % self.name)
 
+  # functions added for Dimension Scale stuff
+  # allow a PyTable object to be built from its reference
+  # 2005-12-19
+  # R. Sassolas
+
+  def _openArray_from_ref(self, hobj_ref_t ref):
+    """Open the array corresponding to a reference."""
+    
+    cdef hid_t array_id
+    cdef hid_t sid
+    cdef hid_t tid
+    cdef hsize_t *dims
+    cdef hsize_t *maxdims
+    cdef hsize_t *dims_chunk
+    cdef object shape
+    cdef herr_t ret
+    cdef hid_t base_type_id
+    cdef H5T_class_t class_id
+    cdef char byteorder[16]
+    cdef int i, enumtype
+    cdef int extdim
+    cdef char flavor[256]
+
+    # First get an ID corresponding to the reference    
+    array_id = H5Rdereference(self.file_id, H5R_OBJECT, <void *>&ref)
+    if array_id < 0:
+      raise HDF5ExtError(\
+      "Error in retrieving an ID corresponding to this reference.")
+    
+    # Then get the infos on the Array corresponding to this ID
+    # (inspired from Array._openArray)
+    
+    sid = H5Dget_space(array_id)
+    if sid < 0:
+      raise HDF5ExtError(\
+      "Error in retrieving the space ID of the Array.")
+    
+    tid = H5Dget_type(array_id)
+    if tid < 0:
+      raise HDF5ExtError(\
+      "Error in retrieving the type ID of the Array.")
+      
+    rank = H5Sget_simple_extent_ndims(sid)
+    if rank < 0:
+      raise HDF5Exterror(\
+      "Error in retrieving the rank of the Array.")
+
+    dims = <hsize_t *>malloc(rank*sizeof(hsize_t))
+    maxdims = <hsize_t *>malloc(rank*sizeof(hsize_t))
+    
+    ret = H5ARRAYget_info(array_id, tid,
+                          dims, maxdims,
+                          &base_type_id, &class_id, byteorder)
+    
+    if ret < 0:
+      raise HDF5ExtError("Unable to get array info.")
+
+
+    extdim = -1  # default is non-chunked Array
+    # Get the extendeable dimension (if any)
+    for i from 0 <= i < rank:
+      if maxdims[i] == -1:
+        extdim = i
+        break
+
+    strcpy(flavor, "NumArray")  # Default value
+    if self._isPTFile:
+      H5ATTRget_attribute_string(array_id, "FLAVOR", flavor)
+
+    # Allocate space for the dimension chunking info
+    dims_chunk = <hsize_t *>malloc(rank * sizeof(hsize_t))
+    if ( (H5ARRAYget_chunksize(array_id, rank,
+                               dims_chunk)) < 0):
+      # H5ARRAYget_chunksize frees dims_chunk
+      dims_chunk = NULL
+      if extdim >= 0:
+        raise HDF5ExtError, "Problems getting the chunksizes!"
+
+    # Get the array type
+    type_size = getArrayType(base_type_id, &enumtype)
+    if type_size < 0:
+      raise TypeError, "HDF5 class %d not supported. Sorry!" % class_id
+
+    shape = []
+    chunksizes = []
+    for i from 0 <= i < rank:
+      shape.append(dims[i])
+      if dims_chunk:
+        chunksizes.append(<int>dims_chunk[i])
+    shape = tuple(shape)
+    chunksizes = tuple(chunksizes)
+
+    type = naEnumToNAType.get(enumtype, None)
+    return (array_id, tid, base_type_id, extdim, flavor, 
+            type, naEnumToNASType[enumtype], 
+            shape, type_size, byteorder, chunksizes)
+  
+ 
+    # Then read the Array thanks to the infos collected in previous step
+    # (inspired from Array.readArray)
+
+#    nrows = na_shape[0]
+#    if nrows < 0:
+#      raise HDF5ExtError(\
+#      "Error in retrieving first dimension of the Array.")
+#
+#    # create the numarray that will be returned    
+#    naarr = numarray.array(None, type=na_type, shape=na_shape)
+          
+#    buflen = NA_getBufferPtrAndSize(naarr._data, 1, &rbuf)
+
+    # Strangely I get a 'segmentation fault' from Python when I uncomment
+    # the two lines concerning Theads. I can't figure out why... 
+    #Py_BEGIN_ALLOW_THREADS
+#    ret = H5ARRAYread(array_id, tid, 0, nrows, 1,
+#                      -1, rbuf)
+    #Py_END_ALLOW_THREADS
+
+    # Release ressources
+
+#    if ret < 0:
+#      raise HDF5ExtError("Problems reading the array data.")
+
+    # Convert some HDF5 types to Numarray after reading.
+    # (inspired from Array._convertTypes)
+#    stype = str(na_type)
+#    if stype == 'Time64':
+#      convertTime64(naarr, len(naarr), 1)
+    
+#    return naarr     
+   
+    
 
 cdef class AttributeSet:
   cdef hid_t   parent_id, dataset_id
   cdef char    *name
 
   def _g_new(self, node):
-    # Initialize the C attributes of Node object
-    self.name =  PyString_AsString(node._v_hdf5name)
-    # The parent group id of the node
-    self.parent_id = node._v_parent._v_objectID
+    if node._v_hdf5name:
+        # Initialize the C attributes of Node object
+        self.name =  PyString_AsString(node._v_hdf5name)
+        # The parent group id of the node
+        self.parent_id = node._v_parent._v_objectID
     self.dataset_id = node._v_objectID
 
   def __g_listAttr(self):
@@ -948,6 +1075,17 @@
         l=[]
         for i in xrange(nelmts):
           ref = dsbuf[i].ref
+          
+          # The creation of the PyTable object corresponding to this reference
+          # remains to be done ...
+          
+          file = self._v_node._v_file
+            
+          array_id, type_id, base_type_id, extdim, flavor, \
+          type, stype, shape, type_size, \
+          byteorder, chunksize = \
+              file._openArray_from_ref(ref)
+          
           dim_idx = dsbuf[i].dim_idx 
           
           l.append((ref, dim_idx))
@@ -962,7 +1100,8 @@
   
     elif class_id == H5T_VLEN:
       if str(attrname)== 'DIMENSION_LIST':
-        # get the number of dimensions of the dataset which owns this attribute 'DIMENSION_LIST'
+        # get the number of dimensions of the dataset which
+	    # owns this attribute 'DIMENSION_LIST'
         rank = H5Sget_simple_extent_ndims(H5Dget_space(self.dataset_id))
 
         aid = H5Aopen_name(self.dataset_id,'DIMENSION_LIST')
@@ -990,7 +1129,91 @@
           l2=[]
           for j in xrange(length):
             ref = <hobj_ref_t>(p[j])
-            l2.append(ref)
+            # Now build the Dimension Scale corresponding to this reference
+            file = self._v_node._v_file
+            
+            array_id, type_id, base_type_id, extdim, flavor, \
+            type, stype, shape, itemsize, \
+            byteorder, chunksize = \
+                file._openArray_from_ref(ref)
+
+            # create the Atom of the DimensionScale
+            
+            atom_shape = list(shape)
+            atom_shape.__delitem__(extdim)
+            atom_shape.insert(extdim, 0)
+            atom_shape = tuple(atom_shape)
+            
+            atom = file.createAtom(type, atom_shape, flavor)
+            ds = file.createDimensionScale("", "", atom, dataset_id = array_id) 
+
+            # forge a pathname for the Dimension Scale
+            pathname = self._v_node._v_pathname
+            ds._v_pathname = pathname + "/ATTRS" + "/DIMENSION_LIST" + \
+                             "/REF_" + str(i) + "-" + str(j)
+
+            # Parameters which would have been initialized by 
+            # tables.Node.__init__()
+            ds._v_file = file
+
+            # Set the different IDs
+            # The function "_set_ids" is to my point of view a very dangerous
+            # function, but I didn't manage to set these IDs in a different
+            # way 
+            ds._set_ids(array_id, type_id, base_type_id)
+            
+            # Parameters which would have been initialized by 
+            # hdf5Extension.Array._openArray()
+            ds.extdim = extdim
+            ds.flavor = flavor
+            
+            # Parameters which would have been initialized by
+            # tables.EArray._g_open()
+            ds.type = type
+            ds.stype = stype
+            ds.shape = shape
+            ds.itemsize = itemsize
+            ds.byteorder = byteorder
+            ds._v_chunksize = chunksize
+            ds.nrows = shape[extdim]
+
+            # Inspired from tables.Leaf._g_getFilters()
+            # Create a filters instance with default values
+            filters = file.createFilters()
+            filtersDict = get_filter_names(0, "", ds.objectID)
+                         
+            if filtersDict:
+                for name in filtersDict:
+                    if name.startswith("lzo"):
+                        filters.complib = "lzo"
+                        filters.complevel = filtersDict[name][0]
+                    elif name.startswith("ucl"):
+                        filters.complib = "ucl"
+                        filters.complevel = filtersDict[name][0]
+                    elif name.startswith("bzip2"):
+                        filters.complib = "bzip2"
+                        filters.complevel = filtersDict[name][0]
+                    elif name.startswith("deflate"):
+                        filters.complib = "zlib"
+                        filters.complevel = filtersDict[name][0]
+                    elif name.startswith("szip"):
+                        filters.complib = "szip"
+                        #filters.complevel = filtersDict[name][0]
+                        filters.complevel = 1  # Because there is not a compression
+                                               # level equivalent for szip
+                    elif name.startswith("shuffle"):
+                        filters.shuffle = 1
+                    elif name.startswith("fletcher32"):
+                        filters.fletcher32 = 1
+
+            ds.__dict__['filters'] = filters
+            
+            # Other parameters which would have been initialized by
+            # tables.EArray._g_open() 
+            (ds._v_maxTuples, computedChunksize) = ds._calcTuplesAndChunks(
+            atom, extdim, ds.nrows, filters.complevel)
+                        
+            l2.append(ds)
 
           l.append(l2)  
 
@@ -1021,9 +1244,9 @@
 
     # Check for multimensional attributes (if file.format_version > "1.4")
     if hasattr(node._v_file, "format_version"):
-      format_version = node._v_file.format_version
+        format_version = node._v_file.format_version
     else:
-      format_version = None
+        format_version = None
 
     if format_version is not None:
       if format_version < "1.4":
@@ -1127,11 +1350,17 @@
   cdef char  *name
   cdef hid_t  parent_id
 
-  def _g_new(self, where, name, init):
-    self.name = strdup(name)
-    """The name of this node in its parent group."""
-    self.parent_id = where._v_objectID
-    """The identifier of the parent group."""
+  def _g_new(self, where, name, init, dataset_id):
+    if dataset_id ==0:
+        self.name = strdup(name)
+        """The name of this node in its parent group."""
+        self.parent_id = where._v_objectID
+        """The identifier of the parent group."""
+    else:
+        # dataset_id != 0:
+        # The object is created from a reference
+        self.name = ""
+        self.parent_id = 0
 
 
   def _g_delete(self):
@@ -1268,13 +1497,33 @@
   cdef hid_t   type_id
   cdef hid_t   base_type_id
 
-  def _g_new(self, where, name, init):
+  def _g_new(self, where, name, init, dataset_id):
     if init:
       # Put this info to 0 just when the class is initialized
       self.dataset_id = -1
       self.type_id = -1
       self.base_type_id = -1
-    super(Leaf, self)._g_new(where, name, init)
+    super(Leaf, self)._g_new(where, name, init, dataset_id)
+
+
+  # There should be a nicer way to do so ...
+  def _set_ids(self, dataset_id, type_id, base_type_id):
+    """
+    This function is reserved for PyTables objects created from a reference.
+    It allow some IDs to be set : the 'dataset_id', the 'type_id' and the
+    'base_type_id'.
+     
+    """
+    if not self.hdf5name:
+        self.dataset_id = dataset_id
+        self.type_id = type_id
+        self.base_type_id = base_type_id 
+    else:
+        error_msg = \
+"This function is reserved for PyTables objects created from a reference. \
+Read the docstring for more info."
+        raise ValueError, error_msg
+
 
   def _g_close(self):
     # Close dataset in HDF5 space
@@ -1718,37 +1967,36 @@
   # added Functions for Dimension Scale
   # 2005-11-09
   # R. Sassolas
-  
-  def to_scale(self, char *dimension_name=""):
+
+  def get_reference(self):
     """
-    Convert current dataset (self) to a Dimension Scale, with optional name, 'dimension_name'.
+    Get the reference of an Array (the adress of its values in the hdf5 file).
+    
     """
-    cdef hid_t dataset_id
+    
+    cdef hobj_ref_t ref
+    cdef hid_t loc_id
     cdef herr_t ret
     
-    dataset_id = self.dataset_id
-    ret = H5DSset_scale(dataset_id, dimension_name)
+    loc_id = self.dataset_id
+    
+    ret = H5Rcreate(&ref, loc_id, ".", H5R_OBJECT, -1)
     if ret < 0:
-      raise HDF5ExtError("Problems converting the %s into a Dimension Scale." % self.__class__.__name__)
-    return ret
+        raise HDF5ExtError("Error getting the reference \
+for the dataset %s" % self.name)
 
-#  def _iterate_scales(self, unsigned int dimension_index, int index, H5DS_iterate_t visitor, visitor_data):
-#     """
-#     Iterates the operation 'visitor' through the scales attached to dimension 'dimension_index' of current dataset (self).
-#     """
-#     cdef hid_t dataset_id
-#     cdef int *index_p
-#     cdef herr_t ret
-#        
-#     dataset_id = self.dataset_id
-#     index_p = &index
-#    
-#     ret = H5DSiterate_scales(dataset_id, dimension_index, index_p, visitor, visitor_data_p)
-#     return ret
+    return ref
+    
 
   def set_label(self, unsigned int dimension_index, char *label):
-    """
-    Set label for the dimension 'dimension_index' of current dataset (self) to the value 'label'.
+    """Set a label for a dimension.
+
+    Keyword arguments:
+    
+    dimension_index -- the index of the dimension
+    
+    label -- the label
+
     """
     cdef hid_t dataset_id
     cdef herr_t ret
@@ -1756,17 +2004,19 @@
     dataset_id = self.dataset_id
     
     if self.rank<=dimension_index:
-      error_msg = "The Array specified doesn't have %d dimensions." % (dimension_index+1)
-      raise AttributeError, error_msg
+      error_msg = "The Array specified \
+doesn't have %d dimensions." % (dimension_index+1)
+      raise ValueError, error_msg
     
     ret = H5DSset_label(dataset_id, dimension_index, label)
     if ret <0:
-      raise HDF5ExtError("Problems setting the label %s to the dimension %d of the dataset." % (label,dimension_index))
+      raise HDF5ExtError("Problems setting the label %s to \
+the dimension %d of the Array." % (label,dimension_index))
+
+    return ret
   
   def is_scale(self):
-    """
-    Determines whether current Array (self) is a Dimension Scale.
-    """
+    """Determines whether an Array is a Dimension Scale."""
     cdef hid_t dataset_id
     cdef htri_t ret
     
@@ -1774,45 +2024,97 @@
     
     ret = H5DSis_scale(dataset_id)
     if ret <0:
-      raise HDF5ExtError("Problems determining whether the dataset is a Dimension Scale.")
+      raise HDF5ExtError("Problems determining \
+whether the Array is a Dimension Scale.")
+    
     return ret
 
   def get_scale_number(self, unsigned int dimension_index):
-    """
-    Determines how many Dimension Scales are attached to dimension 'dimension_index' of current Array (self).
+    """Determines how many Dimension Scales are attached to a dimension
+
+    Keyword arguments:
+    
+    dimension_index -- the index of the dimension
+
     """
     cdef hid_t dataset_id
     cdef int num_scales
     
     dataset_id = self.dataset_id
     
-    if self.rank<dimension_index:
-      error_msg = "The Array specified doesn't have %d dimensions" % (dimension_index+1)
-      raise AttributeError, error_msg
+    if self.rank<dimension_index+1:
+      error_msg = \
+"The Array specified doesn't have %d dimensions." % (dimension_index+1)
+      raise ValueError, error_msg
     
     num_scales = H5DSget_num_scales(dataset_id, dimension_index)
     if num_scales <0:
-      raise HDF5ExtError("Problems determining the number of Dimension Scales attached to dimension %d of the dataset." % dimension_index)
+      raise HDF5ExtError("Problems determining the number of Dimension Scales \
+attached to dimension %d of the Array." % dimension_index)
+    
     return num_scales
 
 
+  def clear_scale(self, hid_t dimension_scale_id, hobj_ref_t ref,
+                  unsigned int dimension_index):
+    """Detach a Dimension Scale from a dimension of an Array.
+
+    Keyword arguments:
+    
+    dimension_scale_id -- the object ID of the Dimension Scale
+    
+    ref -- the reference of the Dimension Scale
+    
+    dimension_index -- the index of the dimension
+    
+    Remark: The Dimension Scale is specified either by its ID or by its 
+    reference
+
+    """
+    
+    cdef herr_t ret
+    cdef dataset_id
+    cdef char *name
+    
+    dataset_id = self.dataset_id
+
+    close_dimension_scale_id = False
+    if not dimension_scale_id:
+        file_id = self._v_file._getFileId()
+        dimension_scale_id = H5Rdereference(file_id, H5R_OBJECT, <void *>&ref)
+        if dimension_scale_id < 0:
+            raise HDF5ExtError(\
+"Error in retrieving an ID corresponding to the reference %d" % ref)
+        # the ID returned by HRdereference will have to be released
+        close_dimension_scale_id = True
+     
+    ret = H5DSdetach_scale(dataset_id, dimension_scale_id, dimension_index)
+    if ret <0:
+      raise HDF5ExtError("Problems detaching the Dimension Scale \
+from the dimension %d of the dataset" % dimension_index)
+    
+    # If needed, release the ID of the Array
+    if close_dimension_scale_id:
+        H5Dclose(dimension_scale_id)
+    
+    return ret
+
+
 # A new class to better distinguish between Arrays and Dimension Scales
 # 2005-12-05
 # R. Sassolas
 cdef class DimensionScale(Array):
-  """Dimension Scale are merely datasets with some extra metadata"""
+  """Dimension Scale are merely Arrays with some extra metadata"""
 
-  def _dereference(self, hobj_ref_t ref):
-    cdef hid_t retvalue
+  def attach_to(self, hid_t dataset_id, unsigned int dimension_index):
+    """Attach a Dimension Scale to a dimension of an Array.
 
-    retvalue = H5Rdereference(self.dataset_id, H5R_OBJECT, <void *>&ref)
-    if retvalue <0  :
-      raise HDF5ExtError("Problem occured while dereferencing %d" % ref)
-    return retvalue
+    Keyword arguments:
+    
+    dataset_id -- the object ID of the Array
+    
+    dimension_index -- the index of the dimension
 
-  def attach_to(self, hid_t dataset_id, unsigned int dimension_index):
-    """
-    Attach current Dimension Scale (self) to dimension 'dimension_index' of dataset 'dataset_id'.
     """
     cdef hid_t dimension_scale_id
     cdef herr_t ret
@@ -1821,26 +2123,63 @@
 
     ret = H5DSattach_scale(dataset_id, dimension_scale_id, dimension_index)
     if ret <0:
-      raise HDF5ExtError("Problems attaching the Dimension Scale to the dimension %d of the dataset." % dimension_index)
+      raise HDF5ExtError("Problems attaching the Dimension Scale \
+to the dimension %d of the dataset." % dimension_index)
+    
     return ret
 
-  def detach_from(self, hid_t dataset_id, hid_t dimension_scale_id, unsigned int dimension_index):
-    """
-    Detach Dimension Scale 'dimension_scale_id' from dimension 'dimension_index' of dataset 'dataset_id'.
+  def detach_from(self, hid_t dataset_id, hobj_ref_t ref,
+                  unsigned int dimension_index):
+    """Detach a Dimension Scale from a dimension of an Array.
+
+    Keyword arguments:
+    
+    dataset_id -- the object ID of the Array
+    
+    ref -- the reference of the Array
+    
+    dimension_index -- the index of the dimension
+    
+    Remark: The Array is specified either by its ID or by its reference
+
     """
-    # the ID of the Dimension Scale is not retrieved (but it could be) from 'self' 
-    # because this function is to be called both from 'DimensionScale' and 'Array' instances
     
     cdef herr_t ret
+    cdef dimension_scale_id
+    cdef char *name
+    
+    dimension_scale_id = self.dataset_id
+
+    close_dataset_id = False
+    if not dataset_id:
+        file_id = self._v_file._getFileId()
+        dataset_id = H5Rdereference(file_id, H5R_OBJECT, <void *>&ref)
+        if dataset_id < 0:
+            raise HDF5ExtError(\
+"Error in retrieving an ID corresponding to the reference %d." % ref)
+        # the ID returned by HRdereference will have to be released
+        close_dataset_id = True
      
     ret = H5DSdetach_scale(dataset_id, dimension_scale_id, dimension_index)
     if ret <0:
-      raise HDF5ExtError("Problems detaching the Dimension Scale to the dimension %d of the dataset" % dimension_index)
+      raise HDF5ExtError("Problems detaching the Dimension Scale \
+from the dimension %d of the dataset" % dimension_index)
+    
+    # If needed, release the ID of the Array
+    if close_dataset_id:
+        H5Dclose(dataset_id)
+        
     return ret
 
   def is_attached_to(self, hid_t dataset_id, unsigned int dimension_index):
-    """
-    Report if current Dimension Scale (self) is currently attached to dimension 'dimension_index' of dataset 'dataset_id'.
+    """Report if a Dimension Scale is attached to a dimension of an Array.
+
+    Keyword arguments:
+    
+    dataset_id -- the object ID of the Array
+    
+    dimension_index -- the index of the dimension
+
     """
     cdef hid_t dimension_scale_id
     cdef htri_t ret
@@ -1849,7 +2188,9 @@
     
     ret = H5DSis_attached(dataset_id, dimension_scale_id, dimension_index)
     if ret <0:
-      raise HDF5ExtError("Problems determining whether the Dimension Scale is attached to dimension %d of the dataset" % dimension_index)
+      raise HDF5ExtError("Problems determining whether the Dimension Scale\
+is attached to the dimension %d of the dataset" % dimension_index)
+    
     return ret
   
 
diff -Naur pytables-1.3-ds-devel/src/TableExtension.pyx ds/pytables-1.3-devel/src/TableExtension.pyx
--- pytables-1.3-ds-devel/src/TableExtension.pyx	2005-11-01 14:22:35.000000000 +0100
+++ ds/pytables-1.3-devel/src/TableExtension.pyx	2006-01-20 09:33:59.000000000 +0100
@@ -195,7 +195,7 @@
   cdef hid_t   parent_id  # XXX from Node
   cdef hid_t   dataset_id, type_id, disk_type_id
 
-  def _g_new(self, where, name, init):
+  def _g_new(self, where, name, init, dataset_id):
     self.name = strdup(name)  # XXX from Node._g_new()
     # The parent group id for this object
     self.parent_id = where._v_objectID  # XXX from Node._g_new()
diff -Naur pytables-1.3-ds-devel/src/utils.c ds/pytables-1.3-devel/src/utils.c
--- pytables-1.3-ds-devel/src/utils.c	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/src/utils.c	2006-01-23 15:16:44.000000000 +0100
@@ -139,7 +139,7 @@
  */
 
 PyObject *get_filter_names( hid_t loc_id,
-			    const char *dset_name)
+			    const char *dset_name, hid_t dataset_id)
 {
  hid_t    dset;
  hid_t    dcpl;           /* dataset creation property list */
@@ -154,13 +154,19 @@
  PyObject *filters;
  PyObject *filter_values;
 
- /* Open the dataset. */
- if ( (dset = H5Dopen( loc_id, dset_name )) < 0 ) {
-   goto out;
- }
+ if ( dataset_id == 0)
+ {
+   /* Open the dataset. */
+   if ( (dset = H5Dopen( loc_id, dset_name )) < 0 ) {
+     goto out;
+   }
 
- /* Get the properties container */
- dcpl = H5Dget_create_plist(dset);
+   /* Get the properties container */
+   dcpl = H5Dget_create_plist(dset);
+ }
+ else
+   dcpl = H5Dget_create_plist(dataset_id);
+   
  /* Collect information about filters on chunked storage */
  if (H5D_CHUNKED==H5Pget_layout(dcpl)) {
    filters = PyDict_New();
@@ -193,7 +199,8 @@
  }
 
  H5Pclose(dcpl);
- H5Dclose(dset);
+ if ( dataset_id == 0 )
+   H5Dclose(dset);
 
 return filters;
 
@@ -596,6 +603,7 @@
 
 }
 
+
 /* The next provides functions to support a complex datatype.
    HDF5 does not provide an atomic type class for complex numbers
    so we make one from a HDF5 compound type class.
@@ -638,7 +646,7 @@
    instead of a nested type should not be a big issue (I hope!) :-/
    F. Altet 2005-05-23 */
 int is_complex(hid_t type_id) {
-  hid_t class_id, base_type_id, base_class_id;
+  hid_t class_id, base_type_id;
   hid_t class1, class2;
   char *colname1, *colname2;
   int result = 0;
@@ -678,7 +686,7 @@
 int is_reference_list(hid_t type_id) {
   hid_t class_id;
   hid_t class0, class1;
-  int i, result=0;
+  int result=0;
   hsize_t nfields;
   
   class_id = H5Tget_class(type_id);
diff -Naur pytables-1.3-ds-devel/src/utilsExtension.pyx ds/pytables-1.3-devel/src/utilsExtension.pyx
--- pytables-1.3-ds-devel/src/utilsExtension.pyx	2005-10-31 18:42:21.000000000 +0100
+++ ds/pytables-1.3-devel/src/utilsExtension.pyx	2006-01-23 15:14:22.000000000 +0100
@@ -224,7 +224,7 @@
   int    is_complex(hid_t type_id)
   herr_t set_order(hid_t type_id, char *byteorder)
   herr_t get_order(hid_t type_id, char *byteorder)
-  object get_filter_names( hid_t loc_id, char *dset_name)
+  object get_filter_names( hid_t loc_id, char *dset_name, hid_t dataset_id)
 
   H5T_class_t getHDF5ClassID(hid_t loc_id, char *name, H5D_layout_t *layout,
                              hid_t *type_id, hid_t *dataset_id)
@@ -573,7 +573,7 @@
 
 def getFilters(parent_id, name):
   "Get a dictionary with the filter names and cd_values"
-  return get_filter_names(parent_id, name)
+  return get_filter_names(parent_id, name, 0)
 
 
 # This is used by several <Leaf>._convertTypes() methods.
diff -Naur pytables-1.3-ds-devel/src/utils.h ds/pytables-1.3-devel/src/utils.h
--- pytables-1.3-ds-devel/src/utils.h	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/src/utils.h	2006-01-23 15:14:34.000000000 +0100
@@ -54,7 +54,8 @@
 
 PyObject *createNamesTuple(char *buffer[], int nelements);
 
-PyObject *get_filter_names( hid_t loc_id, const char *dset_name);
+PyObject *get_filter_names( hid_t loc_id, const char *dset_name,
+                            hid_t dataset_id);
 
 PyObject *Giterate(hid_t parent_id, hid_t loc_id, const char *name);
 
diff -Naur pytables-1.3-ds-devel/tables/Array.py ds/pytables-1.3-devel/tables/Array.py
--- pytables-1.3-ds-devel/tables/Array.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/tables/Array.py	2006-01-19 17:17:52.000000000 +0100
@@ -57,7 +57,7 @@
 # list of the different arrays - or more accurately the list of their IDs
 # 2005-11-28
 # R. Sassolas
-ARRAY_LIST = ["ARRAY", "CARRAY"]
+ARRAY_LIST = ["ARRAY", "CARRAY", "EARRAY"]
 
 
 
@@ -145,9 +145,9 @@
         return buffersize
 
 
-    def __init__(self, parentNode, name,
+    def __init__(self, parentNode, name, 
                  object=None, title="",
-                 log=True):
+                 log=True, dataset_id = 0):
         """Create the instance Array.
 
         Keyword arguments:
@@ -242,7 +242,8 @@
         """The index of the enlargeable dimension."""
 
         # Ordinary arrays have no filters: leaf is created with default ones.
-        super(Array, self).__init__(parentNode, name, new, Filters(), log)
+        super(Array, self).__init__(parentNode, name, new, Filters(), log,
+                                    dataset_id)
 
 
     def _g_create(self):
@@ -395,7 +396,9 @@
             flavor = "String"
         else:
             raise TypeError, \
-"""The object '%s' is not in the list of supported objects (NumArray, CharArray, Numeric, homogeneous list or homogeneous tuple, int, float or str). Sorry, but this object is not supported.""" % (arr)
+"""The object '%s' is not in the list of supported objects 
+(NumArray, CharArray, Numeric, homogeneous list or homogeneous tuple,
+int, float or str). Sorry, but this object is not supported.""" % (arr)
 
         # We always want a contiguous buffer
         # (no matter if has an offset or not; that will be corrected later)
@@ -479,13 +482,14 @@
                         else:
                             # Warn the user
                             warnings.warn( \
-"""The object on-disk has Numeric flavor, but Numeric is not installed locally. Returning a numarray object instead!.""")
+"""The object on-disk has Numeric flavor, but Numeric is not installed locally.
+Returning a numarray object instead!.""")
                             # Default to numarray
                             self.listarr = numarray.swapaxes(self.listarr,
                                                              self.extdim, 0)
                     else:
                         self.listarr = numarray.swapaxes(self.listarr,
-			                                 self.extdim, 0)
+                                                         self.extdim, 0)
                 self._row = -1
                 self._startb = self._stopb
             self._row += 1
@@ -724,10 +728,10 @@
         "Private part of Leaf.copy() for each kind of leaf"
         # Get the slice of the array
         # (non-buffered version)
-	if self.shape:
+        if self.shape:
             arr = self[start:stop:step]
-	else:
-	    arr = self[()]
+        else:
+            arr = self[()]
         # Build the new Array object
         object = Array(group, name, arr, title=title, log=log)
         nbytes = self.itemsize
@@ -749,377 +753,253 @@
   byteorder = %r""" % (self, self.type, self.stype, self.shape, self.itemsize,
                        self.nrows, self.flavor, self.byteorder)
 
-    # Before deleting an Array or a Dimension Scale, detach all
-    # 2005-1129
+
+    # functions redefined to implement Dimension Scales
+    # 2005-12-21
     # R. Sassolas
-    def _g_delete(self):
+
+    # Before deleting an Array or a Dimension Scale, detach all
+    def _f_remove(self, recursive=False):
+        """
+        Remove this node from the hierarchy.
+
+        If the node has children, recursive removal must be stated by
+        giving `recursive` a true value, or a `NodeError` will be
+        raised.
+        """
+            
         _classId = str(self._c_classId)
         if _classId in ARRAY_LIST:
-	    self.clear_scales2()
-	elif _classId == "DIMENSION_SCALE":
-	    self.detach_from_all2()
-	 
-	# Finally call the inherited method
-	hdf5Extension.Array._g_delete(self)
-
-    # When an Array is moved, the attribute 'REF_LIST' of all the Dimension Scale
-    # that are attached to it have to be updated.
-    def move(self, newparent=None, newname=None, overwrite=False):
-        try:
-	    dim_list = self.attrs.DIM_LIST
-	    old_path_name = self._v_pathname
-	    dim_list_found = True	    
-	except:
-	    # no Dimension Scale is attached to current Array : there are
-	    # no attributes to update
-	    dim_list_found = False
-	
-	# move the Array    
-	Leaf.move(self, newparent, newname, overwrite)
-	
-	if dim_list_found:
-	    new_path_name = self._v_pathname
-	    if new_path_name != old_path_name:
-	        dim_list_length = dim_list.__len__()
-	        for i in xrange(dim_list_length):
-	            l = dim_list[i]
-		    l_length = l.__len__()
-		    for j in xrange(l_length):
-		        # Retrieve a Dimension Scale and its attribute 'REF_LIST'
-		        ds_path_name = l[j]
-		        ds = _parse(self,ds_path_name)
-		        ref_list = ds.attrs.REF_LIST
-		    
-		        # Replace the pathnames in the list 
-		        index = ref_list.index((old_path_name, i))
-		        ref_list.insert(index, (new_path_name,i))
-		        ref_list.__delitem__(index+1)
-		    
-		        # Update the attribute
-		        ds.attrs._g__setattr('REF_LIST', ref_list)	
+            self.clear_all_scales()
+        elif _classId == "DIMENSION_SCALE":
+            self.detach_from_all()
+
+        # Finally call the inherited method
+        super(Array, self)._f_remove(recursive)
+
+
+    # Before copying an Array, detach all
+    def _f_copy(self, newparent=None, newname=None,
+                overwrite=False, recursive=False, **kwargs):
+        """
+        Copy this node and return the new one.
+
+        Creates and returns a copy of the node, maybe in a different
+        place in the hierarchy.  `newparent` can be a `Group` object or
+        a pathname in string form.  If it is not specified or ``None``,
+        the current parent group is chosen as the new parent.  `newname`
+        must be a string with a new name.  If it is not specified or
+        ``None``, the current name is chosen as the new name.  If
+        `recursive` copy is stated, all descendents are copied as well.
+
+        Copying a node across databases is supported but can not be
+        undone.  Copying a node over itself is not allowed, nor it is
+        recursively copying a node into itself.  These result in a
+        `NodeError`.  Copying over another existing node is similarly
+        not allowed, unless the optional `overwrite` argument is true,
+        in which case that node is recursively removed before copying.
+
+        Additional keyword arguments may be passed to customize the
+        copying process.  For instance, title and filters may be
+        changed, user attributes may be or may not be copied, data may
+        be subsampled, stats may be collected, etc.  See the
+        documentation for the particular node type.
+
+        Using only the first argument is equivalent to copying the node
+        to a new location without changing its name.  Using only the
+        second argument is equivalent to making a copy of the node in
+        the same group.
+        """
+
+        # First call the inherited method and store the returned Array
+        temp = super(Array, self)._f_copy(newparent, newname,
+                                          overwrite, recursive, **kwargs)
+        # Then reset its "DIM_LIST" attribute
+        if temp.attrs.__contains__('DIM_LIST'):
+            temp.attrs._g__delattr('DIM_LIST')
+        
+        if temp.attrs.__contains__('DIMENSION_LIST'):
+            temp.attrs._g__delattr('DIMENSION_LIST')
+        
+        # Finally return this Array
+        return temp        
+
 
     # functions added to implement Dimension Scales
-    # 2005-111-15
-    # R. Sassolas
     
-    def to_scale(self, dimension_name=""):
-        """Turn current Array into a Dimension Scale, with an optionnal name."""
-        if self.is_scale() == False:
-	    hdf5Extension.Array.to_scale(self,dimension_name)
-	    
-	    # Add the attribute name to the list of attribute so that it can  be obtained later. 
-	    if(self.attrs):
-	        self.attrs._addAttrName('NAME')
-		
-	    # This allow some kind of "cast" in Python". It might no be very nice, but it works well !
-	    self.__class__=DimensionScale	    
-	else:
-	    error_msg = "Current object seems to already be a valid Dimension Scale."
-	    raise AttributeError, error_msg
-    
-    # The point of this method is to add the attribute for labels ('DIMENSION_LABELS')
-    # in the dictionnary gathering (among others) the names of the attributes owned by the Array.
-    # It can indeed not be added in a standard way. 
+    # The point of this method is to add the attribute for labels
+    # ('DIMENSION_LABELS') in the dictionnary gathering (among others)
+    # the names of the attributes owned by the Array.
+    # It can indeed not be added in a standard way.
     def set_label(self, dimension_index, label):
-        """Set a label for a dimension."""
-	if(self.attrs):
-	    self.attrs._addAttrName('DIMENSION_LABELS')
-	return hdf5Extension.Array.set_label(self,dimension_index, label)
+        """Set a label for a dimension.
 
-    # This method makes up for the original Hdf5 method which bears the same name
-    # Remark : is this method really usefull ?
-    def get_label(self, dimension_index):
-        """Get the label of a dimension."""
-	label_list_found = False
-	try:
-	    label_list = self.attrs.DIMENSION_LABELS
-	    label_list_found = True
-	except:
-	    return ""
-	
-	if label_list_found :
-	    rank = self.shape.__len__()
-	    if dimension_index>rank-1:
-	        error_msg = "The Array specified doesn't have %d dimensions." % (dimension_index+1)
-                raise AttributeError, error_msg
-	    return label_list[dimension_index]
+        Keyword arguments:
+	    dimension_index -- the index of the dimension
+	    label -- the label for the dimension
 
-    # version which updates neither the 'REF_LIST' nor the 'DIM_LIST'
-    # def clear_scales(self):
-    #    """Get rid of all the scales attached to an Array."""
-    #	 if not self.is_scale():
-    #        rank=self.shape.__len__()
-    #	     i=0
-    #	     num = self.get_scale_number(i)
-    #        while i<rank-1 and num==0:
-    #            i=i+1
-    #		 num = self.get_scale_number(i)                
-    #	     if num==0: # there are no scale attached to current Array
-    #	         pass
-    #	     else:
-    #	         scale_list = self.attrs.DIMENSION_LIST
-    #	         while i<rank:
-    #	             num = self.get_scale_number(i)
-    #	             if num!=0:
-    #                    for j in xrange(num):
-    #		             ref = scale_list[i][0][j]
-    #		             dsid = self._dereference(ref)
-    #		             hdf5Extension.DimensionScale.detach_from(self, self.objectID, dsid, i)        
-    #		     i=i+1		    
-    #	 else:
-    #	     error_msg = "Current object is not an Array but a Dimension Scale. No scales can be attached to it."
-    #	     raise AttributeError, error_msg
+	    """
+        if(self.attrs):
+            self.attrs._addAttrName('DIMENSION_LABELS')
+        return hdf5Extension.Array.set_label(self,dimension_index, label)
 
-    # version which do update the 'REF_LIST' and 'DIM_LIST'
-    def clear_scales2(self):
-        """Get rid of all the scales attached to an Array."""
-	if not self.is_scale():
-            try:
-	        dim_list = self.attrs.DIM_LIST
-		dim_list_found = True
-	    except:
-	        # if the attribute 'DIM_LIST' can't be found, it means that current Dimension Scale (self)
-                # has not yet been attached to any dimension. So there's nothing to do.
-	        dim_list_found = False
-	    
-	    if dim_list_found:
-	        dim_list_length = dim_list.__len__()
-      	        for i in xrange(dim_list_length):
-	            l=dim_list[i]
-		    l_length = l.__len__()
-	            for j in xrange(l_length):
-		        path_name = l[j]
-		        ds = _parse(self, path_name)
-		        dsid = ds.objectID
-		    
-		        # update the attribute 'REF_LIST' of the Dimension Scale which is to be detached
-		        ref_list = ds.attrs.REF_LIST
-		        ref_list.remove((self._v_pathname,i))
-		        ds.attrs._g__setattr('REF_LIST',ref_list)
-	    
-	                hdf5Extension.DimensionScale.detach_from(ds, self.objectID, dsid, i)        
-
-	        # finally clear the attribute 'DIM_LIST' of current Array
-	        empty_list = _generate_empty_dim_list(dim_list_length)
-	        self.attrs._g__setattr('DIM_LIST', empty_list)	    
-	    		    
-	else:
-	    error_msg = "No scale can be attached to a Dimension Scale."
-	    raise AttributeError, error_msg
 
+    # This method makes up for the original Hdf5 method which bears
+    # the same name
+    # Remark : is this method really usefull ?
+    def get_label(self, dimension_index):
+        """Get the label of a dimension.
 
+        Keyword arguments:
+        dimension_index -- the index of the dimension
 
-# a small utility function which returns the object specified by the pathname 'name'
-# 'array' allow the file name to be retrieved
-# 2005-12-1
-# R. Sassolas
-def _parse(array, name):
-    path=(name.replace("/","."))
-    ds = eval("array._v_file.root"+path)
-    return ds
-
-# An other small utility function which generates an empty list with respect
-# to the format of the attribute 'DIM_LIST'
-def _generate_empty_dim_list(size):
-    l=[]
-    for i in xrange(size):
-        l.append([])
-    return l
+	    """
+        label_list_found = False
+        try:
+            label_list = self.attrs.DIMENSION_LABELS
+            label_list_found = True
+        except:
+            return ""
+        
+        if label_list_found :
+            rank = self.shape.__len__()
+            if dimension_index>rank-1:
+                error_msg = \
+"The Array specified doesn't have %d dimensions." % (dimension_index+1)
+                raise AttributeError, error_msg
+            return label_list[dimension_index]
 
+    def clear_scale(self, ds, dimension_index):
+        """Detach a Dimension Scale from a dimension of an Array.
 
+        Keyword arguments:
+	    ds -- the Dimension Scale to be detached
+	    dimension_index -- the index of the dimension
 
-# new class for Dimension Scales
-# Remark : this class has to be in the same Python file as the Array class, otherwise there's a loop in the imports
-# (Array import DimensionScale and DimensionScale import Array), which makes Python crash.
-# 2005-11-15
-# R. Sassolas
-class DimensionScale(Array, hdf5Extension.DimensionScale):
-    """
-    Represents a HDF5 Dimension Scale.
-    
-    """
-    
-    _c_classId = 'DIMENSION_SCALE'
-    
-    # When an Dimension Scale is moved, the Arrays it is attached to must have
-    # their attributes 'DIM_LIST' updated
-    def move(self, newparent=None, newname=None, overwrite=False):
+	    """
+        error_msg = \
+"The object %s doesn't seem to be a valid Dimension Scale." % ds.__str__()
         try:
-	    ref_list = self.attrs.REF_LIST
-	    old_path_name = self._v_pathname
-	    ref_list_found = True	    
-	except:
-	    # Current Dimension Scale is attached to no Array
-	    # There are no attribute to update
-	    ref_list_found = False
-	
-	# move the Dimension Scale    
-	Leaf.move(self, newparent, newname, overwrite)
-	
-	if ref_list_found:
-	    new_path_name = self._v_pathname
-	    if new_path_name != old_path_name:
-		ref_list_length = ref_list.__len__()
-	        for i in xrange(ref_list_length):
-	            # Retrieve an Array and its attribute 'DIM_LIST'
-		    array_path_name = ref_list[i][0]
-		    dimension_index = ref_list[i][1]
-		    array = _parse(self, array_path_name)
-		    dim_list = array.attrs.DIM_LIST
-		
-		    # Replace the pathnames in the list
-		    l = dim_list[dimension_index]
-		    index = l.index(old_path_name)
-		    l.insert(index, new_path_name)
-		    l.__delitem__(index+1)
-		    dim_list[dimension_index] = l
-		
-		    # Update the attribute
-		    array.attrs._g__setattr('DIM_LIST', dim_list)
-    
-    def attach_to(self, array, dimension_index):
-        """Attach a Dimension Scale to one dimension of an Array."""
+            if ds.is_scale():
+                
+                if ds.is_attached_to(self, dimension_index):
+                    ds_id = ds.objectID
 
-        # If current Dimension Scale (self) is already attached to the dimension specified, nothing is done
-        if not self.is_attached_to(array, dimension_index):
-          _objectID = array.objectID
-	  
-	  # Check the second argument (the first arguments have already been tested by is_attached_to())	
-	  _shape = array.shape
-	  if self.shape[0] != array.shape[dimension_index]:
-	      error_msg = "The shape of this Dimension Scale is not compatible with the dimension specified."   
-	      raise ValueError, error_msg
-	  
-	  hdf5Extension.DimensionScale.attach_to(self, _objectID, dimension_index)
-	
-	  # add the names of the attribute in the AttributeSet list of attributes.
-          if(self.attrs):
-	    self.attrs._addAttrName('REFERENCE_LIST')
-	  if(array.attrs):
-	    array.attrs._addAttrName('DIMENSION_LIST')
-	    
-	  # new attributes, more user-friendly than the ones from hdf5...
-	  # 'REF_LIST' is to make up for 'REFERENCE_LIST'
-          try:
-              ref_list = self.attrs.REF_LIST
-          except: # the attribute doesn't exist yet
-              ref_list = []	      
-          ref_list.append((array._v_pathname,dimension_index))
-          self.attrs._g__setattr('REF_LIST', ref_list)
-	
-	  # 'DIM_LIST' is to make up for 'DIMENSION_LIST'
-	  try:
-              dim_list = array.attrs.DIM_LIST
-          except: # the attribute doesn't exist yet, initialize it.
-              rank=array.shape.__len__()
-	      dim_list=_generate_empty_dim_list(rank)
-	      
-	  # get the list of Dimension Scales attached to dimension dimension_index,
-	  # and add the pathname of current Dimension Scale
-	  l = dim_list[dimension_index]
-	  l.append(self._v_pathname)
-	  dim_list[dimension_index]=l
-	      
-	  array.attrs._g__setattr('DIM_LIST', dim_list)
-
-    def  is_attached_to(self, array, dimension_index):
-        """Check if a Dimension Scale is attached to the dimension specified."""
-        # Check current object
-        if self.is_scale() == False:
-            error_msg = "The object %s doesn't seems to be a valid Dimension Scale." % self.__str__()
+                    hdf5Extension.Array.clear_scale(self, ds_id, 0,
+                                                    dimension_index)
+                else:
+                    error_msg2 = \
+"The Dimension Scale %s doesn't seems to be attached to %s." % (ds.name, self.name)
+                    raise AttributeError, error_msg2
+            
+            else:
+                raise AttributeError, error_msg
+        except Exception, m:
+            print m.__str__()
             raise AttributeError, error_msg
-	    
-	# Check the first argument
-	error_msg = "The object %s doesn't seems to be a valid Array." % array.__str__()
-	try: # 'array' may not have an attribute called _c_class_id...
-	    if array._c_classId not in ARRAY_LIST:
-	        raise AttributeError, error_msg
-        except:
-	    raise AttributeError, error_msg
-		
-	# Check the second argument	
-	_shape = array.shape
-	if _shape.__len__() <= dimension_index :
-	    error_msg = "The Array specified doesn't have %d dimensions." % (dimension_index+1)
-	    raise ValueError, error_msg
-	return hdf5Extension.DimensionScale.is_attached_to(self, array.objectID, dimension_index)
-	
-    def detach_from(self, array, dimension_index):
-        """Detach a Dimension Scale from the dimension specified."""    	
-	if self.is_attached_to(array, dimension_index):
-	    _objectID = array.objectID
-	    hdf5Extension.DimensionScale.detach_from(self, _objectID, self.objectID, dimension_index)
-	    
-	    # update 'REF_LIST'
-            ref_list = self.attrs.REF_LIST
-            ref_list.remove((array._v_pathname, dimension_index))
-            self.attrs._g__setattr('REF_LIST', ref_list)
-	    
-	    # update 'DIM_LIST'
-	    dim_list = array.attrs.DIM_LIST
-	    l = dim_list[dimension_index]
-	    l.remove(self._v_pathname)
-            dim_list[dimension_index]=l
-            array.attrs._g__setattr('DIM_LIST',dim_list)
 
-        else:
-            error_msg = "This Dimension Scale is not attached to the dimension specified."
-            raise AttributeError, error_msg
 
-    # version which updates neither the 'REF_LIST' nor the 'DIM_LIST'  
-    # def detach_from_all(self):
-    #     """detaches a Dimension Scale from all the Arrays it is attached to."""
-    #     try:
-    #         reference_list = self.attrs.REFERENCE_LIST
-    #         reference_list_found = True
-    #     except:
-    #         # if the attribute 'REFERENCE_LIST' can't be found, it means that current Dimension Scale (self)
-    #         # has not yet been attached to any dimension. So there's nothing to do.
-    #         reference_list_found = False    
-    #     if reference_list_found:
-    #         reference_number = reference_list.__len__()
-    #         for i in xrange(reference_number):
-    #             reference = reference_list[i]
-    #             did = self._dereference(reference[0])
-    #             dimension_index = reference[1]
-    #             hdf5Extension.DimensionScale.detach_from(self, did, self.objectID, dimension_index)		
-	
-    # version which do update the 'REF_LIST' and 'DIM_LIST'	
-    def detach_from_all2(self):
-        """Detach a Dimension Scale from all the Arrays it is attached to."""
+    def clear_all_scales(self):
+        """Get rid of all the scales attached to an Array."""
+        
         try:
-            ref_list = self.attrs.REF_LIST
-            ref_list_found = True
-	    
-	except:
-            # if the attribute 'REF_LIST' can't be found, it means that current Dimension Scale (self)
-            # has not yet been attached to any dimension. So there's nothing to do.
-            ref_list_found = False
-	
-	if ref_list_found:
-	    ref_number = ref_list.__len__()    
-            for i in xrange(ref_number):
-                ref = ref_list[i]
-                path_name = ref[0]
-                dimension_index = ref[1]
-
-	        array = _parse(self, path_name)
-	        did = array.objectID
-		
-	        # update the attribute 'DIM_LIST' of the Array to be detached
-	        dim_list = array.attrs.DIM_LIST
-	        l=dim_list[dimension_index]
-	        l.remove(self._v_pathname)
-	        dim_list[dimension_index]=l
-	        array.attrs._g__setattr('DIM_LIST', dim_list)
-	    
-	        # detach current Dimension Scale from 'array'	    
-                hdf5Extension.DimensionScale.detach_from(self, did, self.objectID, dimension_index)		
-	
-	    # finally update the attribute 'REF_LIST' of current Dimension Scale
-	    self.attrs._g__setattr('REF_LIST', [])    
+            dimension_list = self.attrs.DIMENSION_LIST
+            dimension_list_found = True
+        except:
+            dimension_list_found = False
+            
+        if dimension_list_found:
+            dimension_list_length = dimension_list.__len__()
+            for i in xrange(dimension_list_length):
+                l=dimension_list[i]
+                l_length = l.__len__()
+                for j in xrange(l_length):
+                    reference = l[j]
+                    
+                    hdf5Extension.Array.clear_scale(self, 0, reference, i)
+
+
+    def generate_dim_list(self):
+        """
+        Generates the 'DIM_LIST' attribute with respect to the 
+        'DIMENSION_LIST' attribute.
+        
+        """
+        
+        # Dimension Scales don't have DIMENSION_LIST or DIM_LIST, so it's 
+        # useless to go further
+        if not self.is_scale():
+            # If the Array doesn't own an attribute 'DIMENSION_LIST', 
+            # there's nothing to do
+            try:
+                dimension_list = self.attrs.DIMENSION_LIST
+                dimension_list_found = True
+            except:
+                dimension_list_found = False
+                        
+            if dimension_list_found:
+                references=[]
+                pathnames=[]
+                lengths = []
+                dim_list = []
+                file = self._v_file
+                for list in dimension_list:
+                    # For a given dimension, 'list' is the list of the 
+                    # references of the Dimension Scales that are attached to 
+                    # this dimension.
+                    references = references + list
+                    lengths.append(list.__len__())    
+                
+                ref_number = references.__len__()
+                ref_found = 0
+                    
+                for i in xrange(ref_number):
+                    pathnames.append("")
+                
+                walkGroups = file.walkGroups('/')
+                try:
+                    while not ref_found == ref_number:
+                        group = walkGroups.next()
+                        listNodes = file.listNodes(group, 'DimensionScale')
+                        i = 0
+                        while not ref_found == ref_number \
+                              and i < listNodes.__len__():
+                              
+                            ds = listNodes[i]
+                            ds_ref = ds.get_reference()
+                            try:
+                                index = references.index(ds_ref)
+                                index_found = True
+                            except ValueError:
+                                # The reference is not in the list
+                                index_found = False
+                            if index_found:
+                                pathnames.__delitem__(index)
+                                pathnames.insert(index, ds._v_pathname)
+                                ref_found = ref_found+1
+                                
+                            i = i+1
+                                                                
+                        
+                except StopIteration:
+                    # Here we have a problem...
+                    missing_ref = ref_number-ref_found
+                    error_msg = \
+"Can't find the Dimension Scales corresponding to % references" % missing_ref
+                            
+                    raise StopIteration, error_msg
+                
+                # All pathnames have been found : it's time to build the 
+                # 'DIM_LIST'
+                for i in lengths:
+                    dim_list.append(pathnames[:i])
+                    pathnames = pathnames[i:]
+                
+                # The dim_list has been created. Now it just has to be stored 
+                # in the 'DIM_LIST' attribute (which may be overwritten)
+                self.attrs._g__setattr('DIM_LIST', dim_list)
+
 
-	
 
 class ImageArray(Array):
 
diff -Naur pytables-1.3-ds-devel/tables/AttributeSet.py ds/pytables-1.3-devel/tables/AttributeSet.py
--- pytables-1.3-ds-devel/tables/AttributeSet.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/tables/AttributeSet.py	2006-01-25 13:47:01.000000000 +0100
@@ -68,7 +68,9 @@
 # dimension of an Array. A user is not allowed to remove or modify them.
 # 2005-11-22
 # R. Sassolas
-DS_ATTRS = ['REFERENCE_LIST', 'DIMENSION_LIST', 'DIMENSION_LABELS', 'DIM_LIST', 'REF_LIST']
+#DS_ATTRS = ['REFERENCE_LIST', 'DIMENSION_LIST', 'DIMENSION_LABELS']
+DS_ATTRS = ['REFERENCE_LIST', 'DIMENSION_LIST', 'DIMENSION_LABELS',
+            'DIM_LIST', 'REF_LIST']
 
 def issysattrname(name):
     "Check if a name is a system attribute or not"
@@ -145,6 +147,7 @@
         mydict["_v__nodeFile"] = node._v_file
         mydict["_v__nodePath"] = node._v_pathname
         mydict["_v_attrnames"] = self._g_listAttr()
+        
         # Get the file version format. This is an optimization
         # in order to avoid accessing too much to it.
         if hasattr(node._v_file, "format_version"):
@@ -154,6 +157,7 @@
         # Split the attribute list in system and user lists
         mydict["_v_attrnamessys"] = []
         mydict["_v_attrnamesuser"] = []
+        
         for attr in self._v_attrnames:
             # put the attributes on the local dictionary to allow
             # tab-completion
@@ -198,12 +202,12 @@
     # R. Sassolas
     def _addAttrName(self, name):
         """add an attribute name to the list of attributes"""
-	if not name in self._v_attrnames:
-	    self._v_attrnames.append(name)
-	    self._v_attrnames.sort()
-	if not name in self._v_attrnamesuser:
-	    self._v_attrnamesuser.append(name)
-	    self._v_attrnamesuser.sort()
+        if not name in self._v_attrnames:
+            self._v_attrnames.append(name)
+            self._v_attrnames.sort()
+        if not name in self._v_attrnamesuser:
+            self._v_attrnamesuser.append(name)
+            self._v_attrnamesuser.sort()
 
 
     def _f_list(self, attrset="user"):
@@ -284,18 +288,37 @@
 
         # Put this value in local directory
         self.__dict__[name] = retval
+        if name == "DIMENSION_LIST":
+            print "------------- end of AttributeSet.__getattr__ -------------"
         return retval
 
 
-    # Redefine the inherited __getattribute__ method. The attributes for Dimension Scales are
-    # indeed set and modified in a non standard way ; so they can't be got in a standard way either.
+    # Redefine the inherited __getattribute__ method. The attributes for
+    # Dimension Scales are indeed set and modified in a non standard way ;
+    # so they can't be got in a standard way either.
     # 2005-11-22
     # R. Sassolas
     def __getattribute__(self,name):
         if str(name) in DS_ATTRS:
-	    return self.__getattr__(name)
-	else :
-	    return object.__getattribute__(self,name)
+            try:
+                tmp = self.__dict__[name]
+            except:
+                pass
+            else:
+                if name == 'DIMENSION_LIST':
+                    print \
+"------------- the attribute 'DIMENSION_LIST' already exists : \
+so first close all the nodes stored in it -------------"
+
+                    for l in tmp:
+                        for ds in l:
+                            ds.__del__()
+                            print "------------- node closed -------------"
+                print "------------- all nodes closed -------------"            
+
+            return self.__getattr__(name)            
+        else :
+            return object.__getattribute__(self,name)
 
 
     def _g__setattr(self, name, value):
@@ -358,12 +381,22 @@
         number of attributes in a node is going to be exceeded.
         """
 
-        # The attributes for Dimension Scales must not be modified using this method.
-	# 2005-11-22
-	# R. Sassolas
+        # The attribute 'NAME' is reserved for Dimension Scale stuff
+        # but it can be modified using this method
+        if name == 'NAME'and not name in self._v_attrnames:
+            error_msg = \
+"The attribute 'NAME' is reserved for Dimension Scale stuff."
+            raise AttributeError, error_msg
+             
+
+        # The attributes for Dimension Scales must not be
+        # modified using this method.
+        # 2005-11-22
+        # R. Sassolas
         if name in DS_ATTRS:
-	    error_msg = "The attribute %s is reserved for Dimension Scales stuff and is read only." % name
-	    raise ValueError, error_msg
+            error_msg = \
+"The attribute %s is reserved for Dimension Scales and is read only." % name
+            raise AttributeError, error_msg
 
         self._g_checkOpen()
 
@@ -450,25 +483,21 @@
                 "Attribute ('%s') does not exist in node '%s'"
                 % (name, node._v_name))
 
-        # The attributes for Dimension Scales must not be deleted using this method.
-	# 2005-11-22
-	# R. Sassolas
-        if name in DS_ATTRS:
-	    error_msg = "The attribute %s is reserved for Dimension Scales stuff and must not be deleted." % name
-	    raise AttributeError, error_msg
+        # The attributes for Dimension Scales must not 
+        # be deleted using this method.
+        # 2005-11-22
+        # R. Sassolas
+        if name in DS_ATTRS or name == 'NAME':
+            error_msg = \
+"The attribute %s is reserved for Dimension Scales \
+and must not be deleted." % name
+            raise AttributeError, error_msg
 
         # The system attributes are protected
         if name in self._v_attrnamessys:
             raise AttributeError, \
                   "System attribute ('%s') cannot be deleted" % (name)
 
-        # A Dimension Scale can't be turned back into an Array ; its attributes
-	# must not be deleted this way
-	if name in DS_ATTRS:
-	    raise AttributeError(
-	        "Attribute '%s' is an attribute used for Dimension Scales. It cannot be deleted."
-		% name)
-
         nodeFile._checkWritable()
 
         # Remove the PyTables attribute or move it to shadow.
@@ -579,10 +608,12 @@
         # Get this class name
         classname = self.__class__.__name__
         # The attribute names
-        #attrnumber = len([ n for n in self._v_attrnames if not issysattrname(n) ])
+    #attrnames = self._v_attrnames
+        #attrnumber = len([ n for n in attrnames if not issysattrname(n) ])
         # Showing all attributes by default
         attrnumber = len([ n for n in self._v_attrnames ])
-        return "%s._v_attrs (%s), %s attributes" % (pathname, classname, attrnumber)
+        return "%s._v_attrs (%s), %s attributes" % (pathname, classname,
+                                                    attrnumber)
 
     def __repr__(self):
         """A detailed string representation for this object."""
diff -Naur pytables-1.3-ds-devel/tables/CArray.py ds/pytables-1.3-devel/tables/CArray.py
--- pytables-1.3-ds-devel/tables/CArray.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/tables/CArray.py	2006-01-19 15:07:40.000000000 +0100
@@ -39,10 +39,7 @@
     Numeric_imported = False
 
 from tables.Atom import Atom
-# DimensionScale added
-# 2005-11-28
-# R. Sassolas
-from tables.Array import Array, DimensionScale
+from tables.Array import Array
 from tables.utils import processRangeRead
 
 
@@ -116,7 +113,7 @@
     def __init__(self, parentNode, name,
                  shape=None, atom=None,
                  title="", filters=None,
-                 log=True):
+                 log=True, dataset_id = 0):
         """Create CArray instance.
 
         Keyword arguments:
@@ -213,7 +210,8 @@
                 warnings.warn("``shape`` is ``None``: ``atom`` ignored")
 
         # The `Array` class is not abstract enough! :(
-        super(Array, self).__init__(parentNode, name, new, filters, log)
+        super(Array, self).__init__(parentNode, name, new, filters, log,
+                                    dataset_id)
 
 
     def _calcMaxTuples(self, atom, nrows, compress=None):
@@ -378,36 +376,5 @@
   atom = %r
   nrows = %s
   flavor = %r
-  byteorder = %r""" % (self, self.atom, self.nrows, self.flavor, self.byteorder)
-
-# function added to implement Dimension Scales
-    # 2005-111-15
-    # R. Sassolas
-
-    def to_scale(self, dimension_name=""):
-        """Turns the current CArray into a Dimension Scale, with an optionnal name."""
-        if self.is_scale() == False:
-	    self._set_scale(dimension_name)
-	    
-	    # Add the attribute name to the list of attribute so that it can  be obtained later. 
-	    if(self.attrs):
-	        self.attrs._addAttrName('NAME')
-		
-	    # This allow some kind of "cast" in Python". It might no be very nice, but it works well !
-	    self.__class__=CDimensionScale	    
-	else:
-	    error_msg = "Current object seems to already be a valid Dimension Scale."
-	    raise AttributeError, error_msg
-
-
-  
-# Allow Dimension Scale to be extended to Carrays
-# 2005-11-28
-# R. Sassolas
-class CDimensionScale(DimensionScale, CArray):
-    """
-    Represents a HDF5 Dimension Scale.
-    
-    """
-
-    _c_classId = 'DIMENSION_SCALE'
+  byteorder = %r""" % (self, self.atom, self.nrows, self.flavor,
+                       self.byteorder)
diff -Naur pytables-1.3-ds-devel/tables/dimensionscale.py ds/pytables-1.3-devel/tables/dimensionscale.py
--- pytables-1.3-ds-devel/tables/dimensionscale.py	1970-01-01 01:00:00.000000000 +0100
+++ ds/pytables-1.3-devel/tables/dimensionscale.py	2006-01-23 09:43:53.000000000 +0100
@@ -0,0 +1,392 @@
+########################################################################
+#
+#       License: BSD
+#       Created: December 19, 2005
+#       Author:  Remi Sassolas - remi.sassolas@gmail.com
+#
+#       $Id: ???
+#
+########################################################################
+
+"""Here is defined the DimensionScale class.
+
+See DimensionScale class docstring for more info.
+
+Classes:
+
+    DimensionScale
+
+Functions:
+
+
+Misc variables:
+
+    __version__
+
+"""
+
+import numarray.strings as strings
+
+from tables.constants import EXPECTED_ROWS_EARRAY
+import tables.hdf5Extension as hdf5Extension
+from tables.utils import calcBufferSize
+from tables.EArray import EArray
+from tables.utils import processRangeRead
+from tables.Leaf import Leaf
+
+
+__version__ = "???"
+
+
+# default version for EARRAY objects
+#obversion = "1.0"    # initial version
+#obversion = "1.1"    # support for complex datatypes
+#obversion = "1.2"    # This adds support for time datatypes.
+obversion = "1.3"    # This adds support for enumerated datatypes.
+
+# list of the different arrays - or more accurately the list of their IDs
+ARRAY_LIST = ["ARRAY", "CARRAY", "EARRAY"]
+
+class DimensionScale(EArray, hdf5Extension.DimensionScale):
+    """
+    Represents a HDF5 Dimension Scale.
+
+    A Dimension Scale is simply an Array, with some extra datas.
+    These extra datas allow a Dimension Scale to be attached to
+    a dimension of an Array : to each element along 
+    this dimension correspond a row of the Dimension Scale.
+    
+    """
+    
+    _c_classId = 'DIMENSION_SCALE'
+    
+    # <properties>
+    
+    def _g_getds_name (self):
+        if hasattr(self._v_attrs, 'NAME'):
+            return self._v_attrs.NAME
+        else:
+            return ''
+
+    def _g_setds_name (self, ds_name):
+        self._v_attrs.NAME = name
+
+    ds_name = property(_g_getds_name, _g_setds_name, None,
+                        "The 'NAME' attribute of the Dimension Scale.")
+    
+    # </properties>
+    
+    def __init__(self, parentNode, name, atom=None, title="",
+                 filters=None, expectedrows=EXPECTED_ROWS_EARRAY,
+                 log=True, ds_name="", dataset_id = 0):
+        """Create the instance DimensionScale.
+
+        Keyword arguments:
+
+        atom -- An Atom object representing the shape, type and flavor
+            of the atomic objects to be saved. One of the shape
+            dimensions must be 0. The dimension being 0 means that the
+            resulting EArray object can be extended along it.
+
+        title -- Sets a TITLE attribute on the array entity.
+
+        filters -- An instance of the Filters class that provides
+            information about the desired I/O filters to be applied
+            during the life of this object.
+
+        expectedrows -- In the case of enlargeable arrays this
+            represents an user estimate about the number of row
+            elements that will be added to the growable dimension in
+            the EArray object. If you plan to create both much smaller
+            or much bigger EArrays try providing a guess; this will
+            optimize the HDF5 B-Tree creation and management process
+            time and the amount of memory used.
+        
+        ds_name -- Sets a NAME attribute on the HDF5 array entity.
+
+        """
+        super(DimensionScale, self).__init__(parentNode, name, atom, title,
+                                             filters, expectedrows, log,
+                                             dataset_id)
+   
+    
+    # Before copying an Dimension Scale, detach all
+    def _f_copy(self, newparent=None, newname=None,
+                overwrite=False, recursive=False, **kwargs):
+        """
+        Copy this node and return the new one.
+
+        Creates and returns a copy of the node, maybe in a different
+        place in the hierarchy.  `newparent` can be a `Group` object or
+        a pathname in string form.  If it is not specified or ``None``,
+        the current parent group is chosen as the new parent.  `newname`
+        must be a string with a new name.  If it is not specified or
+        ``None``, the current name is chosen as the new name.  If
+        `recursive` copy is stated, all descendents are copied as well.
+
+        Copying a node across databases is supported but can not be
+        undone.  Copying a node over itself is not allowed, nor it is
+        recursively copying a node into itself.  These result in a
+        `NodeError`.  Copying over another existing node is similarly
+        not allowed, unless the optional `overwrite` argument is true,
+        in which case that node is recursively removed before copying.
+
+        Additional keyword arguments may be passed to customize the
+        copying process.  For instance, title and filters may be
+        changed, user attributes may be or may not be copied, data may
+        be subsampled, stats may be collected, etc.  See the
+        documentation for the particular node type.
+
+        Using only the first argument is equivalent to copying the node
+        to a new location without changing its name.  Using only the
+        second argument is equivalent to making a copy of the node in
+        the same group.
+        """
+               
+        # First call the inherited method and store the returned DS
+        temp = super(Leaf, self)._f_copy(newparent, newname,
+                                          overwrite, recursive, **kwargs)
+        # Then reset its "REF_LIST" attribute
+        if temp.attrs.__contains__('REF_LIST'):
+            temp.attrs._g__delattr('REF_LIST')
+        
+        if temp.attrs.__contains__('REFERENCE_LIST'): 
+            temp.attrs._g__delattr('REFERENCE_LIST')
+        
+        # Finally return this Array
+        return temp
+
+
+    # This function is about the same as the one from tables.EArray
+    def _g_copyWithStats(self, group, name, start, stop, step,
+                         title, filters, log):
+        """Private part of Leaf.copy() for each kind of leaf"""
+        # Build the new Dimension Scale object
+        object = DimensionScale(
+            group, name, atom=self.atom, title=title, filters=filters,
+            expectedrows=self.nrows, log=log, ds_name = self.ds_name)
+        # Now, fill the new earray with values from source
+        nrowsinbuf = self._v_maxTuples
+        # The slices parameter for self.__getitem__
+        slices = [slice(0, dim, 1) for dim in self.shape]
+        # This is a hack to prevent doing innecessary conversions
+        # when copying buffers
+        (start, stop, step) = processRangeRead(self.nrows, start, stop, step)
+        self._v_convert = False
+        # Start the copy itself
+        for start2 in range(start, stop, step*nrowsinbuf):
+            # Save the records on disk
+            stop2 = start2+step*nrowsinbuf
+            if stop2 > stop:
+                stop2 = stop
+            # Set the proper slice in the extensible dimension
+            slices[self.extdim] = slice(start2, stop2, step)
+            object._append(self.__getitem__(tuple(slices)))
+        # Active the conversion again (default)
+        self._v_convert = True
+        nbytes = self.itemsize
+        for i in self.shape:
+            nbytes*=i
+
+        return (object, nbytes)
+
+    
+    def attach_to(self, array, dimension_index):
+        """Attach a Dimension Scale to a dimension of an Array.
+
+        Keyword arguments:
+	    array -- the Array to which the Dimension Scale will be attached 
+    	dimension_index -- the index of the dimension
+
+	    """
+
+        # If current Dimension Scale (self) is already attached to
+        # the dimension specified, nothing is done
+        if not self.is_attached_to(array, dimension_index):
+            _objectID = array.objectID
+          
+            # Check the second argument (the first arguments have
+            # already been tested by is_attached_to())
+            _shape = array.shape
+            if self.shape[0] != array.shape[dimension_index]:
+                error_msg = \
+"The shape of this Dimension Scale is not compatible \
+with the dimension specified."   
+                
+                raise ValueError, error_msg
+          
+            hdf5Extension.DimensionScale.attach_to(self, _objectID,
+                                                 dimension_index)
+        
+            # add the names of the attribute in the
+            # AttributeSet list of attributes.
+            if(self.attrs):
+                self.attrs._addAttrName('REFERENCE_LIST')
+            if(array.attrs):
+                array.attrs._addAttrName('DIMENSION_LIST')
+ 
+
+    def  is_attached_to(self, array, dimension_index):
+        """
+        Check if a Dimension Scale is attached to a dimension of an Array.
+
+        Keyword arguments:
+	    array -- the Array to which the Dimension Scale could be attached
+	    dimension_index -- the index of the dimension
+
+        """
+        # Check the first argument
+        error_msg = \
+"The object '%s' doesn't seems to be a valid Array." % array.__str__()
+        try: # 'array' may not have an attribute called _c_class_id...
+            if array._c_classId not in ARRAY_LIST:
+                raise AttributeError, error_msg
+        except:
+            raise AttributeError, error_msg
+                
+        # Check the second argument
+        _shape = array.shape
+        if _shape.__len__() <= dimension_index :
+            error_msg = \
+"The Array specified doesn't have %d dimensions." % (dimension_index+1)
+            raise ValueError, error_msg
+        return hdf5Extension.DimensionScale.is_attached_to(self,
+                                                           array.objectID,
+                                                           dimension_index)
+        
+    def detach_from(self, array, dimension_index):
+        """Detach a Dimension Scale from a dimension of an Array.
+
+        Keyword arguments:
+	    array -- the Array from which the Dimension Scale will be detached
+	    dimension_index -- the index of the dimension
+
+	    """
+        if self.is_attached_to(array, dimension_index):
+            array_id = array.objectID
+            hdf5Extension.DimensionScale.detach_from(self, array_id, 0,
+                                                     dimension_index)
+        else:
+            error_msg = \
+            "This Dimension Scale is not attached to the dimension specified."
+            raise AttributeError, error_msg
+
+        
+    def detach_from_all(self):
+        """Detach a Dimension Scale from all the Arrays it is attached to."""
+        try:
+            reference_list = self.attrs.REFERENCE_LIST
+            reference_list_found = True
+        except:
+            reference_list_found = False
+        
+        if reference_list_found and reference_list:
+            reference_number = reference_list.__len__()    
+            for i in xrange(reference_number):
+                tuple = reference_list[i]
+                reference = tuple[0]
+                dimension_index = tuple[1]
+                                               
+                hdf5Extension.DimensionScale.detach_from(self, 0, reference,
+                                                         dimension_index)
+            self.attrs._g__setattr ('REFERENCE_LIST', None)
+
+
+    def generate_ref_list(self):
+        """
+        Generates the 'REF_LIST' attribute with respect to the 
+        'REFERENCE_LIST' attribute.
+        
+        """
+        
+        try:
+            reference_list = self.attrs.REFERENCE_LIST
+            reference_list_found = True
+        except:
+            reference_list_found = False
+
+        # The to-be-generated REF_LIST
+        ref_list = []
+        
+        if reference_list_found and reference_list:
+            # The list to store the references
+            references = []
+            # The list to store the corresponding dimension indexes
+            dimension_indexes = []
+            # The position in the ref_list
+            position = 0
+            # the list to store the positions
+            positions = []
+            
+            file = self._v_file
+            for tuple in reference_list:
+                ref = tuple[0]
+                dimension_index = tuple[1]
+                
+                references.append(ref)
+                dimension_indexes.append(dimension_index)
+                positions.append(position)
+                position = position+1
+                ref_list.append(None)
+           
+            walkGroups = file.walkGroups('/')
+            try:
+                while not references ==[]:
+                    group = walkGroups.next()
+                    listNodes = file.listNodes(group, 'Array')
+                    nodes_number = listNodes.__len__()
+                    i = 0
+                    while not references == [] and i < nodes_number:
+                        array = listNodes[i]
+                        if not array.is_scale():
+                            array_ref = array.get_reference()
+                            
+                            try:
+                                index = references.index(array_ref)
+                                index_found = True
+                            except ValueError:
+                                # The reference is not in the list
+                                index_found = False
+                            if index_found:
+                                # Remove the reference found from 'references'
+                                references.__delitem__(index)
+                                
+                                # Retrieve the corresponding dimension index, 
+                                # and remove it from 'dimension_indexes'
+                                dimension_index = dimension_indexes[index]
+                                dimension_indexes.__delitem__(index)
+                                
+                                # Retrieve the position in the 'ref_list'
+                                # and remove it from 'positions'
+                                position = positions[index]
+                                positions.__delitem__(index)
+                              
+                                # Finally replace the 'None' object at index 
+                                # 'position' in the ref_list by the tuple
+                                # (ref, dimension_index)
+                                ref_list.__delitem__(position)
+                                ref_list.insert(position, (array._v_pathname,
+                                                               dimension_index))
+                                
+                        i = i+1
+                               
+                # Jump to the next Group
+                        
+            except StopIteration:
+                # Here we have a problem...
+                error_msg = \
+"Can't find the Dimension Scale corresponding to the references %s" \
+% references.__str__()
+                            
+                raise StopIteration, error_msg
+                
+        # The ref_list has been created. Now it just has to be stored 
+        # in the 'REF_LIST' attribute (which may be overwritten)
+        self.attrs._g__setattr('REF_LIST', ref_list)
+        
+    
+    def __repr__(self):
+        return self.objectID.__str__()
+        
+    def __str__(self):
+        return self.objectID.__str__()
diff -Naur pytables-1.3-ds-devel/tables/EArray.py ds/pytables-1.3-devel/tables/EArray.py
--- pytables-1.3-ds-devel/tables/EArray.py	2005-11-10 13:17:03.000000000 +0100
+++ ds/pytables-1.3-devel/tables/EArray.py	2006-01-19 15:07:18.000000000 +0100
@@ -107,7 +107,7 @@
     def __init__(self, parentNode, name,
                  atom=None, title="",
                  filters=None, expectedrows=EXPECTED_ROWS_EARRAY,
-                 log=True):
+                 log=True, dataset_id = 0):
         """Create EArray instance.
 
         Keyword arguments:
@@ -214,7 +214,8 @@
         """The length of the enlargeable dimension of the array."""
 
         # The `Array` class is not abstract enough! :(
-        super(Array, self).__init__(parentNode, name, new, filters, log)
+        super(Array, self).__init__(parentNode, name, new, filters, log,
+                                    dataset_id)
 
 
     def _calcTuplesAndChunks(self, atom, extdim, expectedrows, compress):
@@ -398,12 +399,61 @@
         # Ok. all conditions are met. Return the numarray object
         return naarr
 
+    # added for Dimension Scale stuff
+    # 2006-01-05
+    # R. Sassolas
+    def _check_extensibility(self):
+        """
+        Check that the EArray can be extended or truncated.
+
+        A Earray can not be extended or truncated if there are Dimension Scales
+        attached to its extensible dimension.
+        As for the Dimension Scales (which are EArray as well), they can't be
+        extended or truncated if both their first dimension is the extensible
+        one and this dimension is attached to any Array
+        
+        """
+                
+        _classId = self._c_classId
+        if _classId == 'EARRAY':
+            scale_number = self.get_scale_number(self.extdim)
+            if scale_number != 0:
+                error_msg = "There are %d Dimension Scales attached \
+to the enlargeable dimension." % scale_number
+                raise AttributeError, error_msg
+                
+        elif _classId == 'DIMENSION_SCALE':
+            if self.extdim == 0:
+            # If it is not the first dimension which is enlargeable, 
+            # nothing can go wrong
+                try:
+                    reference_list = self.attrs.REFERENCE_LIST
+                    reference_list_found = True
+                    
+                except:
+                    # The Dimension Scale don't have an attribute 
+                    # 'REFERENCE_LIST' :
+                    # it has not yet been attached to an Array, so
+                    # there's nothing to do
+                    reference_list_found = False
+                    
+                if reference_list_found and reference_list:
+                    reference_list_length = reference_list.__len__()
+                    if reference_list_length != 0:
+                        error_msg = "This Dimension Scale is attached to %d \
+Arrays." % reference_list_length
+                        raise AttributeError, error_msg
+
     def append(self, sequence):
         """Append the sequence to this (enlargeable) object"""
 
         if self._v_file.mode == 'r':
             raise IOError("attempt to write over a file opened in read-only mode")
-
+   
+        self._check_extensibility()
+        # this function returns nothing but may raise exceptions and thus
+        # prevent the sequence to be appended
+        
         # The sequence needs to be copied to make the operation safe
         # to in-place conversion.
         copy = self.stype in ['Time64']
@@ -418,6 +468,11 @@
 
         if size <= 0:
             raise ValueError("`size` must be greater than 0")
+        
+        self._check_extensibility()
+        # this function returns nothing but may raise exceptions and thus
+        # prevent the EArray to be truncated
+        
         self._truncateArray(size)
 
 
diff -Naur pytables-1.3-ds-devel/tables/File.py ds/pytables-1.3-devel/tables/File.py
--- pytables-1.3-ds-devel/tables/File.py	2005-11-08 08:32:02.000000000 +0100
+++ ds/pytables-1.3-devel/tables/File.py	2006-01-25 10:51:02.000000000 +0100
@@ -58,9 +58,11 @@
 from tables.Leaf import Leaf, Filters
 from tables.Table import Table
 from tables.Array import Array
+from tables.dimensionscale import DimensionScale
 from tables.CArray import CArray
 from tables.EArray import EArray
 from tables.VLArray import VLArray
+from tables.Atom import Atom
 
 
 
@@ -438,8 +440,11 @@
     * createTable(where, name, description[, title][, filters]
                   [, expectedrows])
     * createArray(where, name, array[, title])
+    * createCArray(where, name, shape, atom[, title][, filters]
+                   [, compress][, complib][, dataset_id])
     * createEArray(where, name, atom[, title][, filters]
                    [, expectedrows])
+    * createDimensionScale(where, hdf_name, array[, title][, ds_name])
     * createVLArray(where, name, atom[, title][, filters]
                     [, expectedsizeinMB])
     * removeNode(where[, name][, recursive])
@@ -646,6 +651,19 @@
         return self.trMap.get(ptName, ptName)
 
 
+    def createAtom(self, dtype="Float64", shape=1, flavor="NumArray"):
+        """Create an Atomic object instance."""
+        
+        return Atom(dtype, shape, flavor)
+
+
+    def createFilters(self, complevel=0, complib="zlib", 
+                      shuffle=1, fletcher32=0):
+        """Create a Filters object instance."""
+        
+        return Filters(complevel, complib, shuffle, fletcher32)
+
+
     def createGroup(self, where, name, title="", filters=None):
         """Create a new Group instance with name "name" in "where" location.
 
@@ -674,7 +692,7 @@
 
     def createTable(self, where, name, description, title="",
                     filters=None, expectedrows=10000,
-                    compress=None, complib=None):  # Deprecated
+                    compress=None, complib=None, dataset_id=0):  # Deprecated
         """Create a new Table instance with name "name" in "where" location.
 
         "where" parameter can be a path string, or another group
@@ -706,14 +724,15 @@
             management process time and the amount of memory used.
 
         """
+
         parentNode = self.getNode(where)  # Does the parent node exist?
         fprops = _checkFilters(filters, compress, complib)
         return Table(parentNode, name,
                      description=description, title=title,
-                     filters=fprops, expectedrows=expectedrows)
-
-
-    def createArray(self, where, name, object, title=""):
+                     filters=fprops, expectedrows=expectedrows,
+                     dataset_id=dataset_id)
+    
+    def createArray(self, where, name, object, title="", dataset_id=0):
         """Create a new instance Array with name "name" in "where" location.
 
         Keyword arguments:
@@ -734,11 +753,12 @@
         """
         parentNode = self.getNode(where)  # Does the parent node exist?
         return Array(parentNode, name,
-                     object=object, title=title)
-
+                     object=object, title=title, dataset_id=dataset_id)
+    
 
     def createCArray(self, where, name, shape, atom, title="",
-                     filters=None, compress=None, complib=None):
+                     filters=None, compress=None, complib=None,
+                     dataset_id=0):
         """Create a new instance CArray with name "name" in "where" location.
 
         Keyword arguments:
@@ -763,12 +783,13 @@
         parentNode = self.getNode(where)  # Does the parent node exist?
         fprops = _checkFilters(filters, compress, complib)
         return CArray(parentNode, name,
-                      shape=shape, atom=atom, title=title, filters=fprops)
+                      shape=shape, atom=atom, title=title, filters=fprops, 
+                      dataset_id=dataset_id)
 
 
     def createEArray(self, where, name, atom, title="",
                      filters=None, expectedrows=1000,
-                     compress=None, complib=None):
+                     compress=None, complib=None, dataset_id=0):
         """Create a new instance EArray with name "name" in "where" location.
 
         Keyword arguments:
@@ -803,7 +824,47 @@
         fprops = _checkFilters(filters, compress, complib)
         return EArray(parentNode, name,
                       atom=atom, title=title, filters=fprops,
-                      expectedrows=expectedrows)
+                      expectedrows=expectedrows,
+                      dataset_id=dataset_id)
+
+    # added for Dimension Scale stuff
+    # 2005-12-21
+    # R. Sassolas
+    def createDimensionScale(self, where, name, atom, title="",
+                             filters=None, expectedrows=1000,
+                             compress=None, complib=None, ds_name="",
+                             dataset_id=0):
+        """Create a new instance Dimension Scale with name "name"
+        in "where" location.
+
+        Keyword arguments:
+
+        where -- The parent group where the new table will hang
+            from. "where" parameter can be a path string (for example
+            "/level1/leaf5"), or Group instance.
+
+        name -- The name of the new Dimension Scale.
+
+        object -- The (regular) object to be saved. It can be any of
+            NumArray, CharArray, Numeric, List, Tuple, String, Int of
+            Float types, provided that they are regular (i.e. they are
+            not like [[1,2],2]).
+
+        title -- Sets a TITLE attribute on the array entity.
+        
+        ds_name -- Sets a NAME attribute on the array entity.
+
+        """
+        if dataset_id == 0:
+            parentNode = self.getNode(where)  # Does the parent node exist?
+            fprops = _checkFilters(filters, compress, complib)
+        else:
+            parentNode = None
+            fprops = None
+        return DimensionScale(parentNode, name,
+                      atom=atom, title=title, filters=fprops,
+                      expectedrows=expectedrows, ds_name=ds_name,
+                      dataset_id=dataset_id)
 
 
     def createVLArray(self, where, name, atom, title="",
@@ -874,7 +935,39 @@
 
         # Walk down the hierarchy until the last child in the tail is loaded.
         node = parentNode  # maybe `nodePath` was already in memory
+        temp = None
         for childName in pathTail:
+            if childName == "ATTRS":
+                return (parentNode.attrs._g_getAttr("DIMENSION_LIST"))[0][0]
+                try:
+                    temp = parentNode.attrs
+                except:
+                    error_msg = "Failed to retrieve the Node"
+                    raise Exception, error_msg
+                continue
+                
+            if temp and childName in ['REFERENCE_LIST', 'DIMENSION_LIST']:
+                try:
+                    temp = temp._g_getAttr(childName)
+                except:
+                    error_msg = "Failed to retrieve the Node"
+                    raise Exception, error_msg
+                continue
+                
+            if temp and childName.startswith('REF_'):
+                l = childName[4:].split("-")
+                l_length = l.__len__()
+
+                if l_length == 1:
+                    temp = temp[int(l[0])]
+                elif l_length == 2:
+                    temp = temp[int(l[0])][int(l[1])]
+                else:
+                    error_msg = "Failed to retrieve the Node"
+                    raise Exception, error_msg
+                return temp
+                
+            
             # Load the node and use it as a parent for the next one in tail
             # (it puts itself into life via `self._refNode()` when created).
             if not isinstance(parentNode, Group):
diff -Naur pytables-1.3-ds-devel/tables/Group.py ds/pytables-1.3-devel/tables/Group.py
--- pytables-1.3-ds-devel/tables/Group.py	2005-10-27 17:16:14.000000000 +0200
+++ ds/pytables-1.3-devel/tables/Group.py	2006-01-19 18:48:27.000000000 +0100
@@ -1083,7 +1083,7 @@
 
         # hdf5Extension operations (do before setting an AttributeSet):
         #   Update node attributes.
-        self._g_new(ptFile, h5name, init=True)
+        self._g_new(ptFile, h5name, init=True, dataset_id=0)
         #   Open the node and get its object ID.
         self._v_objectID = self._g_open()
 
diff -Naur pytables-1.3-ds-devel/tables/Leaf.py ds/pytables-1.3-devel/tables/Leaf.py
--- pytables-1.3-ds-devel/tables/Leaf.py	2005-09-27 20:43:29.000000000 +0200
+++ ds/pytables-1.3-devel/tables/Leaf.py	2006-01-24 18:15:55.000000000 +0100
@@ -237,27 +237,27 @@
 
     def __init__(self, parentNode, name,
                  new=False, filters=None,
-                 log=True):
+                 log=True, dataset_id = 0):
         self._v_new = new
         """Is this the first time the node has been created?"""
 
-        if new:
-            if filters is None:
-                # If no filter properties have been given,
-                # get the default onesfilter from the parent.
-                filters = parentNode._v_filters
-            self.__dict__['filters'] = filters  # bypass the property
-            # Writing the `Filters` object to an attribute on disk is
-            # not necessary for now, as retrieving the filters using
-            # `utilsExtension.getFilters()` is safer and faster.
-            # Also, cPickling the `filters` attribute is very slow (it
-            # is as much as twice slower than the normal overhead for
-            # creating a Table, for example).
+        if dataset_id == 0:
+            if new:
+                if filters is None:
+                    # If no filter properties have been given,
+                    # get the default onesfilter from the parent.
+                    filters = parentNode._v_filters
+                self.__dict__['filters'] = filters  # bypass the property
+                # Writing the `Filters` object to an attribute on disk is
+                # not necessary for now, as retrieving the filters using
+                # `utilsExtension.getFilters()` is safer and faster.
+                # Also, cPickling the `filters` attribute is very slow (it
+                # is as much as twice slower than the normal overhead for
+                # creating a Table, for example).
 
         # Existing filters need not be read since `filters`
         # is a lazy property that automatically handles their loading.
-
-        super(Leaf, self).__init__(parentNode, name, log)
+        super(Leaf, self).__init__(parentNode, name, log, dataset_id)
 
 
     def _g_getFilters(self):
@@ -294,6 +294,7 @@
 
 
     def _g_copy(self, newParent, newName, recursive, log, **kwargs):
+
         # Compute default arguments.
         start = kwargs.get('start', 0)
         stop = kwargs.get('stop', self.nrows)
@@ -447,7 +448,6 @@
         Besides that, the optional argument `flush` tells whether to
         flush pending data to disk or not before closing.
         """
-
         if not self._f_isOpen():
             return  # the node is already closed
 
diff -Naur pytables-1.3-ds-devel/tables/Node.py ds/pytables-1.3-devel/tables/Node.py
--- pytables-1.3-ds-devel/tables/Node.py	2005-10-28 16:55:40.000000000 +0200
+++ ds/pytables-1.3-devel/tables/Node.py	2006-01-25 13:35:15.000000000 +0100
@@ -226,7 +226,7 @@
     # </properties>
 
 
-    def __init__(self, parentNode, name, log=True):
+    def __init__(self, parentNode, name, log=True, dataset_id=0):
         # Remember to assign these values in the root group constructor
         # if it does not use this method implementation!
 
@@ -248,46 +248,60 @@
 
         validate = new = self._v_new  # set by subclass constructor
 
-        # Is the parent node a group?  Is it open?
-        self._g_checkGroup(parentNode)
-        parentNode._g_checkOpen()
-        file_ = parentNode._v_file
+        if dataset_id == 0:
+            # Is the parent node a group?  Is it open?
+            self._g_checkGroup(parentNode)
+            parentNode._g_checkOpen()
+            file_ = parentNode._v_file
 
-        # Will the file be able to host a new node?
-        if new:
-            file_._checkWritable()
-
-        # Find out the matching HDF5 name.
-        ptname = name  # always the provided one
-        h5name = file_._h5NameFromPTName(ptname)
+            # Will the file be able to host a new node?
+            if new:
+                file_._checkWritable()
 
-        # Will creation be logged?
-        undoEnabled = file_.isUndoEnabled()
-        canUndoCreate = self._c_canUndoCreate
-        if undoEnabled and not canUndoCreate:
-            warnings.warn(
+            # Find out the matching HDF5 name.
+            ptname = name  # always the provided one
+            h5name = file_._h5NameFromPTName(ptname)
+
+            # Will creation be logged?
+            undoEnabled = file_.isUndoEnabled()
+            canUndoCreate = self._c_canUndoCreate
+            if undoEnabled and not canUndoCreate:
+                warnings.warn(
                 "creation can not be undone nor redone for this node",
                 UndoRedoWarning)
 
-        # Bind to the parent node and set location-dependent information.
-        if new:
-            # Only new nodes need to be referenced.
-            # Opened nodes are already known by their parent group.
-            parentNode._g_refNode(self, ptname, validate)
-        self._g_setLocation(parentNode, ptname, h5name)
+            # Bind to the parent node and set location-dependent information.
+            if new:
+                # Only new nodes need to be referenced.
+                # Opened nodes are already known by their parent group.
+                parentNode._g_refNode(self, ptname, validate)
+            self._g_setLocation(parentNode, ptname, h5name)
+
+        else:
+            # dataset_id != 0
+            # The object is created from a reference
+            parentNode = None
+            h5name = ""
 
         try:
             # hdf5Extension operations:
             #   Update node attributes.
-            self._g_new(parentNode, h5name, init=True)
+            self._g_new(parentNode, h5name, init=True, dataset_id=dataset_id)
             #   Create or open the node and get its object ID.
-            if new:
-                self._v_objectID = self._g_create()
+            if dataset_id ==0:
+                if new:
+                    self._v_objectID = self._g_create()
+                else:
+                    self._v_objectID = self._g_open()
+            
+                # This allows extra operations after creating the node.
+                self._g_postInitHook()
+            
             else:
-                self._v_objectID = self._g_open()
+                # dataset_id != 0
+                # The object is created from a reference
+                self._v_objectID = dataset_id
 
-            # This allows extra operations after creating the node.
-            self._g_postInitHook()
         except:
             # If anything happens, the node must be closed
             # to undo every possible registration made so far.
@@ -296,10 +310,11 @@
             self._f_close()
             raise
 
-        # Finally, log creation of the node.
-        # This is made after the ``try`` because the node *has* been created!
-        if new and log and undoEnabled and canUndoCreate:
-            file_._log('CREATE', self._v_pathname)
+        if dataset_id == 0:
+            # Finally, log creation of the node.
+            # This is made after the ``try`` because the node *has* been created!
+            if new and log and undoEnabled and canUndoCreate:
+                file_._log('CREATE', self._v_pathname)
 
 
     def __del__(self):
@@ -319,21 +334,30 @@
         #    revived, the user would also need to force the closed
         #    `Node` out of memory, which is not a trivial task.
         #
+        print "delete node"
         if not self._f_isOpen():
+            print "node not open"
             return
 
+        print "node open"
+
         # If we get here, the `Node` is still open.
+        print "file ?"
         file_ = self._v_file
+        print "file !"
+        print "alive ?"
         if self._v_pathname in file_._aliveNodes:
             # If the node is alive, kill it (to save it).
             file_._killNode(self)
         else:
+            print "dead"
             # The node is already dead and there are no references to it,
             # so follow the usual deletion procedure.
             # This means closing the (still open) node.
             # `self._v__deleting` is asserted so that the node
             # does not try to unreference itself again from the file.
             self._v__deleting = True
+            print "close the node"
             self._f_close()
 
 
@@ -512,21 +536,33 @@
         # Thus, cleanup operations used in ``_f_close()`` in sub-classes
         # must be run *before* calling the method in the superclass.
 
+        print "node still open ?"
         if not self._f_isOpen():
+            print "node closed"
             return  # the node is already closed
 
+        print "node still open"
+
         myDict = self.__dict__
 
         # Close the associated `AttributeSet`
         # only if it has already been placed in the object's dictionary.
+        print "close attribute set ?"
         if '_v_attrs' in myDict:
+            print "yes"
             self._v_attrs._f_close()
+        else:
+            print "no"
 
         # Detach the node from the tree if necessary.
+        print "delLocation"
         self._g_delLocation()
+        print "location deleted"
 
         # Finally, clear all remaining attributes from the object.
+        print "clear the __dict__"
         myDict.clear()
+        print "__dict__ cleared"
 
 
     def _g_remove(self, recursive):
@@ -603,7 +639,7 @@
 
         # hdf5Extension operations:
         #   Update node attributes.
-        self._g_new(newParent, self._v_hdf5name, init=False)
+        self._g_new(newParent, self._v_hdf5name, init=False, dataset_id=0)
         #   Move the node.
         #self._v_parent._g_moveNode(oldPathname, self._v_pathname)
         self._v_parent._g_moveNode(oldParent._v_objectID, oldName,
@@ -747,7 +783,7 @@
         second argument is equivalent to making a copy of the node in
         the same group.
         """
-
+        
         self._g_checkOpen()
         srcFile = self._v_file
         srcParent = self._v_parent
diff -Naur pytables-1.3-ds-devel/tables/Table.py ds/pytables-1.3-devel/tables/Table.py
--- pytables-1.3-ds-devel/tables/Table.py	2005-12-07 19:53:12.000000000 +0100
+++ ds/pytables-1.3-devel/tables/Table.py	2006-01-20 09:50:35.000000000 +0100
@@ -189,7 +189,7 @@
     def __init__(self, parentNode, name,
                  description=None, title="", filters=None,
                  expectedrows=EXPECTED_ROWS_TABLE,
-                 log=True):
+                 log=True, dataset_id=0):
         """Create an instance Table.
 
         Keyword arguments:
@@ -320,7 +320,7 @@
                 "``IsDescription`` subclass, ``Description`` instance, "
                 "dictionary, ``RecArray`` or ``NestedRecArray`` instance""")
 
-        super(Table, self).__init__(parentNode, name, new, filters, log)
+        super(Table, self).__init__(parentNode, name, new, filters, log, dataset_id)
 
 
     def _g_postInitHook(self):
diff -Naur pytables-1.3-ds-devel/tables/tests/test_dimension_scales.py ds/pytables-1.3-devel/tables/tests/test_dimension_scales.py
--- pytables-1.3-ds-devel/tables/tests/test_dimension_scales.py	1970-01-01 01:00:00.000000000 +0100
+++ ds/pytables-1.3-devel/tables/tests/test_dimension_scales.py	2006-01-17 18:00:15.000000000 +0100
@@ -0,0 +1,1607 @@
+"""
+Test module for Dimension Scales under PyTables
+===========================================
+
+:Author:   Remi Sassolas
+:Author:   Ivan Vilata
+:Author:   Francesc Altet
+:Contact:  remi.sassolas@gmail.com
+:Created:  2005-12-14
+:License:  BSD
+"""
+
+import sys
+import unittest
+import os
+import re
+import tempfile
+import warnings
+import numarray
+
+from tables import *
+
+from common import verbose, heavy, cleanup, allequal
+# To delete the internal attributes automagically
+unittest.TestCase.tearDown = cleanup
+
+class FunctionsTestCase(unittest.TestCase):
+
+    def setUp(self):
+        # Create an instance of HDF5 Table
+        self.file = tempfile.mktemp(".h5")
+        self.fileh = openFile(self.file, mode = "w")
+        self.root = self.fileh.root
+
+        # Create some Group objects
+        self.group1 = self.fileh.createGroup(self.root, 'group1',
+                                            "Group 1")
+
+        self.group2 = self.fileh.createGroup(self.root, 'group2',
+                                            "Group 2")
+                
+        # Create some Dimension Scale objects
+        
+        atom1 = Atom(shape=(0,), flavor="NumArray")
+        atom2 = Atom(shape=(0,), flavor="NumArray")
+        atom3 = Atom(shape=(3,0), flavor="NumArray")
+        
+        self.ds1 = \
+        self.fileh.createDimensionScale(self.root, 'ds1', atom1,
+                                        "Title for Dimension Scale 1",
+                                        None, 10, None, None,
+                                        "attribute 'NAME'")
+        self.ds1.append(numarray.array([1]*3))
+
+        self.ds2 = \
+        self.fileh.createDimensionScale(self.group1, 'ds2', atom2,
+                                        "Title for Dimension Scale 2",
+                                        None, 10, None, None)
+        self.ds2.append(numarray.array([2]*2))
+
+        self.ds3 = \
+        self.fileh.createDimensionScale(self.root, 'ds3', atom3)
+        self.ds3.append(numarray.array([[3]*2]*3))
+        
+        # Create a Array object
+        self.data4 = self.fileh.createArray(self.root, 'data4',
+                                          numarray.array([4]*3),
+                                          "Array 4")
+
+        # Create a EArray object
+        
+        atom5 = atom5 = Atom(shape=(0,3), flavor="NumArray")
+        
+        self.data5 = self.fileh.createEArray(self.group2, 'data5',
+                                            atom5, "Array 5")                                      
+        
+        self.data5.append(numarray.array([[5]*3]*2))
+
+    def tearDown(self):
+        self.fileh.close()
+        os.remove(self.file)
+        cleanup(self)
+
+#---------------------------------------
+
+    def test01_is_scale(self):
+        """Checking the function is_scale"""
+
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        assert self.root.ds1.is_scale() == 1
+        assert self.root.group1.ds2.is_scale() == 1
+        assert self.root.ds3.is_scale() == 1
+        assert self.root.data4.is_scale() == 0
+        assert self.root.group2.data5.is_scale() == 0
+
+#---------------------------------------
+        
+    def test02_attach_to(self):
+        """Checking the function that attach a Dimension Scale to an Array"""
+        
+        self.ds1.attach_to(self.data4, 0)
+        self.ds1.attach_to(self.data5, 1)
+        self.ds2.attach_to(self.data5, 0)
+        self.ds3.attach_to(self.data5, 1)       
+
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        assert self.root.ds1.is_attached_to(self.root.data4, 0) == 1
+        assert self.root.ds1.is_attached_to(self.root.group2.data5, 1) == 1
+        assert self.root.group1.ds2.is_attached_to(self.root.group2.data5, 
+                                                   0) == 1
+        assert self.root.ds3.is_attached_to(self.root.group2.data5, 1) == 1
+        
+        assert self.root.ds3.is_attached_to(self.root.data4, 0) == 0
+
+    def test03_attach_to(self):
+        """Checking that a Dimension Scale can only be attached to Arrays"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        try:
+            if verbose:
+                print "Try to attach a Dimension Scale to a String"
+            self.root.ds1.attach_to('a_string', 0)
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")
+            
+    def test04_attach_to(self):
+        """Checking that the Array has enough dimensions"""
+       
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        try:
+            if verbose:
+                print "Try to attach a Dimension Scale to the dim 1 \
+of a one dimensional Array"
+            self.root.ds1.attach_to(self.root.data4, 1)
+        except ValueError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next ValueError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a ValueError")
+            
+    def test05_attach_to(self):
+        """Checking that the dimensions are compatible"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        try:
+            if verbose:
+                print "Try to attach a Dimension Scale to \
+an incompatible dimension"
+            self.root.ds1.attach_to(self.root.group2.data5, 0)
+        except ValueError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next ValueError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a ValueError")     
+
+#---------------------------------------
+        
+    def test06_detach_from(self):
+        """Checking the function that detach Dimension Scales"""
+       
+        self.ds1.attach_to(self.data4, 0)
+        self.ds1.attach_to(self.data5, 1)
+        self.ds2.attach_to(self.data5, 0)
+        self.ds3.attach_to(self.data5, 1)       
+
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        self.root.ds1.detach_from(self.root.data4, 0)
+        self.root.ds1.detach_from(self.root.group2.data5, 1)
+        self.root.group1.ds2.detach_from(self.root.group2.data5, 0)
+        self.root.ds3.detach_from(self.root.group2.data5, 1)    
+        
+        assert self.root.ds1.is_attached_to(self.root.data4, 0) == 0
+        assert self.root.ds1.is_attached_to(self.root.group2.data5, 1) == 0
+        assert self.root.group1.ds2.is_attached_to(self.root.group2.data5, 
+                                                   0) == 0
+        assert self.root.ds3.is_attached_to(self.root.group2.data5, 1) == 0
+        
+        # ds3 was not attached to data4, but one can check anyway
+        assert self.root.ds3.is_attached_to(self.root.data4, 0) == 0
+
+    def test07_detach_from(self):
+        """Checking that a Dimension Scale can only be detached from Arrays"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        try:
+            if verbose:
+                print "Try to detach a Dimension Scale from a String"
+            self.root.ds1.detach_from('a_string', 0)
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")
+            
+    def test08_detach_from(self):
+        """Checking that the Array has enough dimensions"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        try:
+            if verbose:
+                print "Try to detach a Dimension Scale from the dim 1 \
+of a one dimensional Array"
+            self.root.ds1.detach_from(self.root.data4, 1)
+        except ValueError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next ValueError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a ValueError")
+            
+    def test09_detach_from(self):
+        """Checking that the dimensions are compatible"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+            
+        try:
+            if verbose:
+                print "Try to detach a Dimension Scale from \
+an incompatible dimension"
+            self.root.ds1.attach_to(self.root.group2.data5, 0)
+        except ValueError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next ValueError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a ValueError")
+
+#---------------------------------------
+
+    def test10_get_scale_number(self):
+        """Checking the number of Dimension Scales attached to an Array"""
+        
+        self.ds1.attach_to(self.data4, 0)
+        self.ds1.attach_to(self.data5, 1)
+        self.ds2.attach_to(self.data5, 0)
+        self.ds3.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        assert self.root.data4.get_scale_number(0) == 1 
+        assert self.root.group2.data5.get_scale_number(0) == 1
+        assert self.root.group2.data5.get_scale_number(1) == 2
+
+        # No Dimension Scale can be attached to 
+        # a dimension of a Dimension Scale
+        assert self.root.ds1.get_scale_number(0) == 0
+
+    def test11_get_scale_number(self):
+        """Checking the index of the dimension"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            if verbose:
+                print "Try to get the number of scales attached to \
+dim 1 of a one dimensional Array"
+            self.root.data4.get_scale_number(1)
+
+        except ValueError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next ValueError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a ValueError")
+
+#---------------------------------------
+            
+    def test12_set_label(self):
+        """Checking the function that sets labels for dimensions"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        if verbose:
+            print "Set 'label 1' as a label for dim 0 of data4"
+        self.root.data4.set_label(0,'label 1')
+        if verbose:
+            print "Set 'label 2' as a label for dim 1 of data5"
+        self.root.group2.data5.set_label(1, 'label 2')
+        
+        label_list1 = self.root.data4.attrs.DIMENSION_LABELS
+        if verbose:
+            print "'DIMENSION_LABELS' of data4 : " + label_list1.__str__()
+        label_list2 = self.root.group2.data5.attrs.DIMENSION_LABELS
+        if verbose:
+            print "'DIMENSION_LABELS' of data5 : " + label_list2.__str__()
+        
+        assert label_list1 == ['label 1']
+        assert label_list2 == ['', 'label 2']
+        
+    def test13_set_label(self):
+        """Checking the index of the dimension"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            if verbose:
+                print "Try to set a label for dim 1 \
+of a one dimensional Array"
+            self.root.data4.set_label(1, 'label')
+
+        except ValueError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next ValueError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a ValueError")
+
+#---------------------------------------
+
+    def test14_get_array_from_ref(self):
+        """Checking the function which returns an array from a reference"""
+        
+        self.ds1.attach_to(self.data4, 0)
+        self.ds1.attach_to(self.data5, 1)
+        self.ds2.attach_to(self.data5, 0)
+        self.ds3.attach_to(self.data5, 1)       
+
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        ref1 = self.root.data4.attrs.DIMENSION_LIST[0][0]
+        ref2 = self.root.group2.data5.attrs.DIMENSION_LIST[0][0]
+        ref3 = self.root.group2.data5.attrs.DIMENSION_LIST[1][1]
+        ref4 = self.root.ds1.attrs.REFERENCE_LIST[0][0]
+        ref5 = self.root.ds1.attrs.REFERENCE_LIST[1][0]
+        
+        numarray1 = self.fileh.get_array_from_ref(ref1)
+        numarray2 = self.fileh.get_array_from_ref(ref2)
+        numarray3 = self.fileh.get_array_from_ref(ref3)
+        numarray4 = self.fileh.get_array_from_ref(ref4)
+        numarray5 = self.fileh.get_array_from_ref(ref5)
+        
+        list1 = numarray1.tolist()
+        list2 = numarray2.tolist()
+        list3 = numarray3.tolist()
+        list4 = numarray4.tolist()
+        list5 = numarray5.tolist()
+        
+        assert list1 == [1, 1, 1]
+        assert list2 == [2, 2]
+        assert list3 == [[3, 3], [3, 3], [3, 3]]
+        assert list4 == [4, 4, 4]
+        assert list5 == [[5, 5, 5], [5, 5, 5]]
+                
+#---------------------------------------
+            
+class NotCloseFunctionsTestCase(FunctionsTestCase):
+    close = 0
+
+class CloseFunctionsTestCase(FunctionsTestCase):
+    close = 1
+
+#----------------------------------------------------------------------
+
+class AttributesTestCase(unittest.TestCase):
+
+    def setUp(self):
+        # Create an instance of HDF5 Table
+        self.file = tempfile.mktemp(".h5")
+        self.fileh = openFile(self.file, mode = "w")
+        self.root = self.fileh.root
+
+        # Create some Group objects
+        self.group1 = self.fileh.createGroup(self.root, 'group1',
+                                            "Group 1")
+
+        self.group2 = self.fileh.createGroup(self.root, 'group2',
+                                            "Group 2")
+                
+        # Create some Dimension Scale objects
+        
+        atom1 = Atom(shape=(0,), flavor="NumArray")
+        atom2 = Atom(shape=(0,), flavor="NumArray")
+        atom3 = Atom(shape=(3,0), flavor="NumArray")
+        
+        self.ds1 = \
+        self.fileh.createDimensionScale(self.root, 'ds1', atom1,
+                                        "Title for Dimension Scale 1",
+                                        None, 10, None, None,
+                                        "attribute 'NAME'")
+        self.ds1.append(numarray.array([1]*3))
+
+        self.ds2 = \
+        self.fileh.createDimensionScale(self.group1, 'ds2', atom2,
+                                        "Title for Dimension Scale 2",
+                                        None, 10, None, None)
+        self.ds2.append(numarray.array([2]*2))
+
+        self.ds3 = \
+        self.fileh.createDimensionScale(self.root, 'ds3', atom3)
+        self.ds3.append(numarray.array([[3]*2]*3))
+        
+        # Create a Array object
+        self.data4 = self.fileh.createArray(self.root, 'data4',
+                                          numarray.array([4]*3),
+                                          "Array 4")
+
+        # Create a EArray object
+        
+        atom5 = atom5 = Atom(shape=(0,3), flavor="NumArray")
+        
+        self.data5 = self.fileh.createEArray(self.group2, 'data5',
+                                            atom5, "Array 5")                                      
+        
+        self.data5.append(numarray.array([[5]*3]*2))
+
+    def tearDown(self):
+        self.fileh.close()
+        os.remove(self.file)
+        cleanup(self)
+
+#---------------------------------------
+
+    def test01_scale_title(self):
+        """Checking the attribute 'TITLE'"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        title1 = self.root.ds1.attrs.TITLE
+        assert title1 == 'Title for Dimension Scale 1'
+        
+        title2 = self.root.group1.ds2.attrs.TITLE
+        assert title2 == 'Title for Dimension Scale 2'
+        
+        title3 = self.root.ds3.attrs.TITLE
+        assert title3 == ''
+
+#---------------------------------------
+
+    def test02_scale_name(self):
+        """Checking the attribute 'NAME'"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        name1 = self.root.ds1.attrs.NAME
+        assert name1 == "attribute 'NAME'"
+        
+        name2 = self.root.group1.ds2.attrs.NAME
+        assert name2 == ""
+        
+        name3 = self.root.ds3.attrs.NAME
+        assert name3 == ""
+
+#---------------------------------------
+        
+    def test03_references(self):
+        """Checking the attributes 'REF_LIST' and 'DIM_LIST'"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        if verbose:
+            print "Attach ds1 to dim 1 of data5"
+        self.ds1.attach_to(self.data5, 1)
+        if verbose:
+            print "Attach ds2 to dim 0 of data5"
+        self.ds2.attach_to(self.data5, 0)
+        if verbose:
+            print "Attach ds3 to dim 1 of data5"
+        self.ds3.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        self.root.ds1.generate_ref_list()
+        self.root.group1.ds2.generate_ref_list()
+        self.root.ds3.generate_ref_list()
+        self.root.data4.generate_dim_list()
+        self.root.group2.data5.generate_dim_list()
+
+        ref_list1 = self.root.ds1.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds1 : " + ref_list1.__str__()
+        assert ref_list1 == [('/data4', 0),('/group2/data5', 1)]
+        
+        ref_list2 = self.root.group1.ds2.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds2 : " + ref_list2.__str__()
+        assert ref_list2 == [('/group2/data5', 0)]
+        
+        ref_list3 = self.root.ds3.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds3 : " + ref_list3.__str__()
+        assert ref_list3 == [('/group2/data5', 1)]
+        
+        dim_list1 = self.root.data4.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data4 : " + dim_list1.__str__()
+        assert dim_list1 == [['/ds1']]
+        
+        dim_list2 = self.root.group2.data5.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data5 : " + dim_list2.__str__()
+        assert dim_list2 == [['/group1/ds2'], ['/ds1', '/ds3']]
+        
+    def test04_references(self):
+        """Checking attributes when a Dimension Scale is detached"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        if verbose:
+            print "Attach ds1 to dim 1 of data5"
+        self.ds1.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        
+        if verbose:
+            self.root.ds1.generate_ref_list()
+            self.root.data4.generate_dim_list()
+            print "'REF_LIST' of ds1 : " + \
+                  self.root.ds1.attrs.REF_LIST.__str__()
+            print "'DIM_LIST' of data4 : " + \
+                  self.root.data4.attrs.DIM_LIST.__str__()
+    
+            print "Detach ds1 from dim 0 of data4"
+            
+        self.root.ds1.detach_from(self.root.data4, 0)
+        
+        self.root.ds1.generate_ref_list()
+        ref_list = self.root.ds1.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds1 : " + ref_list.__str__()
+        assert ref_list == [('/group2/data5', 1)]
+        
+        self.root.data4.generate_dim_list()
+        dim_list = self.root.data4.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data4 : " + dim_list.__str__()
+        assert dim_list == [[]]
+
+#---------------------------------------
+        
+    def test05_clear_all_scales(self):
+        """Checking the function that detach all DS from an Array"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        if verbose:
+            print "Attach ds1 to dim 1 of data5"
+        self.ds1.attach_to(self.data5, 1)
+        if verbose:
+            print "Attach ds2 to dim 0 of data5"
+        self.ds2.attach_to(self.data5, 0)
+        if verbose:
+            print "Attach ds3 to dim 1 of data5"
+        self.ds3.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        if verbose:
+            self.root.ds1.generate_ref_list()
+            self.root.group1.ds2.generate_ref_list()
+            self.root.ds3.generate_ref_list()
+            self.root.data4.generate_dim_list()
+            self.root.group2.data5.generate_dim_list()
+            
+            print "'REF_LIST' of ds1 : " + \
+                  self.root.ds1.attrs.REF_LIST.__str__()
+            print "'REF_LIST' of ds2 : " + \
+                  self.root.group1.ds2.attrs.REF_LIST.__str__()
+            print "'REF_LIST' of ds3 : " + \
+                  self.root.ds3.attrs.REF_LIST.__str__()
+            print "'DIM_LIST' of data4 : " + \
+                  self.root.data4.attrs.DIM_LIST.__str__()
+            print "'DIM_LIST' of data5 : " + \
+                  self.root.group2.data5.attrs.DIM_LIST.__str__()
+        
+            print "Detach all the DS attached to data5"
+        self.root.group2.data5.clear_all_scales()
+        
+        self.root.ds1.generate_ref_list()
+        ref_list1 = self.root.ds1.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds1 : " + ref_list1.__str__()
+        assert ref_list1 == [('/data4', 0)]
+        
+        self.root.group1.ds2.generate_ref_list()
+        ref_list2 = self.root.group1.ds2.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds2 : " + ref_list2.__str__()
+        assert ref_list2 == []
+        
+        self.root.ds3.generate_ref_list()
+        ref_list3 = self.root.ds3.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds3 : " + ref_list3.__str__()
+        assert ref_list3 == []
+        
+        self.root.data4.generate_dim_list()
+        dim_list1 = self.root.data4.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data4 : " + dim_list1.__str__()
+        assert dim_list1 == [['/ds1']]
+        
+        self.root.group2.data5.generate_dim_list()
+        dim_list2 = self.root.group2.data5.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data5 : " + dim_list2.__str__()
+        assert dim_list2 == [[], []]
+        
+    def test06_detach_from_all(self):
+        """Checking the function that detach a DS from all Arrays"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        if verbose:
+            print "Attach ds1 to dim 1 of data5"
+        self.ds1.attach_to(self.data5, 1)
+        if verbose:
+            print "Attach ds2 to dim 0 of data5"
+        self.ds2.attach_to(self.data5, 0)
+        if verbose:
+            print "Attach ds3 to dim 1 of data5"
+        self.ds3.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        if verbose:
+            self.root.ds1.generate_ref_list()
+            self.root.group1.ds2.generate_ref_list()
+            self.root.ds3.generate_ref_list()
+            self.root.data4.generate_dim_list()
+            self.root.group2.data5.generate_dim_list()
+            
+            print "'REF_LIST' of ds1 : " + \
+                  self.root.ds1.attrs.REF_LIST.__str__()
+            print "'REF_LIST' of ds2 : " + \
+                  self.root.group1.ds2.attrs.REF_LIST.__str__()
+            print "'REF_LIST' of ds3 : " + \
+                  self.root.ds3.attrs.REF_LIST.__str__()
+            print "'DIM_LIST' of data4 : " + \
+                  self.root.data4.attrs.DIM_LIST.__str__()
+            print "'DIM_LIST' of data5 : " + \
+                  self.root.group2.data5.attrs.DIM_LIST.__str__()
+        
+            print "Detach ds1 from all Arrays"
+        self.root.ds1.detach_from_all()
+        
+        self.root.ds1.generate_ref_list()
+        ref_list1 = self.root.ds1.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds1 : " + ref_list1.__str__()
+        assert ref_list1 == []
+        
+        self.root.group1.ds2.generate_ref_list()
+        ref_list2 = self.root.group1.ds2.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds2 : " + ref_list2.__str__()
+        assert ref_list2 == [('/group2/data5', 0)]
+        
+        self.root.ds3.generate_ref_list()
+        ref_list3 = self.root.ds3.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds3 : " + ref_list3.__str__()
+        assert ref_list3 == [('/group2/data5', 1)]
+        
+        self.root.data4.generate_dim_list()
+        dim_list1 = self.root.data4.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data4 : " + dim_list1.__str__()
+        assert dim_list1 == [[]]
+        
+        self.root.group2.data5.generate_dim_list()
+        dim_list2 = self.root.group2.data5.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data5 : " + dim_list2.__str__()
+        assert dim_list2 == [['/group1/ds2'], ['/ds3']]
+
+#---------------------------------------
+
+    def test07_delete_array(self):
+        """Checking the attributes when an Array is deleted"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        
+        if verbose:
+            print "Delete data4"
+        self.data4.remove()
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.ds1.generate_ref_list()
+        ref_list = self.root.ds1.attrs.REF_LIST
+        if verbose:
+            print "'REF_LIST' of ds1 : " + ref_list.__str__()
+        assert ref_list == []
+        
+    def test08_delete_dimension_scale(self):
+        """Checking the attributes when a Dimension Scale is deleted"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        
+        if verbose:
+            print "Delete ds1"
+        self.ds1.remove()
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.data4.generate_dim_list()
+        dim_list = self.root.data4.attrs.DIM_LIST
+        if verbose:
+            print "'DIM_LIST' of data4 : " + dim_list.__str__()
+        assert dim_list == [[]]
+
+#---------------------------------------
+
+    def test09_copy_array(self):
+        """Checking that the copy is not attached"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        
+        if verbose:
+            print "Copy data4 : the new Array is called data4bis"
+        self.data4.copy(self.root, "data4bis")
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        assert self.root.ds1.is_attached_to(self.root.data4bis, 0) == 0
+        assert self.root.ds1.is_attached_to(self.root.data4, 0) == 1
+        try:
+            dimension_list = self.root.data4bis.attrs.DIMENSION_LIST
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")    
+            
+    def test10_copy_dimension_scale(self):
+        """Checking that the copy is not attached"""
+        
+        if verbose:
+            print "Attach ds1 to dim 0 of data4"
+        self.ds1.attach_to(self.data4, 0)
+        
+        if verbose:
+            print "Copy ds1 : the new Dimension Scale is called ds1bis"
+        self.ds1.copy(self.root, "ds1bis")
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        assert self.root.ds1.is_attached_to(self.root.data4, 0) == 1
+        assert self.root.ds1bis.is_attached_to(self.root.data4, 0) == 0
+        assert self.root.ds1bis._c_classId == "DIMENSION_SCALE"
+        try:
+            reference_list = self.root.data4bis.attrs.REFERENCE_LIST
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")    
+
+#---------------------------------------
+            
+class NotCloseAttributesTestCase(AttributesTestCase):
+    close = 0
+
+class CloseAttributesTestCase(AttributesTestCase):
+    close = 1
+
+#----------------------------------------------------------------------
+
+class AttributesProtectionTestCase(unittest.TestCase):
+
+    def setUp(self):
+        # Create an instance of HDF5 Table
+        self.file = tempfile.mktemp(".h5")
+        self.fileh = openFile(self.file, mode = "w")
+        self.root = self.fileh.root
+
+        # Create some Group objects
+        self.group1 = self.fileh.createGroup(self.root, 'group1',
+                                            "Group 1")
+
+        self.group2 = self.fileh.createGroup(self.root, 'group2',
+                                            "Group 2")
+                
+        # Create some Dimension Scale objects
+        
+        atom1 = Atom(shape=(0,), flavor="NumArray")
+        atom2 = Atom(shape=(0,), flavor="NumArray")
+        atom3 = Atom(shape=(3,0), flavor="NumArray")
+        
+        self.ds1 = \
+        self.fileh.createDimensionScale(self.root, 'ds1', atom1,
+                                        "Title for Dimension Scale 1",
+                                        None, 10, None, None,
+                                        "attribute 'NAME'")
+        self.ds1.append(numarray.array([1]*3))
+
+        self.ds2 = \
+        self.fileh.createDimensionScale(self.group1, 'ds2', atom2,
+                                        "Title for Dimension Scale 2",
+                                        None, 10, None, None)
+        self.ds2.append(numarray.array([2]*2))
+
+        self.ds3 = \
+        self.fileh.createDimensionScale(self.root, 'ds3', atom3)
+        self.ds3.append(numarray.array([[3]*2]*3))
+        
+        # Create a Array object
+        self.data4 = self.fileh.createArray(self.root, 'data4',
+                                          numarray.array([4]*3),
+                                          "Array 4")
+
+        # Create a EArray object
+        
+        atom5 = atom5 = Atom(shape=(0,3), flavor="NumArray")
+        
+        self.data5 = self.fileh.createEArray(self.group2, 'data5',
+                                            atom5, "Array 5")                                      
+        
+        self.data5.append(numarray.array([[5]*3]*2))                                      
+
+    def tearDown(self):
+        self.fileh.close()
+        os.remove(self.file)
+        cleanup(self)
+
+#---------------------------------------
+
+    def test01_set_name(self):
+        """Checking that the attribute 'NAME' is reserved for DS"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            if verbose:
+                print "Try to add an attribute called 'NAME' to data4"
+            self.root.data4.attrs.NAME = "some name"
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")      
+            
+    def test02_set_name(self):
+        """Checking that the attribute 'NAME' can be modified"""
+                
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        if verbose:
+            print "Try to set the attribute 'NAME' of ds1 to the value 'name1'"
+        self.root.ds1.attrs.NAME = "name1"
+        
+        name = self.root.ds1.attrs.NAME
+        if verbose:
+            print "'NAME' of ds1 : " + name
+        assert name == 'name1'
+        
+    def test03_del_name(self):
+        """Checking that the attribute 'NAME' can't be removed"""
+         
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        try:
+            if verbose:
+                print "Try to remove the attribute 'NAME' of ds1"
+            del self.root.ds1.attrs.NAME
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")      
+
+#---------------------------------------
+
+    def test04_set_ref_list(self):
+        """Checking that the attribute 'REF_LIST' is reserved for DS"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            if verbose:
+                print "Try to add an attribute called 'REF_LIST' to data4"
+            self.root.data4.attrs.REF_LIST = [1,2,3,4,5,6,7,8,9]
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")      
+            
+    def test05_set_ref_list(self):
+        """Checking that the attribute 'REF_LIST' can't be modified"""
+        
+        if verbose:
+            print "Attach ds1 to data4, so ds1 has an attribute REF_LIST"
+        self.ds1.attach_to(self.data4,0)
+                
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.ds1.generate_ref_list()
+        try:
+            if verbose:
+                print "Try to modify the attribute 'REF_LIST' of ds1"
+            self.root.ds1.attrs.REF_LIST = []
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")
+        
+    def test06_del_ref_list(self):
+        """Checking that the attribute 'REF_LIST' can't be removed"""
+        
+        if verbose:
+            print "Attach ds1 to data4, so ds1 has an attribute REF_LIST"
+        self.ds1.attach_to(self.data4,0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        self.root.ds1.generate_ref_list()
+        try:
+            if verbose:
+                print "Try to remove the attribute 'REF_LIST' of ds1"
+            del self.root.ds1.attrs.REF_LIST
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")      
+
+#---------------------------------------
+
+    def test07_set_dim_list(self):
+        """Checking that the attribute 'DIM_LIST' is reserved for DS"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            if verbose:
+                print "Try to add an attribute called 'DIM_LIST' to data4"
+            self.root.data4.attrs.DIM_LIST = [1,2,3,4,5,6,7,8,9]
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")      
+            
+    def test08_set_dim_list(self):
+        """Checking that the attribute 'DIM_LIST' can't be modified"""
+        
+        if verbose:
+            print "Attach ds1 to data4, so data4 has an attribute DIM_LIST"
+        self.ds1.attach_to(self.data4,0)
+                
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.data4.generate_dim_list()
+        try:
+            if verbose:
+                print "Try to modify the attribute 'DIM_LIST' of data4"
+            self.root.data4.attrs.DIM_LIST = []
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")
+        
+    def test09_del_dim_list(self):
+        """Checking that the attribute 'DIM_LIST' can't be removed"""
+        
+        if verbose:
+            print "Attach ds1 to data4, so data4 has an attribute DIM_LIST"
+        self.ds1.attach_to(self.data4,0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+
+        self.root.data4.generate_dim_list()
+        try:
+            if verbose:
+                print "Try to remove the attribute 'DIM_LIST' of data4"
+            del self.root.data4.attrs.DIM_LIST
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")      
+
+
+#---------------------------------------
+            
+class NotCloseAttributesProtectionTestCase(AttributesProtectionTestCase):
+    close = 0
+
+class CloseAttributesProtectionTestCase(AttributesProtectionTestCase):
+    close = 1
+
+#----------------------------------------------------------------------
+
+class ExtensionTestCase(unittest.TestCase):
+
+    def setUp(self):
+        # Create an instance of HDF5 Table
+        self.file = tempfile.mktemp(".h5")
+        self.fileh = openFile(self.file, mode = "w")
+        self.root = self.fileh.root
+
+        # Create some Group objects
+        self.group1 = self.fileh.createGroup(self.root, 'group1',
+                                            "Group 1")
+
+        self.group2 = self.fileh.createGroup(self.root, 'group2',
+                                            "Group 2")
+                
+        # Create some Dimension Scale objects
+        
+        atom1 = Atom(shape=(0,), flavor="NumArray")
+        atom2 = Atom(shape=(0,), flavor="NumArray")
+        atom3 = Atom(shape=(3,0), flavor="NumArray")
+        
+        self.ds1 = \
+        self.fileh.createDimensionScale(self.root, 'ds1', atom1,
+                                        "Title for Dimension Scale 1",
+                                        None, 10, None, None,
+                                        "attribute 'NAME'")
+        self.ds1.append(numarray.array([1]*3))
+
+        self.ds2 = \
+        self.fileh.createDimensionScale(self.group1, 'ds2', atom2,
+                                        "Title for Dimension Scale 2",
+                                        None, 10, None, None)
+        self.ds2.append(numarray.array([2]*2))
+
+        self.ds3 = \
+        self.fileh.createDimensionScale(self.root, 'ds3', atom3)
+        self.ds3.append(numarray.array([[3]*2]*3))
+        
+        # Create a Array object
+        self.data4 = self.fileh.createArray(self.root, 'data4',
+                                          numarray.array([4]*3),
+                                          "Array 4")
+
+        # Create a EArray object
+        
+        atom5 = atom5 = Atom(shape=(0,3), flavor="NumArray")
+        
+        self.data5 = self.fileh.createEArray(self.group2, 'data5',
+                                            atom5, "Array 5")                                      
+        
+        self.data5.append(numarray.array([[5]*3]*2))                                      
+
+    def tearDown(self):
+        self.fileh.close()
+        os.remove(self.file)
+        cleanup(self)
+
+#---------------------------------------
+
+    def test01_earray_extension(self):
+        """Checking that an EArray can be extended"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        shape = self.root.group2.data5.shape
+        assert shape == (2L, 3)
+        
+        self.root.group2.data5.append([[1,2,3]])
+        
+        shape = self.root.group2.data5.shape
+        assert shape == (3L, 3)
+
+    def test02_earray_extension(self):
+        """Checking that the extension of an EArray can be prevented"""
+        
+        # Checking that an EArray can't be extended if a DS is attached to
+        # its extdim
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            self.root.group2.data5.append([[1,2,3]])
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")        
+
+    def test03_earray_extension(self):
+        """Checking that once the DS detached the EArray can be extended"""
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.group1.ds2.detach_from(self.root.group2.data5, 0)
+        self.root.group2.data5.append([[1,2,3]])
+        assert self.root.group2.data5.shape == (3L, 3)
+
+#---------------------------------------
+
+    def test04_dimension_scale_extension(self):
+        """Checking that an Dimension Scale can be extended"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        shape = self.root.group1.ds2.shape
+        assert shape == (2L,)
+        
+        self.root.group1.ds2.append([0])
+        
+        shape = self.root.group1.ds2.shape
+        assert shape == (3L,)
+
+    def test05_dimension_scale_extension(self):
+        """Checking that the extension of a DS can be prevented"""
+        
+        # Checking that a DS, if its first dimension is extensible, must be 
+        # detached from all Array before it can be extended
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            self.root.group1.ds2.append([0])
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")        
+
+    def test06_dimension_scale_extension(self):
+        """Checking that once detached the DS can be extended"""
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.group1.ds2.detach_from(self.root.group2.data5, 0)
+        self.root.group1.ds2.append([0])
+        assert self.root.group1.ds2.shape == (3L,)
+    
+    def test07_dimension_scale_extension(self):
+        """Checking that only the first dimension of DS matters"""
+        
+        # Checking that if the extensible dimension of a DS is not the first one,
+        # the DS can be extended
+        
+        self.ds3.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+                
+        self.root.ds3.append([[0], [1], [2]])
+        assert self.root.ds3.shape == (3, 3L,)    
+        
+#---------------------------------------
+
+    def test08_earray_truncation(self):
+        """Checking that an EArray can be truncated"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        shape = self.root.group2.data5.shape
+        assert shape == (2L, 3)
+        
+        self.root.group2.data5.truncate(1)
+        
+        shape = self.root.group2.data5.shape
+        assert shape == (1L, 3)
+
+    def test09_earray_truncation(self):
+        """Checking that the truncation of an EArray can be prevented"""
+        # Checking that an EArray can't be truncated if a DS is attached to 
+        # its extdim
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            self.root.group2.data5.truncate(1)
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")        
+
+    def test10_earray_truncation(self):
+        """Checking that once the DS detached the EArray can be truncated"""
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.group1.ds2.detach_from(self.root.group2.data5, 0)
+        self.root.group2.data5.truncate(1)
+        assert self.root.group2.data5.shape == (1L, 3)
+
+#---------------------------------------
+
+    def test11_dimension_scale_truncation(self):
+        """Checking that an Dimension Scale can be truncated"""
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        shape = self.root.group1.ds2.shape
+        assert shape == (2L,)
+        
+        self.root.group1.ds2.truncate(1)
+        
+        shape = self.root.group1.ds2.shape
+        assert shape == (1L,)
+
+    def test12_dimension_scale_truncation(self):
+        """Checking that the truncation of a DS can be prevented"""
+        # Checking that a DS, if its first dimension is extensible, must be 
+        # detached from all Array before it can be truncated
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        try:
+            self.root.group1.ds2.truncate(1)
+        
+        except AttributeError:
+            if verbose:
+                (type, value, traceback) = sys.exc_info()
+                print "Great!, the next AttributeError was catched!"
+                print value
+                
+        else:
+            self.fail("expected a AttributeError")        
+
+    def test13_dimension_scale_truncation(self):
+        """Checking that once detached the DS can be truncated"""
+        
+        self.ds2.attach_to(self.data5, 0)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+        
+        self.root.group1.ds2.detach_from(self.root.group2.data5, 0)
+        self.root.group1.ds2.truncate(1)
+        assert self.root.group1.ds2.shape == (1L,)
+    
+    def test14_dimension_scale_truncation(self):
+        """Checking that only the first dimension of DS matters"""
+        # Checking that if the extensible dimension of a DS is not the first one,
+        # the DS can be extended
+        
+        self.ds3.attach_to(self.data5, 1)
+        
+        if self.close:
+            if verbose:
+                print "(closing file version)"
+            self.fileh.close()
+            self.fileh = openFile(self.file, mode = "r+")
+            self.root = self.fileh.root
+                
+        self.root.ds3.truncate(1)
+        assert self.root.ds3.shape == (3, 1L,)    
+    
+
+#---------------------------------------
+            
+class NotCloseExtensionTestCase(ExtensionTestCase):
+    close = 0
+
+class CloseExtensionTestCase(ExtensionTestCase):
+    close = 1
+
+#----------------------------------------------------------------------
+
+def suite():
+    theSuite = unittest.TestSuite()
+    niter = 1
+    #heavy = 1 # Uncomment this only for testing purposes!
+
+    for i in range(niter):
+        theSuite.addTest(unittest.makeSuite(NotCloseFunctionsTestCase))
+        theSuite.addTest(unittest.makeSuite(CloseFunctionsTestCase))
+        theSuite.addTest(unittest.makeSuite(NotCloseAttributesTestCase))
+        theSuite.addTest(unittest.makeSuite(CloseAttributesTestCase))
+        theSuite.addTest(unittest.makeSuite(NotCloseAttributesProtectionTestCase))
+        theSuite.addTest(unittest.makeSuite(CloseAttributesProtectionTestCase))
+        theSuite.addTest(unittest.makeSuite(NotCloseExtensionTestCase))
+        theSuite.addTest(unittest.makeSuite(CloseExtensionTestCase))
+
+    return theSuite
+
+
+if __name__ == '__main__':
+    unittest.main( defaultTest='suite' )
diff -Naur pytables-1.3-ds-devel/tables/tests/test_ds_attributes2.py ds/pytables-1.3-devel/tables/tests/test_ds_attributes2.py
--- pytables-1.3-ds-devel/tables/tests/test_ds_attributes2.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/tables/tests/test_ds_attributes2.py	1970-01-01 01:00:00.000000000 +0100
@@ -1,147 +0,0 @@
-print("=====================================================================")
-print("Test start...")
-print("=====================================================================")
-
-pblms = 0
-
-print("\nImporting PyTables and Numarray...")
-from numarray import *
-from tables import *
-print("Import succeeded.")
-
-print("\nOpening a hdf5 file, creating arrays...")
-# Open a new empty HDF5 file
-fileh3 = openFile("array3.h5", mode = "w")
-# Get the root group
-root = fileh3.root
-
-# create a new group
-g = fileh3.createGroup(root, 'group_1')
-
-# create some Array objects
-a = array([-1, 2, 4], Int16)
-ds1 = fileh3.createArray(root, 'dim_scale1', a, "Signed short 1D array")
-a = array([3, -2], Int16)
-ds2 = fileh3.createArray(root, 'dim_scale2', a, "Signed short 1D array")
-a = array([[-1, 9], [2, 4], [8, -8]], Int16)
-ds3 = fileh3.createArray(g, 'dim_scale3', a, "Signed short 2D array")
-a = array([[-1, 3, 8], [2, 7, 8], [4, 9, 8]], Int16)
-data1 = fileh3.createArray(root, 'data1', a, "Signed short 2D array")
-a = array([[-1, 3], [2, 7], [4, 9]], Int16)
-data2 = fileh3.createArray(root, 'data2', a, "Signed short 2D array")
-a = array([[-1, 9], [3, -8], [-1, -1]], Int16)
-data3 = fileh3.createArray(root, 'data3', a, "Signed short 2D array")
-print("File opened, 6 arrays created : ds1, ds2, ds3, data1, data2 and data3")
-
-print("\n=====================================================================")
-
-print("\nAttribute 'REF_LIST'\
-\nThis attribute is to make up for 'REFERENCE_LIST'. It stores pathnames instead of references.")
-
-print("Turn ds1 into a scale and attach it to dim 0 of data1...")
-ds1.to_scale('dimension_scale1')
-ds1.attach_to(data1,0)
-
-print("\nNow get the attributes of ds1 : %s\
-\n'REF_LIST' stores tuples gathering the pathnames of the Arrays that \
-\nthe Dimension Scale which owns this attribute is attached to, and the \
-\nindex of the dimension concerned" % ds1.attrs.__repr__())
-
-
-print("\nNow :\
-\nturn ds2 and ds3 into Dimension Scales\
-\nattach ds1 to dim 0 of data2\
-\nattach ds1 to dim 0 of data3\
-\nattach ds2 to dim 1 of data2\
-\nattach ds2 to dim 1 of data3\
-\nattach ds3 to dim 1 of data1")
-ds2.to_scale()
-ds3.to_scale()
-ds1.attach_to(data2,0)
-ds1.attach_to(data3,0)
-ds2.attach_to(data2,1)
-ds2.attach_to(data3,1)
-ds3.attach_to(data1,1)
-
-print("\nCheck the 'REF_LIST' of ds1, ds2, ds3 :\
-\n'REF_LIST' of ds1 : %s\
-\n'REF_LIST' of ds1 : %s\
-\n'REF_LIST' of ds1 : %s" % (ds1.attrs.REF_LIST, ds2.attrs.REF_LIST,ds3.attrs.REF_LIST))
-
-print("\n=====================================================================")
-
-print("\nAttribute 'DIM_LIST'\
-\nThis attribute is to make up for 'DIMENSION_LIST'. It stores pathnames instead of references.")
-
-print("\nCheck the 'DIM_LIST' of data1, data2, and data3 :\
-\n'DIM_LIST' of data1 : %s\
-\n'DIM_LIST' of data2 : %s\
-\n'DIM_LIST' of data3 : %s" % (data1.attrs.DIM_LIST, data2.attrs.DIM_LIST, data3.attrs.DIM_LIST))
-print("'DIM_LIST' stores, for each dimension of an Array, the list of the Dimension \
-\nScales attached for this dimension (even if this list is empty). These lists are \
-\ngathered in a list and sorted by index number (the first list is the list of Dimension\
-\nScales attached to dim 0, the second is for dim 1, and so on)")
-
-print("\n=====================================================================")
-
-print("Check that these attributes are 'move-safe'.\
-\nds1 is attached to dim 0 of data2.\
-\nWitness the attribute 'REF_LIST' of ds1 : %s\
-\nor the attribute 'DIM_LIST' of data : %s" % (ds1.attrs.REF_LIST, data2.attrs.DIM_LIST))
-
-print("Pathname of data1 : %s\
-\nMove data1 to 'group1'..." % data1._v_pathname)
-data1.move(g)
-print("Then :\
-\nPathname of data1 : %s\
-\n'REF_LIST' of ds1 : %s\
-\n'DIM_LIST' of data1 : %s" % (data1._v_pathname, ds1.attrs.REF_LIST, data1.attrs.DIM_LIST))
-
-print("\n=====================================================================")
-
-print("Check that these attributes are correctly updated when a Dimension \
-\nScale is attached / detached.")
-
-print("\nGet the attribute 'REF_LIST' of ds3 : %s\
-\nGet the attribute 'DIM_LIST' of data2 : %s" % (ds3.attrs.REF_LIST, data2.attrs.DIM_LIST))
-
-print("Attach ds3 to dim 0 of data2...")
-ds3.attach_to(data2,0)
-print("Check the attribute 'REF_LIST' of ds3 : %s\
-\nCheck the attribute 'DIM_LIST' of data2 : %s" % (ds3.attrs.REF_LIST, data2.attrs.DIM_LIST))
-
-print("\n=====================================================================")
-
-print("New method for Arrays : clear_scales2 which detach all Dimension Scales \
-\nattached to an Array\
-\nIt is called clear_scales2 because there's a first version which uses references.")
-
-print("\nWhich are the Dimension Scales attached to data2 : %s" % data2.attrs.DIM_LIST)
-
-print("\nDetach all these Dimension Scales...")
-data2.clear_scales2()
-print("\nWhich are the Dimension Scales attached to data2  : %s" % data2.attrs.DIM_LIST)
-
-print("Do ds1 and ds2 know that they have been detached ?\
-\n'REF_LIST' of ds1 : %s\
-\n'REF_LIST' of ds2 : %s" % (ds1.attrs.REF_LIST, ds2.attrs.REF_LIST))
-
-print("\n=====================================================================")
-print("New method for Dimension Scales : detach_from_all2 which detach a Dimension Scale \
-\nfrom all the Arrays it is attached to\
-\nIt is called detach_from_all2 because there's a first version which uses references")
-
-print("\nTo which Arrays is ds1 attached : %s" % ds1.attrs.REF_LIST)
-
-print("Detach ds1 from all the Arrays it is attached to...")
-ds1.detach_from_all2()
-print("\nTo which Arrays is ds1 attached : %s" % ds1.attrs.REF_LIST)
-
-print("Do data1 and data3 know that ds1 is no more attached to them ?\
-\n'DIM_LIST' of data1 : %s\
-\n'DIm_LIST' of data3 : %s" % (data1.attrs.DIM_LIST, data3.attrs.DIM_LIST)) 
-
-
-print("\n=====================================================================")
-print("End of the test : %d problem(s) occured" % pblms)
-print("=====================================================================")
diff -Naur pytables-1.3-ds-devel/tables/tests/test_ds_attributes.py ds/pytables-1.3-devel/tables/tests/test_ds_attributes.py
--- pytables-1.3-ds-devel/tables/tests/test_ds_attributes.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/tables/tests/test_ds_attributes.py	1970-01-01 01:00:00.000000000 +0100
@@ -1,154 +0,0 @@
-print("=====================================================================")
-print("Test start...")
-print("=====================================================================")
-
-pblms = 0
-
-print("\nImporting PyTables and Numarray...")
-from numarray import *
-from tables import *
-print("Import succeeded.")
-
-print("\nOpening a hdf5 file, creating arrays...")
-# Open a new empty HDF5 file
-fileh2 = openFile("array2.h5", mode = "w")
-# Get the root group
-root = fileh2.root
-
-# create some Array objects
-a = array([-1, 2, 4], Int16)
-ds1 = fileh2.createArray(root, 'dim_scale1', a, "Signed short 1D array")
-a = array([3, -2], Int16)
-ds2 = fileh2.createArray(root, 'dim_scale2', a, "Signed short 1D array")
-a = array([[-1, 9], [2, 4], [8, -8]], Int16)
-ds3 = fileh2.createArray(root, 'dim_scale3', a, "Signed short 2D array")
-a = array([[-1, 3, 8], [2, 7, 8], [4, 9, 8]], Int16)
-data1 = fileh2.createArray(root, 'data1', a, "Signed short 2D array")
-a = array([[-1, 3], [2, 7], [4, 9]], Int16)
-data2 = fileh2.createArray(root, 'data2', a, "Signed short 2D array")
-a = array([[-1, 9], [3, -8], [-1, -1]], Int16)
-data3 = fileh2.createArray(root, 'data3', a, "Signed short 2D array")
-print("File opened, 6 arrays created : ds1, ds2, ds3, data1, data2 and data3")
-
-print("\n=====================================================================")
-
-print("\nAttribute 'NAME'")
-print("This attribute is a simple string, which is the optionnal parameter of 'to_scale'")
-
-print("Get the attributes of ds1 : %s" % ds1.attrs.__repr__())
-
-print("\nNow turn ds1 into a scale, with optional name 'dimension_scale1'...")
-ds1.to_scale('dimension_scale1')
-
-print("\nNow get the attributes of ds1 again : %s" % ds1.attrs.__repr__())
-
-print("\nThis new attribute 'NAME' can be modified in the standard way :")
-print(">>>ds1.attrs.NAME='new name'")
-ds1.attrs.NAME='new name'
-print("Check the 'NAME' of the Dimension Scale again : %s" % ds1.attrs.NAME)
-
-print("\n=====================================================================")
-
-print("\nAttribute 'REFRENCE_LIST'")
-print("This attribute is a list of tuples (ref, idx) where 'ref' is a reference to an \
-array and 'idx' the index of the dimension (of the referenced array) current Dimension Scale \
-is attached to.")
-
-print("\nAs long as a Dimension Scale is not attached to any dimension, this Dimension Scale \
-doesn't own such an attribute.")
-print("Get the attributes of ds1 : %s" % ds1.attrs.__repr__())
-
-print("\nNow attach ds1 to dim 0 of data1...")
-ds1.attach_to(data1,0)
-
-print("\nGet the attributes of ds1 again : %s" % ds1.attrs.__repr__())
-
-print("\nThis new attribute 'REFERENCE_LIST' can't be modified in the standard way.")
-print("Try and modify 'REFERENCE_LIST' in the standard way...")
-try:
-	ds1.attrs.REFERENCE_LIST=[]
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\nTo modify this attribute, use 'attach_to' and 'detach_from' instead.")
-print("Attach ds1 to dim 0 of data2...")
-ds1.attach_to(data2,0)
-print("New 'REFERENCE_LIST' : %s" % ds1.attrs.REFERENCE_LIST)
-print("Detach ds1 from dim 0 of data2...")
-ds1.detach_from(data2,0)
-print("New 'REFERENCE_LIST' : %s" % ds1.attrs.REFERENCE_LIST)
-
-
-print("\n=====================================================================")
-
-print("\nAttribute 'DIMENSION_LIST'")
-print("This attribute is a list of tuples (ref_list, idx) where 'ref_list' is a list of reference to \
-Dimension Scales and 'idx' the index of the dimension (of current array) to witch these Dimension Scales \
-are attached to.")
-
-print("\nAs long as no Dimension Scale is attached to an array, this array doesn't own such an attribute.")
-print("Get the attributes of data2 : %s" % data3.attrs.__repr__())
-
-print("Now attach ds3 to dim 0 of data3...")
-ds3.to_scale()
-ds3.attach_to(data3,0)
-
-print("\nGet the attributes of data3 again : %s" % data3.attrs.__repr__())
-
-print("\nThis new attribute 'DIMENSION_LIST' can't be modified in the standard way.")
-print("Try and modify 'DIMENSION_LIST' in the standard way...")
-try:
-	data3.attrs.DIMENSION_LIST=[]
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\nTo modify this attribute, use 'attach_to' and 'detach_from' instead.")
-print("Attach ds2 to dim 1 of data3...")
-ds2.to_scale()
-ds2.attach_to(data3,1)
-print("New 'DIMENSION_LIST' : %s" % data3.attrs.DIMENSION_LIST)
-print("Attach ds1 to dim 0 of data3...")
-ds1.attach_to(data3,0)
-print("New 'DIMENSION_LIST' : %s" % data3.attrs.DIMENSION_LIST)
-print("Detach ds3 from dim 0 of data3...")
-ds3.detach_from(data3,0)
-print("New 'DIMENSION_LIST' : %s" % data3.attrs.DIMENSION_LIST)
-
-print("\n=====================================================================")
-
-print("\nAttribute 'DIMENSION_LABELS'")
-print("This attribute is a list of tuples (lab, idx) where 'lab' is a label and idx the index of the dimension \
-the label of which is 'lab'.")
- 
-print("\nAs long as no label is et for any dimension of an array, this array doesn't own such an attribute.")
-print("Get the attribute of data2 : %s" % data2.attrs.__repr__())
-
-print("Now set 'first dimension' as the label for dim 0 of data2...")
-data2.set_label(0,'first dimension')
-
-print("\nGet the attributes of data2 again : %s" % data2.attrs.__repr__())
-
-print("\nThis new attribute 'DIMENSION_LIST' can't be modified in the standard way.")
-print("Try and modify 'DIMENSION_LABELS' in the standard way...")
-try:
-	data2.attrs.DIMENSION_LABELS=[]
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-	
-print("\nTo modify this attribute, use 'set_label' instead.")
-print("Set 'new label for the first dimension' as a new label for dim 0 of data2...")
-data2.set_label(0, 'new label for the first dimension')
-print("New 'DIMENSION_LABELS' : %s" % data2.attrs.DIMENSION_LABELS)
-print("Set 'label for the second dimension' as a label for dim 1 of data2...")
-data2.set_label(1, 'label for the second dimension')
-print("New 'DIMENSION_LABELS' : %s" % data2.attrs.DIMENSION_LABELS)
-
-print("\n=====================================================================")
-print("End of the test : %d problem(s) occured" % pblms)
-print("=====================================================================")
diff -Naur pytables-1.3-ds-devel/tables/tests/test_ds.py ds/pytables-1.3-devel/tables/tests/test_ds.py
--- pytables-1.3-ds-devel/tables/tests/test_ds.py	2005-12-09 14:33:28.000000000 +0100
+++ ds/pytables-1.3-devel/tables/tests/test_ds.py	1970-01-01 01:00:00.000000000 +0100
@@ -1,210 +0,0 @@
-print("=====================================================================")
-print("Test start...")
-print("=====================================================================")
-
-pblms = 0
-
-print("\nImporting PyTables and Numarray...")
-from numarray import *
-from tables import *
-print("Import succeeded.")
-
-print("\nOpening a hdf5 file, creating arrays...")
-# Open a new empty HDF5 file
-fileh = openFile("array.h5", mode = "w")
-# Get the root group
-root = fileh.root
-
-# create some Array objects
-a = array([-1, 2, 4], Int16)
-ds1 = fileh.createArray(root, 'dim_scale1', a, "Signed short 1D array")
-a = array([3, -2, 8], Int16)
-ds2 = fileh.createArray(root, 'dim_scale2', a, "Signed short 1D array")
-a = array([[-1, 9], [2, 4], [8, -8]], Int16)
-ds3 = fileh.createArray(root, 'dim_scale3', a, "Signed short 2D array")
-a = array([[-1, 3, 8], [2, 7, 8], [4, 9, 8]], Int16)
-data1 = fileh.createArray(root, 'data1', a, "Signed short 2D array")
-a = array([[-1, 3], [2, 7], [4, 9]], Int16)
-data2 = fileh.createArray(root, 'data2', a, "Signed short 2D array")
-print("File opened, 5 arrays created : ds1, ds2, ds3, data1, and data2")
-
-print("\n=====================================================================")
-
-print("\nTesting the function 'is_scale()' : returns 1 if current object is a Dimension Scale, 0 otherwise...")
-print("Is ds1 a Dimension Scale : %d" % ds1.is_scale())
-print("Is ds2 a Dimension Scale : %d" % ds2.is_scale())
-print("Is data1 a Dimension Scale : %d" % data1.is_scale())
-print("This function will be further tested later.")
-
-print("\n=====================================================================")
-
-print("\nTesting the function to_scale'(name='')' : Turns the current Array into a Dimension Scale, with an optional name...")
-print("Turn ds1 into a scale.")
-ds1.to_scale('dimension scale for dimension 0 of dataset1 and dimension 0 of dataset2')
-print("Now is ds1 a Dimension scale : %d" % ds1.is_scale())
-
-print("\nAn exception is raised if current object is already a Dimension Scale.")
-print("Try and turn ds1 into a scale again...")
-try:
-	ds1.to_scale()
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\nTurning ds2 and ds3 into scales too...")
-ds2.to_scale('dimension_scale for dimension 1 of dataset1')
-ds3.to_scale('dimension_scale for dimension 0 of dataset2')
-print("Checking that ds2 and ds3 have been successfully turned into scales...")
-print("Now is ds2 a Dimension scale : %d" % ds2.is_scale())
-print("Now is ds3 a Dimension scale : %d" % ds3.is_scale())
-
-print("\n=====================================================================")
-
-print("\nTesting the function 'attach_to(array, dimension_index)' : Attaches current Dimension Scale to one dimension of an Array...")
-print("This function is only available for Dimension Scales.")
-print("Attaching ds1 to dim 0 of data1 and data2...")
-ds1.attach_to(data1,0)
-ds1.attach_to(data2,0)
-print("Attaching ds2 to dim 1 of data1...")
-ds2.attach_to(data1,1)
-print("Attaching ds3 to dim 0 of data2...")
-ds3.attach_to(data2,0)
-
-print("\nAn exception is raised if one try to attach a Dimension Scale to something different from a genuine array (a Dimension Scale is not considered as an array).")
-print("Try and attach ds2 to dim 0 of ds1...")
-try:
-	ds2.attach_to(ds1, 0)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-	
-print("\nAn exception is raised if the dimension_index is too big...")
-print("Try and attach ds2 to dim 2 of data1 which only have two dimensions...")
-try:
-	ds2.attach_to(data1, 2)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-	
-print("\nAn exception is raised if the first dimension of current Dimension scale is incompatible with the specified dimension.")
-print("Try and attach ds1 to dim 1 of data2...")
-try:
-	ds1.attach_to(data2, 1)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\n=====================================================================")
-
-print("\nTesting the function 'is_attached_to(array, dimension_index)' : checks if current Dimension Scale is attached to the specified dimension...")
-print("This function is only available for Dimension Scales.")
-print("Is ds1 attached to dim 0 of data1 : %d" % ds1.is_attached_to(data1,0))
-print("Is ds1 attached to dim 0 of data2 : %d" % ds1.is_attached_to(data2,0))
-print("Is ds1 attached to dim 1 of data2 : %d" % ds1.is_attached_to(data2,1))
-print("Is ds2 attached to dim 1 of data1 : %d" % ds2.is_attached_to(data1,1))
-print("Is ds3 attached to dim 0 of data2 : %d" % ds3.is_attached_to(data2,0))
-print("Is ds3 attached to dim 1 of data2 : %d" % ds3.is_attached_to(data2,1))
-
-print("\nAn exception is raised if the 'array' specified is not a genuine array (a Dimension Scale is not considered as an array).")
-print("Try and check whether ds2 is attached to dim 0 of ds3")
-try:
-	ds2.is_attached_to(ds3, 0)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\nAn exception is raised if the dimension index is too big.")
-print("Try and check whether ds1 is attached to dim 2 of data1...")
-try:
-	ds1.is_attached_to(data1, 2)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\n=====================================================================")
-
-print("\nTesting the function 'detach_from(array, dimension_index)' : detaches current Dimension Scale from the specified dimension.")
-print("This function is only available for Dimension Scales.")
-print("Is ds1 attached to dim 0 of data1 : %d" % ds1.is_attached_to(data1, 0))
-print("Then detach ds1 from data1...")
-ds1.detach_from(data1,0)
-print("Is ds1 still attached to dim 0 of data 1 : %d" % ds1.is_attached_to(data1, 0))
-print("Re-attach ds1 to dim 0 of data1...")
-ds1.attach_to(data1,0)
-print("Now is ds1 attached to dim 0 of data1 : %d" % ds1.is_attached_to(data1, 0))
-
-print("\nAn exception is raised if the 'array' specified is not a genuine array (a Dimension Scale is not considered as an array).")
-print("Try and detach ds2 from dim 0 of ds3")
-try:
-	ds2.detach_from(ds3, 0)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\nAn exception is raised if the dimension index is too big.")
-print("Try and detach ds1 from dim 2 of data1...")
-try:
-	ds1.detach_from(data1, 2)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\nAn exception is raised if current Dimension Scale is not attached to the specified dimension.")
-print("Try and detach ds1 from dim 1 of data1")
-try:
-	ds1.detach_from(data1, 1)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\n=====================================================================")
-
-print("\nTesting the function 'get_scale_number(dimension_index)' : returns the number of scales attached to the dimension specified.")
-print("Get the number of scales attached to dim 0 of data1 : %d" % data1.get_scale_number(0)) 
-print("Get the number of scales attached to dim 1 of data1 : %d" % data1.get_scale_number(1))
-print("Get the number of scales attached to dim 0 of data2 : %d" % data2.get_scale_number(0))
-print("Get the number of scales attached to dim 1 of data2 : %d" % data2.get_scale_number(1))
-
-print("\n=====================================================================")
-
-print("\nTesting the function 'set_label(dimension_index, label)' : sets a label for the specified dimension.")
-print("\nTesting the function 'get_label(dimension_index)' : gets the label for the specified dimension.")
-print("Set '123456789' as the label for dim 0 of data1...")
-data1.set_label(0,'123456789')
-print("Get the label for dim 0 of data1 : %s" % data1.get_label(0))
-print("Set 'azertyuio' as a new label for dim 0 of data1...")
-data1.set_label(0,"azertyuio")
-print("Get the new label for dim 0 of data1 : %s" % data1.get_label(0))
-
-print("\nAn exception is raised if the dimension index is too big.")
-print("Try and set a label for dim 2 of data2 ...")
-try:
-	data2.set_label(2,"label")
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("Try and get a label for dim 2 of data1 ...")
-try:
-	data1.get_label(2)
-	print("An exception should have been raised...")
-	pblms=pblms+1
-except Exception, m:
-	print("Exception successfully raised : %s" % m.__str__())
-
-print("\n=====================================================================")
-print("End of the test : %d problem(s) occured" % pblms)
-print("=====================================================================")
-
-
-
