leezu commented on a change in pull request #18413:
URL: https://github.com/apache/incubator-mxnet/pull/18413#discussion_r436162599



##########
File path: python/mxnet/gluon/parameter.py
##########
@@ -664,6 +677,40 @@ def cast(self, dtype):
             self._grad = [i.astype(dtype) for i in self._grad]
             autograd.mark_variables(self._data, self._grad, self.grad_req)
 
+    def check_and_setattr(self, **kwargs):

Review comment:
       Should this be a private API? `_check_and_setattr`

##########
File path: python/mxnet/gluon/parameter.py
##########
@@ -142,6 +142,19 @@ def __repr__(self):
     def grad_req(self):
         return self._grad_req
 
+    @property
+    def name(self):
+        return self._name
+
+    @name.setter
+    def name(self, name):
+        if self._var is not None:
+            warnings.warn("Parameter {} has generated its symbol 
representation, "
+                          "which could be used in some cached graph. "
+                          "Skip the operation that sets its name as {}. 
".format(self.name, name), stacklevel=4)
+            return

Review comment:
       With respect to the symbol names: Note that in the Gluon API we only 
need to ensure we can match (bind) the parameter ndarrays to the inputs. This 
operation is local to the particular HybridBlock. Let's see if we can simplify 
the API further to get rid of the problem.
   
   

##########
File path: tests/python/unittest/test_gluon.py
##########
@@ -265,31 +264,30 @@ def test_parameter_str():
     class Net(gluon.Block):
         def __init__(self, **kwargs):
             super(Net, self).__init__(**kwargs)
-            with self.name_scope():
-                self.dense0 = nn.Dense(10, in_units=5, use_bias=False)
+            self.dense0 = nn.Dense(10, in_units=5, use_bias=False)
 
-    net = Net(prefix='net1_')
+    net = Net()
     lines = str(net.collect_params()).splitlines()
-
-    assert lines[0] == 'net1_ ('
-    assert 'net1_dense0_weight' in lines[1]
-    assert '(10, 5)' in lines[1]
-    assert 'float32' in lines[1]
-    assert lines[2] == ')'
-
+    
+    #assert lines[0] == 'net1_ ('

Review comment:
       Delete?

##########
File path: tests/python/unittest/test_gluon_estimator.py
##########
@@ -394,24 +388,6 @@ def test_val_net():
                     val_loss=val_loss,
                     val_net=val_net)
 
-    with pytest.raises(RuntimeError):
-        est.fit(train_data=dataloader,
-                val_data=dataloader,
-                epochs=num_epochs)
-
-    ''' test weight sharing of sequential networks with namescope '''

Review comment:
       Would it be better to test the new method for weight sharing here 
instead of deleting the test? Ie. 
`val_net.share_parameters(net.collect_params())`?
   

##########
File path: tests/python/unittest/test_profiler.py
##########
@@ -558,40 +557,42 @@ def test_gpu_memory_profiler_gluon():
 
     # Sample gpu_memory_profiler.csv
     # "Attribute Name","Requested Size","Device","Actual Size","Reuse?"
-    # "<unk>:in_arg:data","640","0","4096","0"
-    # "net:arg_grad:net_dense0_bias","512","0","4096","0"
-    # "net:arg_grad:net_dense0_weight","5120","0","8192","0"
-    # "net:arg_grad:net_dense1_bias","256","0","4096","0"
-    # "net:arg_grad:net_dense1_weight","32768","0","32768","0"
-    # "net:arg_grad:net_dense2_bias","128","0","4096","0"
-    # "net:arg_grad:net_dense2_weight","8192","0","8192","0"
-    # "net:dense0:net_dense0_fwd","8192","0","8192","0"
-    # "net:dense0:tanh:net_dense0_tanh_fwd","8192","0","8192","0"
-    # "net:dense1:net_dense1_fwd","4096","0","4096","0"
-    # "net:dense1:tanh:net_dense1_tanh_fwd","4096","0","4096","0"
-    # "net:dense2:net_dense2_fwd","2048","0","4096","0"
-    # "net:dense2:net_dense2_fwd_backward","4096","0","4096","0"
-    # "net:dropout0:net_dropout0_fwd","8192","0","8192","0"
-    # "net:dropout0:net_dropout0_fwd","8192","0","8192","0"
-    # "net:in_arg:net_dense0_bias","512","0","4096","0"
-    # "net:in_arg:net_dense0_weight","5120","0","8192","0"
-    # "net:in_arg:net_dense1_bias","256","0","4096","0"
-    # "net:in_arg:net_dense1_weight","32768","0","32768","0"
-    # "net:in_arg:net_dense2_bias","128","0","4096","0"
-    # "net:in_arg:net_dense2_weight","8192","0","8192","0"
-    # "net:relu0:net_relu0_fwd","2048","0","4096","0"
-    # "net:relu0:net_relu0_fwd_backward","8192","0","8192","0"
-    # "net:relu0:net_relu0_fwd_head_grad","2048","0","4096","0"
-    # "resource:cudnn_dropout_state (dropout-inl.h 
+258)","1671168","0","1671168","0"
-    # "resource:temp_space (fully_connected-inl.h 
+316)","34816","0","36864","0"
+    # 0:0_fwd,8192,0,8192,0
+    # 0_act:0_act_fwd,8192,0,8192,0
+    # 1:1_fwd,8192,0,8192,0
+    # 1:1_fwd,8192,0,8192,0
+    # 2:2_fwd,4096,0,4096,0
+    # 2_act:2_act_fwd,4096,0,4096,0
+    # 3:3_fwd_backward,4096,0,4096,1
+    # 4:4_fwd,2048,0,4096,0
+    # 4:4_fwd_backward,8192,0,8192,0
+    # 4:4_fwd_head_grad,2048,0,4096,0
+    # <unk>:arg_grad:0_bias,512,0,4096,0
+    # <unk>:arg_grad:0_weight,5120,0,8192,0
+    # <unk>:arg_grad:2_bias,256,0,4096,0
+    # <unk>:arg_grad:2_weight,32768,0,32768,0
+    # <unk>:arg_grad:3_bias,128,0,4096,0
+    # <unk>:arg_grad:3_weight,8192,0,8192,0
+    # <unk>:in_arg:0_bias,512,0,4096,0
+    # <unk>:in_arg:0_weight,5120,0,8192,0
+    # <unk>:in_arg:2_bias,256,0,4096,0
+    # <unk>:in_arg:2_weight,32768,0,32768,0
+    # <unk>:in_arg:3_bias,128,0,4096,0
+    # <unk>:in_arg:3_weight,8192,0,8192,0
+    # <unk>:in_arg:data,640,0,4096,0
+    # resource:cudnn_dropout_state (dropout-inl.h +256),786432,0,786432,0
+    # resource:temp_space (fully_connected-inl.h +316),8192,0,8192,0
+    # nvml_amend,639434752,0,639434752,0
 
     # We are only checking for weight parameters here, also making sure that
     # there is no unknown entries in the memory profile.

Review comment:
       This doesn't hold true anymore.

##########
File path: python/mxnet/gluon/parameter.py
##########
@@ -664,6 +677,40 @@ def cast(self, dtype):
             self._grad = [i.astype(dtype) for i in self._grad]
             autograd.mark_variables(self._data, self._grad, self.grad_req)
 
+    def check_and_setattr(self, **kwargs):
+        """check and set attributes for parameters
+        # TODO add explanations.

Review comment:
       Please add and delete the TODO

##########
File path: python/mxnet/gluon/block.py
##########
@@ -366,36 +327,25 @@ def _alias(self):
 
     @property
     def prefix(self):
-        """Prefix of this :py:class:`Block`."""
+        """Prefix of this :py:class:`Block`.
+        Please call self.set_prefix() to get the correct prefix for current 
block.
+        """
         return self._prefix
 
     @property
     def name(self):
-        """Name of this :py:class:`Block`, without '_' in the end."""
-        return self._name
-
-    def name_scope(self):
-        """Returns a name space object managing a child :py:class:`Block` and 
parameter
-        names. Should be used within a ``with`` statement::
-
-            with self.name_scope():
-                self.dense = nn.Dense(20)
-
-        Please refer to
-        `the naming tutorial 
</api/python/docs/tutorials/packages/gluon/blocks/naming.html>`_

Review comment:
       We need to update the documentation on the website as well. It could be 
done in a separate PR once the new API is stable.
   
   cc: @ys2843 what's your recommendation so that the website may still contain 
tutorials for stable 1.x version, while we also need to start provide tutorials 
for upcoming 2.0?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to