rahul003 commented on a change in pull request #11234: [MXNET-535] Add Warmup 
Learning Rate Scheduler and fix bugs in LR Schedulers
URL: https://github.com/apache/incubator-mxnet/pull/11234#discussion_r196620100
 
 

 ##########
 File path: python/mxnet/lr_scheduler.py
 ##########
 @@ -153,18 +153,57 @@ class PolyScheduler(LRScheduler):
 
     """
 
-    def __init__(self, max_update, base_lr=0.01, pwr=2):
-        super(PolyScheduler, self).__init__(base_lr)
+    def __init__(self, max_update, pwr=2, **kwargs):
+        super(PolyScheduler, self).__init__(**kwargs)
         assert isinstance(max_update, int)
         if max_update < 1:
             raise ValueError("maximum number of updates must be strictly 
positive")
         self.base_lr_orig = self.base_lr
         self.max_update = max_update
         self.power = pwr
-        self.base_lr = self.base_lr_orig
 
     def __call__(self, num_update):
         if num_update <= self.max_update:
             self.base_lr = self.base_lr_orig * pow(1.0 - float(num_update) / 
float(self.max_update),
                                                    self.power)
         return self.base_lr
+
+class WarmupScheduler(LRScheduler):
+    """Implement linear warmup starting from lr_begin to given scheduler's 
base_lr.
+
+    Parameters
+    ----------
+    lr_begin: float
+                  learning rate to start increasing from
+    warmup_steps: int
+                  number of warmup steps
+    scheduler: LRScheduler
+              scheduler following the warmup
+    """
+    def __init__(self, lr_begin, warmup_steps, scheduler):
+        super(WarmupScheduler, self).__init__()
+        self.lr_begin = lr_begin
+        self.scheduler = scheduler
+        self.lr_final = self.scheduler.base_lr
+        if self.lr_begin > self.lr_final:
+            raise ValueError("Final lr has to be higher than beginning lr")
+        if warmup_steps <= 0:
+            raise ValueError("Warmup steps has to be positive")
+        self.warmup_steps = warmup_steps
+        self.lrs_updates = {}
 
 Review comment:
   We would have for each batch, number of calls to __call__ equal to the 
number of learnable parameter arrays. 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to