__init__.py 2.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. from __future__ import unicode_literals
  18. import copy
  19. import paddle
  20. __all__ = ['build_optimizer']
  21. def build_lr_scheduler(lr_config, epochs, step_each_epoch):
  22. from . import learning_rate
  23. lr_config.update({'epochs': epochs, 'step_each_epoch': step_each_epoch})
  24. if 'name' in lr_config:
  25. lr_name = lr_config.pop('name')
  26. lr = getattr(learning_rate, lr_name)(**lr_config)()
  27. else:
  28. lr = lr_config['learning_rate']
  29. return lr
  30. def build_optimizer(config, epochs, step_each_epoch, parameters):
  31. from . import regularizer, optimizer
  32. config = copy.deepcopy(config)
  33. # step1 build lr
  34. lr = build_lr_scheduler(config.pop('lr'), epochs, step_each_epoch)
  35. # step2 build regularization
  36. if 'regularizer' in config and config['regularizer'] is not None:
  37. reg_config = config.pop('regularizer')
  38. reg_name = reg_config.pop('name') + 'Decay'
  39. reg = getattr(regularizer, reg_name)(**reg_config)()
  40. else:
  41. reg = None
  42. # step3 build optimizer
  43. optim_name = config.pop('name')
  44. if 'clip_norm' in config:
  45. clip_norm = config.pop('clip_norm')
  46. grad_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
  47. else:
  48. grad_clip = None
  49. optim = getattr(optimizer, optim_name)(learning_rate=lr,
  50. weight_decay=reg,
  51. grad_clip=grad_clip,
  52. **config)
  53. return optim(parameters), lr