Python chainer 模块,optimizers() 实例源码

我们从Python开源项目中,提取了以下8个代码示例,用于说明如何使用chainer.optimizers()

项目:trainer    作者:nutszebra    | 项目源码 | 文件源码
def save_optimizer(self, optimizer, path=''):
        """Save optimizer model

        Example:

        ::

            path = './test.optimizer'
            self.save_optimizer(optimizer, path)

        Args:
            optimizer (chainer.optimizers): optimizer
            path (str): path

        Returns:
            bool: True if saving successful
        """

        # if path is ''
        if path == '':
            path = str(self.save_optimizer_epoch) + '.optimizer'
        # increment self.nz_save_optimizer_epoch
        self.nz_save_optimizer_epoch += 1
        serializers.save_npz(path, optimizer)
        return True
项目:neural_style_synthesizer    作者:dwango    | 项目源码 | 文件源码
def __init__(self, gpu=-1, optimizer=None, model=None, content_weight=1, texture_weight=1, average_pooling=False):
        self.content_weight = content_weight
        self.texture_weight = texture_weight
        self.average_pooling = average_pooling
        if optimizer is None:
            self.optimizer = chainer.optimizers.Adam(alpha=4.0)
        else:
            self.optimizer = optimizer
        if model is None:
            self.model = neural_art.utility.load_nn("vgg")
        else:
            self.model = model

        if gpu >= 0:
            chainer.cuda.get_device(gpu).use()
            self.xp = chainer.cuda.cupy
            self.model.model.to_gpu()
        else:
            self.xp = numpy
项目:NlpUtil    作者:trtd56    | 项目源码 | 文件源码
def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):
        if opt_name == "Adam":
            opt = getattr(optimizers, opt_name)()
        else:
            opt = getattr(optimizers, opt_name)(lr)
        opt.setup(self.model)
        opt.add_hook(optimizer.GradientClipping(g_clip))

        updater = training.StandardUpdater(self.train_iter, opt, device=gpu)
        self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=out_dir)
        self.trainer.extend(extensions.Evaluator(self.test_iter, self.model, device=gpu))
        self.trainer.extend(extensions.dump_graph('main/loss'))
        self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))
        self.trainer.extend(extensions.LogReport())
        self.trainer.extend(extensions.PlotReport(['main/loss', 'validation/main/loss'],
                                                   'epoch', file_name='loss.png'))
        self.trainer.extend(extensions.PlotReport(['main/accuracy', 'validation/main/accuracy'],
                                                   'epoch', file_name='accuracy.png'))
        self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss', 'validation/main/loss',
                                                    'main/accuracy', 'validation/main/accuracy',
                                                    'elapsed_time']))
        self.trainer.extend(extensions.ProgressBar())
项目:lencon    作者:kiyukuta    | 项目源码 | 文件源码
def _build_optimizer(self, config):
        kwargs = {k: float(v) for k, v in config.items() if k != 'name'}
        o = getattr(chainer.optimizers, config['name'])(**kwargs)
        return o
项目:chainer_nmt    作者:odashi    | 项目源码 | 文件源码
def init_optimizer(args, mdl):
  logger = logging.getLogger(__name__)

  logger.info('Making Adam optimizer:')
  logger.info('* learning rate = %f', args.learning_rate)
  logger.info('* gradient clipping = %f', args.gradient_clipping)
  logger.info('* weight decay = %f', args.weight_decay)

  opt = chainer.optimizers.Adam(alpha=args.learning_rate)
  opt.setup(mdl)
  opt.add_hook(chainer.optimizer.GradientClipping(args.gradient_clipping))
  opt.add_hook(chainer.optimizer.WeightDecay(args.weight_decay))

  return opt
项目:chainercmd    作者:mitmul    | 项目源码 | 文件源码
def get_optimizer(model, method, optimizer_args, weight_decay=None):
    optimizer = getattr(optimizers, method)(**optimizer_args)
    optimizer.setup(model)
    if weight_decay is not None:
        optimizer.add_hook(chainer.optimizer.WeightDecay(weight_decay))
    return optimizer
项目:chainer-pspnet    作者:mitmul    | 项目源码 | 文件源码
def get_optimizer_from_config(model, config):
    opt_config = Optimizer(**config['optimizer'])
    optimizer = getattr(optimizers, opt_config.method)(**opt_config.args)
    optimizer.setup(model)
    if opt_config.weight_decay is not None:
        optimizer.add_hook(
            chainer.optimizer.WeightDecay(opt_config.weight_decay))
    return optimizer
项目:chainer-pspnet    作者:mitmul    | 项目源码 | 文件源码
def get_optimizer(model, method, optimizer_args, weight_decay=None):
    optimizer = getattr(optimizers, method)(**optimizer_args)
    optimizer.setup(model)
    if weight_decay is not None:
        optimizer.add_hook(chainer.optimizer.WeightDecay(weight_decay))
    return optimizer