Python chainer.optimizers 模块,AdaDelta() 实例源码

我们从Python开源项目中,提取了以下15个代码示例,用于说明如何使用chainer.optimizers.AdaDelta()

项目:cnn-text-classification    作者:marevol    | 项目源码 | 文件源码
def create_classifier(n_vocab, doc_length, wv_size, filter_sizes, hidden_units, output_channel, initialW, non_static, batch_size, epoch, gpu):
    model = NNModel(n_vocab=n_vocab,
                    doc_length=doc_length,
                    wv_size=wv_size,
                    filter_sizes=filter_sizes,
                    hidden_units=hidden_units,
                    output_channel=output_channel,
                    initialW=initialW,
                    non_static=non_static)
#    optimizer = optimizers.Adam()
    optimizer = optimizers.AdaDelta()
    return (model, ChainerEstimator(model=SoftmaxCrossEntropyClassifier(model),
                                    optimizer=optimizer,
                                    batch_size=batch_size,
                                    device=gpu,
                                    stop_trigger=(epoch, 'epoch')))
项目:ddnn    作者:kunglab    | 项目源码 | 文件源码
def get_optimizer(self, name, lr, momentum=0.9):
        if name.lower() == "adam":
            return optimizers.Adam(alpha=lr, beta1=momentum)
        if name.lower() == "smorms3":
            return optimizers.SMORMS3(lr=lr)
        if name.lower() == "adagrad":
            return optimizers.AdaGrad(lr=lr)
        if name.lower() == "adadelta":
            return optimizers.AdaDelta(rho=momentum)
        if name.lower() == "nesterov" or name.lower() == "nesterovag":
            return optimizers.NesterovAG(lr=lr, momentum=momentum)
        if name.lower() == "rmsprop":
            return optimizers.RMSprop(lr=lr, alpha=momentum)
        if name.lower() == "momentumsgd":
            return optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
        if name.lower() == "sgd":
            return optimizers.SGD(lr=lr)
项目:unrolled-gan    作者:musyoku    | 项目源码 | 文件源码
def get_optimizer(name, lr, momentum=0.9):
    if name.lower() == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    if name.lower() == "eve":
        return Eve(alpha=lr, beta1=momentum)
    if name.lower() == "adagrad":
        return optimizers.AdaGrad(lr=lr)
    if name.lower() == "adadelta":
        return optimizers.AdaDelta(rho=momentum)
    if name.lower() == "nesterov" or name.lower() == "nesterovag":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name.lower() == "rmsprop":
        return optimizers.RMSprop(lr=lr, alpha=momentum)
    if name.lower() == "momentumsgd":
        return optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
    if name.lower() == "sgd":
        return optimizers.SGD(lr=lr)
项目:unrolled-gan    作者:musyoku    | 项目源码 | 文件源码
def update_momentum(self, momentum):
        if isinstance(self.optimizer, optimizers.Adam):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, Eve):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, optimizers.AdaDelta):
            self.optimizer.rho = momentum
            return
        if isinstance(self.optimizer, optimizers.NesterovAG):
            self.optimizer.momentum = momentum
            return
        if isinstance(self.optimizer, optimizers.RMSprop):
            self.optimizer.alpha = momentum
            return
        if isinstance(self.optimizer, optimizers.MomentumSGD):
            self.optimizer.mommentum = momentum
            return
项目:wavenet    作者:musyoku    | 项目源码 | 文件源码
def get_optimizer(name, lr, momentum=0.9):
    if name.lower() == "adam":
        return chainer.optimizers.Adam(alpha=lr, beta1=momentum)
    if name.lower() == "eve":
        return Eve(alpha=lr, beta1=momentum)
    if name.lower() == "adagrad":
        return chainer.optimizers.AdaGrad(lr=lr)
    if name.lower() == "adadelta":
        return chainer.optimizers.AdaDelta(rho=momentum)
    if name.lower() == "nesterov" or name.lower() == "nesterovag":
        return chainer.optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name.lower() == "rmsprop":
        return chainer.optimizers.RMSprop(lr=lr, alpha=momentum)
    if name.lower() == "momentumsgd":
        return chainer.optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
    if name.lower() == "sgd":
        return chainer.optimizers.SGD(lr=lr)
    raise Exception()
项目:wavenet    作者:musyoku    | 项目源码 | 文件源码
def update_momentum(self, momentum):
        if isinstance(self.optimizer, optimizers.Adam):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, Eve):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, optimizers.AdaDelta):
            self.optimizer.rho = momentum
            return
        if isinstance(self.optimizer, optimizers.NesterovAG):
            self.optimizer.momentum = momentum
            return
        if isinstance(self.optimizer, optimizers.RMSprop):
            self.optimizer.alpha = momentum
            return
        if isinstance(self.optimizer, optimizers.MomentumSGD):
            self.optimizer.mommentum = momentum
            return
项目:LSGAN    作者:musyoku    | 项目源码 | 文件源码
def update_momentum(self, momentum):
        if isinstance(self._optimizer, optimizers.Adam):
            self._optimizer.beta1 = momentum
            return
        if isinstance(self._optimizer, Eve):
            self._optimizer.beta1 = momentum
            return
        if isinstance(self._optimizer, optimizers.AdaDelta):
            self._optimizer.rho = momentum
            return
        if isinstance(self._optimizer, optimizers.NesterovAG):
            self._optimizer.momentum = momentum
            return
        if isinstance(self._optimizer, optimizers.RMSprop):
            self._optimizer.alpha = momentum
            return
        if isinstance(self._optimizer, optimizers.MomentumSGD):
            self._optimizer.mommentum = momentum
            return
项目:adgm    作者:musyoku    | 项目源码 | 文件源码
def get_optimizer(name, lr, momentum=0.9):
    if name.lower() == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    if name.lower() == "eve":
        return Eve(alpha=lr, beta1=momentum)
    if name.lower() == "adagrad":
        return optimizers.AdaGrad(lr=lr)
    if name.lower() == "adadelta":
        return optimizers.AdaDelta(rho=momentum)
    if name.lower() == "nesterov" or name.lower() == "nesterovag":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name.lower() == "rmsprop":
        return optimizers.RMSprop(lr=lr, alpha=momentum)
    if name.lower() == "momentumsgd":
        return optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
    if name.lower() == "sgd":
        return optimizers.SGD(lr=lr)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def create(self):
        return optimizers.AdaDelta(eps=1e-5)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def setUp(self):
        fd, path = tempfile.mkstemp()
        os.close(fd)
        self.temp_file_path = path

        child = link.Chain(linear=links.Linear(2, 3))
        child.add_param('Wc', (2, 3))
        self.parent = link.Chain(child=child)
        self.parent.add_param('Wp', (2, 3))

        self.optimizer = optimizers.AdaDelta()
        self.optimizer.setup(self.parent)

        self.savez = numpy.savez_compressed if self.compress else numpy.savez
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def setUp(self):
        fd, path = tempfile.mkstemp()
        os.close(fd)
        self.temp_file_path = path

        child = link.Chain(linear=links.Linear(2, 3))
        child.add_param('Wc', (2, 3))
        self.parent = link.Chain(child=child)
        self.parent.add_param('Wp', (2, 3))

        self.optimizer = optimizers.AdaDelta()
        self.optimizer.setup(self.parent)
项目:mlpnlp-nmt    作者:mlpnlp    | 项目源码 | 文件源码
def setOptimizer(args, EncDecAtt):
    # optimizer???
    if args.optimizer == 'SGD':
        optimizer = chaOpt.SGD(lr=args.lrate)
        sys.stdout.write(
            '# SET Learning %s: initial learning rate: %e\n' %
            (args.optimizer, optimizer.lr))
    elif args.optimizer == 'Adam':
        # assert 0, "Currently Adam is not supported for asynchronous update"
        optimizer = chaOpt.Adam(alpha=args.lrate)
        sys.stdout.write(
            '# SET Learning %s: initial learning rate: %e\n' %
            (args.optimizer, optimizer.alpha))
    elif args.optimizer == 'MomentumSGD':
        optimizer = chaOpt.MomentumSGD(lr=args.lrate)
        sys.stdout.write(
            '# SET Learning %s: initial learning rate: %e\n' %
            (args.optimizer, optimizer.lr))
    elif args.optimizer == 'AdaDelta':
        optimizer = chaOpt.AdaDelta(rho=args.lrate)
        sys.stdout.write(
            '# SET Learning %s: initial learning rate: %e\n' %
            (args.optimizer, optimizer.rho))
    else:
        assert 0, "ERROR"

    optimizer.setup(EncDecAtt.model)  # ???optimizer?????????
    if args.optimizer == 'Adam':
        optimizer.t = 1  # warning?????????hack ???????????

    return optimizer
项目:unrolled-gan    作者:musyoku    | 项目源码 | 文件源码
def update_learning_rate(self, lr):
        if isinstance(self.optimizer, optimizers.Adam):
            self.optimizer.alpha = lr
            return
        if isinstance(self.optimizer, Eve):
            self.optimizer.alpha = lr
            return
        if isinstance(self.optimizer, optimizers.AdaDelta):
            # AdaDelta has no learning rate
            return
        self.optimizer.lr = lr
项目:wavenet    作者:musyoku    | 项目源码 | 文件源码
def update_laerning_rate(self, lr):
        if isinstance(self.optimizer, optimizers.Adam):
            self.optimizer.alpha = lr
            return
        if isinstance(self.optimizer, Eve):
            self.optimizer.alpha = lr
            return
        if isinstance(self.optimizer, optimizers.AdaDelta):
            # AdaDelta has no learning rate
            return
        self.optimizer.lr = lr
项目:LSGAN    作者:musyoku    | 项目源码 | 文件源码
def update_learning_rate(self, lr):
        if isinstance(self._optimizer, optimizers.Adam):
            self._optimizer.alpha = lr
            return
        if isinstance(self._optimizer, Eve):
            self._optimizer.alpha = lr
            return
        if isinstance(self._optimizer, optimizers.AdaDelta):
            # AdaDelta has no learning rate
            return
        self._optimizer.lr = lr