Python chainer 模块,Link() 实例源码

我们从Python开源项目中,提取了以下47个代码示例,用于说明如何使用chainer.Link()

项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def layer(self, *layers):
        with self.init_scope():
            for i, layer in enumerate(layers):
                index = i + len(self.layers)

                if isinstance(layer, chainer.Link):
                    setattr(self, "layer_%d" % index, layer)

                if isinstance(layer, GLU):
                    setattr(self, "layer_%d" % index, layer.W)

                if isinstance(layer, Residual):
                    for _index, _layer in enumerate(layer.layers):
                        if isinstance(_layer, chainer.Link):
                            setattr(self, "layer_{}_{}".format(index, _index), _layer)

        self.layers += layers
项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def _set_module(self, namespace, module):
        assert isinstance(module, Module)

        for index, layer in enumerate(module.layers):
            if isinstance(layer, chainer.Link):
                super(Module, self).__setattr__("_module_{}_sequential_{}".format(namespace, index), layer)

            if isinstance(layer, Residual):
                for _index, _layer in enumerate(layer.layers):
                    if isinstance(_layer, chainer.Link):
                        super(Module, self).__setattr__("_module_{}_sequential_{}_{}".format(namespace, index, _index), _layer)

        for index, (link_name, link) in enumerate(module.links):
            assert isinstance(link, chainer.Link)
            super(Module, self).__setattr__("_module_{}_link_{}".format(namespace, link_name), link)

        for index, (module_name, module) in enumerate(module.modules):
            assert isinstance(module, Module)
            self._set_module("{}_{}".format(namespace, module_name), module)

        module._locked = True
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def copy(self):
        """Copies the link hierarchy to new one.

        The whole hierarchy rooted by this link is copied. The copy is
        basically shallow, except that the parameter variables are also
        shallowly copied. It means that the parameter variables of copied one
        are different from ones of original link, while they share the data and
        gradient arrays.

        The name of the link is reset on the copy, since the copied instance
        does not belong to the original parent chain (even if exists).

        Returns:
            Link: Copied link object.

        """
        ret = copy.copy(self)
        ret._params = list(self._params)
        ret._persistent = list(self._persistent)
        ret.name = None
        d = ret.__dict__
        for name in ret._params:
            d[name] = copy.copy(d[name])
        return ret
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_copyparams(self):
        self.link.x.grad.fill(0)
        self.link.y.grad.fill(1)
        gx = self.link.x.grad.copy()
        gy = self.link.y.grad.copy()

        l = chainer.Link(x=(2, 3), y=2)
        l.x.data.fill(2)
        l.x.grad.fill(3)
        l.y.data.fill(4)
        l.y.grad.fill(5)
        self.link.copyparams(l)
        numpy.testing.assert_array_equal(self.link.x.data, l.x.data)
        numpy.testing.assert_array_equal(self.link.x.grad, gx)
        numpy.testing.assert_array_equal(self.link.y.data, l.y.data)
        numpy.testing.assert_array_equal(self.link.y.grad, gy)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_addgrads(self):
        l1 = chainer.Link(x=(2, 3))
        l2 = chainer.Link(x=2)
        l3 = chainer.Link(x=3)
        c1 = chainer.Chain(l1=l1, l2=l2)
        c2 = chainer.Chain(c1=c1, l3=l3)
        l1.x.grad.fill(1)
        l2.x.grad.fill(2)
        l3.x.grad.fill(3)

        self.l1.x.grad.fill(-1)
        self.l2.x.grad.fill(-2)
        self.l3.x.grad.fill(-3)

        self.c2.addgrads(c2)
        numpy.testing.assert_array_equal(self.l1.x.grad, numpy.zeros((2, 3)))
        numpy.testing.assert_array_equal(self.l2.x.grad, numpy.zeros(2))
        numpy.testing.assert_array_equal(self.l3.x.grad, numpy.zeros(3))
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_addgrads(self):
        l1 = chainer.Link(x=(2, 3))
        l2 = chainer.Link(x=2)
        l3 = chainer.Link(x=3)
        c1 = chainer.ChainList(l1, l2)
        c2 = chainer.ChainList(c1, l3)
        l1.x.grad.fill(1)
        l2.x.grad.fill(2)
        l3.x.grad.fill(3)

        self.l1.x.grad.fill(-1)
        self.l2.x.grad.fill(-2)
        self.l3.x.grad.fill(-3)

        self.c2.addgrads(c2)
        numpy.testing.assert_array_equal(self.l1.x.grad, numpy.zeros((2, 3)))
        numpy.testing.assert_array_equal(self.l2.x.grad, numpy.zeros(2))
        numpy.testing.assert_array_equal(self.l3.x.grad, numpy.zeros(3))
项目:adversarial-autoencoder    作者:musyoku    | 项目源码 | 文件源码
def __setattr__(self, name, value):
        assert isinstance(value, Residual) is False

        if isinstance(value, Module):
            self.__module_name__ = name

            self.__modules__.append((name, value))
            value.set_parent_module(self)

            self.update_params()
            return super(chainer.Link, self).__setattr__(name, value)   # prevent module from being added to self._children

        if isinstance(value, chainer.Link):
            if name.startswith("_nn_layer_"):
                return self.super__setattr__(name, value)

            self.__links__.append((name, value))

            self.update_params()

            with self.init_scope():
                return self.super__setattr__(name, value)

        super(Module, self).__setattr__(name, value)
项目:chainercv    作者:chainer    | 项目源码 | 文件源码
def __setattr__(self, name, value):
        if self.within_init_scope and isinstance(value, Link):
            new_name = rename(name)

            if new_name == 'extractor/conv1_1':
                # BGR -> RGB
                value.W.array[:, ::-1] = value.W.array
                print('{:s} -> {:s} (BGR -> RGB)'.format(name, new_name))
            elif new_name.startswith('multibox/loc/'):
                # xy -> yx
                for data in (value.W.array, value.b.array):
                    data = data.reshape((-1, 4) + data.shape[1:])
                    data[:, [1, 0, 3, 2]] = data.copy()
                print('{:s} -> {:s} (xy -> yx)'.format(name, new_name))
            else:
                print('{:s} -> {:s}'.format(name, new_name))
        else:
            new_name = name

        super(SSDCaffeFunction, self).__setattr__(new_name, value)
项目:chainercv    作者:chainer    | 项目源码 | 文件源码
def _check(self, xp):
        self.assertIsInstance(self.link, chainer.Link)
        self.assertEqual(self.link.xp, xp)

        outputs = self.link('ignored', -1, 'inputs', 1.0)

        if isinstance(self.outputs, tuple):
            originals = self.outputs
            outputs = outputs
        else:
            originals = self.outputs,
            outputs = outputs,

        self.assertEqual(len(originals), len(outputs))

        for orig, out in zip(originals, outputs):
            self.assertIsInstance(out, chainer.Variable)
            self.assertEqual(out.shape, orig.shape)
            self.assertEqual(out.dtype, orig.dtype)

            self.assertEqual(
                chainer.cuda.get_array_module(out.array), xp)
            out.to_cpu()
            np.testing.assert_equal(out.array, orig)
项目:chainer-DPNs    作者:oyam    | 项目源码 | 文件源码
def __init__(self, *args):
        super(Sequential, self).__init__()
        assert len(args) > 0
        assert not hasattr(self, "layers")
        if len(args) == 1 and isinstance(args[0], OrderedDict):
            self.layers = args[0].values()
            with self.init_scope():
                for key, layer in args[0].items():
                    if isinstance(layer, (chainer.Link, chainer.Chain, chainer.ChainList)):
                        setattr(self, key, layer)
        else:
            self.layers = args
            with self.init_scope():
                for idx, layer in enumerate(args):
                    if isinstance(layer, (chainer.Link, chainer.Chain, chainer.ChainList)):
                        setattr(self, str(idx), layer)
项目:wavenet    作者:rampage644    | 项目源码 | 文件源码
def layer_params(layer, param_name, attr_name):

    """Return parameters in a flattened array from the given layer or an empty
    array if the parameters are not found.

    Args:
        layer (~chainer.Link): The layer from which parameters are collected.
        param_name (str): Name of the parameter, ``'W'`` or ``'b'``.
        attr_name (str): Name of the attribute, ``'data'`` or ``'grad'``.

    Returns:
        array: Flattened array of parameters.
    """

    if isinstance(layer, chainer.Chain):
        # Nested chainer.Chain, aggregate all underlying statistics
        return layers_params(layer, param_name, attr_name)
    elif not hasattr(layer, param_name):
        return layer.xp.array([])

    params = getattr(layer, param_name)
    params = getattr(params, attr_name)
    return params.flatten()
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def __init__(self, *layers):
        self.layers = layers
        links = [layer for layer in layers if isinstance(layer, chainer.Link)]
        # Cache the signatures because it might be slow
        self.argnames = [set(signature(layer).parameters)
                         for layer in layers]
        self.accept_var_args = [accept_variable_arguments(layer)
                                for layer in layers]
        super().__init__(*links)
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def set_shared_params(a, b):
    """Set shared params to a link.

    Args:
      a (chainer.Link): link whose params are to be replaced
      b (dict): dict that consists of (param_name, multiprocessing.Array)
    """
    assert isinstance(a, chainer.Link)
    for param_name, param in a.namedparams():
        if param_name in b:
            shared_param = b[param_name]
            param.data = np.frombuffer(
                shared_param, dtype=param.data.dtype).reshape(param.data.shape)
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def make_params_not_shared(a):
    """Make a link's params not shared.

    Args:
      a (chainer.Link): link whose params are to be made not shared
    """
    assert isinstance(a, chainer.Link)
    for param in a.params():
        param.data = param.data.copy()
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def assert_params_not_shared(a, b):
    assert isinstance(a, chainer.Link)
    assert isinstance(b, chainer.Link)
    a_params = dict(a.namedparams())
    b_params = dict(b.namedparams())
    for name, a_param in a_params.items():
        b_param = b_params[name]
        assert a_param.data.ctypes.data != b_param.data.ctypes.data
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def extract_params_as_shared_arrays(link):
    assert isinstance(link, chainer.Link)
    shared_arrays = {}
    for param_name, param in link.namedparams():
        shared_arrays[param_name] = mp.RawArray('f', param.data.ravel())
    return shared_arrays
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def synchronize_to_shared_objects(obj, shared_memory):
    if isinstance(obj, tuple):
        return tuple(synchronize_to_shared_objects(o, s)
                     for o, s in zip(obj, shared_memory))
    elif isinstance(obj, chainer.Link):
        set_shared_params(obj, shared_memory)
        return obj
    elif isinstance(obj, chainer.Optimizer):
        set_shared_states(obj, shared_memory)
        return obj
    elif isinstance(obj, mp.sharedctypes.Synchronized):
        return shared_memory
    else:
        raise ValueError('')
项目:chainerrl    作者:chainer    | 项目源码 | 文件源码
def create_simple_link():
    link = chainer.Link()
    with link.init_scope():
        link.param = chainer.Parameter(np.zeros(1))
    return link
项目:async-rl    作者:muupan    | 项目源码 | 文件源码
def set_shared_params(a, b):
    """
    Args:
      a (chainer.Link): link whose params are to be replaced
      b (dict): dict that consists of (param_name, multiprocessing.Array)
    """
    assert isinstance(a, chainer.Link)
    for param_name, param in a.namedparams():
        if param_name in b:
            shared_param = b[param_name]
            param.data = np.frombuffer(
                shared_param, dtype=param.data.dtype).reshape(param.data.shape)
项目:async-rl    作者:muupan    | 项目源码 | 文件源码
def extract_params_as_shared_arrays(link):
    assert isinstance(link, chainer.Link)
    shared_arrays = {}
    for param_name, param in link.namedparams():
        shared_arrays[param_name] = mp.RawArray('f', param.data.ravel())
    return shared_arrays
项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def add(self, *layers):
        with self.init_scope():
            for i, layer in enumerate(layers):
                index = i + len(self.layers)

                if isinstance(layer, chainer.Link):
                    setattr(self, "_sequential_%d" % index, layer)

                if isinstance(layer, Residual):
                    for _index, _layer in enumerate(layer.layers):
                        if isinstance(_layer, chainer.Link):
                            setattr(self, "_sequential_{}_{}".format(index, _index), _layer)
        self.layers += layers
        self.blocks.append(layers)
项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def __setattr__(self, name, value):
        if isinstance(value, Module):
            self.modules.append((name, value))
            self._set_module(name, value)
            return super(chainer.Link, self).__setattr__(name, value)   # prevent module from being added to self._children

        if isinstance(value, chainer.Link):
            assert self._locked is False, "Since this module is owned by another module, it is not possible to add Link."
            with self.init_scope():
                if name.startswith("_sequential_"):
                    return super(Module, self).__setattr__(name, value)
                self.links.append((name, value))
                return super(Module, self).__setattr__(name, value)

        super(Module, self).__setattr__(name, value)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def copyparams(self, link):
        """Copies all parameters from given link.

        This method copies data arrays of all parameters in the hierarchy. The
        copy is even done across the host and devices. Note that this method
        does not copy the gradient arrays.

        Args:
            link (Link): Source link object.

        """
        src = link.__dict__
        dst = self.__dict__
        for name in self._params:
            dst[name].copydata(src[name])
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def addgrads(self, link):
        """Accumulates gradient values from given link.

        This method adds each gradient array of the given link to corresponding
        gradient array of this link. The accumulation is even done across
        host and different devices.

        Args:
            link (Link): Source link object.

        """
        src = link.__dict__
        dst = self.__dict__
        for name in self._params:
            dst[name].addgrad(src[name])
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def add_link(self, name, link):
        """Registers a child link to this chain.

        The registered link is saved and loaded on serialization and
        deserialization, and involved in the optimization. The registered link
        is called a child. The child link is set to an attribute of the chain
        with the given name.

        This method also sets the :attr:`~Link.name` attribute of the
        registered link. If the given link already has the name attribute set,
        then it raises an error.

        Args:
            name (str): Name of the child link. This name is also used as the
                attribute name.
            link (Link): The link object to be registered.

        """
        if link.name is not None:
            raise ValueError(
                'given link is already registered to another chain by name %s'
                % link.name)
        d = self.__dict__
        if name in d:
            raise AttributeError(
                'cannot register a new link %s: attribute exists' % name)
        self._children.append(name)
        link.name = name
        d[name] = link
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def __getitem__(self, index):
        """Returns the child at given index.

        Args:
            index (int): Index of the child in the list.

        Returns:
            Link: The ``index``-th child link.

        """
        return self._children[index]
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_initialize(self):
        model = self.model.model
        assert isinstance(model, chainer.Link)
        optimizer = self.create()
        optimizer.setup(model)

        msg = 'optimization target must be a link'
        with six.assertRaisesRegex(self, TypeError, msg):
            optimizer.setup('xxx')
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def setUp(self):
        self.link = chainer.Link(x=(2, 3), y=2)
        self.p = numpy.array([1, 2, 3], dtype='f')
        self.link.add_persistent('p', self.p)
        self.link.name = 'a'
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_addgrads(self):
        l = chainer.Link(x=(2, 3), y=2)
        l.x.grad.fill(1)
        l.y.grad.fill(2)

        self.link.x.grad.fill(-1)
        self.link.y.grad.fill(-2)

        self.link.addgrads(l)

        gx_expect = numpy.zeros_like(l.x.grad)
        gy_expect = numpy.zeros_like(l.y.grad)
        numpy.testing.assert_array_equal(self.link.x.grad, gx_expect)
        numpy.testing.assert_array_equal(self.link.y.grad, gy_expect)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_serialize(self):
        serializer = mock.MagicMock(return_value=3)
        l = chainer.Link(x=(2, 3), y=2)
        l.add_persistent('z', 1)
        l.serialize(serializer)
        self.assertEqual(serializer.call_count, 3)
        serializer.assert_any_call('x', l.x.data)
        serializer.assert_any_call('y', l.y.data)
        serializer.assert_any_call('z', 1)

        self.assertEqual(l.z, 3)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_copyparams(self):
        l1 = chainer.Link(x=(2, 3))
        l2 = chainer.Link(x=2)
        l3 = chainer.Link(x=3)
        c1 = chainer.Chain(l1=l1, l2=l2)
        c2 = chainer.Chain(c1=c1, l3=l3)
        l1.x.data.fill(0)
        l2.x.data.fill(1)
        l3.x.data.fill(2)

        self.c2.copyparams(c2)

        numpy.testing.assert_array_equal(self.l1.x.data, l1.x.data)
        numpy.testing.assert_array_equal(self.l2.x.data, l2.x.data)
        numpy.testing.assert_array_equal(self.l3.x.data, l3.x.data)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def setUp(self):
        self.l1 = chainer.Link(x=(2, 3))
        self.l2 = chainer.Link(x=2)
        self.l3 = chainer.Link(x=3)
        self.c1 = chainer.ChainList(self.l1)
        self.c1.add_link(self.l2)
        self.c2 = chainer.ChainList(self.c1, self.l3)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def test_copyparams(self):
        l1 = chainer.Link(x=(2, 3))
        l2 = chainer.Link(x=2)
        l3 = chainer.Link(x=3)
        c1 = chainer.ChainList(l1, l2)
        c2 = chainer.ChainList(c1, l3)
        l1.x.data.fill(0)
        l2.x.data.fill(1)
        l3.x.data.fill(2)

        self.c2.copyparams(c2)

        numpy.testing.assert_array_equal(self.l1.x.data, l1.x.data)
        numpy.testing.assert_array_equal(self.l2.x.data, l2.x.data)
        numpy.testing.assert_array_equal(self.l3.x.data, l3.x.data)
项目:adversarial-autoencoder    作者:musyoku    | 项目源码 | 文件源码
def add(self, *layers):
        with self.init_scope():
            for i, layer in enumerate(layers):
                index = i + len(self.__layers__)

                if isinstance(layer, chainer.Link):
                    setattr(self, "_nn_layer_%d" % index, layer)

                if isinstance(layer, Residual):
                    for _index, _layer in enumerate(layer.__layers__):
                        if isinstance(_layer, chainer.Link):
                            setattr(self, "_nn_layer_{}_res_{}".format(index, _index), _layer)
        self.__layers__ += layers
项目:adversarial-autoencoder    作者:musyoku    | 项目源码 | 文件源码
def set_submodule_layers(self, namespace, module):
        with self.init_scope():
            for index, layer in enumerate(module.__layers__):
                if isinstance(layer, chainer.Link):
                    self.super__setattr__("_nn_{}_layer_{}".format(namespace, index), layer)

                if isinstance(layer, Residual):
                    for resnet_index, _layer in enumerate(layer.__layers__):
                        if isinstance(_layer, chainer.Link):
                            self.super__setattr__("_nn_{}_layer_{}_res_{}".format(namespace, index, resnet_index), _layer)
项目:adversarial-autoencoder    作者:musyoku    | 项目源码 | 文件源码
def set_submodule_links(self, namespace, module):
        with self.init_scope():
            for index, (link_name, link) in enumerate(module.__links__):
                assert isinstance(link, chainer.Link)
                self.super__setattr__("_nn_{}_link_{}".format(namespace, link_name), link)
项目:chainercv    作者:chainer    | 项目源码 | 文件源码
def __setattr__(self, name, value):
        if self.within_init_scope and isinstance(value, Link):
            new_name = rename(name)

            if new_name == 'conv1_1/conv':
                # BGR -> RGB
                value.W.array[:, ::-1] = value.W.array
                print('{:s} -> {:s} (BGR -> RGB)'.format(name, new_name))
            else:
                print('{:s} -> {:s}'.format(name, new_name))
        else:
            new_name = name

        super(VGGCaffeFunction, self).__setattr__(new_name, value)
项目:chainermn    作者:chainer    | 项目源码 | 文件源码
def create_multi_node_n_step_rnn(
        actual_link, communicator, rank_in=None, rank_out=None):
    """Create a multi node stacked RNN link from a Chainer stacked RNN link.

    Multi node stacked RNN link is used for model-parallel.
    The created link will receive initial hidden states from the process
    specified by ``rank_in`` (or do not receive if ``None``), execute
    the original RNN compuation, and then send resulting hidden states
    to the process specified by ``rank_out``.

    Compared with Chainer stacked RNN link, multi node stacked RNN link
    returns an extra object called ``delegate_variable``.
    If ``rank_out`` is not ``None``, backward computation is expected
    to be begun from ``delegate_variable``.
    For detail, please refer ``chainermn.functions.pseudo_connect``.

    The following RNN links can be passed to this function:

    - ``chainer.links.NStepBiGRU``
    - ``chainer.links.NStepBiLSTM``
    - ``chainer.links.NStepBiRNNReLU``
    - ``chainer.links.NStepBiRNNTanh``
    - ``chainer.links.NStepGRU``
    - ``chainer.links.NStepLSTM``
    - ``chainer.links.NStepRNNReLU``
    - ``chainer.links.NStepRNNTanh``

    Args:
        link (chainer.Link): Chainer stacked RNN link
        communicator: ChainerMN communicator
        rank_in (int, or None):
            Rank of the process which sends hidden RNN states to this process.
        rank_out (int, or None):
            Rank of the process to which this process sends hiddne RNN states.

    Returns:
        The multi node stacked RNN link based on ``actual_link``.
    """
    chainer.utils.experimental('chainermn.links.create_multi_node_n_step_rnn')
    return _MultiNodeNStepRNN(actual_link, communicator, rank_in, rank_out)
项目:chainermn    作者:chainer    | 项目源码 | 文件源码
def add_link(self, link, rank_in=None, rank_out=None):
        """Register one connected link with its inout rank.

        Args:
            link (chainer.Link): The link object to be registered.
            rank_in (int, list, or None):
                Ranks from which it receives data. If None is specified,
                the model does not receive from any machines.
            rank_out (int, list, or None):
                Ranks to which it sends data. If None is specified,
                the model will not send to any machine.
        """
        super(MultiNodeChainList, self).add_link(link)
        if isinstance(rank_in, int):
            rank_in = [rank_in]
        if isinstance(rank_out, int):
            rank_out = [rank_out]

        if rank_out is None:
            for _, _rank_out in self._rank_inouts:
                if _rank_out is None:
                    raise ValueError(
                        'MultiNodeChainList cannot have more than two '
                        'computational graph component whose rank_out is None')

        self._rank_inouts.append((rank_in, rank_out))
项目:chainermn    作者:chainer    | 项目源码 | 文件源码
def _namedpersistents(model):
    assert isinstance(model, chainer.Link)

    for lname, link in model.namedlinks():
        for pname in link._persistent:
            yield lname + '/' + pname, link.__dict__[pname]
项目:chainer_sklearn    作者:corochann    | 项目源码 | 文件源码
def setUp(self):
        if self.accfun is None:
            self.link = SklearnWrapperClassifier(chainer.Link())
        else:
            self.link = SklearnWrapperClassifier(chainer.Link(),
                                         accfun=self.accfun)
        self.link.compute_accuracy = self.compute_accuracy

        self.x = numpy.random.uniform(-1, 1, (5, 10)).astype(numpy.float32)
        self.t = numpy.random.randint(3, size=5).astype(numpy.int32)
项目:chainer_sklearn    作者:corochann    | 项目源码 | 文件源码
def set_params(self, **parameters):
        """set_params is used to set Grid parameters"""
        for parameter, value in parameters.items():
            if parameter == 'predictor':
                if isinstance(value, chainer.Link):
                    del self.predictor
                    with self.init_scope():
                        self.predictor = value
                else:
                    assert False, 'predictor is not Chain instance'
            elif parameter in ['lossfun', 'accfun', 'device']:
                setattr(self, parameter, value)
            else:
                self.sk_params.update({parameter: value})
        return self
项目:chainer-EWC    作者:okdshin    | 项目源码 | 文件源码
def make_variable_list(model):
    variable_list = []
    for child in model.children():
        if isinstance(child, Chain):
            variable_list.extend(make_variable_list(child))
        if isinstance(child, Link):
            variable_list.extend(child.namedparams())
    return variable_list
项目:chainer-neural-style    作者:dsanno    | 项目源码 | 文件源码
def __fit(self, content_image, style_image, epoch_num, callback=None):
        xp = self.xp
        input_image = None
        height, width = content_image.shape[-2:]
        base_epoch = 0
        old_link = None
        for stride in [4, 2, 1][-self.resolution_num:]:
            if width // stride < 64:
                continue
            content_x = xp.asarray(content_image[:,:,::stride,::stride])
            if self.keep_color:
                style_x = util.luminance_only(xp.asarray(style_image[:,:,::stride,::stride]), content_x)
            else:
                style_x = xp.asarray(style_image[:,:,::stride,::stride])
            content_layer_names = self.content_layer_names
            with chainer.using_config('enable_backprop', False):
                content_layers = self.model(content_x)
            content_layers = [(name, content_layers[name]) for name in content_layer_names]
            style_layer_names = self.style_layer_names
            with chainer.using_config('enable_backprop', False):
                style_layers = self.model(style_x)
            style_grams = [(name, util.gram_matrix(style_layers[name])) for name in style_layer_names]
            if input_image is None:
                if self.initial_image == 'content':
                    input_image = xp.asarray(content_image[:,:,::stride,::stride])
                else:
                    input_image = xp.random.normal(0, 1, size=content_x.shape).astype(np.float32) * 0.001
            else:
                input_image = input_image.repeat(2, 2).repeat(2, 3)
                h, w = content_x.shape[-2:]
                input_image = input_image[:,:,:h,:w]
            link = chainer.Link(x=input_image.shape)
            if self.device_id >= 0:
                link.to_gpu()
            link.x.data[:] = xp.asarray(input_image)
            self.optimizer.setup(link)
            for epoch in six.moves.range(epoch_num):
                loss_info = self.__fit_one(link, content_layers, style_grams)
                if callback:
                    callback(base_epoch + epoch, link.x, loss_info)
            base_epoch += epoch_num
            input_image = link.x.data
        return link.x
项目:chainer-neural-style    作者:dsanno    | 项目源码 | 文件源码
def __fit(self, content_image, style_image, epoch_num, callback=None):
        xp = self.xp
        input_image = None
        height, width = content_image.shape[-2:]
        base_epoch = 0
        for stride in [4, 2, 1][-self.resolution_num:]:
            if width // stride < 64:
                continue
            content_x = xp.asarray(content_image[:,:,::stride,::stride])
            if self.keep_color:
                style_x = util.luminance_only(xp.asarray(style_image[:,:,::stride,::stride]), content_x)
            else:
                style_x = xp.asarray(style_image[:,:,::stride,::stride])
            content_layer_names = self.content_layer_names
            with chainer.using_config('enable_backprop', False):
                content_layers = self.model(content_x)
            content_layers = [(name, content_layers[name]) for name in content_layer_names]
            style_layer_names = self.style_layer_names
            with chainer.using_config('enable_backprop', False):
                style_layers = self.model(style_x)
            style_patches = []
            for name in style_layer_names:
                patch = util.patch(style_layers[name])
                patch_norm = F.expand_dims(F.sum(patch ** 2, axis=1) ** 0.5, 1)
                style_patches.append((name, patch, patch_norm))
            if input_image is None:
                if self.initial_image == 'content':
                    input_image = xp.asarray(content_image[:,:,::stride,::stride])
                else:
                    input_image = xp.random.uniform(-20, 20, size=content_x.shape).astype(np.float32)
            else:
                input_image = input_image.repeat(2, 2).repeat(2, 3)
                h, w = content_x.shape[-2:]
                input_image = input_image[:,:,:h,:w]
            link = chainer.Link(x=input_image.shape)
            if self.device_id >= 0:
                link.to_gpu()
            link.x.data[:] = xp.asarray(input_image)
            self.optimizer.setup(link)
            for epoch in six.moves.range(epoch_num):
                loss_info = self.__fit_one(link, content_layers, style_patches)
                if callback:
                    callback(base_epoch + epoch, link.x, loss_info)
            base_epoch += epoch_num
            input_image = link.x.data
        return link.x
项目:mlpnlp-nmt    作者:mlpnlp    | 项目源码 | 文件源码
def __init__(self, n_layers,  # ??
                 in_size,  # ?????????
                 out_size,  # ?????(?????????????)
                 dropout_rate,
                 name="",
                 use_cudnn=True):
        weights = []
        direction = 1  # ????????????????????1???
        t_name = name
        if name is not "":
            t_name = '%s_' % (name)

        for i in six.moves.range(n_layers):
            for di in six.moves.range(direction):
                weight = chainer.Link()
                for j in six.moves.range(8):
                    if i == 0 and j < 4:
                        w_in = in_size
                    elif i > 0 and j < 4:
                        w_in = out_size * direction
                    else:
                        w_in = out_size
                    weight.add_param('%sw%d' % (t_name, j), (out_size, w_in))
                    weight.add_param('%sb%d' % (t_name, j), (out_size,))
                    getattr(weight, '%sw%d' %
                            (t_name, j)).data[...] = np.random.normal(
                                0, np.sqrt(1. / w_in), (out_size, w_in))
                    getattr(weight, '%sb%d' % (t_name, j)).data[...] = 0
                weights.append(weight)

        super(NStepLSTMpp, self).__init__(*weights)

        self.n_layers = n_layers
        self.dropout_rate = dropout_rate
        self.use_cudnn = use_cudnn
        self.out_size = out_size
        self.direction = direction
        self.ws = [[getattr(w, '%sw0' % (t_name)),
                    getattr(w, '%sw1' % (t_name)),
                    getattr(w, '%sw2' % (t_name)),
                    getattr(w, '%sw3' % (t_name)),
                    getattr(w, '%sw4' % (t_name)),
                    getattr(w, '%sw5' % (t_name)),
                    getattr(w, '%sw6' % (t_name)),
                    getattr(w, '%sw7' % (t_name))] for w in self]
        self.bs = [[getattr(w, '%sb0' % (t_name)),
                    getattr(w, '%sb1' % (t_name)),
                    getattr(w, '%sb2' % (t_name)),
                    getattr(w, '%sb3' % (t_name)),
                    getattr(w, '%sb4' % (t_name)),
                    getattr(w, '%sb5' % (t_name)),
                    getattr(w, '%sb6' % (t_name)),
                    getattr(w, '%sb7' % (t_name))] for w in self]
项目:chainer_sklearn    作者:corochann    | 项目源码 | 文件源码
def __init__(self,
                 predictor=None,
                 lossfun=softmax_cross_entropy.softmax_cross_entropy,
                 accfun=accuracy.accuracy,
                 device=-1,
                 **sk_params
                 ):
        """

        :param predictor (~chainer.links.Chain): 
        :param lossfun: loss function
        :param accfun: accuracy function. When `None` is set, accuracy is not 
        calculated during the training and `loassfun` is used for `score`.
        :param device (int): GPU device id. -1 indicates to use CPU.
        :param sk_params (dict): dict of parameters. This is used for 
        `GridSearchCV` and `RandomizedSearchCV` internally. 
        """
        super(SklearnBaseWrapper, self).__init__()
        if predictor is None:
            # Temporal counter measure to pass `check_estimator`,
            # sklearn need to support default constructor
            # TODO: Should dynamically asign n_out, instead of using magic parameter.
            predictor = chainer.links.Linear(None, self._default_n_out)
        if isinstance(predictor, chainer.Link):
            # print('[DEBUG] predictor instance')
            with self.init_scope():
                self.predictor = predictor
            self.predictor_constructor = predictor.__class__
        elif is_function(predictor) or issubclass(predictor, chainer.Link):
            # print('[DEBUG] predictor is constructor')
            self.predictor_constructor = predictor
        else:
            print("[ERROR] predictor should be either Chain class instance or"
                  "function which returns Chain class instance")
            assert False

        self.lossfun = lossfun
        self.accfun = accfun
        self.compute_accuracy = accfun is not None
        self.y = None
        self.loss = None
        self.accuracy = None
        self.inputs = None

        # Ensure initialization, necessary for GridSearch
        self.device = -1
        if hasattr(self, 'predictor'):
            self.predictor.to_cpu()
        self.update_device(device)

        self.sk_params = sk_params