Python chainer.serializers 模块,load_hdf5() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用chainer.serializers.load_hdf5()

项目:chainer-qrnn    作者:musyoku    | 项目源码 | 文件源码
def load_model(dirname):
    model_filename = dirname + "/model.hdf5"
    param_filename = dirname + "/params.json"

    if os.path.isfile(param_filename):
        print("loading {} ...".format(param_filename))
        with open(param_filename, "r") as f:
            try:
                params = json.load(f)
            except Exception as e:
                raise Exception("could not load {}".format(param_filename))

        model = seq2seq(**params)

        if os.path.isfile(model_filename):
            print("loading {} ...".format(model_filename))
            serializers.load_hdf5(model_filename, model)

        return model
    else:
        return None
项目:chainer-qrnn    作者:musyoku    | 项目源码 | 文件源码
def load_model(dirname):
    model_filename = dirname + "/model.hdf5"
    param_filename = dirname + "/params.json"

    if os.path.isfile(param_filename):
        print("loading {} ...".format(param_filename))
        with open(param_filename, "r") as f:
            try:
                params = json.load(f)
            except Exception as e:
                raise Exception("could not load {}".format(param_filename))

        qrnn = RNNModel(**params)

        if os.path.isfile(model_filename):
            print("loading {} ...".format(model_filename))
            serializers.load_hdf5(model_filename, qrnn)

        return qrnn
    else:
        return None
项目:self-driving-cars    作者:musyoku    | 项目源码 | 文件源码
def load(self):
        filename = "fc_value.model"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.fc_value)
            print "model fc_value loaded successfully."
        filename = "fc_advantage.model"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.fc_advantage)
            print "model fc_advantage loaded successfully."
        filename = "fc_value.optimizer"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.optimizer_fc_value)
            print "optimizer fc_value loaded successfully."
        filename = "fc_advantage.optimizer"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.optimizer_fc_advantage)
            print "optimizer fc_advantage loaded successfully."
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  embed_cache = {}

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit, embed_cache),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  embed_cache = {}

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit, embed_cache),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semi_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if USE_GPU:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semi_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  embed_cache = {}
  parser.reset()

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit, embed_cache),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward_test(word_list, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semi_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if USE_GPU:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = restore_labels(
          parser.forward(word_list, None, args.unary_limit),
          phrase_vocab,
          semi_vocab
      )
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semi_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if USE_GPU:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semi_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semi_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semi_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:nn_parsers    作者:odashi    | 项目源码 | 文件源码
def test(args):
  trace('loading model ...')
  word_vocab = Vocabulary.load(args.model + '.words')
  phrase_vocab = Vocabulary.load(args.model + '.phrases')
  semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
  parser = Parser.load_spec(args.model + '.spec')
  if args.use_gpu:
    parser.to_gpu()
  serializers.load_hdf5(args.model + '.weights', parser)

  trace('generating parse trees ...')
  with open(args.source) as fp:
    for l in fp:
      word_list = convert_word_list(l.split(), word_vocab)
      tree = combine_xbar(
          restore_labels(
              parser.forward(word_list, None, args.unary_limit),
              phrase_vocab,
              semiterminal_vocab))
      print('( ' + tree_to_string(tree) + ' )')

  trace('finished.')
项目:seq2seq_temporal_attention    作者:aistairc    | 项目源码 | 文件源码
def test(model, test_data, vocab, inv_vocab, modelfile_to_load, params):

    print('Testing ...')
    print('Beam size: {}'.format(params.beam_size))
    print('print output to file:', out_test_filename)
    serializers.load_hdf5(modelfile_to_load, model)
    batch_test = utils_seq2seq.gen_batch_test(test_data, args.feature, 1, vocab, xp)
    output_file = open(out_test_filename, mode='w')
    for vid_batch, caption_batch, id_batch in batch_test:
        output = predict(model, params, vocab, inv_vocab, vid_batch,
                         batch_size=1, beam_size=params.beam_size)
        print('%s %s' % (id_batch[0], output))
        output_file.write(id_batch[0] + '\t' + output + '\n')
    output_file.close()
    utils_coco.convert(out_test_filename, eval_test_filename)
    eval_coco.eval_coco(args.cocotest, eval_test_filename)
项目:seq2seq_temporal_attention    作者:aistairc    | 项目源码 | 文件源码
def test_batch(model, test_data, vocab, inv_vocab, modelfile_to_load):

    print('Testing (beam size = 1)...')
    print('print output to file: {}'.format(out_test_filename))
    serializers.load_hdf5(modelfile_to_load, model)
    batch_test = \
        utils_seq2seq.gen_batch_test(test_data, args.feature, params.batch_size_val, vocab, xp)
    caption_out = []
    output_file = open(out_test_filename, mode='w')
    for vid_batch_test, caption_batch_test, id_batch_test in batch_test:
        output_test = forward(model, params, vocab, inv_vocab,
                              vid_batch_test, caption_batch_test,
                              'test-on-train', args.batchsizeval)
        for ii in range(args.batchsizeval):
            caption_out.append({'image_id': id_batch_test[ii],
                                'caption': output_test[ii]})
            print('%s %s' % (id_batch_test[ii], output_test[ii]))
            output_file.write(id_batch_test[ii] + '\t' + output_test[ii] + '\n')
    output_file.close()
    with open(eval_test_filename, mode='w') as f:
        json.dump(caption_out, f)
    eval_coco.eval_coco(args.cocotest, eval_test_filename)
项目:chainer-glu    作者:musyoku    | 项目源码 | 文件源码
def load_model(dirname):
    model_filename = dirname + "/model.hdf5"
    param_filename = dirname + "/params.json"

    if os.path.isfile(param_filename):
        print("loading {} ...".format(param_filename))
        with open(param_filename, "r") as f:
            try:
                params = json.load(f)
            except Exception as e:
                raise Exception("could not load {}".format(param_filename))

        qrnn = RNNModel(**params)

        if os.path.isfile(model_filename):
            print("loading {} ...".format(model_filename))
            serializers.load_hdf5(model_filename, qrnn)

        return qrnn
    else:
        return None
项目:Emotion_Voice_Recognition_Chainer-    作者:SnowMasaya    | 项目源码 | 文件源码
def __init__(self, x_data, y_data, feature, initmodel, gpu = -1):
        self.N = 5000
        self.N_test = 766
        self.total = self.N + self.N_test
        self.emotion_weight = {0: self.total / 716, 1: self.total / 325, 2: self.total / 1383, 3: self.total / 743, 4: self.total / 2066, 5: self.total / 74, 6: self.total / 17, 7: self.total / 35, 8: self.total / 404,  9: self.total / 3}
        self.x_data = x_data.astype(np.float32)
        self.x_data = np.vstack((self.x_data, self.x_data))
        self.y_data = y_data.astype(np.int32)
        self.y_data = np.vstack((self.y_data, self.y_data))
        if feature == "IS2009":
            self.input_layer = 384
        elif feature == "IS2010":
            self.input_layer = 1582
        self.n_units = 256
        self.output_layer = 10
        self.model = L.Classifier(net.EmotionRecognitionVoice(self.input_layer, self.n_units, self.output_layer))
        self.gpu = gpu
        self.__set_cpu_or_gpu()
        self.emotion = {0: "Anger", 1: "Happiness", 2: "Excited", 3: "Sadness", 4: "Frustration", 5: "Fear", 6: "Surprise", 7: "Other", 8: "Neutral state", 9: "Disgust"}
        # Init/Resume
        serializers.load_hdf5(initmodel, self.model)
项目:reinforcement-learning    作者:musyoku    | 项目源码 | 文件源码
def load(self):
        dir = "model"
        filename = dir + "/bddqn_shared_fc.model"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.shared_fc)
            print "model shared_fc loaded successfully."

        filename = dir + "/bddqn_head_fc.model"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.head_fc)
            print "model head_fc loaded successfully."

        filename = dir + "/bddqn_shared_fc.optimizer"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.optimizer_shared_fc)
            print "optimizer shared_fc loaded successfully."

        filename = dir + "/bddqn_head_fc.optimizer"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.optimizer_head_fc)
            print "optimizer head_fc loaded successfully."
项目:kaggle-dsg-qualification    作者:Ignotus    | 项目源码 | 文件源码
def predict(modelfn, model_vargs, data, batchsize=128, gpu=0):
    assert gpu >= 0, "CPU support not yet implemented"

    model = ResNet(**model_vargs) 
    serializers.load_hdf5(modelfn, model)
    model.to_gpu()

    N = data.shape[0]
    prediction = np.zeros(N, dtype='int')   

    for i in range(0, N, batchsize):
        x_batch = data[i:i+batchsize]

        x_var = Variable(cuda.to_gpu(x_batch))

        prediction[i:i+batchsize] = cuda.to_cpu(model.predict(x_var))

    if N % batchsize != 0:
        x_batch = data[N - N % batchsize:]

        x_var = Variable(cuda.to_gpu(x_batch))

        prediction[N - N % batchsize:] = cuda.to_cpu(model.predict(x_var))

    return prediction
项目:kaggle-dsg-qualification    作者:Ignotus    | 项目源码 | 文件源码
def predict(modelfn, model_vargs, data, batchsize=128, gpu=0):
    assert gpu >= 0, "CPU support not yet implemented"

    model = ResNet(**model_vargs) 
    serializers.load_hdf5(modelfn, model)
    model.to_gpu()

    N = data.shape[0]
    prediction = np.zeros(N, dtype='int')   

    for i in range(0, N, batchsize):
        x_batch = data[i:i+batchsize]

        x_var = Variable(cuda.to_gpu(x_batch))

        prediction[i:i+batchsize] = cuda.to_cpu(model.predict(x_var))

    if N % batchsize != 0:
        x_batch = data[N - N % batchsize:]

        x_var = Variable(cuda.to_gpu(x_batch))

        prediction[N - N % batchsize:] = cuda.to_cpu(model.predict(x_var))

    return prediction
项目:double-dqn    作者:musyoku    | 项目源码 | 文件源码
def load(self):
        filename = "conv.model"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.conv)
            print "convolutional network loaded."
        if self.fcl_eliminated is False:
            filename = "fc.model"
            if os.path.isfile(filename):
                serializers.load_hdf5(filename, self.fc)
                print "fully-connected network loaded."
项目:async-rl    作者:muupan    | 项目源码 | 文件源码
def load_model(self, model_filename):
        """Load a network model form a file
        """
        serializers.load_hdf5(model_filename, self.model)
        copy_param.copy_param(target_link=self.model,
                              source_link=self.shared_model)
        opt_filename = model_filename + '.opt'
        if os.path.exists(opt_filename):
            print('WARNING: {0} was not found, so loaded only a model'.format(
                opt_filename))
            serializers.load_hdf5(model_filename + '.opt', self.optimizer)
项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def load(self, filename):
        if os.path.isfile(filename):
            print("Loading {} ...".format(filename))
            serializers.load_hdf5(filename, self)
            return True
        return False
项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def load(self, filename):
        if os.path.isfile(filename):
            print("Loading {} ...".format(filename))
            serializers.load_hdf5(filename, self)
            return True
        return False
项目:self-driving-cars    作者:musyoku    | 项目源码 | 文件源码
def load(self):
        filename = "fc.model"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.fc)
            print "model loaded successfully."
        filename = "fc.optimizer"
        if os.path.isfile(filename):
            serializers.load_hdf5(filename, self.optimizer_fc)
            print "optimizer loaded successfully."
项目:jrm_ssl    作者:Fhrozen    | 项目源码 | 文件源码
def LoadFineTnModel(self, folder, epoch, batch):
        print('Loading model')
        serializers.load_hdf5('{}/network_epoch{}_batch{}.model'.format(folder, epoch, batch), self.Networks[0])
        self.Networks[0].finetune_network()
        return
项目:jrm_ssl    作者:Fhrozen    | 项目源码 | 文件源码
def LoadResumeModel(self, folder, epoch, batch):
        print('Loading model')
        serializers.load_hdf5('{}/network_epoch{}_batch{}.model'.format(folder, epoch, batch), self.Networks[0])
        return
项目:jrm_ssl    作者:Fhrozen    | 项目源码 | 文件源码
def LoadTraining(self, folder, epoch, batch):
        print('Loading optimizer')
        serializers.load_hdf5('{}/network_epoch{}_batch{}.state'.format(folder, epoch, batch), self.Optimizer)
        return
项目:char-classify    作者:ekatek    | 项目源码 | 文件源码
def __init__(self, net_size, model_filename, optimizer_filename):
        """ Create the underlying neural network model """
        self.model = L.Classifier(BaseNetwork(net_size))
        if (model_filename != ""):
            serializers.load_hdf5(model_filename, self.model)

        """ Create the underlying optimizer """
        self.optimizer = optimizers.Adam()
        self.optimizer.setup(self.model)
        if (optimizer_filename != ""):
            serializers.load_hdf5(optimizer_filename, self.optimizer)
项目:ImageCaptioning    作者:rkuga    | 项目源码 | 文件源码
def load_state(self,path,epoch):
        print "==> loading state %s epoch %s"%(path,epoch)
        serializers.load_hdf5('./states/%s/net_model_classifier_%s.h5'%(path,epoch), self.network)
        return int(epoch)
项目:ImageCaptioning    作者:rkuga    | 项目源码 | 文件源码
def load_state(self,path,epoch):
        print "==> loading state %s epoch %s"%(path,epoch)
        serializers.load_hdf5('./states/%s/net_model_enc_%s.h5'%(path,epoch), self.enc)
        serializers.load_hdf5('./states/%s/net_model_dec_%s.h5'%(path,epoch), self.dec)

        return int(epoch)
项目:ddnn    作者:kunglab    | 项目源码 | 文件源码
def load(self, filename):
        if os.path.isfile(filename):
            print("loading {} ...".format(filename))
            serializers.load_hdf5(filename, self)
        else:
            print(filename, "not found.")
项目:adversarial-autoencoder    作者:musyoku    | 项目源码 | 文件源码
def load(self, filename):
        if os.path.isfile(filename):
            print("Loading {} ...".format(filename))
            serializers.load_hdf5(filename, self)
            return True
        return False