设为首页 加入收藏

TOP

学习笔记TF035:实现基于LSTM语言模型(三)
2017-09-30 17:00:23 】 浏览:8150
Tags:学习 笔记 TF035: 实现 基于 LSTM 语言 模型
TestConfig(object):   """Tiny config, for testing."""   init_scale = 0.1   learning_rate = 1.0   max_grad_norm = 1   num_layers = 1   num_steps = 2   hidden_size = 2   max_epoch = 1   max_max_epoch = 1   keep_prob = 1.0   lr_decay = 0.5   batch_size = 20   vocab_size = 10000 def run_epoch(session, model, eva l_op=None, verbose=False):   """Runs the model on the given data."""   start_time = time.time()   costs = 0.0   iters = 0   state = session.run(model.initial_state)   fetches = {       "cost": model.cost,       "final_state": model.final_state,   }   if eva l_op is not None:     fetches["eva l_op"] = eva l_op   for step in range(model.input.epoch_size):     feed_dict = {}     for i, (c, h) in enumerate(model.initial_state):       feed_dict[c] = state[i].c       feed_dict[h] = state[i].h     vals = session.run(fetches, feed_dict)     cost = vals["cost"]     state = vals["final_state"]     costs += cost     iters += model.input.num_steps     if verbose and step % (model.input.epoch_size // 10) == 10:       print("%.3f perplexity: %.3f speed: %.0f wps" %             (step * 1.0 / model.input.epoch_size, np.exp(costs / iters),               iters * model.input.batch_size / (time.time() - start_time)))   return np.exp(costs / iters) raw_data = reader.ptb_raw_data('simple-examples/data/') train_data, valid_data, test_data, _ = raw_data config = SmallConfig() eva l_config = SmallConfig() eva l_config.batch_size = 1 eva l_config.num_steps = 1 with tf.Graph().as_default():   initializer = tf.random_uniform_initializer(-config.init_scale,                                               config.init_scale)   with tf.name_scope("Train"):     train_input = PTBInput(config=config, data=train_data, name="TrainInput")     with tf.variable_scope("Model", reuse=None, initializer=initializer):       m = PTBModel(is_training=True, config=config, input_=train_input)       #tf.scalar_summary("Training Loss", m.cost)       #tf.scalar_summary("Learning Rate", m.lr)   with tf.name_scope("Valid"):     valid_input = PTBInput(config=config, data=valid_data, name="ValidInput")     with tf.variable_scope("Model", reuse=True, initializer=initializer):       mvalid = PTBModel(is_training=False, config=config, input_=valid_input)       #tf.scalar_summary("Validation Loss", mvalid.cost)   with tf.name_scope("Test"):     test_input = PTBInput(config=eva l_config, data=test_data, name="TestInput")     with tf.variable_scope("Model",
首页 上一页 1 2 3 4 5 6 下一页 尾页 3/6/6
】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
上一篇day5模块学习--re正则模块 下一篇Python 3 学习笔记(四)----字符..

最新文章

热门文章

Hot 文章

Python

C 语言

C++基础

大数据基础

linux编程基础

C/C++面试题目