forked from hundred06/jaylyrics_generation_tensorflow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsample.py
53 lines (46 loc) · 1.76 KB
/
sample.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# -*- coding:utf-8 -*-
from __future__ import print_function
import numpy as np
import tensorflow as tf
import argparse
import time
import os
from six.moves import cPickle
import codecs
from preprocess import TextParser
from seq2seq_rnn import Model
def main():
# get arguments
parser = argparse.ArgumentParser()
parser.add_argument('--save_dir', type=str, default='save',
help='model directory to store checkpointed models')
parser.add_argument('-n', type=int, default=400,
help='number of words to sample')
parser.add_argument('--start', default=u'如果',
help='prime text')
parser.add_argument('--sample', type=str, default='combined',
help='three choices:argmax,weighted,combined')
args = parser.parse_args()
sample(args)
def sample(args):
# import configuration
with open(os.path.join(args.save_dir, 'config.pkl'), 'rb') as f:
saved_args = cPickle.load(f)
with open(os.path.join(args.save_dir, 'words_vocab.pkl'), 'rb') as f:
words, vocab = cPickle.load(f)
# import the trained model
model = Model(saved_args, True)
with tf.Session() as sess:
# initialize the model
tf.initialize_all_variables().run()
saver = tf.train.Saver(tf.all_variables())
ckpt = tf.train.get_checkpoint_state(args.save_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
# sample the new sequence word by word
literature = model.sample(sess, words, vocab, args.n, args.start, args.sample)
with codecs.open('result/sequence.txt','a','utf-8') as f:
f.write(literature+'\n\n')
print(literature)
if __name__ == '__main__':
main()