-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathrnn_play.py
47 lines (38 loc) · 1.67 KB
/
rnn_play.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import tensorflow as tf
import numpy as np
import my_txtutils
target = open("outputs.txt", 'w')
# these must match what was saved !
ALPHASIZE = my_txtutils.ALPHASIZE
NLAYERS = 3
INTERNALSIZE = 512
##################################### REPLACE THESE WITH YOUR TRAINED MODEL CHECKPOINT FILES #############################
author = "twitter2_checkpoints/rnn_train_1498315225-15000000"
ncnt = 0
with tf.Session() as sess:
new_saver = tf.train.import_meta_graph('twitter2_checkpoints/rnn_train_1498315225-15000000.meta')
new_saver.restore(sess, author)
x = my_txtutils.convert_from_alphabet(ord("L"))
x = np.array([[x]]) # shape [BATCHSIZE, SEQLEN] with BATCHSIZE=1 and SEQLEN=1
# initial values
y = x
h = np.zeros([1, INTERNALSIZE * NLAYERS], dtype=np.float32) # [ BATCHSIZE, INTERNALSIZE * NLAYERS]
for i in range(1000000000):
yo, h = sess.run(['Yo:0', 'H:0'], feed_dict={'X:0': y, 'pkeep:0': 1., 'Hin:0': h, 'batchsize:0': 1})
# If sampling is be done from the topn most likely characters, the generated text
# is more credible and more "english". If topn is not set, it defaults to the full
# distribution (ALPHASIZE)
# Recommended: topn = 10 for intermediate checkpoints, topn=2 or 3 for fully trained checkpoints
c = my_txtutils.sample_from_probabilities(yo, topn=2)
y = np.array([[c]]) # shape [BATCHSIZE, SEQLEN] with BATCHSIZE=1 and SEQLEN=1
c = chr(my_txtutils.convert_to_alphabet(c))
print(c, end="")
target.write(c)
if c == '\n':
ncnt = 0
else:
ncnt += 1
if ncnt == 100:
print("")
ncnt = 0
target.close()