fork download
  1. import numpy as np
  2. import tensorflow as tf
  3. from tensorflow.keras.models import Sequential
  4. from tensorflow.keras.layers import Embedding, SimpleRNN, Dense
  5. text="This is a sample text for language modeling using RNN."
  6. chars=sorted(set(text))
  7. char_to_index={char: index for index, char in enumerate(chars)}
  8. index_to_char={index: char for index, char in enumerate(chars)}
  9. text_indices=[char_to_index[char] for char in text]
  10. seq_length,sequences,next_char=20,[],[]
  11. for i in range(0,len(text_indices)-seq_length):
  12. sequences.append(text_indices[i:i+seq_length])
  13. next_char.append(text_indices[i+seq_length])
  14. X,y=np.array(sequences),np.array(next_char)
  15. 17
  16. model=Sequential([Embedding(input_dim=len(chars),output_dim=50,input_length=seq_length),SimpleRNN(100,return_sequences=False),Dense(len(chars),activation="softmax")])
  17. model.compile(loss="sparse_categorical_crossentropy",optimizer="adam")
  18. model.fit(X,y,batch_size=64,epochs=20)
  19. seed_text="This is a sample te"
  20. generated_text=seed_text
  21. num_chars_to_generate = 100
  22. for _ in range(num_chars_to_generate):
  23. seed_indices=[char_to_index[char] for char in seed_text]
  24. if len(seed_indices)<seq_length:
  25. diff=seq_length-len(seed_indices)
  26. seed_indices=[0]*diff+seed_indices
  27. seed_indices=np.array(seed_indices).reshape(1,-1)
  28. next_index=model.predict(seed_indices).argmax()
  29. next_char=index_to_char[next_index]
  30. generated_text+=next_char
  31. seed_text=seed_text[1:]+next_char
  32. print(generated_text)
Success #stdin #stdout #stderr 2.38s 240084KB
stdin
Standard input is empty
stdout
Epoch 1/20

34/34 [==============================] - 0s 10ms/sample - loss: 3.1183
Epoch 2/20

34/34 [==============================] - 0s 202us/sample - loss: 3.0351
Epoch 3/20

34/34 [==============================] - 0s 178us/sample - loss: 2.9550
Epoch 4/20

34/34 [==============================] - 0s 171us/sample - loss: 2.8738
Epoch 5/20

34/34 [==============================] - 0s 173us/sample - loss: 2.7884
Epoch 6/20

34/34 [==============================] - 0s 241us/sample - loss: 2.6965
Epoch 7/20

34/34 [==============================] - 0s 290us/sample - loss: 2.5964
Epoch 8/20

34/34 [==============================] - 0s 286us/sample - loss: 2.4873
Epoch 9/20

34/34 [==============================] - 0s 293us/sample - loss: 2.3700
Epoch 10/20

34/34 [==============================] - 0s 297us/sample - loss: 2.2482
Epoch 11/20

34/34 [==============================] - 0s 327us/sample - loss: 2.1278
Epoch 12/20

34/34 [==============================] - 0s 334us/sample - loss: 2.0137
Epoch 13/20

34/34 [==============================] - 0s 335us/sample - loss: 1.9061
Epoch 14/20

34/34 [==============================] - 0s 313us/sample - loss: 1.8012
Epoch 15/20

34/34 [==============================] - 0s 310us/sample - loss: 1.6952
Epoch 16/20

34/34 [==============================] - 0s 321us/sample - loss: 1.5890
Epoch 17/20

34/34 [==============================] - 0s 190us/sample - loss: 1.4873
Epoch 18/20

34/34 [==============================] - 0s 176us/sample - loss: 1.3936
Epoch 19/20

34/34 [==============================] - 0s 204us/sample - loss: 1.3062
Epoch 20/20

34/34 [==============================] - 0s 292us/sample - loss: 1.2217
This is a sample teot for langnnne gonglnng ngnngnngNigtRNsnnNingnnNnngt gsnngnnonngnnNnngoiNgnnNnnnnnNnnnnnNttgNnnnnnn
stderr
WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.