nb.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. #%%
  2. import tensorflow as tf
  3. from tensorflow import keras
  4. from tensorflow.keras import layers,Sequential,losses,optimizers,datasets
  5. #%%
  6. x = tf.constant([2.,1.,0.1])
  7. layer = layers.Softmax(axis=-1)
  8. layer(x)
  9. #%%
  10. def proprocess(x,y):
  11. x = tf.reshape(x, [-1])
  12. return x,y
  13. # x: [60k, 28, 28],
  14. # y: [60k]
  15. (x, y), (x_test,y_test) = datasets.mnist.load_data()
  16. # x: [0~255] => [0~1.]
  17. x = tf.convert_to_tensor(x, dtype=tf.float32) / 255.
  18. y = tf.convert_to_tensor(y, dtype=tf.int32)
  19. # x: [0~255] => [0~1.]
  20. x_test = tf.convert_to_tensor(x_test, dtype=tf.float32) / 255.
  21. y_test = tf.convert_to_tensor(y_test, dtype=tf.int32)
  22. train_db = tf.data.Dataset.from_tensor_slices((x,y))
  23. train_db = train_db.shuffle(1000).map(proprocess).batch(128)
  24. val_db = tf.data.Dataset.from_tensor_slices((x_test,y_test))
  25. val_db = val_db.shuffle(1000).map(proprocess).batch(128)
  26. x,y = next(iter(train_db))
  27. print(x.shape, y.shape)
  28. #%%
  29. from tensorflow.keras import layers, Sequential
  30. network = Sequential([
  31. layers.Dense(3, activation=None),
  32. layers.ReLU(),
  33. layers.Dense(2, activation=None),
  34. layers.ReLU()
  35. ])
  36. x = tf.random.normal([4,3])
  37. network(x)
  38. #%%
  39. layers_num = 2
  40. network = Sequential([])
  41. for _ in range(layers_num):
  42. network.add(layers.Dense(3))
  43. network.add(layers.ReLU())
  44. network.build(input_shape=(None, 4))
  45. network.summary()
  46. #%%
  47. for p in network.trainable_variables:
  48. print(p.name, p.shape)
  49. #%%
  50. # 创建5层的全连接层网络
  51. network = Sequential([layers.Dense(256, activation='relu'),
  52. layers.Dense(128, activation='relu'),
  53. layers.Dense(64, activation='relu'),
  54. layers.Dense(32, activation='relu'),
  55. layers.Dense(10)])
  56. network.build(input_shape=(4, 28*28))
  57. network.summary()
  58. #%%
  59. # 导入优化器,损失函数模块
  60. from tensorflow.keras import optimizers,losses
  61. # 采用Adam优化器,学习率为0.01;采用交叉熵损失函数,包含Softmax
  62. network.compile(optimizer=optimizers.Adam(lr=0.01),
  63. loss=losses.CategoricalCrossentropy(from_logits=True),
  64. metrics=['accuracy'] # 设置测量指标为准确率
  65. )
  66. #%%
  67. # 指定训练集为db,验证集为val_db,训练5个epochs,每2个epoch验证一次
  68. history = network.fit(train_db, epochs=5, validation_data=val_db, validation_freq=2)
  69. #%%
  70. history.history # 打印训练记录
  71. #%%
  72. # 保存模型参数到文件上
  73. network.save_weights('weights.ckpt')
  74. print('saved weights.')
  75. del network # 删除网络对象
  76. # 重新创建相同的网络结构
  77. network = Sequential([layers.Dense(256, activation='relu'),
  78. layers.Dense(128, activation='relu'),
  79. layers.Dense(64, activation='relu'),
  80. layers.Dense(32, activation='relu'),
  81. layers.Dense(10)])
  82. network.compile(optimizer=optimizers.Adam(lr=0.01),
  83. loss=tf.losses.CategoricalCrossentropy(from_logits=True),
  84. metrics=['accuracy']
  85. )
  86. # 从参数文件中读取数据并写入当前网络
  87. network.load_weights('weights.ckpt')
  88. print('loaded weights!')
  89. #%%
  90. # 新建池化层
  91. global_average_layer = layers.GlobalAveragePooling2D()
  92. # 利用上一层的输出作为本层的输入,测试其输出
  93. x = tf.random.normal([4,7,7,2048])
  94. out = global_average_layer(x) # 池化层降维
  95. print(out.shape)
  96. #%%
  97. # 新建全连接层
  98. fc = layers.Dense(100)
  99. # 利用上一层的输出作为本层的输入,测试其输出
  100. x = tf.random.normal([4,2048])
  101. out = fc(x)
  102. print(out.shape)
  103. #%%