In [10]:
Copied!
import sys
import numpy as np
import tensorflow as tf
import sys
import numpy as np
import tensorflow as tf
In [11]:
Copied!
print (sys.version)
print (sys.version)
3.10.10 | packaged by conda-forge | (main, Mar 24 2023, 20:08:06) [GCC 11.3.0]
In [12]:
Copied!
print (tf.__version__)
print (tf.__version__)
2.15.0
Test if Tensorflow can access a GPU¶
In [13]:
Copied!
tf.config.list_physical_devices('GPU')
tf.config.list_physical_devices('GPU')
Out[13]:
[PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]
In [14]:
Copied!
device_name = tf.test.gpu_device_name()
print (device_name)
device_name = tf.test.gpu_device_name()
print (device_name)
/device:GPU:0
2024-02-08 09:33:51.770011: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1929] Created device /device:GPU:0 with 9776 MB memory: -> device: 0, name: NVIDIA GeForce RTX 2080 Ti, pci bus id: 0000:3b:00.0, compute capability: 7.5
Run a simple example from Tensorflow tutorials¶
The beginner tutorial can be found at: https://www.tensorflow.org/tutorials/quickstart/beginner
Load a dataset¶
In [15]:
Copied!
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
Build a machine learning model¶
In [16]:
Copied!
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10)
])
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(optimizer='adam', loss=loss_fn, metrics=['accuracy'])
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10)
])
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(optimizer='adam', loss=loss_fn, metrics=['accuracy'])
Train your model¶
In [17]:
Copied!
model.fit(x_train, y_train, epochs=5)
model.fit(x_train, y_train, epochs=5)
Epoch 1/5 1875/1875 [==============================] - 11s 5ms/step - loss: 0.2947 - accuracy: 0.9147 Epoch 2/5 1875/1875 [==============================] - 10s 5ms/step - loss: 0.1429 - accuracy: 0.9578 Epoch 3/5 1875/1875 [==============================] - 10s 5ms/step - loss: 0.1066 - accuracy: 0.9678 Epoch 4/5 1875/1875 [==============================] - 10s 5ms/step - loss: 0.0890 - accuracy: 0.9725 Epoch 5/5 1875/1875 [==============================] - 10s 5ms/step - loss: 0.0748 - accuracy: 0.9766
Out[17]:
<keras.src.callbacks.History at 0x2b2e31914580>
Evaluate your model¶
In [18]:
Copied!
model.evaluate(x_test, y_test, verbose=2)
model.evaluate(x_test, y_test, verbose=2)
313/313 - 1s - loss: 0.0775 - accuracy: 0.9751 - 1s/epoch - 4ms/step
Out[18]:
[0.07746326178312302, 0.9750999808311462]