top of page

AI - Tensor Flow


TensorFlow is a free and open-source software library for machine learning.


It can be used across a range of tasks but has a particular focus on training and inference of deep neural networks.


Tensorflow is a symbolic math library based on dataflow and differentiable programming.




import tensorflow as tf

import pandas as pd

COLUMN_NAMES = [

'SepalLength',

'SepalWidth',

'PetalLength',

'PetalWidth',

'Species'

]

1. Import training dataset

training_dataset = pd.read_csv('iris_training.csv', names=COLUMN_NAMES, header=0)

train_x = training_dataset.iloc[:, 0:4]

train_y = training_dataset.iloc[:, 4]


2. Import testing dataset

test_dataset = pd.read_csv('iris_test.csv', names=COLUMN_NAMES, header=0)

test_x = test_dataset.iloc[:, 0:4]

test_y = test_dataset.iloc[:, 4]

----------------------------

# Load the TensorBoard notebook extension.

%load_ext tensorboard

from datetime import datetime

from packaging import version

import tensorflow as tf

from tensorflow import keras

import numpy as np

print("TensorFlow version: ", tf.__version__)

assert version.parse(tf.__version__).release[0] >= 2, \

"This notebook requires TensorFlow 2.0 or above."

data_size = 1000

# 80% of the data is for training.

train_pct = 0.8

train_size = int(data_size * train_pct)

# Create some input data between -1 and 1 and randomize it.

x = np.linspace(-1, 1, data_size)

np.random.shuffle(x)

# Generate the output data.

# y = 0.5x + 2 + noise

y = 0.5 * x + 2 + np.random.normal(0, 0.05, (data_size, ))

# Split into test and train pairs.

x_train, y_train = x[:train_size], y[:train_size]

x_test, y_test = x[train_size:], y[train_size:]

logdir = "logs/scalars/" + datetime.now().strftime("%Y%m%d-%H%M%S")

tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir)

model = keras.models.Sequential([

keras.layers.Dense(16, input_dim=1),

keras.layers.Dense(1),

])

model.compile(

loss='mse', # keras.losses.mean_squared_error

optimizer=keras.optimizers.SGD(lr=0.2),

)

print("Training ... With default parameters, this takes less than 10 seconds.")

training_history = model.fit(

x_train, # input

y_train, # output

batch_size=train_size,

verbose=0, # Suppress chatty output; use Tensorboard instead

epochs=100,

validation_data=(x_test, y_test),

callbacks=[tensorboard_callback],

)

print("Average test loss: ", np.average(training_history.history['loss']))


7 views0 comments

Recent Posts

See All

Comments


bottom of page