弹性网回归Elastic Net Regression
这个脚本展示如何用TensorFlow求解弹性网回归。 =+y=Ax+b
我们使用iris数据集,特别地:
```
y = Sepal Length
x = Pedal Length, Petal Width, Sepal Width¶
#List3-42
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from sklearn import datasets
from tensorflow.python.framework import ops
ops.reset_default_graph()
#tf.set_random_seed(42)
np.random.seed(42)
# Load the data
# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
iris = datasets.load_iris()
x_vals = np.array([[x[1], x[2], x[3]] for x in iris.data])
y_vals = np.array([y[0] for y in iris.data])
def model(x,w,b):
# Declare model operations
model_output = tf.add(tf.matmul(x, w), b)
return model_output
def loss1(x,y,w,b):
# Declare the elastic net loss function
elastic_param1 = tf.constant(1.)
elastic_param2 = tf.constant(1.)
l1_a_loss = tf.reduce_mean(tf.abs(w))
l2_a_loss = tf.reduce_mean(tf.square(w))
e1_term = tf.multiply(elastic_param1, l1_a_loss)
e2_term = tf.multiply(elastic_param2, l2_a_loss)
loss = tf.expand_dims(tf.add(tf.add(tf.reduce_mean(tf.square(y - model(x,w,b))), e1_term), e2_term), 0)
return loss
def grad1(x,y,w,b):
with tf.GradientTape() as tape:
loss_1 = loss1(x,y,w,b)
return tape.gradient(loss_1,[w,b])
# make results reproducible
seed = 13
np.random.seed(seed)
#tf.set_random_seed(seed)
# Declare batch size
batch_size = 50
# Create variables for linear regression
w1 = tf.Variable(tf.random.normal(shape=[3,1]),tf.float32)
b1 = tf.Variable(tf.random.normal(shape=[1,1]),tf.float32)
optimizer = tf.optimizers.Adam(0.001)
# Training loop
loss_vec = []
for i in range(5000):
rand_index = np.random.choice(len(x_vals), size=batch_size)
rand_x = x_vals[rand_index]
rand_y = np.transpose([y_vals[rand_index]])
x=tf.cast(rand_x,tf.float32)
y=tf.cast(rand_y,tf.float32)
grads1=grad1(x,y,w1,b1)
optimizer.apply_gradients(zip(grads1,[w1,b1]))
#sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
temp_loss1 = loss1(x, y,w1,b1).numpy()
#sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
loss_vec.append(temp_loss1)
if (i+1)%25==0:
print('Step #' + str(i+1) + ' A = ' + str(w1.numpy()) + ' b = ' + str(b1.numpy()))
print('Loss = ' + str(temp_loss1))
# Get the optimal coefficients
[[sw_coef], [pl_coef], [pw_ceof]] = w1.numpy()
[y_intercept] = b1.numpy()
# Plot loss over time
plt.plot(loss_vec, 'k-')
plt.title('Loss per Generation')
plt.xlabel('Generation')
plt.ylabel('Loss')
plt.show()