Help Center/
FunctionGraph/
User Guide/
Dependency Management/
Public Dependency Demos/
Linear Regression with TensorFlow
Updated on 2023-11-16 GMT+08:00
Linear Regression with TensorFlow
Adding TensorFlow on Function Details Page
Figure 1 Adding TensorFlow
Importing TensorFlow to Code
import json import random # Import TensorFlow. import tensorflow as tf def handler (event, context): TRUE_W = random.randint(0,9) TRUE_b = random.randint(0,9) NUM_SAMPLES = 100 X = tf.random.normal(shape=[NUM_SAMPLES, 1]).numpy() noise = tf.random.normal(shape=[NUM_SAMPLES, 1]).numpy() y = X * TRUE_W + TRUE_b + noise model = tf.keras.layers.Dense(units=1) EPOCHS = 20 LEARNING_RATE = 0.002 print("start training") for epoch in range(EPOCHS): with tf.GradientTape() as tape: y_ = model(X) loss = tf.reduce_sum(tf.keras.losses.mean_squared_error(y, y_)) grads = tape.gradient(loss, model.variables) optimizer = tf.keras.optimizers.SGD(LEARNING_RATE) optimizer.apply_gradients(zip(grads, model.variables)) print('Epoch [{}/{}], loss [{:.3f}]'.format(epoch, EPOCHS, loss)) print("finished") print(TRUE_W,TRUE_b) print(model.variables) return { "statusCode": 200, "isBase64Encoded": False, "body": json.dumps(event), "headers": { "Content-Type": "application/json" } } class Model(object): def __init__(self): self.W = tf.Variable(tf.random.uniform([1])) self.b = tf.Variable(tf.random.uniform([1])) def __call__(self, x): return self.W * x + self.b
Parent topic: Public Dependency Demos
Feedback
Was this page helpful?
Provide feedbackThank you very much for your feedback. We will continue working to improve the documentation.
The system is busy. Please try again later.