From 6637dbdd365b40f48e94ec689a57625293e80d72 Mon Sep 17 00:00:00 2001 From: guanjihuan Date: Tue, 26 Mar 2024 18:17:17 +0800 Subject: [PATCH] Create activation_function.py --- .../activation_function.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 2024.03.26_activation_function/activation_function.py diff --git a/2024.03.26_activation_function/activation_function.py b/2024.03.26_activation_function/activation_function.py new file mode 100644 index 0000000..fda5f9d --- /dev/null +++ b/2024.03.26_activation_function/activation_function.py @@ -0,0 +1,27 @@ +""" +This code is supported by the website: https://www.guanjihuan.com +The newest version of this code is on the web page: https://www.guanjihuan.com/archives/39029 +""" + +import torch +import numpy as np +import matplotlib.pyplot as plt + +x_array = np.linspace(-6, 6, 100) +x_array_torch_tensor = torch.from_numpy(x_array) + +y_array_torch_tensor = torch.nn.functional.sigmoid(x_array_torch_tensor) +plt.plot(x_array_torch_tensor, y_array_torch_tensor) +plt.show() + +y_array_torch_tensor = torch.nn.functional.tanh(x_array_torch_tensor) +plt.plot(x_array_torch_tensor, y_array_torch_tensor) +plt.show() + +y_array_torch_tensor = torch.nn.functional.relu(x_array_torch_tensor) +plt.plot(x_array_torch_tensor, y_array_torch_tensor) +plt.show() + +y_array_torch_tensor = torch.nn.functional.leaky_relu(x_array_torch_tensor) +plt.plot(x_array_torch_tensor, y_array_torch_tensor) +plt.show() \ No newline at end of file