Skip to content

Refine keras.Activation and add tf.keras.activations. #1001

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 42 additions & 2 deletions src/TensorFlowNET.Core/Keras/Activations/Activations.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,44 @@
namespace Tensorflow.Keras
using Newtonsoft.Json;
using System.Reflection;
using System.Runtime.Versioning;
using Tensorflow.Keras.Common;

namespace Tensorflow.Keras
{
public delegate Tensor Activation(Tensor features, string name = null);
[JsonConverter(typeof(CustomizedActivationJsonConverter))]
public class Activation
{
public string Name { get; set; }
/// <summary>
/// The parameters are `features` and `name`.
/// </summary>
public Func<Tensor, string, Tensor> ActivationFunction { get; set; }

public Tensor Apply(Tensor input, string name = null) => ActivationFunction(input, name);

public static implicit operator Activation(Func<Tensor, string, Tensor> func)
{
return new Activation()
{
Name = func.GetMethodInfo().Name,
ActivationFunction = func
};
}
}

public interface IActivationsApi
{
Activation GetActivationFromName(string name);
Activation Linear { get; }

Activation Relu { get; }

Activation Sigmoid { get; }

Activation Softmax { get; }

Activation Tanh { get; }

Activation Mish { get; }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,27 +26,8 @@ public class ConvolutionalArgs : AutoSerializeLayerArgs
public Shape DilationRate { get; set; } = (1, 1);
[JsonProperty("groups")]
public int Groups { get; set; } = 1;
public Activation Activation { get; set; }
private string _activationName;
[JsonProperty("activation")]
public string ActivationName
{
get
{
if (string.IsNullOrEmpty(_activationName))
{
return Activation.Method.Name;
}
else
{
return _activationName;
}
}
set
{
_activationName = value;
}
}
public Activation Activation { get; set; }
[JsonProperty("use_bias")]
public bool UseBias { get; set; }
[JsonProperty("kernel_initializer")]
Expand Down
22 changes: 1 addition & 21 deletions src/TensorFlowNET.Core/Keras/ArgsDefinition/Core/DenseArgs.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,8 @@ public class DenseArgs : LayerArgs
/// <summary>
/// Activation function to use.
/// </summary>
public Activation Activation { get; set; }

private string _activationName;
[JsonProperty("activation")]
public string ActivationName
{
get
{
if (string.IsNullOrEmpty(_activationName))
{
return Activation.Method.Name;
}
else
{
return _activationName;
}
}
set
{
_activationName = value;
}
}
public Activation Activation { get; set; }

/// <summary>
/// Whether the layer uses a bias vector.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,27 +35,8 @@ public class EinsumDenseArgs : AutoSerializeLayerArgs
/// <summary>
/// Activation function to use.
/// </summary>
public Activation Activation { get; set; }
private string _activationName;
[JsonProperty("activation")]
public string ActivationName
{
get
{
if (string.IsNullOrEmpty(_activationName))
{
return Activation.Method.Name;
}
else
{
return _activationName;
}
}
set
{
_activationName = value;
}
}
public Activation Activation { get; set; }

/// <summary>
/// Initializer for the `kernel` weights matrix.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
using System;
using System.Collections.Generic;
using System.Text;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Common
{
Expand Down Expand Up @@ -31,20 +32,19 @@ public override void WriteJson(JsonWriter writer, object? value, JsonSerializer
}
else
{
var token = JToken.FromObject((value as Activation)!.GetType().Name);
var token = JToken.FromObject(((Activation)value).Name);
token.WriteTo(writer);
}
}

public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
//var dims = serializer.Deserialize(reader, typeof(string));
//if (dims is null)
//{
// throw new ValueError("Cannot deserialize 'null' to `Activation`.");
//}
//return new Shape((long[])(dims!));
var activationName = serializer.Deserialize<string>(reader);
if (tf.keras is null)
{
throw new RuntimeError("Tensorflow.Keras is not loaded, please install it first.");
}
return tf.keras.activations.GetActivationFromName(string.IsNullOrEmpty(activationName) ? "linear" : activationName);
}
}
}
1 change: 1 addition & 0 deletions src/TensorFlowNET.Core/Keras/IKerasApi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ public interface IKerasApi
IInitializersApi initializers { get; }
ILayersApi layers { get; }
ILossesApi losses { get; }
IActivationsApi activations { get; }
IOptimizerApi optimizers { get; }
IMetricsApi metrics { get; }
IModelsApi models { get; }
Expand Down
76 changes: 40 additions & 36 deletions src/TensorFlowNET.Keras/Activations.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,45 +6,61 @@

namespace Tensorflow.Keras
{
public class Activations
public class Activations: IActivationsApi
{
private static Dictionary<string, Activation> _nameActivationMap;
private static Dictionary<Activation, string> _activationNameMap;

private static Activation _linear = (features, name) => features;
private static Activation _relu = (features, name)
=> tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features));
private static Activation _sigmoid = (features, name)
=> tf.Context.ExecuteOp("Sigmoid", name, new ExecuteOpArgs(features));
private static Activation _softmax = (features, name)
=> tf.Context.ExecuteOp("Softmax", name, new ExecuteOpArgs(features));
private static Activation _tanh = (features, name)
=> tf.Context.ExecuteOp("Tanh", name, new ExecuteOpArgs(features));
private static Activation _mish = (features, name)
=> features * tf.math.tanh(tf.math.softplus(features));
private static Activation _linear = new Activation()
{
Name = "linear",
ActivationFunction = (features, name) => features
};
private static Activation _relu = new Activation()
{
Name = "relu",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features))
};
private static Activation _sigmoid = new Activation()
{
Name = "sigmoid",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Sigmoid", name, new ExecuteOpArgs(features))
};
private static Activation _softmax = new Activation()
{
Name = "softmax",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Softmax", name, new ExecuteOpArgs(features))
};
private static Activation _tanh = new Activation()
{
Name = "tanh",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Tanh", name, new ExecuteOpArgs(features))
};
private static Activation _mish = new Activation()
{
Name = "mish",
ActivationFunction = (features, name) => features * tf.math.tanh(tf.math.softplus(features))
};

/// <summary>
/// Register the name-activation mapping in this static class.
/// </summary>
/// <param name="name"></param>
/// <param name="activation"></param>
private static void RegisterActivation(string name, Activation activation)
private static void RegisterActivation(Activation activation)
{
_nameActivationMap[name] = activation;
_activationNameMap[activation] = name;
_nameActivationMap[activation.Name] = activation;
}

static Activations()
{
_nameActivationMap = new Dictionary<string, Activation>();
_activationNameMap= new Dictionary<Activation, string>();

RegisterActivation("relu", _relu);
RegisterActivation("linear", _linear);
RegisterActivation("sigmoid", _sigmoid);
RegisterActivation("softmax", _softmax);
RegisterActivation("tanh", _tanh);
RegisterActivation("mish", _mish);
RegisterActivation(_relu);
RegisterActivation(_linear);
RegisterActivation(_sigmoid);
RegisterActivation(_softmax);
RegisterActivation(_tanh);
RegisterActivation(_mish);
}

public Activation Linear => _linear;
Expand All @@ -59,7 +75,7 @@ static Activations()

public Activation Mish => _mish;

public static Activation GetActivationByName(string name)
public Activation GetActivationFromName(string name)
{
if (!_nameActivationMap.TryGetValue(name, out var res))
{
Expand All @@ -70,17 +86,5 @@ public static Activation GetActivationByName(string name)
return res;
}
}

public static string GetNameByActivation(Activation activation)
{
if(!_activationNameMap.TryGetValue(activation, out var name))
{
throw new Exception($"Activation {activation} not found");
}
else
{
return name;
}
}
}
}
2 changes: 1 addition & 1 deletion src/TensorFlowNET.Keras/KerasInterface.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public static KerasInterface Instance
public Regularizers regularizers { get; } = new Regularizers();
public ILayersApi layers { get; } = new LayersApi();
public ILossesApi losses { get; } = new LossesApi();
public Activations activations { get; } = new Activations();
public IActivationsApi activations { get; } = new Activations();
public Preprocessing preprocessing { get; } = new Preprocessing();
ThreadLocal<BackendImpl> _backend = new ThreadLocal<BackendImpl>(() => new BackendImpl());
public BackendImpl backend => _backend.Value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ protected override Tensors Call(Tensors inputs, Tensor state = null, bool? train
throw new NotImplementedException("");

if (activation != null)
return activation(outputs);
return activation.Apply(outputs);

return outputs;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ protected override Tensors Call(Tensors inputs, Tensor state = null, bool? train
}

if (activation != null)
outputs = activation(outputs);
outputs = activation.Apply(outputs);

return outputs;
}
Expand Down
2 changes: 1 addition & 1 deletion src/TensorFlowNET.Keras/Layers/Core/Dense.cs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ protected override Tensors Call(Tensors inputs, Tensor state = null, bool? train
if (args.UseBias)
outputs = tf.nn.bias_add(outputs, bias);
if (args.Activation != null)
outputs = activation(outputs);
outputs = activation.Apply(outputs);

return outputs;
}
Expand Down
2 changes: 1 addition & 1 deletion src/TensorFlowNET.Keras/Layers/Core/EinsumDense.cs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ protected override Tensors Call(Tensors inputs, Tensor state = null, bool? train
if (this.bias != null)
ret += this.bias.AsTensor();
if (this.activation != null)
ret = this.activation(ret);
ret = this.activation.Apply(ret);
return ret;
}
/// <summary>
Expand Down
Loading