public class RandomWeights extends Object
Constructor and Description |
---|
RandomWeights() |
Modifier and Type | Method and Description |
---|---|
static void |
gaussian(float[] weights,
float mean,
float std) |
static void |
he(float[] weights,
int numInputs)
He initialization, used for relu activations.zero-mean.
|
static void |
initSeed(long seed)
Initializes random number generator with specified seed.
|
static void |
normal(float[] weights) |
static void |
randomize(float[] array)
Initialize the elements of specified array with random numbers with uniform distribution in range [-0.5, 0.5].
|
static void |
uniform(float[] weights,
float min,
float max) |
static void |
uniform(float[] weights,
int numInputs)
Uniform U[-a,a] where a=1/sqrt(in).
|
static void |
widrowHoff(float[] array,
float input,
float hidden) |
static void |
xavier(float[] weights,
int numIn,
int numOut)
Normalized uniform initialization U[-a,a] with a = sqrt(6/(in + out)).
|
public static void initSeed(long seed)
seed
- init value for random generatorpublic static void randomize(float[] array)
array
- array to initializepublic static void widrowHoff(float[] array, float input, float hidden)
public static void uniform(float[] weights, int numInputs)
weights
- an array of weights to randomizenumInputs
- a number of inputs from previous layerpublic static void uniform(float[] weights, float min, float max)
public static void he(float[] weights, int numInputs)
weights
- weights to initializenumInputs
- number of inputspublic static void gaussian(float[] weights, float mean, float std)
public static void normal(float[] weights)
public static void xavier(float[] weights, int numIn, int numOut)
weights
- numIn
- size of the previous layer (number of inputs)numOut
- size of initialized layer (number of outputs)
https://towardsdatascience.com/weight-initialization-techniques-in-neural-networks-26c649eb3b78Copyright © 2022. All rights reserved.