Skip to content

Commit

Permalink
Made randomization of default Seed initialization be thread safe
Browse files Browse the repository at this point in the history
  • Loading branch information
Lyrcaxis committed Nov 24, 2024
1 parent ba9a518 commit 4772eb7
Showing 1 changed file with 10 additions and 2 deletions.
12 changes: 10 additions & 2 deletions LLama/Sampling/DefaultSamplingPipeline.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ public sealed class DefaultSamplingPipeline
/// </summary>
public float RepeatPenalty { get; init; } = 1;


/// <summary>
/// Frequency penalty as described by OpenAI: https://platform.openai.com/docs/api-reference/chat/create<br />
/// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text
Expand Down Expand Up @@ -156,7 +155,16 @@ public float PresencePenalty
/// <summary>
/// Seed to use for random sampling
/// </summary>
public uint Seed { get; set; } = (uint) new Random().Next(0, int.MaxValue);
public uint Seed { get; set; } = GetRandomSeed();


private static Random RandomSeedGenerator = new();
private static uint GetRandomSeed()
{
lock (RandomSeedGenerator)
return (uint) RandomSeedGenerator.Next(0, int.MaxValue) + (uint) RandomSeedGenerator.Next(0, int.MaxValue);
}


/// <inheritdoc />
protected override SafeLLamaSamplerChainHandle CreateChain(SafeLLamaContextHandle context)
Expand Down

0 comments on commit 4772eb7

Please sign in to comment.