Skip to content

Commit

Permalink
Merge pull request #904 from webitube/feature/FixGrammarSupportV2
Browse files Browse the repository at this point in the history
* Fixes constrained output (Grammar) support in BatchedExecutorGuidance
  • Loading branch information
martindevans committed Aug 22, 2024
2 parents d9e0d26 + e222c82 commit 4df040a
Showing 1 changed file with 1 addition and 4 deletions.
5 changes: 1 addition & 4 deletions LLama.Examples/Examples/BatchedExecutorGuidance.cs
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ await AnsiConsole
// Sample from the "guided" conversation. This sampler will internally use the "guidance" conversation
// to steer the conversation. See how this is done in GuidedSampler.ProcessLogits (bottom of this file).
var g = guidedSampler.Sample(executor.Context.NativeHandle, guided.Sample(), []);
guidedSampler.Accept(executor.Context.NativeHandle, g);
guidedDecoder.Add(g);
// Use this token to advance both guided _and_ guidance. Keeping them in sync (except for the initial prompt).
Expand Down Expand Up @@ -110,10 +111,6 @@ protected override LLamaToken ProcessTokenDataArray(SafeLLamaContextHandle ctx,
return candidates.SampleToken(ctx);
}

public override void Accept(SafeLLamaContextHandle ctx, LLamaToken token)
{
}

public override ISamplingPipeline Clone()
{
throw new NotSupportedException();
Expand Down

0 comments on commit 4df040a

Please sign in to comment.