Skip to content

Commit 69b9945

Browse files
ho2103ggerganov
andauthored
llama.swiftui: fix end of generation bug (#8268)
* fix continuing generating blank lines after getting EOT token or EOS token from LLM * change variable name to is_done (variable name suggested by ggerganov) * minor : fix trailing whitespace * minor : add space --------- Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
1 parent c3776ca commit 69b9945

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

examples/llama.swiftui/llama.cpp.swift/LibLlama.swift

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,12 @@ actor LlamaContext {
2626
private var context: OpaquePointer
2727
private var batch: llama_batch
2828
private var tokens_list: [llama_token]
29+
var is_done: Bool = false
2930

3031
/// This variable is used to store temporarily invalid cchars
3132
private var temporary_invalid_cchars: [CChar]
3233

33-
var n_len: Int32 = 64
34+
var n_len: Int32 = 1024
3435
var n_cur: Int32 = 0
3536

3637
var n_decode: Int32 = 0
@@ -160,6 +161,7 @@ actor LlamaContext {
160161

161162
if llama_token_is_eog(model, new_token_id) || n_cur == n_len {
162163
print("\n")
164+
is_done = true
163165
let new_token_str = String(cString: temporary_invalid_cchars + [0])
164166
temporary_invalid_cchars.removeAll()
165167
return new_token_str

examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ class LlamaState: ObservableObject {
132132
messageLog += "\(text)"
133133

134134
Task.detached {
135-
while await llamaContext.n_cur < llamaContext.n_len {
135+
while await !llamaContext.is_done {
136136
let result = await llamaContext.completion_loop()
137137
await MainActor.run {
138138
self.messageLog += "\(result)"

0 commit comments

Comments
 (0)