Skip to content

Commit

Permalink
Fix for new llama.cpp API response body
Browse files Browse the repository at this point in the history
  • Loading branch information
lmg-anon committed Dec 26, 2024
1 parent c50fa87 commit c3daede
Showing 1 changed file with 34 additions and 6 deletions.
40 changes: 34 additions & 6 deletions mikupad.html
Original file line number Diff line number Diff line change
Expand Up @@ -1738,26 +1738,54 @@
throw new Error(`HTTP ${res.status}`);
if (options.stream) {
for await (const chunk of parseEventStream(res.body)) {
const probs = chunk.completion_probabilities?.[0]?.probs ?? [];
const choice = chunk.completion_probabilities?.[0];

let probs = [];
if (choice?.probs) {
probs = choice.probs ?? [];
} else if (choice?.top_logprobs) {
probs = Object.values(choice.top_logprobs).map(({ token, logprob }) => ({
tok_str: token,
prob: Math.exp(logprob)
}));
}
const prob = probs.find(p => p.tok_str === chunk.content)?.prob;

yield {
content: chunk.content,
...(probs.length > 0 ? {
prob: prob ?? -1,
completion_probabilities: chunk.completion_probabilities
completion_probabilities: [{
content: chunk.content,
probs
}]
} : {})
};
}
} else {
const { completion_probabilities } = await res.json();
for (const chunk of completion_probabilities) {
const probs = chunk.probs ?? [];
const prob = probs.find(p => p.tok_str === chunk.content)?.prob;
const token = chunk.content ? chunk.content : chunk.token;

let probs = [];
if (chunk.probs) {
probs = chunk.probs ?? [];
} else if (chunk.top_logprobs) {
probs = Object.values(chunk.top_logprobs).map(({ token, logprob }) => ({
tok_str: token,
prob: Math.exp(logprob)
}));
}
const prob = probs.find(p => p.tok_str === token)?.prob;

yield {
content: chunk.content,
content: token,
...(probs.length > 0 ? {
prob: prob ?? -1,
completion_probabilities: [chunk]
completion_probabilities: [{
content: token,
probs
}]
} : {})
};
}
Expand Down

0 comments on commit c3daede

Please sign in to comment.