mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-06-19 15:52:08 +00:00
correctly forward back the log probabilities
This commit is contained in:
parent
bcb96feea6
commit
0212b1774a
@ -44,12 +44,12 @@ size_t huggingface::tgi::backends::TensorRtLlmBackendImpl::StreamTokens(
|
|||||||
|
|
||||||
const auto token = decoded.outputTokenIds[0][0];
|
const auto token = decoded.outputTokenIds[0][0];
|
||||||
const auto isFinal = decoded.isFinal;
|
const auto isFinal = decoded.isFinal;
|
||||||
// const auto logProb = decoded.logProbs.value()[0][0];
|
const auto logProb = decoded.logProbs.value()[0][0];
|
||||||
|
|
||||||
++numTokens;
|
++numTokens;
|
||||||
|
|
||||||
SPDLOG_DEBUG(FMT_STRING("\tStreamTokens -> {:d} {:.2f} (final = {})"), token, logProb, isFinal);
|
SPDLOG_DEBUG(FMT_STRING("\tStreamTokens -> {:d} {:.2f} (final = {})"), token, logProb, isFinal);
|
||||||
callback(std::move(ctx), token, 1.0, isFinal);
|
callback(std::move(ctx), token, logProb, isFinal);
|
||||||
SPDLOG_DEBUG("\tStreamTokens -> Post callback");
|
SPDLOG_DEBUG("\tStreamTokens -> Post callback");
|
||||||
} else {
|
} else {
|
||||||
// TODO : Return rest::Result with error
|
// TODO : Return rest::Result with error
|
||||||
|
Loading…
Reference in New Issue
Block a user