mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-04-22 15:32:08 +00:00
(ffi) do not use reference capture in lambda as we are not capturing anything
This commit is contained in:
parent
3d0e90b631
commit
483f172938
@ -43,7 +43,7 @@ huggingface::tgi::backends::TensorRtLlmBackendImpl::PullTokens() {
|
||||
SPDLOG_DEBUG(FMT_STRING("Pulled out {:d} new tokens"), responses->size());
|
||||
|
||||
// Transform tle::Response to GenerationStep
|
||||
std::ranges::transform(responses.begin(), responses.end(), std::back_inserter(*steps), [&](const tle::Response &r) {
|
||||
std::ranges::transform(responses.begin(), responses.end(), std::back_inserter(*steps), [](const tle::Response &r) {
|
||||
const auto reqId = r.getRequestId();
|
||||
if (!r.hasError()) {
|
||||
const auto result = r.getResult();
|
||||
|
Loading…
Reference in New Issue
Block a user