amber-laravel/app/Console/Commands/TestLLM.php
2024-07-25 02:53:47 +08:00

86 lines
2.1 KiB
PHP

<?php
namespace App\Console\Commands;
use App\LLM\Qwen;
use App\Models\Tool;
use App\Repositories\LLM\ChatEnum;
use App\Repositories\LLM\History;
use App\Repositories\LLM\HumanMessage;
use GuzzleHttp\Exception\GuzzleException;
use Illuminate\Console\Command;
use Illuminate\Support\Facades\Storage;
class TestLLM extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'app:testllm';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Command description';
/**
* Execute the console command.
*
* @throws GuzzleException
*/
public function handle()
{
$llm = new Qwen();
$history = new History();
$tool = Tool::get();
$llm->setTools($tool);
$llm->setHistory($history);
while (true) {
// var_dump($history->getMessages());
$q = $this->ask('请输入问题');
if ($q == 'q') {
Storage::put('chat.json', json_encode($history->getMessages(), JSON_UNESCAPED_UNICODE | JSON_PRETTY_PRINT));
return 0;
}
if (empty($q)) {
$q = '北京天气';
}
$history->addMessage(new HumanMessage($q));
$s = $llm->streamResponse();
// 循环输出
foreach ($s as $item) {
if ($item->role == ChatEnum::Tool) {
if ($item->processing) {
$this->info('正在执行: '.$item->content);
echo "\n";
} else {
$this->info('执行结果: '.$item->content);
}
} elseif ($item->role == ChatEnum::AssistantChunk) {
echo $item->getLastAppend();
} elseif ($item->role == ChatEnum::Assistant) {
echo "\n完整输出: ".$item->content;
}
}
echo "\n";
}
}
}