1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
|
<?php
/*
* Runs a performance test against the node.js server for both serial and
* parallel requests. Requires PHP 5.5 or greater.
*
* # Basic usage
* make perf
* # With custom options
* REQUESTS=100 PARALLEL=5000 make perf
*/
require __DIR__ . '/bootstrap.php';
use GuzzleHttp\Client;
use GuzzleHttp\Tests\Server;
use GuzzleHttp\Ring\Client\CurlMultiHandler;
use GuzzleHttp\Pool;
// Wait until the server is responding
Server::wait();
// Get custom make variables
$total = isset($_SERVER['REQUESTS']) ? $_SERVER['REQUESTS'] : 1000;
$parallel = isset($_SERVER['PARALLEL']) ? $_SERVER['PARALLEL'] : 100;
$client = new Client(['base_url' => Server::$url]);
$t = microtime(true);
for ($i = 0; $i < $total; $i++) {
$client->get('/guzzle-server/perf');
}
$totalTime = microtime(true) - $t;
$perRequest = ($totalTime / $total) * 1000;
printf("Serial: %f (%f ms / request) %d total\n",
$totalTime, $perRequest, $total);
// Create a generator used to yield batches of requests
$reqs = function () use ($client, $total) {
for ($i = 0; $i < $total; $i++) {
yield $client->createRequest('GET', '/guzzle-server/perf');
}
};
$t = microtime(true);
Pool::send($client, $reqs(), ['parallel' => $parallel]);
$totalTime = microtime(true) - $t;
$perRequest = ($totalTime / $total) * 1000;
printf("Batch: %f (%f ms / request) %d total with %d in parallel\n",
$totalTime, $perRequest, $total, $parallel);
$handler = new CurlMultiHandler(['max_handles' => $parallel]);
$client = new Client(['handler' => $handler, 'base_url' => Server::$url]);
$t = microtime(true);
for ($i = 0; $i < $total; $i++) {
$client->get('/guzzle-server/perf');
}
unset($client);
$totalTime = microtime(true) - $t;
$perRequest = ($totalTime / $total) * 1000;
printf("Future: %f (%f ms / request) %d total\n",
$totalTime, $perRequest, $total);
|