feat(updater): optional httpCheck + IA optimisations
Some checks are pending
/ test (push) Waiting to run
Some checks are pending
/ test (push) Waiting to run
This commit is contained in:
parent
48ff8144fe
commit
1fcf0bf1da
2 changed files with 87 additions and 28 deletions
106
utils.inc.php
106
utils.inc.php
|
@ -2,7 +2,6 @@
|
|||
|
||||
require 'vendor/autoload.php';
|
||||
|
||||
use Amp\Future;
|
||||
use Amp\Http\Client\HttpClientBuilder;
|
||||
use Amp\Http\Client\Request;
|
||||
|
||||
|
@ -484,41 +483,94 @@ function addStatistics()
|
|||
|
||||
}
|
||||
|
||||
function searchWikis($path, $pattern, $depth = 1)
|
||||
function searchWikis($path, $pattern, $depth = 1, $httpCheck = false)
|
||||
{
|
||||
$it = new RecursiveDirectoryIterator($path);
|
||||
$list = array();
|
||||
$httpClient = HttpClientBuilder::buildDefault();
|
||||
$nb = 0;
|
||||
$files = new RecursiveIteratorIterator($it);
|
||||
$files->setMaxDepth($depth);
|
||||
foreach ($files as $file) {
|
||||
if (preg_match('/' . preg_quote($pattern) . '$/i', $file)) {
|
||||
$nb++;
|
||||
$wakkaConfig = [];
|
||||
include_once($file);
|
||||
$list[$nb] = [
|
||||
'PATH' => dirname($file),
|
||||
// Use a more efficient iterator that filters files by name
|
||||
$dirIterator = new RecursiveDirectoryIterator($path, RecursiveDirectoryIterator::SKIP_DOTS);
|
||||
$iterator = new RecursiveIteratorIterator($dirIterator);
|
||||
$iterator->setMaxDepth($depth);
|
||||
|
||||
$list = [];
|
||||
$count = 0;
|
||||
|
||||
// Process filesystem operations first
|
||||
foreach ($iterator as $file) {
|
||||
// Direct basename comparison instead of regex
|
||||
if ($file->isFile() && basename($file) === $pattern) {
|
||||
$count++;
|
||||
$filePath = $file->getPathname();
|
||||
|
||||
// Extract config without using include_once which is slow
|
||||
$wakkaConfig = extractWakkaConfig($filePath);
|
||||
|
||||
$list[$count] = [
|
||||
'PATH' => dirname($filePath),
|
||||
'URL' => $wakkaConfig['base_url'] ?? 'KO',
|
||||
'VERSION' => $wakkaConfig['yeswiki_version'] ?? 'KO',
|
||||
'RELEASE' => $wakkaConfig['yeswiki_release'] ?? 'KO',
|
||||
];
|
||||
}
|
||||
}
|
||||
try {
|
||||
$responses = Future\awaitAll(array_map(function ($l) use ($httpClient) {
|
||||
return Amp\async(fn () => $httpClient->request(new Request($l['URL'], 'HEAD')));
|
||||
}, $list));
|
||||
foreach ($responses[0] as $key => $response) {
|
||||
$list[$key]['STATUS'] = 'ERROR';
|
||||
|
||||
// Process HTTP requests in smaller batches to avoid overwhelming resources
|
||||
if (!empty($list) && $httpCheck) {
|
||||
try {
|
||||
// Process in batches of 10 for better performance
|
||||
$batchSize = 10;
|
||||
$batches = array_chunk($list, $batchSize, true);
|
||||
|
||||
foreach ($batches as $batch) {
|
||||
$httpClient = HttpClientBuilder::buildDefault();
|
||||
$futures = [];
|
||||
|
||||
foreach ($batch as $key => $item) {
|
||||
if ($item['URL'] !== 'KO') {
|
||||
$futures[$key] = Amp\async(fn () => $httpClient->request(new Request($item['URL'], 'HEAD')));
|
||||
}
|
||||
}
|
||||
|
||||
$responses = Amp\Future\awaitAll($futures);
|
||||
|
||||
// Update statuses from responses
|
||||
foreach ($responses[0] as $key => $error) {
|
||||
$list[$key]['STATUS'] = 'ERROR';
|
||||
}
|
||||
|
||||
foreach ($responses[1] as $key => $response) {
|
||||
$list[$key]['STATUS'] = $response->getStatus() . ' ' . $response->getReason();
|
||||
}
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
echo $e->getMessage(), "\n";
|
||||
}
|
||||
foreach ($responses[1] as $key => $response) {
|
||||
$list[$key]['STATUS'] = $response->getStatus() . ' ' . $response->getReason();
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
// If any one of the requests fails the combo will fail
|
||||
echo $e->getMessage(), "\n";
|
||||
}
|
||||
|
||||
return $list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract configuration from a wakka.config.php file without using include
|
||||
* which is much faster for many files
|
||||
*/
|
||||
function extractWakkaConfig($filePath)
|
||||
{
|
||||
$config = [];
|
||||
$content = file_get_contents($filePath);
|
||||
|
||||
// Extract base_url
|
||||
if (preg_match('/[\'"]base_url[\'"]\s*=>\s*[\'"]([^\'"]+)[\'"]/i', $content, $matches)) {
|
||||
$config['base_url'] = $matches[1];
|
||||
}
|
||||
|
||||
// Extract yeswiki_version
|
||||
if (preg_match('/[\'"]yeswiki_version[\'"]\s*=>\s*[\'"]([^\'"]+)[\'"]/i', $content, $matches)) {
|
||||
$config['yeswiki_version'] = $matches[1];
|
||||
}
|
||||
|
||||
// Extract yeswiki_release
|
||||
if (preg_match('/[\'"]yeswiki_release[\'"]\s*=>\s*[\'"]([^\'"]+)[\'"]/i', $content, $matches)) {
|
||||
$config['yeswiki_release'] = $matches[1];
|
||||
}
|
||||
|
||||
return $config;
|
||||
}
|
||||
|
|
|
@ -51,6 +51,12 @@ if ($isRoot) {
|
|||
'description' => 'Depth to scan folders for wikis',
|
||||
'defaultValue' => 1
|
||||
],
|
||||
'httpcheck' => [
|
||||
'prefix' => 'http',
|
||||
'longPrefix' => 'httpcheck',
|
||||
'description' => 'Perform an http request to check if the yeswikis are working',
|
||||
'noValue' => true
|
||||
],
|
||||
]);
|
||||
try {
|
||||
$climate->arguments->parse();
|
||||
|
@ -68,7 +74,8 @@ if ($isRoot) {
|
|||
$output = $climate->arguments->get('output');
|
||||
$nobackup = $climate->arguments->get('nobackup');
|
||||
$depth = $climate->arguments->get('depth');
|
||||
$matches = searchWikis($path, 'wakka.config.php', $depth);
|
||||
$httpCheck = $climate->arguments->get('httpcheck');
|
||||
$matches = searchWikis($path, 'wakka.config.php', $depth, $httpCheck);
|
||||
if (count($matches) == 0) {
|
||||
$climate->info('No yeswiki found on path ' . $path . ' with depth ' . $depth);
|
||||
exit;
|
||||
|
|
Loading…
Add table
Reference in a new issue