* pass arbitrary CLI arguments to update daemon via updater.sh entrypoint

* add configurable log level for update daemon (DAEMON_LOG_LEVEL)
 * when daemon log level is set to LOG_EXTENDED (2) log queries for feed
   update selection
This commit is contained in:
Andrew Dolgov
2024-06-18 21:47:05 +03:00
parent d7a6f74ae5
commit db3e67b3fe
4 changed files with 15 additions and 7 deletions

View File

@@ -121,6 +121,9 @@ class Config {
/** max concurrent update jobs forking update daemon starts */
const DAEMON_MAX_JOBS = "DAEMON_MAX_JOBS";
/** log level for update daemon */
const DAEMON_LOG_LEVEL = "DAEMON_LOG_LEVEL";
/** How long to wait for response when requesting feed from a site (seconds) */
const FEED_FETCH_TIMEOUT = "FEED_FETCH_TIMEOUT";
@@ -229,6 +232,7 @@ class Config {
Config::T_STRING ],
Config::DAEMON_MAX_CHILD_RUNTIME => [ 1800, Config::T_INT ],
Config::DAEMON_MAX_JOBS => [ 2, Config::T_INT ],
Config::DAEMON_LOG_LEVEL => [ Debug::LOG_NORMAL, Config::T_INT ],
Config::FEED_FETCH_TIMEOUT => [ 45, Config::T_INT ],
Config::FEED_FETCH_NO_CACHE_TIMEOUT => [ 15, Config::T_INT ],
Config::FILE_FETCH_TIMEOUT => [ 45, Config::T_INT ],

View File

@@ -148,7 +148,7 @@ class RSSUtils {
$updstart_thresh_qpart
$query_order $query_limit";
//print "$query\n";
Debug::log("base feed query: $query", Debug::LOG_EXTENDED);
$res = $pdo->query($query);
@@ -189,7 +189,7 @@ class RSSUtils {
$update_limit_qpart
ORDER BY f.id $query_limit";
//print "$user_query\n";
Debug::log("per-user feed query: $user_query", Debug::LOG_EXTENDED);
// since we have feed xml cached, we can deal with other feeds with the same url
$usth = $pdo->prepare($user_query);