mirror of
https://github.com/nchevsky/systemrescue-zfs.git
synced 2026-03-02 03:03:48 +01:00
Merge branch 'load-srm-wait-online' into 'main'
wait until we are online before doing a http/https load-srm See merge request systemrescue/systemrescue-sources!206
This commit is contained in:
commit
b8a5beff52
|
|
@ -9,6 +9,10 @@ import os
|
|||
import sys
|
||||
import re
|
||||
import tempfile
|
||||
import functools
|
||||
|
||||
# flush stdout buffer after each print call: immediately show the user what is going on
|
||||
print = functools.partial(print, flush=True)
|
||||
|
||||
# pythons os.symlink bails when a file already exists, this function also handles overwrites
|
||||
def symlink_overwrite(target, link_file):
|
||||
|
|
@ -224,7 +228,7 @@ if 'sysconfig' in config and 'ca-trust' in config['sysconfig'] and config['sysco
|
|||
|
||||
if late_load_srm != "":
|
||||
print(f"====> Late-loading SystemRescueModule (SRM) ...")
|
||||
subprocess.run(["/usr/share/sysrescue/bin/load-srm", late_load_srm])
|
||||
subprocess.run(["/usr/share/sysrescue/bin/load-srm", late_load_srm], stdout=None, stderr=None)
|
||||
# the SRM could contain changes to systemd units -> let them take effect
|
||||
subprocess.run(["/usr/bin/systemctl", "daemon-reload"])
|
||||
# trigger start of multi-user.target: the SRM could have added something to it's "Wants"
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ Wants=getty-pre.target
|
|||
Type=oneshot
|
||||
ExecStart=/etc/systemd/scripts/sysrescue-initialize.py
|
||||
RemainAfterExit=true
|
||||
StandardOutput=journal+console
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
|
|
|||
|
|
@ -243,6 +243,41 @@ curl_download()
|
|||
return 0
|
||||
}
|
||||
|
||||
wait_online()
|
||||
{
|
||||
# timeout in seconds
|
||||
local timeout=$1
|
||||
|
||||
# 4 tests per second
|
||||
local tries=$[$timeout*4]
|
||||
local online=0
|
||||
|
||||
while [[ $tries -gt 0 ]]; do
|
||||
if /usr/bin/nm-online --timeout=0 --quiet; then
|
||||
# we are online
|
||||
online=1
|
||||
break
|
||||
fi
|
||||
tries=$[$tries-1]
|
||||
if [[ $tries -eq 0 ]]; then
|
||||
# no unnecessary sleep+message at the end
|
||||
continue
|
||||
fi
|
||||
|
||||
# print a message every 5 seconds (=20 tests) to not spam the console
|
||||
if [[ $(expr $tries % 20) == "0" ]]; then
|
||||
echo "Waiting for network connection ($[tries/4]s of ${timeout}s left)..."
|
||||
fi
|
||||
|
||||
sleep 0.25
|
||||
done
|
||||
|
||||
if [[ $online -eq 0 ]]; then
|
||||
# the user could have circumvented NetworkManager
|
||||
echo "No network connection detected by NetworkManager, trying download anyway"
|
||||
fi
|
||||
}
|
||||
|
||||
#################################
|
||||
# execution begins here
|
||||
|
||||
|
|
@ -251,6 +286,10 @@ parse_args "$@"
|
|||
[[ $VERBOSE -eq 1 ]] && echo "URL/path: $URL"
|
||||
|
||||
if [[ $URL_PROTO == "http" ]] || [[ $URL_PROTO == "https" ]]; then
|
||||
# wait until we have some kind on network connection before trying the download
|
||||
# waiting is important even if we try downloading anyways: the network may take some time to get up
|
||||
wait_online 30
|
||||
|
||||
curl_download
|
||||
# replace the URL parameter with the location we downloaded the file to
|
||||
URL="$TMPDIR/srm"
|
||||
|
|
|
|||
Loading…
Reference in a new issue