From 2e0e1fa5d66f01effe07d30e53e5cf558494796f Mon Sep 17 00:00:00 2001 From: Hiltjo Posthuma Date: Sun, 30 Sep 2018 19:20:01 +0200 Subject: sfeed_update: add variable for max amount of feeds to update concurrently This adds a variable for the maximum amount of feeds to update concurrently. A system/user may have fork resource limits or want to setup some job limit. Thanks leot for the idea and feedback! --- sfeed_update | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/sfeed_update b/sfeed_update index 0a557c2..798b69b 100755 --- a/sfeed_update +++ b/sfeed_update @@ -5,6 +5,10 @@ # defaults sfeedpath="$HOME/.sfeed/feeds" +# used for processing feeds concurrently: wait until ${maxjobs} amount of +# feeds are finished at a time. +maxjobs=8 + # load config (evaluate shellscript). # loadconfig(configfile) loadconfig() { @@ -71,6 +75,11 @@ fetchfeed() { # fetch and parse feed. # feed(name, feedurl, [basesiteurl], [encoding]) feed() { + # wait until ${maxjobs} are finished: throughput using this logic is + # non-optimal, but it is simple and portable. + test $((curjobs % maxjobs)) -eq 0 && wait + curjobs=$((curjobs + 1)) + (name="$1" filename="$(printf '%s' "$1" | tr '/' '_')" feedurl="$2" @@ -122,6 +131,8 @@ feeds() { echo "See sfeedrc.example for an example." >&2 } +# job counter. +curjobs=0 # kill whole current process group on ^C (SIGINT). isinterrupted="0" # SIGTERM: signal to terminate parent. -- cgit v1.2.3