summaryrefslogtreecommitdiff
path: root/sfeed_update
diff options
context:
space:
mode:
authorHiltjo Posthuma <hiltjo@codemadness.org>2018-09-28 17:11:56 +0200
committerHiltjo Posthuma <hiltjo@codemadness.org>2018-09-30 19:22:17 +0200
commitcc9f0d5549b21bb6254aede2ff479698183ea5e3 (patch)
treefa9d491f202c44fca744e80829953235dea680ee /sfeed_update
parent5aa78eb161a89f3803cc6efa35e214dd2e8f5386 (diff)
sfeed_update: add filter(), order() support per feed + improvements
Pass the name parameter to the functions and add these to the pipeline. They can be overridden in the config. - add the ability to change the merge logic per feed. - add the ability to filter lines and fields per feed. - add the ability to order lines differently per feed. - add filter example to README. - code-style: - fetchfeed consistency in parameter order. - change [ x"" = x"" ] to [ "" = "" ]. Simplify some if statements. - wrap long line in fetchfeed(). - use signal names for trap.
Diffstat (limited to 'sfeed_update')
-rwxr-xr-xsfeed_update73
1 files changed, 42 insertions, 31 deletions
diff --git a/sfeed_update b/sfeed_update
index 8ac5b32..2b23d3d 100755
--- a/sfeed_update
+++ b/sfeed_update
@@ -9,7 +9,7 @@ sfeedpath="$HOME/.sfeed/feeds"
# loadconfig(configfile)
loadconfig() {
# allow to specify config via argv[1].
- if [ ! x"$1" = x"" ]; then
+ if [ "$1" != "" ]; then
# get absolute path of config file.
config=$(readlink -f "$1")
else
@@ -17,8 +17,7 @@ loadconfig() {
config="$HOME/.sfeed/sfeedrc"
fi
- # load config: config is loaded here to be able to override $sfeedpath
- # or functions.
+ # config is loaded here to be able to override $sfeedpath or functions.
if [ -r "${config}" ]; then
. "${config}"
else
@@ -28,30 +27,11 @@ loadconfig() {
fi
}
-# merge raw files.
-# merge(oldfile, newfile)
-merge() {
- # unique sort by id, title, link.
- # order by timestamp (desc).
- (sort -t ' ' -u -k6,6 -k2,2 -k3,3 "$1" "$2" 2>/dev/null) |
- sort -t ' ' -k1rn,1
-}
-
-# fetch a feed via HTTP/HTTPS etc.
-# fetchfeed(url, name, feedfile)
-fetchfeed() {
- if curl -L --max-redirs 0 -H 'User-Agent:' -f -s -S -m 15 -z "$3" "$1" 2>/dev/null; then
- printf " OK %s %s\n" "$(date +'%H:%M:%S')" "$2" >&2
- else
- printf "FAIL %s %s\n" "$(date +'%H:%M:%S')" "$2" >&2
- fi
-}
-
# convert encoding from one encoding to another.
# convertencoding(from, to)
convertencoding() {
# if from != to
- if [ ! "$1" = "" ] && [ ! "$2" = "" ] && [ ! "$1" = "$2" ]; then
+ if [ "$1" != "" ] && [ "$2" != "" ] && [ "$1" != "$2" ]; then
iconv -cs -f "$1" -t "$2" 2> /dev/null
else
# else no convert, just output
@@ -59,6 +39,35 @@ convertencoding() {
fi
}
+# merge raw files: unique sort by id, title, link.
+# merge(name, oldfile, newfile)
+merge() {
+ sort -t ' ' -u -k6,6 -k2,2 -k3,3 "$2" "$3" 2>/dev/null
+}
+
+# filter fields.
+# filter(name)
+filter() {
+ cat
+}
+
+# order by timestamp (descending).
+# order(name)
+order() {
+ sort -t ' ' -k1rn,1
+}
+
+# fetch a feed via HTTP/HTTPS etc.
+# fetchfeed(name, url, feedfile)
+fetchfeed() {
+ if curl -L --max-redirs 0 -H "User-Agent:" -f -s -S -m 15 \
+ -z "$3" "$2" 2>/dev/null; then
+ printf " OK %s %s\n" "$(date +'%H:%M:%S')" "$1" >&2
+ else
+ printf "FAIL %s %s\n" "$(date +'%H:%M:%S')" "$1" >&2
+ fi
+}
+
# fetch and parse feed.
# feed(name, feedurl, [basesiteurl], [encoding])
feed() {
@@ -72,14 +81,14 @@ feed() {
sfeedfile="${sfeedpath}/${filename}"
if [ ! "${encoding}" = "" ]; then
- fetchfeed "${feedurl}" "${name}" "${sfeedfile}" | \
+ fetchfeed "${name}" "${feedurl}" "${sfeedfile}" | \
convertencoding "${encoding}" "utf-8"
else # detect encoding.
tmpencfile="${tmpfeedfile}.enc"
- fetchfeed "${feedurl}" "${name}" "${sfeedfile}" > "${tmpencfile}"
+ fetchfeed "${name}" "${feedurl}" "${sfeedfile}" > "${tmpencfile}"
detectenc=$(sfeed_xmlenc < "${tmpencfile}")
convertencoding "${detectenc}" "utf-8" < "${tmpencfile}"
- fi | sfeed "${basesiteurl}" > "${tmpfeedfile}"
+ fi | sfeed "${basesiteurl}" | filter "${name}" > "${tmpfeedfile}"
# get new data and merge with old.
sfeedfilenew="${sfeedpath}/${filename}.new"
@@ -87,18 +96,20 @@ feed() {
if [ -s "${tmpfeedfile}" ]; then
# if file exists, merge
if [ -e "${sfeedfile}" ]; then
- merge "${sfeedfile}" "${tmpfeedfile}" > "${sfeedfilenew}"
+ merge "${name}" "${sfeedfile}" "${tmpfeedfile}" | \
+ order "${name}" > "${sfeedfilenew}"
# overwrite old file with updated file
mv "${sfeedfilenew}" "${sfeedfile}"
else
- merge "/dev/null" "${tmpfeedfile}" > "${sfeedfile}"
+ merge "${name}" "/dev/null" "${tmpfeedfile}" | \
+ order "${name}" > "${sfeedfile}"
fi
fi) &
}
cleanup() {
- # remove temporary files
+ # remove temporary files.
rm -rf "${sfeedtmpdir}"
}
@@ -114,9 +125,9 @@ feeds() {
# kill whole current process group on ^C (SIGINT).
isinterrupted="0"
# SIGTERM: signal to terminate parent.
-trap -- "interrupted" "15"
+trap -- "interrupted" "TERM"
# SIGINT: kill all running childs >:D
-trap -- "kill -TERM -$$" "2"
+trap -- "kill -TERM -$$" "INT"
# load config file.
loadconfig "$1"
# fetch feeds and store in temporary file.