summaryrefslogtreecommitdiff
path: root/sfeed_update
blob: e2f9677d03b457a570bbabd570d99372df6e8e51 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#!/bin/sh
# update feeds, merge with old feeds.
# NOTE: assumes "sfeed_*" executables are in $PATH.

# defaults
sfeedpath="$HOME/.sfeed/feeds"

# used for processing feeds concurrently: wait until ${maxjobs} amount of
# feeds are finished at a time.
maxjobs=8

# load config (evaluate shellscript).
# loadconfig(configfile)
loadconfig() {
	# allow to specify config via argv[1].
	if [ "$1" != "" ]; then
		# get absolute path of config file.
		config=$(readlink -f "$1")
	else
		# default config location.
		config="$HOME/.sfeed/sfeedrc"
	fi

	# config is loaded here to be able to override $sfeedpath or functions.
	if [ -r "${config}" ]; then
		. "${config}"
	else
		echo "Configuration file \"${config}\" does not exist or is not readable." >&2
		echo "See sfeedrc.example for an example." >&2
		exit 1
	fi
}

# convert encoding from one encoding to another.
# convertencoding(from, to)
convertencoding() {
	# if from != to
	if [ "$1" != "" ] && [ "$2" != "" ] && [ "$1" != "$2" ]; then
		iconv -cs -f "$1" -t "$2" 2> /dev/null
	else
		# else no convert, just output
		cat
	fi
}

# merge raw files: unique sort by id, title, link.
# merge(name, oldfile, newfile)
merge() {
	sort -t '	' -u -k6,6 -k2,2 -k3,3 "$2" "$3" 2>/dev/null
}

# filter fields.
# filter(name)
filter() {
	cat
}

# order by timestamp (descending).
# order(name)
order() {
	sort -t '	' -k1rn,1
}

# fetch a feed via HTTP/HTTPS etc.
# fetchfeed(name, url, feedfile)
fetchfeed() {
	if curl -L --max-redirs 0 -H "User-Agent:" -f -s -m 15 \
		-z "$3" "$2" 2>/dev/null; then
		printf "  OK %s %s\n" "$(date +'%H:%M:%S')" "$1" >&2
	else
		printf "FAIL %s %s\n" "$(date +'%H:%M:%S')" "$1" >&2
	fi
}

# fetch and parse feed.
# feed(name, feedurl, [basesiteurl], [encoding])
feed() {
	# wait until ${maxjobs} are finished: throughput using this logic is
	# non-optimal, but it is simple and portable.
	[ $((curjobs % maxjobs)) -eq 0 ] && wait
	[ ${isinterrupted} -eq 1 ] && return
	curjobs=$((curjobs + 1))

	(name="$1"
	filename="$(printf '%s' "$1" | tr '/' '_')"
	feedurl="$2"
	basesiteurl="$3"
	tmpfeedfile="${sfeedtmpdir}/${filename}"
	tmpencfile=""
	encoding="$4"
	sfeedfile="${sfeedpath}/${filename}"

	if [ "${encoding}" != "" ]; then
		fetchfeed "${name}" "${feedurl}" "${sfeedfile}" | \
			convertencoding "${encoding}" "utf-8"
	else # detect encoding.
		tmpencfile="${tmpfeedfile}.enc"
		fetchfeed "${name}" "${feedurl}" "${sfeedfile}" > "${tmpencfile}"
		detectenc=$(sfeed_xmlenc < "${tmpencfile}")
		convertencoding "${detectenc}" "utf-8" < "${tmpencfile}"
	fi | sfeed "${basesiteurl}" | filter "${name}" > "${tmpfeedfile}"

	# get new data and merge with old.
	sfeedfilenew="${sfeedpath}/${filename}.new"
	# new feed data is non-empty.
	if [ -s "${tmpfeedfile}" ]; then
		# if file exists, merge
		if [ -e "${sfeedfile}" ]; then
			merge "${name}" "${sfeedfile}" "${tmpfeedfile}" | \
				order "${name}" > "${sfeedfilenew}"

			# overwrite old file with updated file
			mv "${sfeedfilenew}" "${sfeedfile}"
		else
			merge "${name}" "/dev/null" "${tmpfeedfile}" | \
				order "${name}" > "${sfeedfile}"
		fi
	fi) &
}

cleanup() {
	# remove temporary files.
	rm -rf "${sfeedtmpdir}"
}

interrupted() {
	isinterrupted=1
}

feeds() {
	echo "Configuration file \"${config}\" is invalid or does not contain a \"feeds\" function." >&2
	echo "See sfeedrc.example for an example." >&2
}

# job counter.
curjobs=0
# kill whole current process group on ^C (SIGINT).
isinterrupted=0
# SIGTERM: signal to terminate parent.
trap -- "interrupted" "TERM"
# SIGINT: kill all running childs >:D
trap -- "kill -TERM -$$" "INT"
# load config file.
loadconfig "$1"
# fetch feeds and store in temporary file.
sfeedtmpdir="$(mktemp -d '/tmp/sfeed_XXXXXX')"
# make sure path exists.
mkdir -p "${sfeedpath}"
# fetch feeds specified in config file.
feeds
# wait till all feeds are fetched (concurrently).
wait
# cleanup temporary files etc.
cleanup
# on SIGINT exit with 128 + signal (SIGINT = 2).
[ ${isinterrupted} -eq 1 ] && exit 130
exit 0