Package: newsbeuter / 2.9-8

11-query-feed-tokenization.patch Patch series | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
Description: Fix age filter for query feeds not being parsed properly
Origin: upstream, https://github.com/akrennmair/newsbeuter/commit/9fe9a0deac4cd2d1183cec0bddda38257b021ef1
Bug: https://github.com/akrennmair/newsbeuter/issues/194

--- a/src/rss.cpp
+++ b/src/rss.cpp
@@ -473,13 +473,43 @@
 void rss_feed::set_rssurl(const std::string& u) {
 	rssurl_ = u;
 	if (rssurl_.substr(0,6) == "query:") {
-		std::vector<std::string> tokens = utils::tokenize_quoted(u, ":");
+		/* Query string looks like this:
+		 *
+		 * "query:Title:unread = \"yes\" and age between 0:7" tag1 "tag two"
+		 *
+		 * At this point, we're only interested in the first part enclosed in
+		 * the quotes. Thus, we first tokenize using space as delimiter... */
+		std::vector<std::string> tokens = utils::tokenize_quoted(u, " ");
+		// and then further split by colon, so as to extract title and query
+		tokens = utils::tokenize_quoted(u, ":");
+
 		if (tokens.size() < 3) {
 			throw std::string(_("too few arguments"));
 		}
-		LOG(LOG_DEBUG, "rss_feed::set_rssurl: query name = `%s' expr = `%s'", tokens[1].c_str(), tokens[2].c_str());
+
+		/* "Between" operator requires a range, which contains a colon. Since
+		 * we've been tokenizing by colon, we might've inadertently split the
+		 * query itself. Let's reconstruct it! */
+		auto query = tokens[2];
+		for (auto it = tokens.begin() + 3; it != tokens.end(); ++it)
+		{
+			query += ":";
+			query += *it;
+		}
+		// Have to check if the result is a valid query, just in case
+		matcher m;
+		if (!m.parse(query)) {
+			throw utils::strprintf(
+			    _("`%s' is not a valid filter expression"), query.c_str());
+		}
+
+		LOG(LOG_DEBUG,
+		    "rss_feed::set_rssurl: query name = `%s' expr = `%s'",
+		    tokens[1].c_str(),
+		    query.c_str());
+
 		set_title(tokens[1]);
-		set_query(tokens[2]);
+		set_query(query);
 	}
 }