Blame SOURCES/pidgin-2.10.7-CVE-2013-6479.patch

4133dc
diff -up pidgin-2.10.7/libpurple/util.c.CVE-2013-6479 pidgin-2.10.7/libpurple/util.c
4133dc
--- pidgin-2.10.7/libpurple/util.c.CVE-2013-6479	2013-02-11 04:16:53.000000000 -0500
4133dc
+++ pidgin-2.10.7/libpurple/util.c	2014-01-28 19:09:20.896950189 -0500
4133dc
@@ -33,6 +33,10 @@
4133dc
 #include "prefs.h"
4133dc
 #include "util.h"
4133dc
 
4133dc
+/* 512KiB Default value for maximum HTTP download size (when the client hasn't
4133dc
+   specified a length) */
4133dc
+#define DEFAULT_MAX_HTTP_DOWNLOAD (512 * 1024)
4133dc
+
4133dc
 struct _PurpleUtilFetchUrlData
4133dc
 {
4133dc
 	PurpleUtilFetchUrlCallback callback;
4133dc
@@ -68,7 +72,7 @@ struct _PurpleUtilFetchUrlData
4133dc
 	char *webdata;
4133dc
 	gsize len;
4133dc
 	unsigned long data_len;
4133dc
-	gssize max_len;
4133dc
+	gsize max_len;
4133dc
 	gboolean chunked;
4133dc
 	PurpleAccount *account;
4133dc
 };
4133dc
@@ -3239,24 +3243,26 @@ purple_strcasereplace(const char *string
4133dc
 	return ret;
4133dc
 }
4133dc
 
4133dc
-const char *
4133dc
-purple_strcasestr(const char *haystack, const char *needle)
4133dc
+/** TODO: Expose this when we can add API */
4133dc
+static const char *
4133dc
+purple_strcasestr_len(const char *haystack, gssize hlen, const char *needle, gssize nlen)
4133dc
 {
4133dc
-	size_t hlen, nlen;
4133dc
 	const char *tmp, *ret;
4133dc
 
4133dc
 	g_return_val_if_fail(haystack != NULL, NULL);
4133dc
 	g_return_val_if_fail(needle != NULL, NULL);
4133dc
 
4133dc
-	hlen = strlen(haystack);
4133dc
-	nlen = strlen(needle);
4133dc
+	if (hlen == -1)
4133dc
+		hlen = strlen(haystack);
4133dc
+	if (nlen == -1)
4133dc
+		nlen = strlen(needle);
4133dc
 	tmp = haystack,
4133dc
 	ret = NULL;
4133dc
 
4133dc
 	g_return_val_if_fail(hlen > 0, NULL);
4133dc
 	g_return_val_if_fail(nlen > 0, NULL);
4133dc
 
4133dc
-	while (*tmp && !ret) {
4133dc
+	while (*tmp && !ret && (hlen - (tmp - haystack)) >= nlen) {
4133dc
 		if (!g_ascii_strncasecmp(needle, tmp, nlen))
4133dc
 			ret = tmp;
4133dc
 		else
4133dc
@@ -3266,6 +3272,12 @@ purple_strcasestr(const char *haystack,
4133dc
 	return ret;
4133dc
 }
4133dc
 
4133dc
+const char *
4133dc
+purple_strcasestr(const char *haystack, const char *needle)
4133dc
+{
4133dc
+	return purple_strcasestr_len(haystack, -1, needle, -1);
4133dc
+}
4133dc
+
4133dc
 char *
4133dc
 purple_str_size_to_units(size_t size)
4133dc
 {
4133dc
@@ -3575,7 +3587,7 @@ static void ssl_url_fetch_connect_cb(gpo
4133dc
 static void ssl_url_fetch_error_cb(PurpleSslConnection *ssl_connection, PurpleSslErrorType error, gpointer data);
4133dc
 
4133dc
 static gboolean
4133dc
-parse_redirect(const char *data, size_t data_len,
4133dc
+parse_redirect(const char *data, gsize data_len,
4133dc
 			   PurpleUtilFetchUrlData *gfud)
4133dc
 {
4133dc
 	gchar *s;
4133dc
@@ -3680,20 +3692,21 @@ parse_redirect(const char *data, size_t
4133dc
 	return TRUE;
4133dc
 }
4133dc
 
4133dc
+/* find the starting point of the content for the specified header and make
4133dc
+ * sure that the content is safe to pass to sscanf */
4133dc
 static const char *
4133dc
-find_header_content(const char *data, size_t data_len, const char *header, size_t header_len)
4133dc
+find_header_content(const char *data, gsize data_len, const char *header)
4133dc
 {
4133dc
 	const char *p = NULL;
4133dc
 
4133dc
-	if (header_len <= 0)
4133dc
-		header_len = strlen(header);
4133dc
+	gsize header_len = strlen(header);
4133dc
 
4133dc
-	/* Note: data is _not_ nul-terminated.  */
4133dc
 	if (data_len > header_len) {
4133dc
+		/* Check if the first header matches (data won't start with a \n") */
4133dc
 		if (header[0] == '\n')
4133dc
 			p = (g_ascii_strncasecmp(data, header + 1, header_len - 1) == 0) ? data : NULL;
4133dc
 		if (!p)
4133dc
-			p = purple_strcasestr(data, header);
4133dc
+			p = purple_strcasestr_len(data, data_len, header, header_len);
4133dc
 		if (p)
4133dc
 			p += header_len;
4133dc
 	}
4133dc
@@ -3709,13 +3722,13 @@ find_header_content(const char *data, si
4133dc
 	return NULL;
4133dc
 }
4133dc
 
4133dc
-static size_t
4133dc
-parse_content_len(const char *data, size_t data_len)
4133dc
+static gsize 
4133dc
+parse_content_len(const char *data, gsize data_len)
4133dc
 {
4133dc
-	size_t content_len = 0;
4133dc
+	gsize content_len = 0;
4133dc
 	const char *p = NULL;
4133dc
 
4133dc
-	p = find_header_content(data, data_len, "\nContent-Length: ", sizeof("\nContent-Length: ") - 1);
4133dc
+	p = find_header_content(data, data_len, "\nContent-Length: ");
4133dc
 	if (p) {
4133dc
 		sscanf(p, "%" G_GSIZE_FORMAT, &content_len);
4133dc
 		purple_debug_misc("util", "parsed %" G_GSIZE_FORMAT "\n", content_len);
4133dc
@@ -3725,9 +3738,9 @@ parse_content_len(const char *data, size
4133dc
 }
4133dc
 
4133dc
 static gboolean
4133dc
-content_is_chunked(const char *data, size_t data_len)
4133dc
+content_is_chunked(const char *data, gsize data_len)
4133dc
 {
4133dc
-	const char *p = find_header_content(data, data_len, "\nTransfer-Encoding: ", sizeof("\nTransfer-Encoding: ") - 1);
4133dc
+	const char *p = find_header_content(data, data_len, "\nTransfer-Encoding: ");
4133dc
 	if (p && g_ascii_strncasecmp(p, "chunked", 7) == 0)
4133dc
 		return TRUE;
4133dc
 
4133dc
@@ -3810,7 +3823,7 @@ url_fetch_recv_cb(gpointer url_data, gin
4133dc
 	while ((gfud->is_ssl && ((len = purple_ssl_read(gfud->ssl_connection, buf, sizeof(buf))) > 0)) ||
4133dc
 			(!gfud->is_ssl && (len = read(source, buf, sizeof(buf))) > 0))
4133dc
 	{
4133dc
-		if(gfud->max_len != -1 && (gfud->len + len) > gfud->max_len) {
4133dc
+		if((gfud->len + len) > gfud->max_len) {
4133dc
 			purple_util_fetch_url_error(gfud, _("Error reading from %s: response too long (%d bytes limit)"),
4133dc
 						    gfud->website.address, gfud->max_len);
4133dc
 			return;
4133dc
@@ -3838,9 +3851,8 @@ url_fetch_recv_cb(gpointer url_data, gin
4133dc
 			/* See if we've reached the end of the headers yet */
4133dc
 			end_of_headers = strstr(gfud->webdata, "\r\n\r\n");
4133dc
 			if (end_of_headers) {
4133dc
-				char *new_data;
4133dc
 				guint header_len = (end_of_headers + 4 - gfud->webdata);
4133dc
-				size_t content_len;
4133dc
+				gsize content_len;
4133dc
 
4133dc
 				purple_debug_misc("util", "Response headers: '%.*s'\n",
4133dc
 					header_len, gfud->webdata);
4133dc
@@ -3860,15 +3872,36 @@ url_fetch_recv_cb(gpointer url_data, gin
4133dc
 					content_len = 8192;
4133dc
 				} else {
4133dc
 					gfud->has_explicit_data_len = TRUE;
4133dc
+					if (content_len > gfud->max_len) {
4133dc
+						purple_debug_error("util",
4133dc
+								"Overriding explicit Content-Length of %" G_GSIZE_FORMAT " with max of %" G_GSSIZE_FORMAT "\n",
4133dc
+								content_len, gfud->max_len);
4133dc
+						content_len = gfud->max_len;
4133dc
+					}
4133dc
 				}
4133dc
 
4133dc
 
4133dc
 				/* If we're returning the headers too, we don't need to clean them out */
4133dc
 				if (gfud->include_headers) {
4133dc
+					char *new_data;
4133dc
 					gfud->data_len = content_len + header_len;
4133dc
-					gfud->webdata = g_realloc(gfud->webdata, gfud->data_len);
4133dc
+					new_data = g_try_realloc(gfud->webdata, gfud->data_len);
4133dc
+					if (new_data == NULL) {
4133dc
+						purple_debug_error("util",
4133dc
+								"Failed to allocate %" G_GSIZE_FORMAT " bytes: %s\n",
4133dc
+								content_len, g_strerror(errno));
4133dc
+						purple_util_fetch_url_error(gfud,
4133dc
+								_("Unable to allocate enough memory to hold "
4133dc
+								  "the contents from %s.  The web server may "
4133dc
+								  "be trying something malicious."),
4133dc
+								gfud->website.address);
4133dc
+
4133dc
+						return;
4133dc
+					}
4133dc
+					gfud->webdata = new_data;
4133dc
 				} else {
4133dc
-					size_t body_len = gfud->len - header_len;
4133dc
+					char *new_data;
4133dc
+					gsize body_len = gfud->len - header_len;
4133dc
 
4133dc
 					content_len = MAX(content_len, body_len);
4133dc
 
4133dc
@@ -4154,7 +4187,11 @@ purple_util_fetch_url_request_len_with_a
4133dc
 	gfud->request = g_strdup(request);
4133dc
 	gfud->include_headers = include_headers;
4133dc
 	gfud->fd = -1;
4133dc
-	gfud->max_len = max_len;
4133dc
+	if (max_len <= 0) {
4133dc
+		max_len = DEFAULT_MAX_HTTP_DOWNLOAD;
4133dc
+		purple_debug_error("util", "Defaulting max download from %s to %" G_GSSIZE_FORMAT "\n", url, max_len);
4133dc
+	}
4133dc
+	gfud->max_len = (gsize) max_len;
4133dc
 	gfud->account = account;
4133dc
 
4133dc
 	purple_url_parse(url, &gfud->website.address, &gfud->website.port,