From dba8a844be9d30cab3b707bf63f1f80e6ee2a967 Mon Sep 17 00:00:00 2001 From: CentOS Sources Date: Nov 03 2016 06:05:06 +0000 Subject: import wget-1.14-13.el7 --- diff --git a/SOURCES/wget-1.14-CVE-2016-4971.patch b/SOURCES/wget-1.14-CVE-2016-4971.patch new file mode 100644 index 0000000..ae65eec --- /dev/null +++ b/SOURCES/wget-1.14-CVE-2016-4971.patch @@ -0,0 +1,261 @@ +diff --git a/src/ftp.c b/src/ftp.c +index 2be2c76..345718f 100644 +--- a/src/ftp.c ++++ b/src/ftp.c +@@ -234,14 +234,15 @@ print_length (wgint size, wgint start, bool authoritative) + logputs (LOG_VERBOSE, !authoritative ? _(" (unauthoritative)\n") : "\n"); + } + +-static uerr_t ftp_get_listing (struct url *, ccon *, struct fileinfo **); ++static uerr_t ftp_get_listing (struct url *, struct url *, ccon *, struct fileinfo **); + + /* Retrieves a file with denoted parameters through opening an FTP + connection to the server. It always closes the data connection, + and closes the control connection in case of error. If warc_tmp + is non-NULL, the downloaded data will be written there as well. */ + static uerr_t +-getftp (struct url *u, wgint passed_expected_bytes, wgint *qtyread, ++getftp (struct url *u, struct url *original_url, ++ wgint passed_expected_bytes, wgint *qtyread, + wgint restval, ccon *con, int count, FILE *warc_tmp) + { + int csock, dtsock, local_sock, res; +@@ -944,7 +945,7 @@ Error in server response, closing control connection.\n")); + bool exists = false; + uerr_t res; + struct fileinfo *f; +- res = ftp_get_listing (u, con, &f); ++ res = ftp_get_listing (u, original_url, con, &f); + /* Set the DO_RETR command flag again, because it gets unset when + calling ftp_get_listing() and would otherwise cause an assertion + failure earlier on when this function gets repeatedly called +@@ -1392,7 +1393,8 @@ Error in server response, closing control connection.\n")); + This loop either gets commands from con, or (if ON_YOUR_OWN is + set), makes them up to retrieve the file given by the URL. */ + static uerr_t +-ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_file) ++ftp_loop_internal (struct url *u, struct url *original_url, struct fileinfo *f, ++ ccon *con, char **local_file) + { + int count, orig_lp; + wgint restval, len = 0, qtyread = 0; +@@ -1415,7 +1417,7 @@ ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_fi + else + { + /* URL-derived file. Consider "-O file" name. */ +- con->target = url_file_name (u, NULL); ++ con->target = url_file_name (opt.trustservernames || !original_url ? u : original_url, NULL); + if (!opt.output_document) + locf = con->target; + else +@@ -1524,7 +1526,7 @@ ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_fi + + /* If we are working on a WARC record, getftp should also write + to the warc_tmp file. */ +- err = getftp (u, len, &qtyread, restval, con, count, warc_tmp); ++ err = getftp (u, original_url, len, &qtyread, restval, con, count, warc_tmp); + + if (con->csock == -1) + con->st &= ~DONE_CWD; +@@ -1677,7 +1679,8 @@ Removing file due to --delete-after in ftp_loop_internal():\n")); + /* Return the directory listing in a reusable format. The directory + is specifed in u->dir. */ + static uerr_t +-ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f) ++ftp_get_listing (struct url *u, struct url *original_url, ccon *con, ++ struct fileinfo **f) + { + uerr_t err; + char *uf; /* url file name */ +@@ -1698,7 +1701,7 @@ ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f) + + con->target = xstrdup (lf); + xfree (lf); +- err = ftp_loop_internal (u, NULL, con, NULL); ++ err = ftp_loop_internal (u, original_url, NULL, con, NULL); + lf = xstrdup (con->target); + xfree (con->target); + con->target = old_target; +@@ -1721,8 +1724,9 @@ ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f) + return err; + } + +-static uerr_t ftp_retrieve_dirs (struct url *, struct fileinfo *, ccon *); +-static uerr_t ftp_retrieve_glob (struct url *, ccon *, int); ++static uerr_t ftp_retrieve_dirs (struct url *, struct url *, ++ struct fileinfo *, ccon *); ++static uerr_t ftp_retrieve_glob (struct url *, struct url *, ccon *, int); + static struct fileinfo *delelement (struct fileinfo *, struct fileinfo **); + static void freefileinfo (struct fileinfo *f); + +@@ -1734,7 +1738,8 @@ static void freefileinfo (struct fileinfo *f); + If opt.recursive is set, after all files have been retrieved, + ftp_retrieve_dirs will be called to retrieve the directories. */ + static uerr_t +-ftp_retrieve_list (struct url *u, struct fileinfo *f, ccon *con) ++ftp_retrieve_list (struct url *u, struct url *original_url, ++ struct fileinfo *f, ccon *con) + { + static int depth = 0; + uerr_t err; +@@ -1893,7 +1898,9 @@ Already have correct symlink %s -> %s\n\n"), + else /* opt.retr_symlinks */ + { + if (dlthis) +- err = ftp_loop_internal (u, f, con, NULL); ++ { ++ err = ftp_loop_internal (u, original_url, f, con, NULL); ++ } + } /* opt.retr_symlinks */ + break; + case FT_DIRECTORY: +@@ -1904,7 +1911,9 @@ Already have correct symlink %s -> %s\n\n"), + case FT_PLAINFILE: + /* Call the retrieve loop. */ + if (dlthis) +- err = ftp_loop_internal (u, f, con, NULL); ++ { ++ err = ftp_loop_internal (u, original_url, f, con, NULL); ++ } + break; + case FT_UNKNOWN: + logprintf (LOG_NOTQUIET, _("%s: unknown/unsupported file type.\n"), +@@ -1969,7 +1978,7 @@ Already have correct symlink %s -> %s\n\n"), + /* We do not want to call ftp_retrieve_dirs here */ + if (opt.recursive && + !(opt.reclevel != INFINITE_RECURSION && depth >= opt.reclevel)) +- err = ftp_retrieve_dirs (u, orig, con); ++ err = ftp_retrieve_dirs (u, original_url, orig, con); + else if (opt.recursive) + DEBUGP ((_("Will not retrieve dirs since depth is %d (max %d).\n"), + depth, opt.reclevel)); +@@ -1982,7 +1991,8 @@ Already have correct symlink %s -> %s\n\n"), + ftp_retrieve_glob on each directory entry. The function knows + about excluded directories. */ + static uerr_t +-ftp_retrieve_dirs (struct url *u, struct fileinfo *f, ccon *con) ++ftp_retrieve_dirs (struct url *u, struct url *original_url, ++ struct fileinfo *f, ccon *con) + { + char *container = NULL; + int container_size = 0; +@@ -2032,7 +2042,7 @@ Not descending to %s as it is excluded/not-included.\n"), + odir = xstrdup (u->dir); /* because url_set_dir will free + u->dir. */ + url_set_dir (u, newdir); +- ftp_retrieve_glob (u, con, GLOB_GETALL); ++ ftp_retrieve_glob (u, original_url, con, GLOB_GETALL); + url_set_dir (u, odir); + xfree (odir); + +@@ -2091,14 +2101,15 @@ is_invalid_entry (struct fileinfo *f) + GLOB_GLOBALL, use globbing; if it's GLOB_GETALL, download the whole + directory. */ + static uerr_t +-ftp_retrieve_glob (struct url *u, ccon *con, int action) ++ftp_retrieve_glob (struct url *u, struct url *original_url, ++ ccon *con, int action) + { + struct fileinfo *f, *start; + uerr_t res; + + con->cmd |= LEAVE_PENDING; + +- res = ftp_get_listing (u, con, &start); ++ res = ftp_get_listing (u, original_url, con, &start); + if (res != RETROK) + return res; + /* First: weed out that do not conform the global rules given in +@@ -2194,7 +2205,7 @@ ftp_retrieve_glob (struct url *u, ccon *con, int action) + if (start) + { + /* Just get everything. */ +- res = ftp_retrieve_list (u, start, con); ++ res = ftp_retrieve_list (u, original_url, start, con); + } + else + { +@@ -2210,7 +2221,7 @@ ftp_retrieve_glob (struct url *u, ccon *con, int action) + { + /* Let's try retrieving it anyway. */ + con->st |= ON_YOUR_OWN; +- res = ftp_loop_internal (u, NULL, con, NULL); ++ res = ftp_loop_internal (u, original_url, NULL, con, NULL); + return res; + } + +@@ -2230,8 +2241,8 @@ ftp_retrieve_glob (struct url *u, ccon *con, int action) + of URL. Inherently, its capabilities are limited on what can be + encoded into a URL. */ + uerr_t +-ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy, +- bool recursive, bool glob) ++ftp_loop (struct url *u, struct url *original_url, char **local_file, int *dt, ++ struct url *proxy, bool recursive, bool glob) + { + ccon con; /* FTP connection */ + uerr_t res; +@@ -2252,16 +2263,17 @@ ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy, + if (!*u->file && !recursive) + { + struct fileinfo *f; +- res = ftp_get_listing (u, &con, &f); ++ res = ftp_get_listing (u, original_url, &con, &f); + + if (res == RETROK) + { + if (opt.htmlify && !opt.spider) + { ++ struct url *url_file = opt.trustservernames ? u : original_url; + char *filename = (opt.output_document + ? xstrdup (opt.output_document) + : (con.target ? xstrdup (con.target) +- : url_file_name (u, NULL))); ++ : url_file_name (url_file, NULL))); + res = ftp_index (filename, u, f); + if (res == FTPOK && opt.verbose) + { +@@ -2306,11 +2318,13 @@ ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy, + /* ftp_retrieve_glob is a catch-all function that gets called + if we need globbing, time-stamping, recursion or preserve + permissions. Its third argument is just what we really need. */ +- res = ftp_retrieve_glob (u, &con, ++ res = ftp_retrieve_glob (u, original_url, &con, + ispattern ? GLOB_GLOBALL : GLOB_GETONE); + } + else +- res = ftp_loop_internal (u, NULL, &con, local_file); ++ { ++ res = ftp_loop_internal (u, original_url, NULL, &con, local_file); ++ } + } + if (res == FTPOK) + res = RETROK; +diff --git a/src/ftp.h b/src/ftp.h +index be00d88..2abc9c0 100644 +--- a/src/ftp.h ++++ b/src/ftp.h +@@ -129,7 +129,8 @@ enum wget_ftp_fstatus + }; + + struct fileinfo *ftp_parse_ls (const char *, const enum stype); +-uerr_t ftp_loop (struct url *, char **, int *, struct url *, bool, bool); ++uerr_t ftp_loop (struct url *, struct url *, char **, int *, struct url *, ++ bool, bool); + + uerr_t ftp_index (const char *, struct url *, struct fileinfo *); + +diff --git a/src/retr.c b/src/retr.c +index 66624dc..21fad56 100644 +--- a/src/retr.c ++++ b/src/retr.c +@@ -794,7 +794,8 @@ retrieve_url (struct url * orig_parsed, const char *origurl, char **file, + if (redirection_count) + oldrec = glob = false; + +- result = ftp_loop (u, &local_file, dt, proxy_url, recursive, glob); ++ result = ftp_loop (u, orig_parsed, &local_file, dt, proxy_url, ++ recursive, glob); + recursive = oldrec; + + /* There is a possibility of having HTTP being redirected to diff --git a/SOURCES/wget-1.14-rh1147572.patch b/SOURCES/wget-1.14-rh1147572.patch new file mode 100644 index 0000000..f3785dd --- /dev/null +++ b/SOURCES/wget-1.14-rh1147572.patch @@ -0,0 +1,26 @@ +From 798f554773baf1adca376500ca120a992e6d7492 Mon Sep 17 00:00:00 2001 +From: Tim Ruehsen +Date: Tue, 28 Aug 2012 16:38:21 +0200 +Subject: [PATCH] remove -nv from --report-speed in doc/wget.texi + +--- + doc/wget.texi | 3 +-- + 2 files changed, 5 insertions(+), 2 deletions(-) + +diff --git a/doc/wget.texi b/doc/wget.texi +index 7efdc72..400debe 100644 +--- a/doc/wget.texi ++++ b/doc/wget.texi +@@ -479,8 +479,7 @@ Turn off verbose without being completely quiet (use @samp{-q} for + that), which means that error messages and basic information still get + printed. + +-@item -nv +-@itemx --report-speed=@var{type} ++@item --report-speed=@var{type} + Output bandwidth as @var{type}. The only accepted value is @samp{bits}. + + @cindex input-file +-- +1.9.3 + diff --git a/SOURCES/wget-1.14-rh1203384.patch b/SOURCES/wget-1.14-rh1203384.patch new file mode 100644 index 0000000..9f3ba42 --- /dev/null +++ b/SOURCES/wget-1.14-rh1203384.patch @@ -0,0 +1,30 @@ +From aed7d4163a9e2083d294a9471e1347ab13d6f2ab Mon Sep 17 00:00:00 2001 +From: Pavel Mateja +Date: Sat, 2 Nov 2013 11:27:58 +0100 +Subject: [PATCH] http: specify Host when CONNECT is used. + +--- + src/http.c | 7 +++---- + 2 files changed, 7 insertions(+), 4 deletions(-) + +diff --git a/src/http.c b/src/http.c +index dbfcdfb..8917fa5 100644 +--- a/src/http.c ++++ b/src/http.c +@@ -2013,10 +2013,9 @@ gethttp (struct url *u, struct http_stat *hs, int *dt, struct url *proxy, + the regular request below. */ + proxyauth = NULL; + } +- /* Examples in rfc2817 use the Host header in CONNECT +- requests. I don't see how that gains anything, given +- that the contents of Host would be exactly the same as +- the contents of CONNECT. */ ++ request_set_header (connreq, "Host", ++ aprintf ("%s:%d", u->host, u->port), ++ rel_value); + + write_error = request_send (connreq, sock, 0); + request_free (connreq); +-- +2.1.0 + diff --git a/SOURCES/wget-1.14-support-non-ASCII-characters.patch b/SOURCES/wget-1.14-support-non-ASCII-characters.patch new file mode 100644 index 0000000..7ffd5a4 --- /dev/null +++ b/SOURCES/wget-1.14-support-non-ASCII-characters.patch @@ -0,0 +1,154 @@ +From 0a33fa22c597234ab133f63127b4a5e00cf048b9 Mon Sep 17 00:00:00 2001 +From: Tomas Hozza +Date: Mon, 20 Jun 2016 12:10:38 +0200 +Subject: [PATCH] Support non-ASCII characters + +Upstream commit 59b920874daa565a1323ffa1e756e80493190686 + +Signed-off-by: Tomas Hozza +--- + src/url.c | 87 +++++++++++++++++++++++++++++++++++++++++++++++++-- + tests/Test-ftp-iri.px | 4 +-- + 2 files changed, 87 insertions(+), 4 deletions(-) + +diff --git a/src/url.c b/src/url.c +index 6bca719..d0d9e27 100644 +--- a/src/url.c ++++ b/src/url.c +@@ -42,6 +42,11 @@ as that of the covered work. */ + #include "url.h" + #include "host.h" /* for is_valid_ipv6_address */ + ++#if HAVE_ICONV ++#include ++#include ++#endif ++ + #ifdef __VMS + #include "vms.h" + #endif /* def __VMS */ +@@ -1335,8 +1340,8 @@ UWC, C, C, C, C, C, C, C, /* NUL SOH STX ETX EOT ENQ ACK BEL */ + 0, 0, 0, 0, 0, 0, 0, 0, /* p q r s t u v w */ + 0, 0, 0, 0, W, 0, 0, C, /* x y z { | } ~ DEL */ + +- C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, /* 128-143 */ +- C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, /* 144-159 */ ++ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 128-143 */ ++ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 144-159 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + +@@ -1456,6 +1461,82 @@ append_uri_pathel (const char *b, const char *e, bool escaped, + TAIL_INCR (dest, outlen); + } + ++static char * ++convert_fname (const char *fname) ++{ ++ char *converted_fname = (char *)fname; ++#if HAVE_ICONV ++ const char *from_encoding = opt.encoding_remote; ++ const char *to_encoding = opt.locale; ++ iconv_t cd; ++ size_t len, done, inlen, outlen; ++ char *s; ++ const char *orig_fname = fname;; ++ ++ /* Defaults for remote and local encodings. */ ++ if (!from_encoding) ++ from_encoding = "UTF-8"; ++ if (!to_encoding) ++ to_encoding = nl_langinfo (CODESET); ++ ++ cd = iconv_open (to_encoding, from_encoding); ++ if (cd == (iconv_t)(-1)) ++ logprintf (LOG_VERBOSE, _("Conversion from %s to %s isn't supported\n"), ++ quote (from_encoding), quote (to_encoding)); ++ else ++ { ++ inlen = strlen (fname); ++ len = outlen = inlen * 2; ++ converted_fname = s = xmalloc (outlen + 1); ++ done = 0; ++ ++ for (;;) ++ { ++ if (iconv (cd, &fname, &inlen, &s, &outlen) != (size_t)(-1) ++ && iconv (cd, NULL, NULL, &s, &outlen) != (size_t)(-1)) ++ { ++ *(converted_fname + len - outlen - done) = '\0'; ++ iconv_close(cd); ++ DEBUGP (("Converted file name '%s' (%s) -> '%s' (%s)\n", ++ orig_fname, from_encoding, converted_fname, to_encoding)); ++ xfree (orig_fname); ++ return converted_fname; ++ } ++ ++ /* Incomplete or invalid multibyte sequence */ ++ if (errno == EINVAL || errno == EILSEQ) ++ { ++ logprintf (LOG_VERBOSE, ++ _("Incomplete or invalid multibyte sequence encountered\n")); ++ xfree (converted_fname); ++ converted_fname = (char *)orig_fname; ++ break; ++ } ++ else if (errno == E2BIG) /* Output buffer full */ ++ { ++ done = len; ++ len = outlen = done + inlen * 2; ++ converted_fname = xrealloc (converted_fname, outlen + 1); ++ s = converted_fname + done; ++ } ++ else /* Weird, we got an unspecified error */ ++ { ++ logprintf (LOG_VERBOSE, _("Unhandled errno %d\n"), errno); ++ xfree (converted_fname); ++ converted_fname = (char *)orig_fname; ++ break; ++ } ++ } ++ DEBUGP (("Failed to convert file name '%s' (%s) -> '?' (%s)\n", ++ orig_fname, from_encoding, to_encoding)); ++ } ++ ++ iconv_close(cd); ++#endif ++ ++ return converted_fname; ++} ++ + /* Append to DEST the directory structure that corresponds the + directory part of URL's path. For example, if the URL is + http://server/dir1/dir2/file, this appends "/dir1/dir2". +@@ -1582,6 +1663,8 @@ url_file_name (const struct url *u, char *replaced_filename) + + fname = fnres.base; + ++ fname = convert_fname (fname); ++ + /* Check the cases in which the unique extensions are not used: + 1) Clobbering is turned off (-nc). + 2) Retrieval with regetting. +diff --git a/tests/Test-ftp-iri.px b/tests/Test-ftp-iri.px +index a4b7fe1..24ac467 100755 +--- a/tests/Test-ftp-iri.px ++++ b/tests/Test-ftp-iri.px +@@ -26,12 +26,12 @@ my %urls = ( + }, + ); + +-my $cmdline = $WgetTest::WGETPATH . " --local-encoding=iso-8859-1 -S ftp://localhost:{{port}}/fran${ccedilla_l1}ais.txt"; ++my $cmdline = $WgetTest::WGETPATH . " --local-encoding=iso-8859-1 --remote-encoding=utf-8 -S ftp://localhost:{{port}}/fran${ccedilla_l1}ais.txt"; + + my $expected_error_code = 0; + + my %expected_downloaded_files = ( +- "fran${ccedilla_u8}ais.txt" => { ++ "fran${ccedilla_l1}ais.txt" => { + content => $francais, + }, + ); +-- +2.5.5 + diff --git a/SPECS/wget.spec b/SPECS/wget.spec index cc94424..49393b6 100644 --- a/SPECS/wget.spec +++ b/SPECS/wget.spec @@ -1,7 +1,7 @@ Summary: A utility for retrieving files using the HTTP or FTP protocols Name: wget Version: 1.14 -Release: 10%{?dist}.1 +Release: 13%{?dist} License: GPLv3+ Group: Applications/Internet Url: http://www.gnu.org/software/wget/ @@ -20,12 +20,22 @@ Patch10: wget-1.14-set_sock_to_-1_if_no_persistent_conn.patch Patch11: wget-1.14-document-backups.patch Patch12: wget-1.14-fix-backups-to-work-as-documented.patch Patch13: wget-1.14-CVE-2014-4877.patch +Patch14: wget-1.14-rh1203384.patch +Patch15: wget-1.14-rh1147572.patch +Patch16: wget-1.14-CVE-2016-4971.patch +# needed because fix for CVE-2016-4971 changes default behavior +# and the file is not saved in correct encoding. This caused the +# Test-ftp-iri-fallback test to fail. This additional change makes +# Test-ftp-iri-fallback test pass again. +Patch17: wget-1.14-support-non-ASCII-characters.patch Provides: webclient Provides: bundled(gnulib) Requires(post): /sbin/install-info Requires(preun): /sbin/install-info BuildRequires: openssl-devel, pkgconfig, texinfo, gettext, autoconf, libidn-devel, libuuid-devel, perl-podlators +# dependencies for the test suite +BuildRequires: perl-libwww-perl BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) %description @@ -52,6 +62,10 @@ support for Proxy servers, and configurability. %patch11 -p1 %patch12 -p1 %patch13 -p1 +%patch14 -p1 +%patch15 -p1 +%patch16 -p1 +%patch17 -p1 %build if pkg-config openssl ; then @@ -79,6 +93,9 @@ fi %clean rm -rf $RPM_BUILD_ROOT +%check +make check + %files -f %{name}.lang %defattr(-,root,root) %doc AUTHORS MAILING-LIST NEWS README COPYING doc/sample.wgetrc @@ -88,8 +105,17 @@ rm -rf $RPM_BUILD_ROOT %{_infodir}/* %changelog -* Fri Oct 24 2014 Tomas Hozza - 1.14-10.1 -- Fix CVE-2014-4877 wget: FTP symlink arbitrary filesystem access (#1156135) +* Mon Jun 20 2016 Tomas Hozza - 1.14-13 +- Fix CVE-2016-4971 (#1345778) +- Added support for non-ASCII URLs (Related: CVE-2016-4971) + +* Mon Mar 21 2016 Tomas Hozza - 1.14-12 +- Fix wget to include Host header on CONNECT as required by HTTP 1.1 (#1203384) +- Run internal test suite during build (#1295846) +- Fix -nv being documented as synonym for two options (#1147572) + +* Fri Oct 24 2014 Tomas Hozza - 1.14-11 +- Fix CVE-2014-4877 wget: FTP symlink arbitrary filesystem access (#1156136) * Fri Jan 24 2014 Daniel Mach - 1.14-10 - Mass rebuild 2014-01-24