diff --git a/SOURCES/squid-3.5.20-CVE-2019-12528.patch b/SOURCES/squid-3.5.20-CVE-2019-12528.patch
new file mode 100644
index 0000000..b8470af
--- /dev/null
+++ b/SOURCES/squid-3.5.20-CVE-2019-12528.patch
@@ -0,0 +1,158 @@
+diff --git a/src/clients/FtpGateway.cc b/src/clients/FtpGateway.cc
+index 4f8319a..3a35784 100644
+--- a/src/clients/FtpGateway.cc
++++ b/src/clients/FtpGateway.cc
+@@ -543,8 +543,10 @@ ftpListParseParts(const char *buf, struct Ftp::GatewayFlags flags)
+ {
+     ftpListParts *p = NULL;
+     char *t = NULL;
+-    const char *ct = NULL;
+-    char *tokens[MAX_TOKENS];
++    struct FtpLineToken {
++        char *token = NULL; ///< token image copied from the received line
++        size_t pos = 0;  ///< token offset on the received line
++    } tokens[MAX_TOKENS];
+     int i;
+     int n_tokens;
+     static char tbuf[128];
+@@ -585,7 +587,8 @@ ftpListParseParts(const char *buf, struct Ftp::GatewayFlags flags)
+     }
+ 
+     for (t = strtok(xbuf, w_space); t && n_tokens < MAX_TOKENS; t = strtok(NULL, w_space)) {
+-        tokens[n_tokens] = xstrdup(t);
++        tokens[n_tokens].token = xstrdup(t);
++        tokens[n_tokens].pos = t - xbuf;
+         ++n_tokens;
+     }
+ 
+@@ -593,10 +596,10 @@ ftpListParseParts(const char *buf, struct Ftp::GatewayFlags flags)
+ 
+     /* locate the Month field */
+     for (i = 3; i < n_tokens - 2; ++i) {
+-        char *size = tokens[i - 1];
+-        char *month = tokens[i];
+-        char *day = tokens[i + 1];
+-        char *year = tokens[i + 2];
++        const char *size = tokens[i - 1].token;
++        char *month = tokens[i].token;
++        char *day = tokens[i + 1].token;
++        char *year = tokens[i + 2].token;
+ 
+         if (!is_month(month))
+             continue;
+@@ -610,30 +613,35 @@ ftpListParseParts(const char *buf, struct Ftp::GatewayFlags flags)
+         if (regexec(&scan_ftp_time, year, 0, NULL, 0) != 0) /* Yr | hh:mm */
+             continue;
+ 
+-        snprintf(tbuf, 128, "%s %2s %5s",
+-                 month, day, year);
++        const char *copyFrom = buf + tokens[i].pos;
+ 
+-        if (!strstr(buf, tbuf))
+-            snprintf(tbuf, 128, "%s %2s %-5s",
+-                     month, day, year);
++        // "MMM DD [ YYYY|hh:mm]" with at most two spaces between DD and YYYY
++        int dateSize = snprintf(tbuf, sizeof(tbuf), "%s %2s %5s", month, day, year);
++        bool isTypeA = (dateSize == 12) && (strncmp(copyFrom, tbuf, dateSize) == 0);
+ 
+-        char const *copyFrom = NULL;
++        // "MMM DD [YYYY|hh:mm]" with one space between DD and YYYY
++        dateSize = snprintf(tbuf, sizeof(tbuf), "%s %2s %-5s", month, day, year);
++        bool isTypeB = (dateSize == 12 || dateSize == 11) && (strncmp(copyFrom, tbuf, dateSize) == 0);
+ 
+-        if ((copyFrom = strstr(buf, tbuf))) {
+-            p->type = *tokens[0];
++        // TODO: replace isTypeA and isTypeB with a regex.
++        if (isTypeA || isTypeB) {
++            p->type = *tokens[0].token;
+             p->size = strtoll(size, NULL, 10);
++            const int finalDateSize = snprintf(tbuf, sizeof(tbuf), "%s %2s %5s", month, day, year);
++            assert(finalDateSize >= 0);
+             p->date = xstrdup(tbuf);
+ 
++            // point after tokens[i+2] :
++            copyFrom = buf + tokens[i + 2].pos + strlen(tokens[i + 2].token);
+             if (flags.skip_whitespace) {
+-                copyFrom += strlen(tbuf);
+-
+                 while (strchr(w_space, *copyFrom))
+                     ++copyFrom;
+             } else {
+                 /* XXX assumes a single space between date and filename
+                  * suggested by:  Nathan.Bailey@cc.monash.edu.au and
+                  * Mike Battersby <mike@starbug.bofh.asn.au> */
+-                copyFrom += strlen(tbuf) + 1;
++                if (strchr(w_space, *copyFrom))
++                    ++copyFrom;
+             }
+ 
+             p->name = xstrdup(copyFrom);
+@@ -651,45 +659,36 @@ ftpListParseParts(const char *buf, struct Ftp::GatewayFlags flags)
+ 
+     /* try it as a DOS listing, 04-05-70 09:33PM ... */
+     if (n_tokens > 3 &&
+-            regexec(&scan_ftp_dosdate, tokens[0], 0, NULL, 0) == 0 &&
+-            regexec(&scan_ftp_dostime, tokens[1], 0, NULL, 0) == 0) {
+-        if (!strcasecmp(tokens[2], "<dir>")) {
++            regexec(&scan_ftp_dosdate, tokens[0].token, 0, NULL, 0) == 0 &&
++            regexec(&scan_ftp_dostime, tokens[1].token, 0, NULL, 0) == 0) {
++        if (!strcasecmp(tokens[2].token, "<dir>")) {
+             p->type = 'd';
+         } else {
+             p->type = '-';
+-            p->size = strtoll(tokens[2], NULL, 10);
++            p->size = strtoll(tokens[2].token, NULL, 10);
+         }
+ 
+-        snprintf(tbuf, 128, "%s %s", tokens[0], tokens[1]);
++        snprintf(tbuf, sizeof(tbuf), "%s %s", tokens[0].token, tokens[1].token);
+         p->date = xstrdup(tbuf);
+ 
+         if (p->type == 'd') {
+-            /* Directory.. name begins with first printable after <dir> */
+-            ct = strstr(buf, tokens[2]);
+-            ct += strlen(tokens[2]);
+-
+-            while (xisspace(*ct))
+-                ++ct;
+-
+-            if (!*ct)
+-                ct = NULL;
++            // Directory.. name begins with first printable after <dir>
++            // Because of the "n_tokens > 3", the next printable after <dir>
++            // is stored at token[3]. No need for more checks here.
+         } else {
+-            /* A file. Name begins after size, with a space in between */
+-            snprintf(tbuf, 128, " %s %s", tokens[2], tokens[3]);
+-            ct = strstr(buf, tbuf);
+-
+-            if (ct) {
+-                ct += strlen(tokens[2]) + 2;
+-            }
++            // A file. Name begins after size, with a space in between.
++            // Also a space should exist before size.
++            // But there is not needed to be very strict with spaces.
++            // The name is stored at token[3], take it from here.
+         }
+ 
+-        p->name = xstrdup(ct ? ct : tokens[3]);
++        p->name = xstrdup(tokens[3].token);
+         goto found;
+     }
+ 
+     /* Try EPLF format; carson@lehman.com */
+     if (buf[0] == '+') {
+-        ct = buf + 1;
++        const char *ct = buf + 1;
+         p->type = 0;
+ 
+         while (ct && *ct) {
+@@ -760,7 +759,7 @@ blank:
+ found:
+ 
+     for (i = 0; i < n_tokens; ++i)
+-        xfree(tokens[i]);
++        xfree(tokens[i].token);
+ 
+     if (!p->name)
+         ftpListPartsFree(&p);   /* cleanup */
diff --git a/SOURCES/squid-3.5.20-CVE-2020-15049.patch b/SOURCES/squid-3.5.20-CVE-2020-15049.patch
new file mode 100644
index 0000000..707b315
--- /dev/null
+++ b/SOURCES/squid-3.5.20-CVE-2020-15049.patch
@@ -0,0 +1,531 @@
+From abdf3942a848b3de8c4fcdbccf15139b1ed0d9c2 Mon Sep 17 00:00:00 2001
+From: Lubos Uhliarik <luhliari@redhat.com>
+Date: Mon, 3 Aug 2020 16:48:15 +0200
+Subject: [PATCH] Fix for CVE-2020-15049
+
+---
+ src/HttpHeader.cc                    |  85 ++++++------
+ src/HttpHeaderTools.cc               |  27 ++++
+ src/HttpHeaderTools.h                |   8 +-
+ src/http/ContentLengthInterpreter.cc | 190 +++++++++++++++++++++++++++
+ src/http/ContentLengthInterpreter.h  |  66 ++++++++++
+ src/http/Makefile.am                 |   2 +
+ src/http/Makefile.in                 |   4 +-
+ 7 files changed, 337 insertions(+), 45 deletions(-)
+ create mode 100644 src/http/ContentLengthInterpreter.cc
+ create mode 100644 src/http/ContentLengthInterpreter.h
+
+diff --git a/src/HttpHeader.cc b/src/HttpHeader.cc
+index 7e8c77e..ef60c02 100644
+--- a/src/HttpHeader.cc
++++ b/src/HttpHeader.cc
+@@ -11,6 +11,7 @@
+ #include "squid.h"
+ #include "base64.h"
+ #include "globals.h"
++#include "http/ContentLengthInterpreter.h"
+ #include "HttpHdrCc.h"
+ #include "HttpHdrContRange.h"
+ #include "HttpHdrSc.h"
+@@ -588,7 +589,6 @@ int
+ HttpHeader::parse(const char *header_start, const char *header_end)
+ {
+     const char *field_ptr = header_start;
+-    HttpHeaderEntry *e, *e2;
+     int warnOnError = (Config.onoff.relaxed_header_parser <= 0 ? DBG_IMPORTANT : 2);
+ 
+     PROF_start(HttpHeaderParse);
+@@ -605,6 +605,7 @@ HttpHeader::parse(const char *header_start, const char *header_end)
+         return reset();
+     }
+ 
++    Http::ContentLengthInterpreter clen(warnOnError);
+     /* common format headers are "<name>:[ws]<value>" lines delimited by <CRLF>.
+      * continuation lines start with a (single) space or tab */
+     while (field_ptr < header_end) {
+@@ -681,6 +682,7 @@ HttpHeader::parse(const char *header_start, const char *header_end)
+             break;      /* terminating blank line */
+         }
+ 
++        HttpHeaderEntry *e;
+         if ((e = HttpHeaderEntry::parse(field_start, field_end)) == NULL) {
+             debugs(55, warnOnError, "WARNING: unparseable HTTP header field {" <<
+                    getStringPrefix(field_start, field_end) << "}");
+@@ -693,45 +695,19 @@ HttpHeader::parse(const char *header_start, const char *header_end)
+             return reset();
+         }
+ 
+-        // XXX: RFC 7230 Section 3.3.3 item #4 requires sending a 502 error in
+-        // several cases that we do not yet cover. TODO: Rewrite to cover more.
+-        if (e->id == HDR_CONTENT_LENGTH && (e2 = findEntry(e->id)) != NULL) {
+-            if (e->value != e2->value) {
+-                int64_t l1, l2;
+-                debugs(55, warnOnError, "WARNING: found two conflicting content-length headers in {" <<
+-                       getStringPrefix(header_start, header_end) << "}");
+-
+-                if (!Config.onoff.relaxed_header_parser) {
+-                    delete e;
+-                    PROF_stop(HttpHeaderParse);
+-                    return reset();
+-                }
+ 
+-                if (!httpHeaderParseOffset(e->value.termedBuf(), &l1)) {
+-                    debugs(55, DBG_IMPORTANT, "WARNING: Unparseable content-length '" << e->value << "'");
+-                    delete e;
+-                    continue;
+-                } else if (!httpHeaderParseOffset(e2->value.termedBuf(), &l2)) {
+-                    debugs(55, DBG_IMPORTANT, "WARNING: Unparseable content-length '" << e2->value << "'");
+-                    delById(e2->id);
+-                } else {
+-                    if (l1 != l2)
+-                        conflictingContentLength_ = true;
+-                    delete e;
+-                    continue;
+-                }
+-            } else {
+-                debugs(55, warnOnError, "NOTICE: found double content-length header");
+-                delete e;
++        if (e->id == HDR_CONTENT_LENGTH && !clen.checkField(e->value)) {
++            delete e;
+ 
+-                if (Config.onoff.relaxed_header_parser)
+-                    continue;
++            if (Config.onoff.relaxed_header_parser)
++                continue; // clen has printed any necessary warnings
+ 
+-                PROF_stop(HttpHeaderParse);
+-                return reset();
+-            }
++            PROF_stop(HttpHeaderParse);
++            clean();
++            return 0;
+         }
+ 
++
+         if (e->id == HDR_OTHER && stringHasWhitespace(e->name.termedBuf())) {
+             debugs(55, warnOnError, "WARNING: found whitespace in HTTP header name {" <<
+                    getStringPrefix(field_start, field_end) << "}");
+@@ -746,6 +722,32 @@ HttpHeader::parse(const char *header_start, const char *header_end)
+         addEntry(e);
+     }
+ 
++    if (clen.headerWideProblem) {
++        debugs(55, warnOnError, "WARNING: " << clen.headerWideProblem <<
++               " Content-Length field values in" <<
++               Raw("header", header_start, (size_t)(header_end - header_start)));
++    }
++
++    if (chunked()) {
++        // RFC 2616 section 4.4: ignore Content-Length with Transfer-Encoding
++        // RFC 7230 section 3.3.3 #3: Transfer-Encoding overwrites Content-Length
++        delById(HDR_CONTENT_LENGTH);
++
++        // and clen state becomes irrelevant
++    } else if (clen.sawBad) {
++        // ensure our callers do not accidentally see bad Content-Length values
++        delById(HDR_CONTENT_LENGTH);
++        conflictingContentLength_ = true; // TODO: Rename to badContentLength_.
++    } else if (clen.needsSanitizing) {
++        // RFC 7230 section 3.3.2: MUST either reject or ... [sanitize];
++        // ensure our callers see a clean Content-Length value or none at all
++        delById(HDR_CONTENT_LENGTH);
++        if (clen.sawGood) {
++            putInt64(HDR_CONTENT_LENGTH, clen.value);
++            debugs(55, 5, "sanitized Content-Length to be " << clen.value);
++        }
++    }
++
+     if (chunked()) {
+         // RFC 2616 section 4.4: ignore Content-Length with Transfer-Encoding
+         delById(HDR_CONTENT_LENGTH);
+@@ -1722,6 +1724,7 @@ HttpHeaderEntry::getInt() const
+     assert_eid (id);
+     assert (Headers[id].type == ftInt);
+     int val = -1;
++
+     int ok = httpHeaderParseInt(value.termedBuf(), &val);
+     httpHeaderNoteParsedEntry(id, value, !ok);
+     /* XXX: Should we check ok - ie
+@@ -1733,15 +1736,11 @@ HttpHeaderEntry::getInt() const
+ int64_t
+ HttpHeaderEntry::getInt64() const
+ {
+-    assert_eid (id);
+-    assert (Headers[id].type == ftInt64);
+     int64_t val = -1;
+-    int ok = httpHeaderParseOffset(value.termedBuf(), &val);
+-    httpHeaderNoteParsedEntry(id, value, !ok);
+-    /* XXX: Should we check ok - ie
+-     * return ok ? -1 : value;
+-     */
+-    return val;
++
++    const bool ok = httpHeaderParseOffset(value.termedBuf(), &val);
++    httpHeaderNoteParsedEntry(id, value, ok);
++    return val; // remains -1 if !ok (XXX: bad method API)
+ }
+ 
+ static void
+diff --git a/src/HttpHeaderTools.cc b/src/HttpHeaderTools.cc
+index d8c29d8..02087cd 100644
+--- a/src/HttpHeaderTools.cc
++++ b/src/HttpHeaderTools.cc
+@@ -188,6 +188,33 @@ httpHeaderParseInt(const char *start, int *value)
+     return 1;
+ }
+ 
++bool
++httpHeaderParseOffset(const char *start, int64_t *value, char **endPtr)
++{
++    char *end = nullptr;
++    errno = 0;
++
++    const int64_t res = strtoll(start, &end, 10);
++    if (errno && !res) {
++        debugs(66, 7, "failed to parse malformed offset in " << start);
++        return false;
++    }
++    if (errno == ERANGE && (res == LLONG_MIN || res == LLONG_MAX)) { // no overflow
++        debugs(66, 7, "failed to parse huge offset in " << start);
++        return false;
++    }
++    if (start == end) {
++        debugs(66, 7, "failed to parse empty offset");
++        return false;
++    }
++    *value = res;
++    if (endPtr)
++        *endPtr = end;
++    debugs(66, 7, "offset " << start << " parsed as " << res);
++    return true;
++}
++
++
+ int
+ httpHeaderParseOffset(const char *start, int64_t * value)
+ {
+diff --git a/src/HttpHeaderTools.h b/src/HttpHeaderTools.h
+index 509d940..2d97ad4 100644
+--- a/src/HttpHeaderTools.h
++++ b/src/HttpHeaderTools.h
+@@ -113,7 +113,13 @@ public:
+     bool quoted;
+ };
+ 
+-int httpHeaderParseOffset(const char *start, int64_t * off);
++/// A strtoll(10) wrapper that checks for strtoll() failures and other problems.
++/// XXX: This function is not fully compatible with some HTTP syntax rules.
++/// Just like strtoll(), allows whitespace prefix, a sign, and _any_ suffix.
++/// Requires at least one digit to be present.
++/// Sets "off" and "end" arguments if and only if no problems were found.
++/// \return true if and only if no problems were found.
++bool httpHeaderParseOffset(const char *start, int64_t *offPtr, char **endPtr = nullptr);
+ 
+ HttpHeaderFieldInfo *httpHeaderBuildFieldsInfo(const HttpHeaderFieldAttrs * attrs, int count);
+ void httpHeaderDestroyFieldsInfo(HttpHeaderFieldInfo * info, int count);
+diff --git a/src/http/ContentLengthInterpreter.cc b/src/http/ContentLengthInterpreter.cc
+new file mode 100644
+index 0000000..1d40f4a
+--- /dev/null
++++ b/src/http/ContentLengthInterpreter.cc
+@@ -0,0 +1,190 @@
++/*
++ * Copyright (C) 1996-2016 The Squid Software Foundation and contributors
++ *
++ * Squid software is distributed under GPLv2+ license and includes
++ * contributions from numerous individuals and organizations.
++ * Please see the COPYING and CONTRIBUTORS files for details.
++ */
++
++/* DEBUG: section 55    HTTP Header */
++
++#include "squid.h"
++#include "base/CharacterSet.h"
++#include "Debug.h"
++#include "http/ContentLengthInterpreter.h"
++#include "HttpHeaderTools.h"
++#include "SquidConfig.h"
++#include "SquidString.h"
++#include "StrList.h"
++
++Http::ContentLengthInterpreter::ContentLengthInterpreter(const int aDebugLevel):
++    value(-1),
++    headerWideProblem(nullptr),
++    debugLevel(aDebugLevel),
++    sawBad(false),
++    needsSanitizing(false),
++    sawGood(false)
++{
++}
++
++/// characters HTTP permits tolerant parsers to accept as delimiters
++static const CharacterSet &
++RelaxedDelimiterCharacters()
++{
++    // RFC 7230 section 3.5
++    // tolerant parser MAY accept any of SP, HTAB, VT (%x0B), FF (%x0C),
++    // or bare CR as whitespace between request-line fields
++    static const CharacterSet RelaxedDels =
++        (CharacterSet::SP +
++         CharacterSet::HTAB +
++         CharacterSet("VT,FF","\x0B\x0C") +
++         CharacterSet::CR).rename("relaxed-WSP");
++
++    return RelaxedDels;
++}
++
++const CharacterSet &
++Http::ContentLengthInterpreter::WhitespaceCharacters()
++{
++    return Config.onoff.relaxed_header_parser ?
++           RelaxedDelimiterCharacters() : CharacterSet::WSP;
++}
++
++const CharacterSet &
++Http::ContentLengthInterpreter::DelimiterCharacters()
++{
++    return Config.onoff.relaxed_header_parser ?
++           RelaxedDelimiterCharacters() : CharacterSet::SP;
++}
++
++/// checks whether all characters before the Content-Length number are allowed
++/// \returns the start of the digit sequence (or nil on errors)
++const char *
++Http::ContentLengthInterpreter::findDigits(const char *prefix, const char * const valueEnd) const
++{
++    // skip leading OWS in RFC 7230's `OWS field-value OWS`
++    const CharacterSet &whitespace = WhitespaceCharacters();
++    while (prefix < valueEnd) {
++        const auto ch = *prefix;
++        if (CharacterSet::DIGIT[ch])
++            return prefix; // common case: a pre-trimmed field value
++        if (!whitespace[ch])
++            return nullptr; // (trimmed) length does not start with a digit
++        ++prefix;
++    }
++    return nullptr; // empty or whitespace-only value
++}
++
++/// checks whether all characters after the Content-Length are allowed
++bool
++Http::ContentLengthInterpreter::goodSuffix(const char *suffix, const char * const end) const
++{
++    // optimize for the common case that does not need delimiters
++    if (suffix == end)
++        return true;
++
++    for (const CharacterSet &delimiters = DelimiterCharacters();
++            suffix < end; ++suffix) {
++        if (!delimiters[*suffix])
++            return false;
++    }
++    // needsSanitizing = true; // TODO: Always remove trailing whitespace?
++    return true; // including empty suffix
++}
++
++/// handles a single-token Content-Length value
++/// rawValue null-termination requirements are those of httpHeaderParseOffset()
++bool
++Http::ContentLengthInterpreter::checkValue(const char *rawValue, const int valueSize)
++{
++    Must(!sawBad);
++
++    const auto valueEnd = rawValue + valueSize;
++
++    const auto digits = findDigits(rawValue, valueEnd);
++    if (!digits) {
++        debugs(55, debugLevel, "WARNING: Leading garbage or empty value in" << Raw("Content-Length", rawValue, valueSize));
++        sawBad = true;
++        return false;
++    }
++
++    int64_t latestValue = -1;
++    char *suffix = nullptr;
++
++    if (!httpHeaderParseOffset(digits, &latestValue, &suffix)) {
++        debugs(55, DBG_IMPORTANT, "WARNING: Malformed" << Raw("Content-Length", rawValue, valueSize));
++        sawBad = true;
++        return false;
++    }
++
++    if (latestValue < 0) {
++        debugs(55, debugLevel, "WARNING: Negative" << Raw("Content-Length", rawValue, valueSize));
++        sawBad = true;
++        return false;
++    }
++
++    // check for garbage after the number
++    if (!goodSuffix(suffix, valueEnd)) {
++        debugs(55, debugLevel, "WARNING: Trailing garbage in" << Raw("Content-Length", rawValue, valueSize));
++        sawBad = true;
++        return false;
++    }
++
++    if (sawGood) {
++        /* we have found at least two, possibly identical values */
++
++        needsSanitizing = true; // replace identical values with a single value
++
++        const bool conflicting = value != latestValue;
++        if (conflicting)
++            headerWideProblem = "Conflicting"; // overwrite any lesser problem
++        else if (!headerWideProblem) // preserve a possibly worse problem
++            headerWideProblem = "Duplicate";
++
++        // with relaxed_header_parser, identical values are permitted
++        sawBad = !Config.onoff.relaxed_header_parser || conflicting;
++        return false; // conflicting or duplicate
++    }
++
++    sawGood = true;
++    value = latestValue;
++    return true;
++}
++
++/// handles Content-Length: a, b, c
++bool
++Http::ContentLengthInterpreter::checkList(const String &list)
++{
++    Must(!sawBad);
++
++    if (!Config.onoff.relaxed_header_parser) {
++        debugs(55, debugLevel, "WARNING: List-like" << Raw("Content-Length", list.rawBuf(), list.size()));
++        sawBad = true;
++        return false;
++    }
++
++    needsSanitizing = true; // remove extra commas (at least)
++
++    const char *pos = nullptr;
++    const char *item = nullptr;;
++    int ilen = -1;
++    while (strListGetItem(&list, ',', &item, &ilen, &pos)) {
++        if (!checkValue(item, ilen) && sawBad)
++            break;
++        // keep going after a duplicate value to find conflicting ones
++    }
++    return false; // no need to keep this list field; it will be sanitized away
++}
++
++bool
++Http::ContentLengthInterpreter::checkField(const String &rawValue)
++{
++    if (sawBad)
++        return false; // one rotten apple is enough to spoil all of them
++
++    // TODO: Optimize by always parsing the first integer first.
++    return rawValue.pos(',') ?
++           checkList(rawValue) :
++           checkValue(rawValue.rawBuf(), rawValue.size());
++}
++
+diff --git a/src/http/ContentLengthInterpreter.h b/src/http/ContentLengthInterpreter.h
+new file mode 100644
+index 0000000..ba7080c
+--- /dev/null
++++ b/src/http/ContentLengthInterpreter.h
+@@ -0,0 +1,66 @@
++/*
++ * Copyright (C) 1996-2016 The Squid Software Foundation and contributors
++ *
++ * Squid software is distributed under GPLv2+ license and includes
++ * contributions from numerous individuals and organizations.
++ * Please see the COPYING and CONTRIBUTORS files for details.
++ */
++
++#ifndef SQUID_SRC_HTTP_CONTENTLENGTH_INTERPRETER_H
++#define SQUID_SRC_HTTP_CONTENTLENGTH_INTERPRETER_H
++
++class String;
++
++namespace Http
++{
++
++/// Finds the intended Content-Length value while parsing message-header fields.
++/// Deals with complications such as value lists and/or repeated fields.
++class ContentLengthInterpreter
++{
++public:
++    explicit ContentLengthInterpreter(const int aDebugLevel);
++
++    /// updates history based on the given message-header field
++    /// \return true iff the field should be added/remembered for future use
++    bool checkField(const String &field);
++
++    /// intended Content-Length value if sawGood is set and sawBad is not set
++    /// meaningless otherwise
++    int64_t value;
++
++    /* for debugging (declared here to minimize padding) */
++    const char *headerWideProblem; ///< worst header-wide problem found (or nil)
++    const int debugLevel; ///< debugging level for certain warnings
++
++    /// whether a malformed Content-Length value was present
++    bool sawBad;
++
++    /// whether all remembered fields should be removed
++    /// removed fields ought to be replaced with the intended value (if known)
++    /// irrelevant if sawBad is set
++    bool needsSanitizing;
++
++    /// whether a valid field value was present, possibly among problematic ones
++    /// irrelevant if sawBad is set
++    bool sawGood;
++
++    /// Whitespace between protocol elements in restricted contexts like
++    /// request line, status line, asctime-date, and credentials
++    /// Seen in RFCs as SP but may be "relaxed" by us.
++    /// See also: WhitespaceCharacters().
++    /// XXX: Misnamed and overused.
++    static const CharacterSet &DelimiterCharacters();
++
++    static const CharacterSet &WhitespaceCharacters();
++protected:
++    const char *findDigits(const char *prefix, const char *valueEnd) const;
++    bool goodSuffix(const char *suffix, const char * const end) const;
++    bool checkValue(const char *start, const int size);
++    bool checkList(const String &list);
++};
++
++} // namespace Http
++
++#endif /* SQUID_SRC_HTTP_CONTENTLENGTH_INTERPRETER_H */
++
+diff --git a/src/http/Makefile.am b/src/http/Makefile.am
+index 7887ef0..78b503e 100644
+--- a/src/http/Makefile.am
++++ b/src/http/Makefile.am
+@@ -11,6 +11,8 @@ include $(top_srcdir)/src/TestHeaders.am
+ noinst_LTLIBRARIES = libsquid-http.la
+ 
+ libsquid_http_la_SOURCES = \
++	ContentLengthInterpreter.cc \
++	ContentLengthInterpreter.h \
+ 	MethodType.cc \
+ 	MethodType.h \
+ 	ProtocolVersion.h \
+diff --git a/src/http/Makefile.in b/src/http/Makefile.in
+index f5b62fb..c7891ae 100644
+--- a/src/http/Makefile.in
++++ b/src/http/Makefile.in
+@@ -160,7 +160,7 @@ CONFIG_CLEAN_VPATH_FILES =
+ LTLIBRARIES = $(noinst_LTLIBRARIES)
+ libsquid_http_la_LIBADD =
+ am_libsquid_http_la_OBJECTS = MethodType.lo StatusCode.lo \
+-	StatusLine.lo
++	StatusLine.lo ContentLengthInterpreter.lo
+ libsquid_http_la_OBJECTS = $(am_libsquid_http_la_OBJECTS)
+ AM_V_lt = $(am__v_lt_@AM_V@)
+ am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
+@@ -694,6 +694,8 @@ COMPAT_LIB = $(top_builddir)/compat/libcompat-squid.la $(LIBPROFILER)
+ subst_perlshell = sed -e 's,[@]PERL[@],$(PERL),g' <$(srcdir)/$@.pl.in >$@ || ($(RM) -f $@ ; exit 1)
+ noinst_LTLIBRARIES = libsquid-http.la
+ libsquid_http_la_SOURCES = \
++	ContentLengthInterpreter.cc \
++	ContentLengthInterpreter.h \
+ 	MethodType.cc \
+ 	MethodType.h \
+ 	ProtocolVersion.h \
+-- 
+2.21.0
+
diff --git a/SOURCES/squid-3.5.20-CVE-2020-15810.patch b/SOURCES/squid-3.5.20-CVE-2020-15810.patch
new file mode 100644
index 0000000..b80108f
--- /dev/null
+++ b/SOURCES/squid-3.5.20-CVE-2020-15810.patch
@@ -0,0 +1,52 @@
+diff --git a/src/HttpHeader.cc b/src/HttpHeader.cc
+index ef60c02..ce55a6f 100644
+--- a/src/HttpHeader.cc
++++ b/src/HttpHeader.cc
+@@ -12,6 +12,7 @@
+ #include "base64.h"
+ #include "globals.h"
+ #include "http/ContentLengthInterpreter.h"
++#include "base/CharacterSet.h"
+ #include "HttpHdrCc.h"
+ #include "HttpHdrContRange.h"
+ #include "HttpHdrSc.h"
+@@ -707,18 +708,6 @@ HttpHeader::parse(const char *header_start, const char *header_end)
+             return 0;
+         }
+ 
+-
+-        if (e->id == HDR_OTHER && stringHasWhitespace(e->name.termedBuf())) {
+-            debugs(55, warnOnError, "WARNING: found whitespace in HTTP header name {" <<
+-                   getStringPrefix(field_start, field_end) << "}");
+-
+-            if (!Config.onoff.relaxed_header_parser) {
+-                delete e;
+-                PROF_stop(HttpHeaderParse);
+-                return reset();
+-            }
+-        }
+-
+         addEntry(e);
+     }
+ 
+@@ -1653,6 +1642,20 @@ HttpHeaderEntry::parse(const char *field_start, const char *field_end)
+             return NULL;
+     }
+ 
++    /* RFC 7230 section 3.2:
++     *
++     *  header-field   = field-name ":" OWS field-value OWS
++     *  field-name     = token
++     *  token          = 1*TCHAR
++     */
++    for (const char *pos = field_start; pos < (field_start+name_len); ++pos) {
++        if (!CharacterSet::TCHAR[*pos]) {
++            debugs(55, 2, "found header with invalid characters in " <<
++                   Raw("field-name", field_start, min(name_len,100)) << "...");
++            return nullptr;
++        }
++    }
++
+     /* now we know we can parse it */
+ 
+     debugs(55, 9, "parsing HttpHeaderEntry: near '" <<  getStringPrefix(field_start, field_end) << "'");
diff --git a/SOURCES/squid-3.5.20-CVE-2020-15811.patch b/SOURCES/squid-3.5.20-CVE-2020-15811.patch
new file mode 100644
index 0000000..445bebc
--- /dev/null
+++ b/SOURCES/squid-3.5.20-CVE-2020-15811.patch
@@ -0,0 +1,170 @@
+diff --git a/src/HttpHeader.cc b/src/HttpHeader.cc
+index ce55a6f..6ce06f2 100644
+--- a/src/HttpHeader.cc
++++ b/src/HttpHeader.cc
+@@ -470,6 +470,7 @@ HttpHeader::operator =(const HttpHeader &other)
+         update(&other, NULL); // will update the mask as well
+         len = other.len;
+         conflictingContentLength_ = other.conflictingContentLength_;
++        teUnsupported_ = other.teUnsupported_;
+     }
+     return *this;
+ }
+@@ -519,6 +520,7 @@ HttpHeader::clean()
+     httpHeaderMaskInit(&mask, 0);
+     len = 0;
+     conflictingContentLength_ = false;
++    teUnsupported_ = false;
+     PROF_stop(HttpHeaderClean);
+ }
+ 
+@@ -717,12 +719,24 @@ HttpHeader::parse(const char *header_start, const char *header_end)
+                Raw("header", header_start, (size_t)(header_end - header_start)));
+     }
+ 
+-    if (chunked()) {
++
++
++    String rawTe;
++    if (getByIdIfPresent(HDR_TRANSFER_ENCODING, &rawTe)) {
+         // RFC 2616 section 4.4: ignore Content-Length with Transfer-Encoding
+         // RFC 7230 section 3.3.3 #3: Transfer-Encoding overwrites Content-Length
+         delById(HDR_CONTENT_LENGTH);
+-
+         // and clen state becomes irrelevant
++
++        if (rawTe == "chunked") {
++            ; // leave header present for chunked() method
++        } else if (rawTe == "identity") { // deprecated. no coding
++            delById(HDR_TRANSFER_ENCODING);
++        } else {
++            // This also rejects multiple encodings until we support them properly.
++            debugs(55, warnOnError, "WARNING: unsupported Transfer-Encoding used by client: " << rawTe);
++            teUnsupported_ = true;
++        }
+     } else if (clen.sawBad) {
+         // ensure our callers do not accidentally see bad Content-Length values
+         delById(HDR_CONTENT_LENGTH);
+@@ -1084,6 +1098,18 @@ HttpHeader::getStrOrList(http_hdr_type id) const
+     return String();
+ }
+ 
++bool
++HttpHeader::getByIdIfPresent(http_hdr_type id, String *result) const
++{
++    if (id == HDR_BAD_HDR)
++        return false;
++    if (!has(id))
++        return false;
++    if (result)
++        *result = getStrOrList(id);
++    return true;
++}
++
+ /*
+  * Returns the value of the specified header and/or an undefined String.
+  */
+diff --git a/src/HttpHeader.h b/src/HttpHeader.h
+index 836a26f..c49b105 100644
+--- a/src/HttpHeader.h
++++ b/src/HttpHeader.h
+@@ -239,6 +239,9 @@ public:
+     bool getByNameIfPresent(const char *name, String &value) const;
+     String getByNameListMember(const char *name, const char *member, const char separator) const;
+     String getListMember(http_hdr_type id, const char *member, const char separator) const;
++    /// returns true iff a [possibly empty] field identified by id is there
++    /// when returning true, also sets the `result` parameter (if it is not nil)
++    bool getByIdIfPresent(http_hdr_type id, String *result) const;
+     int has(http_hdr_type id) const;
+     void putInt(http_hdr_type id, int number);
+     void putInt64(http_hdr_type id, int64_t number);
+@@ -267,7 +270,13 @@ public:
+     int hasListMember(http_hdr_type id, const char *member, const char separator) const;
+     int hasByNameListMember(const char *name, const char *member, const char separator) const;
+     void removeHopByHopEntries();
+-    inline bool chunked() const; ///< whether message uses chunked Transfer-Encoding
++
++    /// whether the message uses chunked Transfer-Encoding
++    /// optimized implementation relies on us rejecting/removing other codings
++    bool chunked() const { return has(HDR_TRANSFER_ENCODING); }
++
++    /// whether message used an unsupported and/or invalid Transfer-Encoding
++    bool unsupportedTe() const { return teUnsupported_; }
+ 
+     /* protected, do not use these, use interface functions instead */
+     std::vector<HttpHeaderEntry *> entries;     /**< parsed fields in raw format */
+@@ -282,6 +291,9 @@ protected:
+ private:
+     HttpHeaderEntry *findLastEntry(http_hdr_type id) const;
+     bool conflictingContentLength_; ///< found different Content-Length fields
++    /// unsupported encoding, unnecessary syntax characters, and/or
++    /// invalid field-value found in Transfer-Encoding header
++    bool teUnsupported_ = false;
+ };
+ 
+ int httpHeaderParseQuotedString(const char *start, const int len, String *val);
+@@ -293,13 +305,6 @@ int httpHeaderHasByNameListMember(const HttpHeader * hdr, const char *name, cons
+ void httpHeaderUpdate(HttpHeader * old, const HttpHeader * fresh, const HttpHeaderMask * denied_mask);
+ void httpHeaderCalcMask(HttpHeaderMask * mask, http_hdr_type http_hdr_type_enums[], size_t count);
+ 
+-inline bool
+-HttpHeader::chunked() const
+-{
+-    return has(HDR_TRANSFER_ENCODING) &&
+-           hasListMember(HDR_TRANSFER_ENCODING, "chunked", ',');
+-}
+-
+ void httpHeaderInitModule(void);
+ void httpHeaderCleanModule(void);
+ 
+diff --git a/src/client_side.cc b/src/client_side.cc
+index 261abdf..6858eb4 100644
+--- a/src/client_side.cc
++++ b/src/client_side.cc
+@@ -2581,9 +2581,7 @@ clientProcessRequest(ConnStateData *conn, HttpParser *hp, ClientSocketContext *c
+     ClientHttpRequest *http = context->http;
+     HttpRequest::Pointer request;
+     bool notedUseOfBuffer = false;
+-    bool chunked = false;
+     bool mustReplyToOptions = false;
+-    bool unsupportedTe = false;
+     bool expectBody = false;
+ 
+     // temporary hack to avoid splitting this huge function with sensitive code
+@@ -2767,13 +2765,7 @@ clientProcessRequest(ConnStateData *conn, HttpParser *hp, ClientSocketContext *c
+     // TODO: this effectively obsoletes a lot of conn->FOO copying. That needs cleaning up later.
+     request->clientConnectionManager = conn;
+ 
+-    if (request->header.chunked()) {
+-        chunked = true;
+-    } else if (request->header.has(HDR_TRANSFER_ENCODING)) {
+-        const String te = request->header.getList(HDR_TRANSFER_ENCODING);
+-        // HTTP/1.1 requires chunking to be the last encoding if there is one
+-        unsupportedTe = te.size() && te != "identity";
+-    } // else implied identity coding
++    const auto unsupportedTe = request->header.unsupportedTe();
+ 
+     mustReplyToOptions = (method == Http::METHOD_OPTIONS) &&
+                          (request->header.getInt64(HDR_MAX_FORWARDS) == 0);
+@@ -2791,6 +2783,7 @@ clientProcessRequest(ConnStateData *conn, HttpParser *hp, ClientSocketContext *c
+         return;
+     }
+ 
++    const auto chunked = request->header.chunked();
+     if (!chunked && !clientIsContentLengthValid(request.getRaw())) {
+         clientStreamNode *node = context->getClientReplyContext();
+         clientReplyContext *repContext = dynamic_cast<clientReplyContext *>(node->data.getRaw());
+diff --git a/src/http.cc b/src/http.cc
+index 08531dc..f0fe648 100644
+--- a/src/http.cc
++++ b/src/http.cc
+@@ -1296,6 +1296,9 @@ HttpStateData::continueAfterParsingHeader()
+             } else if (vrep->header.conflictingContentLength()) {
+                 fwd->dontRetry(true);
+                 error = ERR_INVALID_RESP;
++            } else if (vrep->header.unsupportedTe()) {
++                fwd->dontRetry(true);
++                error = ERR_INVALID_RESP;
+             } else {
+                 return true; // done parsing, got reply, and no error
+             }
diff --git a/SOURCES/squid-3.5.20-CVE-2020-24606.patch b/SOURCES/squid-3.5.20-CVE-2020-24606.patch
new file mode 100644
index 0000000..ad74097
--- /dev/null
+++ b/SOURCES/squid-3.5.20-CVE-2020-24606.patch
@@ -0,0 +1,20 @@
+diff --git a/src/peer_digest.cc b/src/peer_digest.cc
+index 1b81fe7..25a18e0 100644
+--- a/src/peer_digest.cc
++++ b/src/peer_digest.cc
+@@ -469,6 +469,15 @@ peerDigestHandleReply(void *data, StoreIOBuffer receivedData)
+ 
+     } while (cbdataReferenceValid(fetch) && prevstate != fetch->state && fetch->bufofs > 0);
+ 
++    // Check for EOF here, thus giving the parser one extra run. We could avoid this overhead by
++    // checking at the beginning of this function. However, in this case, we would have to require
++    // that the parser does not regard EOF as a special condition (it is true now but may change
++    // in the future).
++    if (!receivedData.length) { // EOF
++        peerDigestFetchAbort(fetch, fetch->buf, "premature end of digest reply");
++        return;
++    }
++
+     /* Update the copy offset */
+     fetch->offset += receivedData.length;
+ 
diff --git a/SOURCES/squid-3.5.20-CVE-2020-8449-and-8450.patch b/SOURCES/squid-3.5.20-CVE-2020-8449-and-8450.patch
new file mode 100644
index 0000000..645931c
--- /dev/null
+++ b/SOURCES/squid-3.5.20-CVE-2020-8449-and-8450.patch
@@ -0,0 +1,49 @@
+diff --git a/src/client_side.cc b/src/client_side.cc
+index 01760f3..261abdf 100644
+--- a/src/client_side.cc
++++ b/src/client_side.cc
+@@ -2018,6 +2018,23 @@ setLogUri(ClientHttpRequest * http, char const *uri, bool cleanUrl)
+     }
+ }
+ 
++static char *
++getHostHeader(const char *req_hdr)
++{
++    char *host = mime_get_header(req_hdr, "Host");
++    if (!host)
++        return NULL;
++
++    // check the header contents are valid
++    for(const char *c = host; *c != '\0'; ++c) {
++        // currently only used for pre-parse Host header, ensure valid domain[:port] or ip[:port]
++        static const CharacterSet hostChars = CharacterSet("host",":[].-_") + CharacterSet::ALPHA + CharacterSet::DIGIT;
++        if (!hostChars[*c])
++            return NULL; // error. line contains character not accepted in Host header
++    }
++    return host;
++}
++
+ static void
+ prepareAcceleratedURL(ConnStateData * conn, ClientHttpRequest *http, char *url, const char *req_hdr)
+ {
+@@ -2060,9 +2077,9 @@ prepareAcceleratedURL(ConnStateData * conn, ClientHttpRequest *http, char *url,
+ 
+     const bool switchedToHttps = conn->switchedToHttps();
+     const bool tryHostHeader = vhost || switchedToHttps;
+-    if (tryHostHeader && (host = mime_get_header(req_hdr, "Host")) != NULL) {
++    if (tryHostHeader && (host = getHostHeader(req_hdr)) != NULL && strlen(host) <= SQUIDHOSTNAMELEN) {
+         debugs(33, 5, "ACCEL VHOST REWRITE: vhost=" << host << " + vport=" << vport);
+-        char thost[256];
++        char thost[SQUIDHOSTNAMELEN + 6 /* ':' vport */];
+         if (vport > 0) {
+             thost[0] = '\0';
+             char *t = NULL;
+@@ -2119,7 +2136,7 @@ prepareTransparentURL(ConnStateData * conn, ClientHttpRequest *http, char *url,
+ 
+     /* BUG: Squid cannot deal with '*' URLs (RFC2616 5.1.2) */
+ 
+-    if ((host = mime_get_header(req_hdr, "Host")) != NULL) {
++    if ((host = getHostHeader(req_hdr)) != NULL) {
+         int url_sz = strlen(url) + 32 + Config.appendDomainLen +
+                      strlen(host);
+         http->uri = (char *)xcalloc(url_sz, 1);
diff --git a/SPECS/squid.spec b/SPECS/squid.spec
index 1dbe49b..19fcc56 100644
--- a/SPECS/squid.spec
+++ b/SPECS/squid.spec
@@ -4,7 +4,7 @@
 
 Name:     squid
 Version:  3.5.20
-Release:  15%{?dist}.1
+Release:  17%{?dist}.4
 Summary:  The Squid proxy caching server
 Epoch:    7
 # See CREDITS for breakdown of non GPLv2+ code
@@ -66,12 +66,24 @@ Patch500: squid-3.5.20-CVE-2019-13345.patch
 # https://bugzilla.redhat.com/show_bug.cgi?id=1582301
 Patch501: squid-3.5.20-CVE-2018-1000024.patch
 Patch502: squid-3.5.20-CVE-2018-1000027.patch
+Patch503: squid-3.5.20-CVE-2019-12525.patch
 # https://bugzilla.redhat.com/show_bug.cgi?id=1828361
-Patch503: squid-3.5.20-CVE-2020-11945.patch
+Patch504: squid-3.5.20-CVE-2020-11945.patch
 # https://bugzilla.redhat.com/show_bug.cgi?id=1828362
-Patch504: squid-3.5.20-CVE-2019-12519.patch
-# https://bugzilla.redhat.com/show_bug.cgi?id=1829772
-Patch505: squid-3.5.20-CVE-2019-12525.patch
+Patch505: squid-3.5.20-CVE-2019-12519.patch
+# https://bugzilla.redhat.com/show_bug.cgi?id=1798540
+# https://bugzilla.redhat.com/show_bug.cgi?id=1798552
+Patch506: squid-3.5.20-CVE-2020-8449-and-8450.patch
+# https://bugzilla.redhat.com/show_bug.cgi?id=1852550
+Patch507: squid-3.5.20-CVE-2020-15049.patch
+# https://bugzilla.redhat.com/show_bug.cgi?id=1802517
+Patch508: squid-3.5.20-CVE-2019-12528.patch
+# https://bugzilla.redhat.com/show_bug.cgi?id=1871705
+Patch509: squid-3.5.20-CVE-2020-24606.patch
+# https://bugzilla.redhat.com/show_bug.cgi?id=1871700
+Patch510: squid-3.5.20-CVE-2020-15810.patch
+# https://bugzilla.redhat.com/show_bug.cgi?id=1871702
+Patch511: squid-3.5.20-CVE-2020-15811.patch
 
 Buildroot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
 Requires: bash >= 2.0
@@ -162,9 +174,15 @@ migration and script which prepares squid for downgrade operation.
 %patch500 -p1 -b .CVE-2019-13345
 %patch501 -p1 -b .CVE-2018-1000024
 %patch502 -p1 -b .CVE-2018-1000027
-%patch503 -p1 -b .CVE-2020-11945
-%patch504 -p1 -b .CVE-2019-12519
-%patch505 -p1 -b .CVE-2019-12525
+%patch503 -p1 -b .CVE-2019-12525
+%patch504 -p1 -b .CVE-2020-11945
+%patch505 -p1 -b .CVE-2019-12519
+%patch506 -p1 -b .CVE-2020-8449-and-8450
+%patch507 -p1 -b .CVE-2020-15049
+%patch508 -p1 -b .CVE-2019-12528
+%patch509 -p1 -b .CVE-2020-24606
+%patch510 -p1 -b .CVE-2020-15810
+%patch511 -p1 -b .CVE-2020-15811
 
 # https://bugzilla.redhat.com/show_bug.cgi?id=1471140
 # Patch in the vendor documentation and used different location for documentation
@@ -394,12 +412,32 @@ fi
     chgrp squid /var/cache/samba/winbindd_privileged >/dev/null 2>&1 || :
 
 %changelog
-* Tue Apr 28 2020 Lubos Uhliarik <luhliari@redhat.com> - 7:3.5.20-15.1
-- Resolves: #1828359 - CVE-2020-11945 squid: improper access restriction upon
+* Fri Aug 28 2020 Lubos Uhliarik <luhliari@redhat.com> - 7:3.5.20-17.4
+- Resolves: #1872349 - CVE-2020-24606 squid: Improper Input Validation could
+  result in a DoS
+- Resolves: #1872327 - CVE-2020-15810 squid: HTTP Request Smuggling could
+  result in cache poisoning
+- Resolves: #1872342 - CVE-2020-15811 squid: HTTP Request Splitting could
+  result in cache poisoning
+
+* Fri Jul 31 2020 Lubos Uhliarik <luhliari@redhat.com> - 7:3.5.20-17.2
+- Resolves: #1802516 - CVE-2020-8449 squid: Improper input validation issues
+  in HTTP Request processing
+- Resolves: #1802515 - CVE-2020-8450 squid: Buffer overflow in a Squid acting
+  as reverse-proxy
+- Resolves: #1853129 - CVE-2020-15049 squid: request smuggling and poisoning
+  attack against the HTTP cache
+- Resolves: #1802517 - CVE-2019-12528 squid: Information Disclosure issue in
+  FTP Gateway
+
+* Tue Apr 28 2020 Lubos Uhliarik <luhliari@redhat.com> - 7:3.5.20-17
+- Resolves: #1828361 - CVE-2020-11945 squid: improper access restriction upon
   Digest Authentication nonce replay could lead to remote code execution
-- Resolves: #1828360 - CVE-2019-12519 squid: improper check for new member in
-  ESIExpression::Evaluate allows for stack buffer overflow
-- Resolves: #1829772 - CVE-2019-12525 squid: parsing of header
+- Resolves: #1828362 - CVE-2019-12519 squid: improper check for new member in
+  ESIExpression::Evaluate allows for stack buffer overflow [rhel
+
+* Fri Mar 27 2020 Lubos Uhliarik <luhliari@redhat.com> - 7:3.5.20-16
+- Resolves: #1738582 - CVE-2019-12525 squid: parsing of header 
   Proxy-Authentication leads to memory corruption
 
 * Thu Jul 25 2019 Lubos Uhliarik <luhliari@redhat.com> - 7:3.5.20-15