diff --git a/backport-CVE-2023-32001.patch b/backport-CVE-2023-32001.patch deleted file mode 100644 index 8827596e1ebe30f9eddf3f1e7f1bcc7b395c7896..0000000000000000000000000000000000000000 --- a/backport-CVE-2023-32001.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 0c667188e0c6cda615a036b8a2b4125f2c404dde Mon Sep 17 00:00:00 2001 -From: SaltyMilk -Date: Mon, 10 Jul 2023 21:43:28 +0200 -Subject: [PATCH] fopen: optimize - -Closes #11419 ---- - lib/fopen.c | 12 ++++++------ - 1 file changed, 6 insertions(+), 6 deletions(-) - -diff --git a/lib/fopen.c b/lib/fopen.c -index c9c9e3d6e..b6e3caddd 100644 ---- a/lib/fopen.c -+++ b/lib/fopen.c -@@ -56,13 +56,13 @@ CURLcode Curl_fopen(struct Curl_easy *data, const char *filename, - int fd = -1; - *tempname = NULL; - -- if(stat(filename, &sb) == -1 || !S_ISREG(sb.st_mode)) { -- /* a non-regular file, fallback to direct fopen() */ -- *fh = fopen(filename, FOPEN_WRITETEXT); -- if(*fh) -- return CURLE_OK; -+ *fh = fopen(filename, FOPEN_WRITETEXT); -+ if(!*fh) - goto fail; -- } -+ if(fstat(fileno(*fh), &sb) == -1 || !S_ISREG(sb.st_mode)) -+ return CURLE_OK; -+ fclose(*fh); -+ *fh = NULL; - - result = Curl_rand_hex(data, randsuffix, sizeof(randsuffix)); - if(result) --- -2.33.0 - diff --git a/backport-CVE-2023-38039.patch b/backport-CVE-2023-38039.patch deleted file mode 100644 index 03a879b9b9d81f9b1d60d23b69bcc18c9c501036..0000000000000000000000000000000000000000 --- a/backport-CVE-2023-38039.patch +++ /dev/null @@ -1,212 +0,0 @@ -From 3ee79c1674fd6f99e8efca52cd7510e08b766770 Mon Sep 17 00:00:00 2001 -From: Daniel Stenberg -Date: Wed, 2 Aug 2023 23:34:48 +0200 -Subject: [PATCH] http: return error when receiving too large header set - -To avoid abuse. The limit is set to 300 KB for the accumulated size of -all received HTTP headers for a single response. Incomplete research -suggests that Chrome uses a 256-300 KB limit, while Firefox allows up to -1MB. - -Closes #11582 ---- - lib/c-hyper.c | 12 +++++++----- - lib/cf-h1-proxy.c | 4 +++- - lib/http.c | 34 ++++++++++++++++++++++++++++++---- - lib/http.h | 9 +++++++++ - lib/pingpong.c | 4 +++- - lib/urldata.h | 17 ++++++++--------- - 6 files changed, 60 insertions(+), 20 deletions(-) - -diff --git a/lib/c-hyper.c b/lib/c-hyper.c -index c29983c0b24a6..0b9d9ab478e67 100644 ---- a/lib/c-hyper.c -+++ b/lib/c-hyper.c -@@ -182,8 +182,11 @@ static int hyper_each_header(void *userdata, - } - } - -- data->info.header_size += (curl_off_t)len; -- data->req.headerbytecount += (curl_off_t)len; -+ result = Curl_bump_headersize(data, len, FALSE); -+ if(result) { -+ data->state.hresult = result; -+ return HYPER_ITER_BREAK; -+ } - return HYPER_ITER_CONTINUE; - } - -@@ -313,9 +316,8 @@ static CURLcode status_line(struct Curl_easy *data, - if(result) - return result; - } -- data->info.header_size += (curl_off_t)len; -- data->req.headerbytecount += (curl_off_t)len; -- return CURLE_OK; -+ result = Curl_bump_headersize(data, len, FALSE); -+ return result; - } - - /* -diff --git a/lib/cf-h1-proxy.c b/lib/cf-h1-proxy.c -index c9b157c9bccc7..b1d8cb618b7d1 100644 ---- a/lib/cf-h1-proxy.c -+++ b/lib/cf-h1-proxy.c -@@ -587,7 +587,9 @@ static CURLcode recv_CONNECT_resp(struct Curl_cfilter *cf, - return result; - } - -- data->info.header_size += (long)perline; -+ result = Curl_bump_headersize(data, perline, TRUE); -+ if(result) -+ return result; - - /* Newlines are CRLF, so the CR is ignored as the line isn't - really terminated until the LF comes. Treat a following CR -diff --git a/lib/http.c b/lib/http.c -index f7c71afd7d847..bc78ff97435c4 100644 ---- a/lib/http.c -+++ b/lib/http.c -@@ -3920,6 +3920,29 @@ static CURLcode verify_header(struct Curl_easy *data) - return CURLE_OK; - } - -+CURLcode Curl_bump_headersize(struct Curl_easy *data, -+ size_t delta, -+ bool connect_only) -+{ -+ size_t bad = 0; -+ if(delta < MAX_HTTP_RESP_HEADER_SIZE) { -+ if(!connect_only) -+ data->req.headerbytecount += (unsigned int)delta; -+ data->info.header_size += (unsigned int)delta; -+ if(data->info.header_size > MAX_HTTP_RESP_HEADER_SIZE) -+ bad = data->info.header_size; -+ } -+ else -+ bad = data->info.header_size + delta; -+ if(bad) { -+ failf(data, "Too large response headers: %zu > %zu", -+ bad, MAX_HTTP_RESP_HEADER_SIZE); -+ return CURLE_RECV_ERROR; -+ } -+ return CURLE_OK; -+} -+ -+ - /* - * Read any HTTP header lines from the server and pass them to the client app. - */ -@@ -4173,8 +4196,9 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, - if(result) - return result; - -- data->info.header_size += (long)headerlen; -- data->req.headerbytecount += (long)headerlen; -+ result = Curl_bump_headersize(data, headerlen, FALSE); -+ if(result) -+ return result; - - /* - * When all the headers have been parsed, see if we should give -@@ -4496,8 +4520,10 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, - if(result) - return result; - -- data->info.header_size += Curl_dyn_len(&data->state.headerb); -- data->req.headerbytecount += Curl_dyn_len(&data->state.headerb); -+ result = Curl_bump_headersize(data, Curl_dyn_len(&data->state.headerb), -+ FALSE); -+ if(result) -+ return result; - - Curl_dyn_reset(&data->state.headerb); - } -diff --git a/lib/http.h b/lib/http.h -index df3b4e38b8a88..4aeabc345938c 100644 ---- a/lib/http.h -+++ b/lib/http.h -@@ -64,6 +64,10 @@ extern const struct Curl_handler Curl_handler_wss; - - struct dynhds; - -+CURLcode Curl_bump_headersize(struct Curl_easy *data, -+ size_t delta, -+ bool connect_only); -+ - /* Header specific functions */ - bool Curl_compareheader(const char *headerline, /* line to check */ - const char *header, /* header keyword _with_ colon */ -@@ -183,6 +187,11 @@ CURLcode Curl_http_auth_act(struct Curl_easy *data); - #define EXPECT_100_THRESHOLD (1024*1024) - #endif - -+/* MAX_HTTP_RESP_HEADER_SIZE is the maximum size of all response headers -+ combined that libcurl allows for a single HTTP response, any HTTP -+ version. This count includes CONNECT response headers. */ -+#define MAX_HTTP_RESP_HEADER_SIZE (300*1024) -+ - #endif /* CURL_DISABLE_HTTP */ - - /**************************************************************************** -diff --git a/lib/pingpong.c b/lib/pingpong.c -index f3f7cb93cb9b7..523bbec189fe6 100644 ---- a/lib/pingpong.c -+++ b/lib/pingpong.c -@@ -341,7 +341,9 @@ CURLcode Curl_pp_readresp(struct Curl_easy *data, - ssize_t clipamount = 0; - bool restart = FALSE; - -- data->req.headerbytecount += (long)gotbytes; -+ result = Curl_bump_headersize(data, gotbytes, FALSE); -+ if(result) -+ return result; - - pp->nread_resp += gotbytes; - for(i = 0; i < gotbytes; ptr++, i++) { -diff --git a/lib/urldata.h b/lib/urldata.h -index e5446b6840f63..d21aa415dc94b 100644 ---- a/lib/urldata.h -+++ b/lib/urldata.h -@@ -629,17 +629,16 @@ struct SingleRequest { - curl_off_t bytecount; /* total number of bytes read */ - curl_off_t writebytecount; /* number of bytes written */ - -- curl_off_t headerbytecount; /* only count received headers */ -- curl_off_t deductheadercount; /* this amount of bytes doesn't count when we -- check if anything has been transferred at -- the end of a connection. We use this -- counter to make only a 100 reply (without a -- following second response code) result in a -- CURLE_GOT_NOTHING error code */ -- - curl_off_t pendingheader; /* this many bytes left to send is actually - header and not body */ - struct curltime start; /* transfer started at this time */ -+ unsigned int headerbytecount; /* only count received headers */ -+ unsigned int deductheadercount; /* this amount of bytes doesn't count when -+ we check if anything has been transferred -+ at the end of a connection. We use this -+ counter to make only a 100 reply (without -+ a following second response code) result -+ in a CURLE_GOT_NOTHING error code */ - enum { - HEADER_NORMAL, /* no bad header at all */ - HEADER_PARTHEADER, /* part of the chunk is a bad header, the rest -@@ -1089,7 +1088,6 @@ struct PureInfo { - int httpversion; /* the http version number X.Y = X*10+Y */ - time_t filetime; /* If requested, this is might get set. Set to -1 if the - time was unretrievable. */ -- curl_off_t header_size; /* size of read header(s) in bytes */ - curl_off_t request_size; /* the amount of bytes sent in the request(s) */ - unsigned long proxyauthavail; /* what proxy auth types were announced */ - unsigned long httpauthavail; /* what host auth types were announced */ -@@ -1097,6 +1095,7 @@ struct PureInfo { - char *contenttype; /* the content type of the object */ - char *wouldredirect; /* URL this would've been redirected to if asked to */ - curl_off_t retry_after; /* info from Retry-After: header */ -+ unsigned int header_size; /* size of read header(s) in bytes */ - - /* PureInfo members 'conn_primary_ip', 'conn_primary_port', 'conn_local_ip' - and, 'conn_local_port' are copied over from the connectdata struct in - diff --git a/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch b/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch deleted file mode 100644 index 129e9cecbdac582793a39fecc9ae8338db407972..0000000000000000000000000000000000000000 --- a/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch +++ /dev/null @@ -1,112 +0,0 @@ -From 49e244318672c688097c1bf601a110005cd9a6a8 Mon Sep 17 00:00:00 2001 -From: Daniel Stenberg -Date: Mon, 31 Jul 2023 10:07:35 +0200 -Subject: [PATCH] urlapi: make sure zoneid is also duplicated in curl_url_dup - -Add several curl_url_dup() tests to the general lib1560 test. - -Reported-by: Rutger Broekhoff -Bug: https://curl.se/mail/lib-2023-07/0047.html -Closes #11549 - -Conflict: tests/libtest/lib1560.c for context adapt -Reference: https://github.com/curl/curl/commit/49e244318672c688097c1bf601a110005cd9a6a8 ---- - lib/urlapi.c | 1 + - tests/libtest/lib1560.c | 67 +++++++++++++++++++++++++++++++++++++++++ - 2 files changed, 68 insertions(+) - -diff --git a/lib/urlapi.c b/lib/urlapi.c -index cd423c335d88f..b1a126d548213 100644 ---- a/lib/urlapi.c -+++ b/lib/urlapi.c -@@ -1385,6 +1385,7 @@ CURLU *curl_url_dup(const CURLU *in) - DUP(u, in, path); - DUP(u, in, query); - DUP(u, in, fragment); -+ DUP(u, in, zoneid); - u->portnum = in->portnum; - } - return u; -diff --git a/tests/libtest/lib1560.c b/tests/libtest/lib1560.c -index 0eca0fda72d0b..ff03bec9391a4 100644 ---- a/tests/libtest/lib1560.c -+++ b/tests/libtest/lib1560.c -@@ -1672,10 +1672,77 @@ static int huge(void) - return error; - } - -+static int urldup(void) -+{ -+ const char *url[] = { -+ "http://" -+ "user:pwd@" -+ "[2a04:4e42:e00::347%25eth0]" -+ ":80" -+ "/path" -+ "?query" -+ "#fraggie", -+ "https://example.com", -+ "https://user@example.com", -+ "https://user.pwd@example.com", -+ "https://user.pwd@example.com:1234", -+ "https://example.com:1234", -+ "example.com:1234", -+ "https://user.pwd@example.com:1234/path?query#frag", -+ NULL -+ }; -+ CURLU *copy = NULL; -+ char *h_str = NULL, *copy_str = NULL; -+ CURLU *h = curl_url(); -+ int i; -+ -+ if(!h) -+ goto err; -+ -+ for(i = 0; url[i]; i++) { -+ CURLUcode rc = curl_url_set(h, CURLUPART_URL, url[i], -+ CURLU_GUESS_SCHEME); -+ if(rc) -+ goto err; -+ copy = curl_url_dup(h); -+ -+ rc = curl_url_get(h, CURLUPART_URL, &h_str, 0); -+ if(rc) -+ goto err; -+ -+ rc = curl_url_get(copy, CURLUPART_URL, ©_str, 0); -+ if(rc) -+ goto err; -+ -+ if(strcmp(h_str, copy_str)) { -+ printf("Original: %s\nParsed: %s\nCopy: %s\n", -+ url[i], h_str, copy_str); -+ goto err; -+ } -+ curl_free(copy_str); -+ curl_free(h_str); -+ curl_url_cleanup(copy); -+ copy_str = NULL; -+ h_str = NULL; -+ copy = NULL; -+ } -+ curl_url_cleanup(h); -+ return 0; -+err: -+ curl_free(copy_str); -+ curl_free(h_str); -+ curl_url_cleanup(copy); -+ curl_url_cleanup(h); -+ return 1; -+} -+ - int test(char *URL) - { - (void)URL; /* not used */ - -+ if(urldup()) -+ return 11; -+ - if(get_url()) - return 3; - diff --git a/backport-vtls-avoid-memory-leak-if-sha256-call-fails.patch b/backport-vtls-avoid-memory-leak-if-sha256-call-fails.patch deleted file mode 100644 index bf475bc4ea4d3558393fcfdea8fbb54eaf46f29f..0000000000000000000000000000000000000000 --- a/backport-vtls-avoid-memory-leak-if-sha256-call-fails.patch +++ /dev/null @@ -1,41 +0,0 @@ -From a4a5e438ae533c9af5e97457ae424c9189545105 Mon Sep 17 00:00:00 2001 -From: Daniel Stenberg -Date: Mon, 12 Jun 2023 14:10:37 +0200 -Subject: [PATCH] vtls: avoid memory leak if sha256 call fails - -... in the pinned public key handling function. - -Reported-by: lizhuang0630 on github -Fixes #11306 -Closes #11307 - -Conflict: NA -Reference: https://github.com/curl/curl/commit/a4a5e438ae533c9af5e97457ae424c9189545105 ---- - lib/vtls/vtls.c | 12 +++++------- - 1 file changed, 5 insertions(+), 7 deletions(-) - -diff --git a/lib/vtls/vtls.c b/lib/vtls/vtls.c -index a4ff7d61a6193..cdd3a4fdc1c14 100644 ---- a/lib/vtls/vtls.c -+++ b/lib/vtls/vtls.c -@@ -907,14 +907,12 @@ CURLcode Curl_pin_peer_pubkey(struct Curl_easy *data, - if(!sha256sumdigest) - return CURLE_OUT_OF_MEMORY; - encode = Curl_ssl->sha256sum(pubkey, pubkeylen, -- sha256sumdigest, CURL_SHA256_DIGEST_LENGTH); -+ sha256sumdigest, CURL_SHA256_DIGEST_LENGTH); - -- if(encode != CURLE_OK) -- return encode; -- -- encode = Curl_base64_encode((char *)sha256sumdigest, -- CURL_SHA256_DIGEST_LENGTH, &encoded, -- &encodedlen); -+ if(!encode) -+ encode = Curl_base64_encode((char *)sha256sumdigest, -+ CURL_SHA256_DIGEST_LENGTH, &encoded, -+ &encodedlen); - Curl_safefree(sha256sumdigest); - - if(encode) diff --git a/curl-8.1.2.tar.xz b/curl-8.1.2.tar.xz deleted file mode 100644 index df364de2ee888cda1584c187a75759e0493bf689..0000000000000000000000000000000000000000 Binary files a/curl-8.1.2.tar.xz and /dev/null differ diff --git a/curl-8.4.0.tar.xz b/curl-8.4.0.tar.xz new file mode 100644 index 0000000000000000000000000000000000000000..5f555058a7ea9a7ab106443156019f4913a11dd4 Binary files /dev/null and b/curl-8.4.0.tar.xz differ diff --git a/curl.spec b/curl.spec index 93185f0c03f9493ad09ec88ae73a4d7cb64ae852..21edca45d321d17a7c9fc00401f95a0c2cbbdbbf 100644 --- a/curl.spec +++ b/curl.spec @@ -5,8 +5,8 @@ %global _configure ../configure Name: curl -Version: 8.1.2 -Release: 4 +Version: 8.4.0 +Release: 1 Summary: Curl is used in command lines or scripts to transfer data License: curl URL: https://curl.se/ @@ -15,10 +15,6 @@ Source: https://curl.se/download/curl-%{version}.tar.xz Patch1: backport-0101-curl-7.32.0-multilib.patch Patch2: backport-curl-7.84.0-test3026.patch Patch4: backport-curl-7.88.0-tests-warnings.patch -Patch5: backport-CVE-2023-32001.patch -Patch6: backport-vtls-avoid-memory-leak-if-sha256-call-fails.patch -Patch7: backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch -Patch8: backport-CVE-2023-38039.patch BuildRequires: automake brotli-devel coreutils gcc groff krb5-devel BuildRequires: libidn2-devel libnghttp2-devel libpsl-devel @@ -203,6 +199,12 @@ rm -rf ${RPM_BUILD_ROOT}%{_libdir}/libcurl.la %{_mandir}/man3/* %changelog +* Wed Oct 11 2023 Funda Wang - 8.4.0-1 +- Type:CVE +- CVE:CVE-2023-38545, CVE-2023-38546 +- SUG:NA +- DESC:Upgrade to 8.4.0 to fix CVE-2023-38545, CVE-2023-38546 + * Thu Sep 14 2023 gaihuiying - 8.1.2-4 - Type:CVE - CVE:CVE-2023-38039