summaryrefslogtreecommitdiffstats
path: root/Utilities/cmcurl/lib/http.c
diff options
context:
space:
mode:
authorBrad King <brad.king@kitware.com>2018-01-24 19:17:42 (GMT)
committerBrad King <brad.king@kitware.com>2018-01-24 19:18:16 (GMT)
commitcd8e31a1bf7429514078c2923a1a9580113f9d4f (patch)
tree6a0b8a01e3e0e52bb4b3abb66ce9e992614af6a7 /Utilities/cmcurl/lib/http.c
parente9c8ea75575afdb4e87b262641ee4071ef42b4c6 (diff)
parentaf9e654045f11028e50dac4781e297834129a749 (diff)
downloadCMake-cd8e31a1bf7429514078c2923a1a9580113f9d4f.zip
CMake-cd8e31a1bf7429514078c2923a1a9580113f9d4f.tar.gz
CMake-cd8e31a1bf7429514078c2923a1a9580113f9d4f.tar.bz2
Merge branch 'upstream-curl' into update-curl
* upstream-curl: curl 2018-01-23 (d6c21c8e)
Diffstat (limited to 'Utilities/cmcurl/lib/http.c')
-rw-r--r--Utilities/cmcurl/lib/http.c127
1 files changed, 43 insertions, 84 deletions
diff --git a/Utilities/cmcurl/lib/http.c b/Utilities/cmcurl/lib/http.c
index 38227eb..a500767 100644
--- a/Utilities/cmcurl/lib/http.c
+++ b/Utilities/cmcurl/lib/http.c
@@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
- * Copyright (C) 1998 - 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
+ * Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@@ -73,7 +73,6 @@
#include "http_proxy.h"
#include "warnless.h"
#include "non-ascii.h"
-#include "conncache.h"
#include "pipeline.h"
#include "http2.h"
#include "connect.h"
@@ -715,7 +714,7 @@ Curl_http_output_auth(struct connectdata *conn,
if(!data->state.this_is_a_follow ||
conn->bits.netrc ||
!data->state.first_host ||
- data->set.http_disable_hostname_check_before_authentication ||
+ data->set.allow_auth_to_other_hosts ||
strcasecompare(data->state.first_host, conn->host.name)) {
result = output_auth_headers(conn, authhost, request, path, FALSE);
}
@@ -1637,6 +1636,14 @@ CURLcode Curl_add_custom_headers(struct connectdata *conn,
checkprefix("Transfer-Encoding:", headers->data))
/* HTTP/2 doesn't support chunked requests */
;
+ else if(checkprefix("Authorization:", headers->data) &&
+ /* be careful of sending this potentially sensitive header to
+ other hosts */
+ (data->state.this_is_a_follow &&
+ data->state.first_host &&
+ !data->set.allow_auth_to_other_hosts &&
+ !strcasecompare(data->state.first_host, conn->host.name)))
+ ;
else {
CURLcode result = Curl_add_bufferf(req_buffer, "%s\r\n",
headers->data);
@@ -3104,7 +3111,7 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
!(conn->handler->protocol & CURLPROTO_RTSP) &&
data->set.httpreq != HTTPREQ_HEAD) {
/* On HTTP 1.1, when connection is not to get closed, but no
- Content-Length nor Content-Encoding chunked have been
+ Content-Length nor Transfer-Encoding chunked have been
received, according to RFC2616 section 4.4 point 5, we
assume that the server will close the connection to
signal the end of the document. */
@@ -3387,12 +3394,14 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
}
}
else if(conn->handler->protocol & CURLPROTO_RTSP) {
+ char separator;
nc = sscanf(HEADER1,
- " RTSP/%d.%d %3d",
+ " RTSP/%1d.%1d%c%3d",
&rtspversion_major,
&conn->rtspversion,
+ &separator,
&k->httpcode);
- if(nc == 3) {
+ if((nc == 4) && (' ' == separator)) {
conn->rtspversion += 10 * rtspversion_major;
conn->httpversion = 11; /* For us, RTSP acts like HTTP 1.1 */
}
@@ -3504,31 +3513,35 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
if(!k->ignorecl && !data->set.ignorecl &&
checkprefix("Content-Length:", k->p)) {
curl_off_t contentlength;
- if(!curlx_strtoofft(k->p + 15, NULL, 10, &contentlength)) {
+ CURLofft offt = curlx_strtoofft(k->p + 15, NULL, 10, &contentlength);
+
+ if(offt == CURL_OFFT_OK) {
if(data->set.max_filesize &&
contentlength > data->set.max_filesize) {
failf(data, "Maximum file size exceeded");
return CURLE_FILESIZE_EXCEEDED;
}
- if(contentlength >= 0) {
- k->size = contentlength;
- k->maxdownload = k->size;
- /* we set the progress download size already at this point
- just to make it easier for apps/callbacks to extract this
- info as soon as possible */
- Curl_pgrsSetDownloadSize(data, k->size);
- }
- else {
- /* Negative Content-Length is really odd, and we know it
- happens for example when older Apache servers send large
- files */
- streamclose(conn, "negative content-length");
- infof(data, "Negative content-length: %" CURL_FORMAT_CURL_OFF_T
- ", closing after transfer\n", contentlength);
+ k->size = contentlength;
+ k->maxdownload = k->size;
+ /* we set the progress download size already at this point
+ just to make it easier for apps/callbacks to extract this
+ info as soon as possible */
+ Curl_pgrsSetDownloadSize(data, k->size);
+ }
+ else if(offt == CURL_OFFT_FLOW) {
+ /* out of range */
+ if(data->set.max_filesize) {
+ failf(data, "Maximum file size exceeded");
+ return CURLE_FILESIZE_EXCEEDED;
}
+ streamclose(conn, "overflow content-length");
+ infof(data, "Overflow Content-Length: value!\n");
+ }
+ else {
+ /* negative or just rubbish - bad HTTP */
+ failf(data, "Invalid Content-Length: value");
+ return CURLE_WEIRD_SERVER_REPLY;
}
- else
- infof(data, "Illegal Content-Length: header\n");
}
/* check for Content-Type: header lines to get the MIME-type */
else if(checkprefix("Content-Type:", k->p)) {
@@ -3612,51 +3625,9 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
* of chunks, and a chunk-data set to zero signals the
* end-of-chunks. */
- char *start;
-
- /* Find the first non-space letter */
- start = k->p + 18;
-
- for(;;) {
- /* skip whitespaces and commas */
- while(*start && (ISSPACE(*start) || (*start == ',')))
- start++;
-
- if(checkprefix("chunked", start)) {
- k->chunk = TRUE; /* chunks coming our way */
-
- /* init our chunky engine */
- Curl_httpchunk_init(conn);
-
- start += 7;
- }
-
- if(k->auto_decoding)
- /* TODO: we only support the first mentioned compression for now */
- break;
-
- if(checkprefix("identity", start)) {
- k->auto_decoding = IDENTITY;
- start += 8;
- }
- else if(checkprefix("deflate", start)) {
- k->auto_decoding = DEFLATE;
- start += 7;
- }
- else if(checkprefix("gzip", start)) {
- k->auto_decoding = GZIP;
- start += 4;
- }
- else if(checkprefix("x-gzip", start)) {
- k->auto_decoding = GZIP;
- start += 6;
- }
- else
- /* unknown! */
- break;
-
- }
-
+ result = Curl_build_unencoding_stack(conn, k->p + 18, TRUE);
+ if(result)
+ return result;
}
else if(checkprefix("Content-Encoding:", k->p) &&
data->set.str[STRING_ENCODING]) {
@@ -3667,21 +3638,9 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
* 2616). zlib cannot handle compress. However, errors are
* handled further down when the response body is processed
*/
- char *start;
-
- /* Find the first non-space letter */
- start = k->p + 17;
- while(*start && ISSPACE(*start))
- start++;
-
- /* Record the content-encoding for later use */
- if(checkprefix("identity", start))
- k->auto_decoding = IDENTITY;
- else if(checkprefix("deflate", start))
- k->auto_decoding = DEFLATE;
- else if(checkprefix("gzip", start)
- || checkprefix("x-gzip", start))
- k->auto_decoding = GZIP;
+ result = Curl_build_unencoding_stack(conn, k->p + 17, FALSE);
+ if(result)
+ return result;
}
else if(checkprefix("Content-Range:", k->p)) {
/* Content-Range: bytes [num]-