- I introduced a maximum limit for received HTTP headers. It is controlled by
the define CURL_MAX_HTTP_HEADER which is even exposed in the public header file to allow for users to fairly easy rebuild libcurl with a modified limit. The rationale for a fixed limit is that libcurl is realloc()ing a buffer to be able to put a full header into it, so that it can call the header callback with the entire header, but that also risk getting it into trouble if a server by mistake or willingly sends a header that is more or less without an end. The limit is set to 100K.
This commit is contained in:
parent
867a0de670
commit
8646cecb78
10
CHANGES
10
CHANGES
@ -6,6 +6,16 @@
|
|||||||
|
|
||||||
Changelog
|
Changelog
|
||||||
|
|
||||||
|
Daniel Stenberg (27 Sep 2009)
|
||||||
|
- I introduced a maximum limit for received HTTP headers. It is controlled by
|
||||||
|
the define CURL_MAX_HTTP_HEADER which is even exposed in the public header
|
||||||
|
file to allow for users to fairly easy rebuild libcurl with a modified
|
||||||
|
limit. The rationale for a fixed limit is that libcurl is realloc()ing a
|
||||||
|
buffer to be able to put a full header into it, so that it can call the
|
||||||
|
header callback with the entire header, but that also risk getting it into
|
||||||
|
trouble if a server by mistake or willingly sends a header that is more or
|
||||||
|
less without an end. The limit is set to 100K.
|
||||||
|
|
||||||
Daniel Stenberg (26 Sep 2009)
|
Daniel Stenberg (26 Sep 2009)
|
||||||
- John P. McCaskey posted a bug report that showed how libcurl did wrong when
|
- John P. McCaskey posted a bug report that showed how libcurl did wrong when
|
||||||
saving received cookies with no given path, if the path in the request had a
|
saving received cookies with no given path, if the path in the request had a
|
||||||
|
@ -11,6 +11,7 @@ This release includes the following changes:
|
|||||||
|
|
||||||
o -T. is now for non-blocking uploading from stdin
|
o -T. is now for non-blocking uploading from stdin
|
||||||
o SYST handling on FTP for OS/400 FTP server cases
|
o SYST handling on FTP for OS/400 FTP server cases
|
||||||
|
o libcurl refuses to read a single HTTP header longer than 100K
|
||||||
|
|
||||||
This release includes the following bugfixes:
|
This release includes the following bugfixes:
|
||||||
|
|
||||||
|
@ -178,6 +178,15 @@ typedef int (*curl_progress_callback)(void *clientp,
|
|||||||
time for those who feel adventurous. */
|
time for those who feel adventurous. */
|
||||||
#define CURL_MAX_WRITE_SIZE 16384
|
#define CURL_MAX_WRITE_SIZE 16384
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef CURL_MAX_HTTP_HEADER
|
||||||
|
/* The only reason to have a max limit for this is to avoid the risk of a bad
|
||||||
|
server feeding libcurl with a never-ending header that will cause reallocs
|
||||||
|
infinitely */
|
||||||
|
#define CURL_MAX_HTTP_HEADER (100*1024)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/* This is a magic return code for the write callback that, when returned,
|
/* This is a magic return code for the write callback that, when returned,
|
||||||
will signal libcurl to pause receiving on the current transfer. */
|
will signal libcurl to pause receiving on the current transfer. */
|
||||||
#define CURL_WRITEFUNC_PAUSE 0x10000001
|
#define CURL_WRITEFUNC_PAUSE 0x10000001
|
||||||
|
@ -752,12 +752,22 @@ static CURLcode header_append(struct SessionHandle *data,
|
|||||||
struct SingleRequest *k,
|
struct SingleRequest *k,
|
||||||
size_t length)
|
size_t length)
|
||||||
{
|
{
|
||||||
if(k->hbuflen + length >= data->state.headersize) {
|
if(k->hbuflen + length >= data->state.headersize) {
|
||||||
/* We enlarge the header buffer as it is too small */
|
/* We enlarge the header buffer as it is too small */
|
||||||
char *newbuff;
|
char *newbuff;
|
||||||
size_t hbufp_index;
|
size_t hbufp_index;
|
||||||
size_t newsize=CURLMAX((k->hbuflen+ length)*3/2,
|
size_t newsize;
|
||||||
data->state.headersize*2);
|
|
||||||
|
if(k->hbuflen + length > CURL_MAX_HTTP_HEADER) {
|
||||||
|
/* The reason to have a max limit for this is to avoid the risk of a bad
|
||||||
|
server feeding libcurl with a never-ending header that will cause
|
||||||
|
reallocs infinitely */
|
||||||
|
failf (data, "Avoided giant realloc for header (max is %d)!",
|
||||||
|
CURL_MAX_HTTP_HEADER);
|
||||||
|
return CURLE_OUT_OF_MEMORY;
|
||||||
|
}
|
||||||
|
|
||||||
|
newsize=CURLMAX((k->hbuflen+ length)*3/2, data->state.headersize*2);
|
||||||
hbufp_index = k->hbufp - data->state.headerbuff;
|
hbufp_index = k->hbufp - data->state.headerbuff;
|
||||||
newbuff = realloc(data->state.headerbuff, newsize);
|
newbuff = realloc(data->state.headerbuff, newsize);
|
||||||
if(!newbuff) {
|
if(!newbuff) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user