summaryrefslogtreecommitdiffstats
path: root/src/uscxml/URL.cpp
diff options
context:
space:
mode:
authorStefan Radomski <radomski@tk.informatik.tu-darmstadt.de>2013-10-28 17:26:38 (GMT)
committerStefan Radomski <radomski@tk.informatik.tu-darmstadt.de>2013-10-28 17:26:38 (GMT)
commita68b6c1d31cb94675dd4dda0a2da11d8e83063c3 (patch)
treed96b56f209f2b65c703be4e611a7e3e6da899067 /src/uscxml/URL.cpp
parent1be1af2d15375dbbf20cd07e85afdf3cee23c992 (diff)
downloaduscxml-a68b6c1d31cb94675dd4dda0a2da11d8e83063c3.zip
uscxml-a68b6c1d31cb94675dd4dda0a2da11d8e83063c3.tar.gz
uscxml-a68b6c1d31cb94675dd4dda0a2da11d8e83063c3.tar.bz2
Bug fixes (see details)
- No more 100-continue HTTP header - Correctly delegate HTTP requests - More elaborate expressions when communicating via HTTP - Fixed off-by-one in JSCNodeSet
Diffstat (limited to 'src/uscxml/URL.cpp')
-rw-r--r--src/uscxml/URL.cpp12
1 files changed, 11 insertions, 1 deletions
diff --git a/src/uscxml/URL.cpp b/src/uscxml/URL.cpp
index 9721562..6ebd9a6 100644
--- a/src/uscxml/URL.cpp
+++ b/src/uscxml/URL.cpp
@@ -552,7 +552,7 @@ void URLFetcher::fetchURL(URL& url) {
// (curlError = curl_easy_setopt(handle, CURLOPT_NOSIGNAL, 1)) == CURLE_OK ||
// LOG(ERROR) << "Cannot set curl to ignore signals: " << curl_easy_strerror(curlError);
-
+
(curlError = curl_easy_setopt(handle, CURLOPT_WRITEDATA, url._impl.get())) == CURLE_OK ||
LOG(ERROR) << "Cannot register this as write userdata: " << curl_easy_strerror(curlError);
@@ -582,6 +582,12 @@ void URLFetcher::fetchURL(URL& url) {
(curlError = curl_easy_setopt(handle, CURLOPT_COPYPOSTFIELDS, url._impl->_outContent.c_str())) == CURLE_OK ||
LOG(ERROR) << "Cannot set post data " << url.asString() << ": " << curl_easy_strerror(curlError);
+ // Disable "Expect: 100-continue"
+// curl_slist* disallowed_headers = 0;
+// disallowed_headers = curl_slist_append(disallowed_headers, "Expect:");
+// (curlError = curl_easy_setopt(handle, CURLOPT_HTTPHEADER, disallowed_headers)) == CURLE_OK ||
+// LOG(ERROR) << "Cannot disable Expect 100 header: " << curl_easy_strerror(curlError);
+
struct curl_slist* headers = NULL;
std::map<std::string, std::string>::iterator paramIter = url._impl->_outHeader.begin();
while(paramIter != url._impl->_outHeader.end()) {
@@ -596,6 +602,10 @@ void URLFetcher::fetchURL(URL& url) {
curl_free(value);
paramIter++;
}
+
+ // Disable "Expect: 100-continue"
+ headers = curl_slist_append(headers, "Expect:");
+
(curlError = curl_easy_setopt(handle, CURLOPT_HTTPHEADER, headers)) == CURLE_OK ||
LOG(ERROR) << "Cannot headers for " << url.asString() << ": " << curl_easy_strerror(curlError);