about summary refs log tree commit diff
diff options
context:
space:
mode:
authorVincent Ambo <mail@tazj.in>2019-02-27T13·02+0100
committerVincent Ambo <github@tazj.in>2019-02-27T13·39+0100
commit67bfba446d42e047796c79046920caa5cdcd90f0 (patch)
tree0084c0e2372e752da9f4d96725c5fb201acd0110
parenta18dfc5a5e7380800b8dd3314a14a72cf2269340 (diff)
fix(lib): Support large body uploads with repeated read callbacks
When uploading larger amounts of data, cURL will call the read
callback incrementally multiple times to receive all the expected
data.

Previously if the size of the data to upload exceeded the size of the
initial buffer provided by cURL, the write (and thus the request)
would fail.

This changes the logic to write the data in chunks of a size that are
acceptable to cURL.
-rw-r--r--src/lib.rs36
-rw-r--r--src/tests.rs18
2 files changed, 42 insertions, 12 deletions
diff --git a/src/lib.rs b/src/lib.rs
index 76d4c68e8b90..b52ebc3ef09c 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -77,7 +77,7 @@ extern crate curl;
 
 pub use curl::init;
 
-use curl::easy::{Auth, Easy, Form, List, ReadError};
+use curl::easy::{Auth, Easy, Form, List, Transfer, ReadError, WriteError};
 use std::collections::HashMap;
 use std::io::Write;
 use std::path::Path;
@@ -379,18 +379,10 @@ impl <'a> Request<'a> {
 
             // Write the payload if it exists:
             match self.body {
-                Body::Bytes { data, .. } => transfer.read_function(move |mut into| {
-                    into.write_all(data)
-                        .map(|_| data.len())
-                        .map_err(|_| ReadError::Abort)
-                })?,
+                Body::Bytes { data, .. } => chunked_read_function(&mut transfer, data)?,
 
                 #[cfg(feature = "json")]
-                Body::Json(json) => transfer.read_function(move |mut into| {
-                    into.write_all(&json)
-                        .map(|_| json.len())
-                        .map_err(|_| ReadError::Abort)
-                })?,
+                Body::Json(ref json) => chunked_read_function(&mut transfer, json)?,
 
                 // Do nothing if there is no body or if the body is a
                 // form.
@@ -426,7 +418,7 @@ impl <'a> Request<'a> {
                 let len = data.len();
                 body.write_all(data)
                     .map(|_| len)
-                    .map_err(|err| panic!("{:?}", err))
+                    .map_err(|_| WriteError::Pause)
             })?;
 
             transfer.perform()?;
@@ -440,6 +432,26 @@ impl <'a> Request<'a> {
     }
 }
 
+/// Provide a data chunk potentially larger than cURL's initial write
+/// buffer to the data reading callback by tracking the offset off
+/// already written data.
+///
+/// As we manually set the expected upload size, cURL will call the
+/// read callback repeatedly until it has all the data it needs.
+fn chunked_read_function<'easy, 'data>(transfer: &mut Transfer<'easy, 'data>,
+                                       data: &'data [u8]) -> Result<(), curl::Error> {
+    let mut data = data;
+
+    transfer.read_function(move |mut into| {
+        let written = into.write(data)
+            .map_err(|_| ReadError::Abort)?;
+
+        data = &data[written..];
+
+        Ok(written)
+    })
+}
+
 impl <T> Response<T> {
     /// Check whether the status code of this HTTP response is a
     /// success (i.e. in the 200-299 range).
diff --git a/src/tests.rs b/src/tests.rs
index bc22b95fcac3..a2bf1e7f5eb3 100644
--- a/src/tests.rs
+++ b/src/tests.rs
@@ -114,6 +114,24 @@ fn test_basic_auth() {
     assert!(response.is_success(), "authorized request should succeed");
 }
 
+#[test]
+fn test_large_body() {
+    // By default cURL buffers seem to be 2^16 bytes in size. The test
+    // size is therefore 2^16+1.
+    const BODY_SIZE: usize = 65537;
+
+    let resp = Request::post("http://127.0.0.1:4662/post")
+        .body("application/octet-stream", &[0; BODY_SIZE])
+        .send().expect("sending request")
+        .as_json::<Value>().expect("JSON deserialisation");
+
+    // httpbin returns the uploaded data as a string in the `data`
+    // field.
+    let data = resp.body.get("data").unwrap().as_str().unwrap();
+
+    assert_eq!(BODY_SIZE, data.len(), "uploaded data length should be correct");
+}
+
 // Tests for various other features.
 
 #[test]