From 67bfba446d42e047796c79046920caa5cdcd90f0 Mon Sep 17 00:00:00 2001 From: Vincent Ambo Date: Wed, 27 Feb 2019 14:02:49 +0100 Subject: fix(lib): Support large body uploads with repeated read callbacks When uploading larger amounts of data, cURL will call the read callback incrementally multiple times to receive all the expected data. Previously if the size of the data to upload exceeded the size of the initial buffer provided by cURL, the write (and thus the request) would fail. This changes the logic to write the data in chunks of a size that are acceptable to cURL. --- src/tests.rs | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) (limited to 'src/tests.rs') diff --git a/src/tests.rs b/src/tests.rs index bc22b95fcac3..a2bf1e7f5eb3 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -114,6 +114,24 @@ fn test_basic_auth() { assert!(response.is_success(), "authorized request should succeed"); } +#[test] +fn test_large_body() { + // By default cURL buffers seem to be 2^16 bytes in size. The test + // size is therefore 2^16+1. + const BODY_SIZE: usize = 65537; + + let resp = Request::post("http://127.0.0.1:4662/post") + .body("application/octet-stream", &[0; BODY_SIZE]) + .send().expect("sending request") + .as_json::().expect("JSON deserialisation"); + + // httpbin returns the uploaded data as a string in the `data` + // field. + let data = resp.body.get("data").unwrap().as_str().unwrap(); + + assert_eq!(BODY_SIZE, data.len(), "uploaded data length should be correct"); +} + // Tests for various other features. #[test] -- cgit 1.4.1