[med-svn] [r-cran-crul] 02/04: New upstream version 0.2.0

Andreas Tille tille at debian.org
Sun Oct 1 21:04:03 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository r-cran-crul.

commit 4cafab2c26ddf49c72d7253b4228172432e09668
Author: Andreas Tille <tille at debian.org>
Date:   Sun Oct 1 22:58:44 2017 +0200

    New upstream version 0.2.0
---
 DESCRIPTION                      |  27 +++
 LICENSE                          |   2 +
 MD5                              |  48 +++++
 NAMESPACE                        |   7 +
 NEWS.md                          |  55 +++++
 R/body.R                         |  85 ++++++++
 R/client.R                       | 291 +++++++++++++++++++++++++
 R/crul-package.r                 |  21 ++
 R/curl-options.R                 |  21 ++
 R/curl_options.R                 |  13 ++
 R/fetch.R                        |  12 ++
 R/handle.R                       |  15 ++
 R/headers.R                      |  11 +
 R/http-headers.R                 |  35 +++
 R/make_url.R                     |  39 ++++
 R/post-requests.R                |  46 ++++
 R/query.R                        |  28 +++
 R/response.R                     | 155 ++++++++++++++
 R/use_agent.R                    |   8 +
 R/writing-options.R              |  18 ++
 R/zzz.R                          |   3 +
 README.md                        | 248 +++++++++++++++++++++
 build/vignette.rds               | Bin 0 -> 209 bytes
 debian/README.test               |   8 -
 debian/changelog                 |   5 -
 debian/compat                    |   1 -
 debian/control                   |  32 ---
 debian/copyright                 |  32 ---
 debian/docs                      |   3 -
 debian/rules                     |   5 -
 debian/source/format             |   1 -
 debian/tests/control             |   5 -
 debian/tests/run-unit-test       |  17 --
 debian/watch                     |   2 -
 inst/doc/crul_vignette.Rmd       | 281 ++++++++++++++++++++++++
 inst/doc/crul_vignette.html      | 450 +++++++++++++++++++++++++++++++++++++++
 inst/vign/crul_vignette.Rmd      | 156 ++++++++++++++
 inst/vign/crul_vignette.md       | 281 ++++++++++++++++++++++++
 man/HttpClient.Rd                |  95 +++++++++
 man/HttpResponse.Rd              |  64 ++++++
 man/crul-package.Rd              |  29 +++
 man/curl-options.Rd              |  27 +++
 man/handle.Rd                    |  25 +++
 man/http-headers.Rd              |  41 ++++
 man/post-requests.Rd             |  52 +++++
 man/writing-options.Rd           |  24 +++
 tests/test-all.R                 |   2 +
 tests/testthat/test-client.R     |  23 ++
 tests/testthat/test-delete.R     |  38 ++++
 tests/testthat/test-get.R        |  43 ++++
 tests/testthat/test-head.R       |  20 ++
 tests/testthat/test-headers.R    |  25 +++
 tests/testthat/test-patch.R      |  38 ++++
 tests/testthat/test-paths.R      |  47 ++++
 tests/testthat/test-post.R       |  38 ++++
 tests/testthat/test-put.R        |  38 ++++
 tests/testthat/test-query.R      |  35 +++
 tests/testthat/test-status.R     |  39 ++++
 tests/testthat/test-user-agent.R |  12 ++
 vignettes/crul_vignette.Rmd      | 281 ++++++++++++++++++++++++
 60 files changed, 3392 insertions(+), 111 deletions(-)

diff --git a/DESCRIPTION b/DESCRIPTION
new file mode 100644
index 0000000..902c44c
--- /dev/null
+++ b/DESCRIPTION
@@ -0,0 +1,27 @@
+Package: crul
+Title: HTTP Client
+Description: A simple HTTP client, with tools for making HTTP requests,
+    and mocking HTTP requests. The package is built on R6, and takes
+    inspiration from Ruby's 'faraday' gem (<https://rubygems.org/gems/faraday>).
+    The package name is a play on curl, the widely used command line tool
+    for HTTP, and this package is built on top of the R package 'curl', an
+    interface to 'libcurl' (<https://curl.haxx.se/libcurl>).
+Version: 0.2.0
+License: MIT + file LICENSE
+Authors at R: c(
+    person("Scott", "Chamberlain", role = c("aut", "cre"), 
+    email = "myrmecocystus at gmail.com")
+    )
+URL: https://github.com/ropensci/crul
+BugReports: https://github.com/ropensci/crul/issues
+Imports: curl (>= 2.2), R6 (>= 2.2.0), urltools (>= 1.6.0), httpcode
+        (>= 0.2.0), mime
+Suggests: testthat, fauxpas (>= 0.1.0), knitr, covr, jsonlite
+VignetteBuilder: knitr
+RoxygenNote: 5.0.1
+NeedsCompilation: no
+Packaged: 2017-01-03 15:18:41 UTC; sacmac
+Author: Scott Chamberlain [aut, cre]
+Maintainer: Scott Chamberlain <myrmecocystus at gmail.com>
+Repository: CRAN
+Date/Publication: 2017-01-03 16:27:27
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..37ee2c7
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,2 @@
+YEAR: 2017
+COPYRIGHT HOLDER: Scott Chamberlain
diff --git a/MD5 b/MD5
new file mode 100644
index 0000000..90e85ae
--- /dev/null
+++ b/MD5
@@ -0,0 +1,48 @@
+a6f6fbc16cd71616d681a37cb3c912e9 *DESCRIPTION
+c5af52351472a750055a760a8924ce71 *LICENSE
+ccfd2548d37213f1c9fb4ed441c441e1 *NAMESPACE
+863807c45f19cb9c253d7a29dae2eabb *NEWS.md
+c6de1547ab52ff1cd67fb5c6aeeac61d *R/body.R
+01ee24174d618c7272c081739e07c1e7 *R/client.R
+c2c1b73d8f35cf3a5d89fbd07020e28c *R/crul-package.r
+5cb4fcba89003e2e8188b5c08a7a9f04 *R/curl-options.R
+ce31dc346613d071e58f07d49bdb14eb *R/curl_options.R
+8f916ca18e13c4b6c544f2c284b9fabf *R/fetch.R
+2dfad3475055103724c39b36f6ac1d2a *R/handle.R
+7fe1bb2e5890da2188c2f3099ef5ccc1 *R/headers.R
+0cd2754bc2668203d10a1431d2713c4e *R/http-headers.R
+9141860e68614c1ec7ff6b7381f5f808 *R/make_url.R
+3d4be1045a245a5ab31e88b3bec7fef4 *R/post-requests.R
+bbc44d0c12732851f73d1f556098317a *R/query.R
+e1c677d21d92379174d5d4e6e35895ab *R/response.R
+14c11771d130c3cc4ba9e9f475a2951d *R/use_agent.R
+c92e901a06260e4175002099a617aa60 *R/writing-options.R
+226c3a16c73ea8dd62a69524df930908 *R/zzz.R
+f4aca5d2f32af09d109570d33de13836 *README.md
+13d67e238649588f477b9dfd996b8269 *build/vignette.rds
+02bf1952e659c0bd4a564da66c12c9db *inst/doc/crul_vignette.Rmd
+3cbc48c791a977d09be812762d3959b7 *inst/doc/crul_vignette.html
+80d1d44e24d667bc689d3ae88f6ce364 *inst/vign/crul_vignette.Rmd
+02bf1952e659c0bd4a564da66c12c9db *inst/vign/crul_vignette.md
+a10ec0e1878c2413d815012af25f9ac7 *man/HttpClient.Rd
+20a2ec2ab50ab466c0a5df70e6e737cf *man/HttpResponse.Rd
+238334b1249de8c8a108cd7eeba6bd87 *man/crul-package.Rd
+17b7aa113d4e3f2bb27181a0f30aec86 *man/curl-options.Rd
+099cc621daf091046a95667588044cb3 *man/handle.Rd
+cb453c1382bb59334acf15f7565933bc *man/http-headers.Rd
+42bfec87af6ea9ee8c034a1945b55789 *man/post-requests.Rd
+65da7a25eb2fa7bceb0ac867acbe4034 *man/writing-options.Rd
+9d086d73e3d68be9f055a6103bf7be39 *tests/test-all.R
+18d7e9ae1cd3375c8c646e6af971fae0 *tests/testthat/test-client.R
+411b885d9cffa5d35c8c76640b33dace *tests/testthat/test-delete.R
+81c982a6d03502e5b48ce38d581e8fe8 *tests/testthat/test-get.R
+f703252d231c221048dbdd8e74db7a49 *tests/testthat/test-head.R
+7302796a88a8ed8326239c4cced4726a *tests/testthat/test-headers.R
+d97a38299ad7f3501b4bfc9aeab50310 *tests/testthat/test-patch.R
+1acaf78df767322f37911a1925f1eda1 *tests/testthat/test-paths.R
+8e092d174768345a088f0ff4388542e9 *tests/testthat/test-post.R
+2ee7f4b175d689c27227a8bb91bb2d6d *tests/testthat/test-put.R
+edd29a7dd644adb3389cf4dca2bd5f2e *tests/testthat/test-query.R
+ae1547b89f973f29f0d21fd526ccb7ce *tests/testthat/test-status.R
+b66e8ddf24d1ff5ffe66761e00d75a0e *tests/testthat/test-user-agent.R
+02bf1952e659c0bd4a564da66c12c9db *vignettes/crul_vignette.Rmd
diff --git a/NAMESPACE b/NAMESPACE
new file mode 100644
index 0000000..73a539e
--- /dev/null
+++ b/NAMESPACE
@@ -0,0 +1,7 @@
+# Generated by roxygen2: do not edit by hand
+
+export(HttpClient)
+export(HttpResponse)
+export(handle)
+import(curl)
+importFrom(R6,R6Class)
diff --git a/NEWS.md b/NEWS.md
new file mode 100644
index 0000000..cd6b519
--- /dev/null
+++ b/NEWS.md
@@ -0,0 +1,55 @@
+crul 0.2.0
+==========
+
+### MINOR IMPROVEMENTS
+
+* Created new manual files for various tasks to document
+usage better (#19)
+* URL encode paths - should fix any bugs where spaces between words 
+caused errors previously (#17)
+* URL encode query parameters - should fix any bugs where spaces between words 
+caused errors previously (#11)
+* request headers now passed correctly to response object (#13)
+* response headers now parsed to a list for easier access (#14)
+* Now supporting multiple query parameters of the same name, wasn't
+possible in last version (#15)
+
+
+crul 0.1.6
+==========
+
+### NEW FEATURES
+
+* Improved options for using curl options. Can manually add
+to list of curl options or pass in via `...`. And we 
+check that user doesn't pass in prohibited options 
+(`curl` package takes care of checking that options 
+are valid) (#5)
+* Incorporated `fauxpas` package for dealing with HTTP 
+conditions. It's a Suggest, so only used if installed (#6)
+* Added support for streaming via `curl::curl_fetch_stream`. 
+`stream` param defaults to `NULL` (thus ignored), or pass in a 
+function to use streaming. Only one of memory, streaming or 
+disk allowed. (#9)
+* Added support for streaming via `curl::curl_fetch_disk`. 
+`disk` param defaults to `NULL` (thus ignored), or pass in a 
+path to write to disk instead of use memory. Only one of memory, 
+streaming or disk allowed. (#12)
+
+### MINOR IMPROVEMENTS
+
+* Added missing `raise_for_status()` method on the 
+`HttpResponse` class (#10)
+
+### BUG FIXES
+
+* Was importing `httpcode` but wasn't using it in the package. 
+Now using the package in `HttpResponse`
+
+
+crul 0.1.0
+==========
+
+### NEW FEATURES
+
+* Released to CRAN.
diff --git a/R/body.R b/R/body.R
new file mode 100644
index 0000000..f5c198a
--- /dev/null
+++ b/R/body.R
@@ -0,0 +1,85 @@
+make_type <- function(x) {
+  if (is.null(x))  {
+    return()
+  }
+  if (substr(x, 1, 1) == ".") {
+    x <- mime::guess_type(x, empty = NULL)
+  }
+  list(`Content-Type` = x)
+}
+
+# adapted from https://github.com/hadley/httr
+raw_body <- function(body, type = NULL) {
+  if (is.character(body)) {
+    body <- charToRaw(paste(body, collapse = "\n"))
+  }
+  stopifnot(is.raw(body))
+  list(
+    opts = list(
+      post = TRUE,
+      postfieldsize = length(body),
+      postfields = body
+    ),
+    type = make_type(type %||% "")
+  )
+}
+
+# adapted from https://github.com/hadley/httr
+prep_body <- function(body, encode, type = NULL) {
+  if (identical(body, FALSE)) {
+    return(list(opts = list(post = TRUE, nobody = TRUE)))
+  }
+  if (is.character(body) || is.raw(body)) {
+    return(raw_body(body, type = type))
+  }
+  if ("files" %in% names(body)) {
+    con <- file(body$files$path, "rb")
+    size <- file.info(body$files$path)$size
+    return(
+      list(
+        post = TRUE,
+        readfunction = function(nbytes, ...) {
+          if (is.null(con)) return(raw())
+          bin <- readBin(con, "raw", nbytes)
+          if (length(bin) < nbytes) {
+            close(con)
+            con <<- NULL
+          }
+          bin
+        },
+        postfieldsize_large = size,
+        type = make_type(body$type)
+      )
+    )
+  }
+  if (is.null(body)) {
+    return(raw_body(raw()))
+  }
+  if (!is.list(body)) {
+    stop("Unknown type of `body`: must be NULL, FALSE, character, raw or list",
+         call. = FALSE)
+  }
+
+  body <- ccp(body)
+  if (!encode %in% c('raw', 'form', 'json', 'multipart')) {
+    stop("encode must be one of raw, form, json, or multipart", call. = FALSE)
+  }
+
+  if (encode == "raw") {
+    raw_body(body)
+  } else if (encode == "form") {
+    raw_body(make_query(body), "application/x-www-form-urlencoded")
+  } else if (encode == "json") {
+    raw_body(jsonlite::toJSON(body, auto_unbox = TRUE), "application/json")
+  } else if (encode == "multipart") {
+    if (!all(has_name(body))) {
+      stop("All components of body must be named", call. = FALSE)
+    }
+    list(
+      opts = list(
+        post = TRUE
+      ),
+      fields = lapply(body, as.character)
+    )
+  }
+}
diff --git a/R/client.R b/R/client.R
new file mode 100644
index 0000000..01f03a5
--- /dev/null
+++ b/R/client.R
@@ -0,0 +1,291 @@
+#' HTTP client
+#'
+#' @export
+#' @param url (character) A url. One of \code{url} or \code{handle} required.
+#' @param opts (list) curl options
+#' @param handle A handle
+#' @details
+#' \strong{Methods}
+#'   \describe{
+#'     \item{\code{get(path, query, disk, stream, ...)}}{
+#'       Make a GET request
+#'     }
+#'     \item{\code{post(path, query, body, disk, stream, ...)}}{
+#'       Make a POST request
+#'     }
+#'     \item{\code{put(path, query, body, disk, stream, ...)}}{
+#'       Make a PUT request
+#'     }
+#'     \item{\code{patch(path, query, body, disk, stream, ...)}}{
+#'       Make a PATCH request
+#'     }
+#'     \item{\code{delete(path, query, body, disk, stream, ...)}}{
+#'       Make a DELETE request
+#'     }
+#'     \item{\code{head(path, disk, stream, ...)}}{
+#'       Make a HEAD request
+#'     }
+#'   }
+#' @format NULL
+#' @usage NULL
+#' @details Possible parameters (not all are allowed in each HTTP verb):
+#' \itemize{
+#'  \item path - URL path, appended to the base URL
+#'  \item query - query terms, as a list
+#'  \item body - body as an R list
+#'  \item encode - one of form, multipart, json, or raw
+#'  \item disk - a path to write to. if NULL (default), memory used
+#'  \item stream - an R function to determine how to stream data. if
+#'  NULL (default), memory used
+#'  \item ... curl options, only those in the acceptable set from
+#'  \code{\link[curl]{curl_options}} except the following: httpget, httppost,
+#'  post, postfields, postfieldsize, and customrequest
+#' }
+#'
+#' @seealso \code{\link{post-requests}}, \code{\link{http-headers}},
+#' \code{\link{writing-options}}
+#'
+#' @examples
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#' x$url
+#' (res_get1 <- x$get('get'))
+#' res_get1$content
+#' res_get1$response_headers
+#' res_get1$parse()
+#'
+#' (res_get2 <- x$get('get', query = list(hello = "world")))
+#' res_get2$parse()
+#' library("jsonlite")
+#' jsonlite::fromJSON(res_get2$parse())
+#'
+#' # post request
+#' (res_post <- x$post('post', body = list(hello = "world")))
+#'
+#' ## empty body request
+#' x$post('post')
+#'
+#' # put request
+#' (res_put <- x$put('put'))
+#'
+#' # delete request
+#' (res_delete <- x$delete('delete'))
+#'
+#' # patch request
+#' (res_patch <- x$patch('patch'))
+#'
+#' # head request
+#' (res_head <- x$head())
+#'
+#' # query params are URL encoded for you, so DO NOT do it yourself
+#' ## if you url encode yourself, it gets double encoded, and that's bad
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#' res <- x$get("get", query = list(a = 'hello world'), verbose = TRUE)
+
+HttpClient <- R6::R6Class(
+  'HttpClient',
+  public = list(
+    url = NULL,
+    opts = list(),
+    headers = list(),
+    handle = NULL,
+
+    print = function(x, ...) {
+      cat("<crul connection> ", sep = "\n")
+      cat(paste0("  url: ", if (is.null(self$url)) self$handle$url else self$url), sep = "\n")
+      cat("  options: ", sep = "\n")
+      for (i in seq_along(self$opts)) {
+        cat(sprintf("    %s: %s", names(self$opts)[i],
+                    self$opts[[i]]), sep = "\n")
+      }
+      cat("  headers: ", sep = "\n")
+      for (i in seq_along(self$headers)) {
+        cat(sprintf("    %s: %s", names(self$headers)[i],
+                    self$headers[[i]]), sep = "\n")
+      }
+      invisible(self)
+    },
+
+    initialize = function(url, opts, headers, handle) {
+      if (!missing(url)) self$url <- url
+      if (!missing(opts)) self$opts <- opts
+      if (!missing(headers)) self$headers <- headers
+      if (!missing(handle)) self$handle <- handle
+      if (is.null(self$url) && is.null(self$handle)) {
+        stop("need one of url or handle", call. = FALSE)
+      }
+    },
+
+    get = function(path = NULL, query = list(), disk = NULL,
+                   stream = NULL, ...) {
+      curl_opts_check(...)
+      url <- make_url(self$url, self$handle, path, query)
+      rr <- list(
+        url = url,
+        method = "get",
+        options = list(
+          httpget = TRUE,
+          useragent = make_ua()
+        ),
+        headers = self$headers
+      )
+      rr$options <- utils::modifyList(rr$options, c(self$opts, ...))
+      rr$disk <- disk
+      rr$stream <- stream
+      private$make_request(rr)
+    },
+
+    post = function(path = NULL, query = list(), body = NULL, disk = NULL,
+                    stream = NULL, encode = "multipart", ...) {
+      curl_opts_check(...)
+      url <- make_url(self$url, self$handle, path, query)
+      # opts <- list(post = TRUE)
+      # if (is.null(body)) {
+      #   opts$postfields <- raw(0)
+      #   opts$postfieldsize <- 0
+      # }
+      opts <- prep_body(body, encode)
+      rr <- list(
+        url = url,
+        method = "post",
+        options = as.list(c(
+          opts$opts,
+          useragent = make_ua()
+        )),
+        headers = c(self$headers, opts$type),
+        fields = opts$fields
+      )
+      rr$options <- utils::modifyList(rr$options, c(self$opts, ...))
+      rr$disk <- disk
+      rr$stream <- stream
+      private$make_request(rr)
+    },
+
+    put = function(path = NULL, query = list(), body = NULL, disk = NULL,
+                   stream = NULL, encode = NULL, ...) {
+      curl_opts_check(...)
+      url <- make_url(self$url, self$handle, path, query)
+      opts <- list(customrequest = "PUT")
+      if (is.null(body)) {
+        opts$postfields <- raw(0)
+        opts$postfieldsize <- 0
+      }
+      rr <- list(
+        url = url,
+        method = "put",
+        options = c(
+          opts,
+          useragent = make_ua()
+        ),
+        headers = self$headers,
+        fields = body
+      )
+      rr$options <- utils::modifyList(rr$options, c(self$opts, ...))
+      rr$disk <- disk
+      rr$stream <- stream
+      private$make_request(rr)
+    },
+
+    patch = function(path = NULL, query = list(), body = NULL, disk = NULL,
+                     stream = NULL, encode = NULL, ...) {
+      curl_opts_check(...)
+      url <- make_url(self$url, self$handle, path, query)
+      opts <- list(customrequest = "PATCH")
+      if (is.null(body)) {
+        opts$postfields <- raw(0)
+        opts$postfieldsize <- 0
+      }
+      rr <- list(
+        url = url,
+        method = "patch",
+        options = c(
+          opts,
+          useragent = make_ua()
+        ),
+        headers = self$headers,
+        fields = body
+      )
+      rr$options <- utils::modifyList(rr$options, c(self$opts, ...))
+      rr$disk <- disk
+      rr$stream <- stream
+      private$make_request(rr)
+    },
+
+    delete = function(path = NULL, query = list(), body = NULL, disk = NULL,
+                      stream = NULL, encode = NULL, ...) {
+      curl_opts_check(...)
+      url <- make_url(self$url, self$handle, path, query)
+      opts <- list(customrequest = "DELETE")
+      if (is.null(body)) {
+        opts$postfields <- raw(0)
+        opts$postfieldsize <- 0
+      }
+      rr <- list(
+        url = url,
+        method = "delete",
+        options = c(
+          opts,
+          useragent = make_ua()
+        ),
+        headers = self$headers,
+        fields = body
+      )
+      rr$options <- utils::modifyList(rr$options, c(self$opts, ...))
+      rr$disk <- disk
+      rr$stream <- stream
+      private$make_request(rr)
+    },
+
+    head = function(path = NULL, disk = NULL, stream = NULL, ...) {
+      curl_opts_check(...)
+      url <- make_url(self$url, self$handle, path, NULL)
+      opts <- list(customrequest = "HEAD", nobody = TRUE)
+      rr <- list(
+        url = url,
+        method = "head",
+        options = c(
+          opts,
+          useragent = make_ua()
+        ),
+        headers = self$headers
+      )
+      rr$options <- utils::modifyList(rr$options, c(self$opts, ...))
+      rr$disk <- disk
+      rr$stream <- stream
+      private$make_request(rr)
+    }
+  ),
+
+  private = list(
+    request = NULL,
+
+    make_request = function(opts) {
+      if (xor(!is.null(opts$disk), !is.null(opts$stream))) {
+        if (!is.null(opts$disk) && !is.null(opts$stream)) {
+          stop("disk and stream can not be used together", call. = FALSE)
+        }
+      }
+      curl::handle_setopt(opts$url$handle, .list = opts$options)
+      if (!is.null(opts$fields)) {
+        curl::handle_setform(opts$url$handle, .list = opts$fields)
+      }
+      curl::handle_setheaders(opts$url$handle, .list = opts$headers)
+      on.exit(curl::handle_reset(opts$url$handle), add = TRUE)
+      resp <- crul_fetch(opts)
+
+      HttpResponse$new(
+        method = opts$method,
+        url = resp$url,
+        status_code = resp$status_code,
+        request_headers = c(useragent = opts$options$useragent, opts$headers),
+        response_headers = {
+          headers_parse(curl::parse_headers(rawToChar(resp$headers)))
+        },
+        modified = resp$modified,
+        times = resp$times,
+        content = resp$content,
+        handle = opts$url$handle,
+        request = opts
+      )
+    }
+  )
+)
diff --git a/R/crul-package.r b/R/crul-package.r
new file mode 100644
index 0000000..b3a6bbd
--- /dev/null
+++ b/R/crul-package.r
@@ -0,0 +1,21 @@
+#' \strong{HTTP R client}
+#'
+#' @section Package API:
+#' \itemize{
+#'  \item \code{\link{HttpClient}} - create a connection client, set all
+#'  your http options, make http requests
+#'  \item \code{\link{HttpResponse}} - mostly for internal use, handles
+#'  http responses
+#' }
+#'
+#' @section HTTP conditions:
+#' We use \code{fauxpas} if you have it installed for handling HTTP
+#' conditions but if it's not installed we use \pkg{httpcode}
+#'
+#' @import curl
+#' @importFrom R6 R6Class
+#' @name crul-package
+#' @aliases crul
+#' @author Scott Chamberlain \email{myrmecocystus@@gmail.com}
+#' @docType package
+NULL
diff --git a/R/curl-options.R b/R/curl-options.R
new file mode 100644
index 0000000..95d5920
--- /dev/null
+++ b/R/curl-options.R
@@ -0,0 +1,21 @@
+#' curl options
+#'
+#' @name curl-options
+#' @examples
+#' # set curl options on client initialization
+#' (res <- HttpClient$new(
+#'   url = "https://httpbin.org",
+#'   opts = list(
+#'     verbose = TRUE,
+#'     useragent = "hello world"
+#'   )
+#' ))
+#' res$opts
+#' res$get('get')
+#'
+#' # or set curl options when performing HTTP operation
+#' (res <- HttpClient$new(url = "https://httpbin.org"))
+#' res$get('get', verbose = TRUE)
+#' \dontrun{res$get('get', stuff = "things")}
+#' \dontrun{res$get('get', httpget = TRUE)}
+NULL
diff --git a/R/curl_options.R b/R/curl_options.R
new file mode 100644
index 0000000..33b4071
--- /dev/null
+++ b/R/curl_options.R
@@ -0,0 +1,13 @@
+nonacccurl <- c("httpget", "httppost", "post", "postfields",
+                "postfieldsize", "customrequest")
+
+curl_opts_check <- function(...) {
+  x <- list(...)
+  if (any(names(x) %in% nonacccurl)) {
+    stop(
+      paste0("the following curl options are not allowed:\n  ",
+             paste(nonacccurl, collapse = ", ")),
+      call. = FALSE
+    )
+  }
+}
diff --git a/R/fetch.R b/R/fetch.R
new file mode 100644
index 0000000..f18de03
--- /dev/null
+++ b/R/fetch.R
@@ -0,0 +1,12 @@
+crul_fetch <- function(x) {
+  if (is.null(x$disk) && is.null(x$stream)) {
+    # memory
+    curl::curl_fetch_memory(x$url$url, handle = x$url$handle)
+  } else if (!is.null(x$disk)) {
+    # disk
+    curl::curl_fetch_disk(x$url$url, x$disk, handle = x$url$handle)
+  } else {
+    # stream
+    curl::curl_fetch_stream(x$url$url, x$stream, handle = x$url$handle)
+  }
+}
diff --git a/R/handle.R b/R/handle.R
new file mode 100644
index 0000000..2f0068f
--- /dev/null
+++ b/R/handle.R
@@ -0,0 +1,15 @@
+#' Make a handle
+#'
+#' @export
+#' @param url (character) A url. required.
+#' @param ... options passed on to \code{\link[curl]{new_handle}}
+#' @examples
+#' handle("https://httpbin.org")
+#'
+#' # handles - pass in your own handle
+#' h <- handle("https://httpbin.org")
+#' (res <- HttpClient$new(handle = h))
+#' out <- res$get("get")
+handle <- function(url, ...) {
+  list(url = url, handle = curl::new_handle(...))
+}
diff --git a/R/headers.R b/R/headers.R
new file mode 100644
index 0000000..9584014
--- /dev/null
+++ b/R/headers.R
@@ -0,0 +1,11 @@
+head_parse <- function(z) {
+  if (grepl("HTTP\\/", z)) {
+    list(status = z)
+  } else {
+    ff <- regexec("^([^:]*):\\s*(.*)$", z)
+    xx <- regmatches(z, ff)[[1]]
+    as.list(stats::setNames(xx[[3]], tolower(xx[[2]])))
+  }
+}
+
+headers_parse <- function(x) do.call("c", lapply(x, head_parse))
diff --git a/R/http-headers.R b/R/http-headers.R
new file mode 100644
index 0000000..d5f8265
--- /dev/null
+++ b/R/http-headers.R
@@ -0,0 +1,35 @@
+#' Working with HTTP headers
+#'
+#' @name http-headers
+#' @examples
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#'
+#' # set headers
+#' (res <- HttpClient$new(
+#'   url = "https://httpbin.org",
+#'   opts = list(
+#'     verbose = TRUE
+#'   ),
+#'   headers = list(
+#'     a = "stuff",
+#'     b = "things"
+#'   )
+#' ))
+#' res$headers
+#' # reassign header value
+#' res$headers$a <- "that"
+#' # define new header
+#' res$headers$c <- "what"
+#' # request
+#' res$get('get')
+#'
+#' ## setting content-type via headers
+#' (res <- HttpClient$new(
+#'   url = "https://httpbin.org",
+#'   opts = list(
+#'     verbose = TRUE
+#'   ),
+#'   headers = list(`Content-Type` = "application/json")
+#' ))
+#' res$get('get')
+NULL
diff --git a/R/make_url.R b/R/make_url.R
new file mode 100644
index 0000000..0a47616
--- /dev/null
+++ b/R/make_url.R
@@ -0,0 +1,39 @@
+make_url <- function(url = NULL, handle = NULL, path, query) {
+  if (!is.null(handle)) {
+    url <- handle$url
+  } else {
+    handle <- list(handle = curl::new_handle())
+  }
+
+  if (!is.null(path)) {
+    urltools::path(url) <- path
+  }
+
+  url <- gsub("\\s", "%20", url)
+
+  url <- add_query(query, url)
+  # if (length(query)) {
+  # for (i in seq_along(query)) {
+  #   url <- urltools::param_set(url, names(query)[i], query[[i]])
+  # }
+  # }
+
+  return(list(url = url, handle = handle$handle))
+}
+
+# query <- list(a = 5, a = 6)
+# query <- list(a = 5)
+# query <- list()
+# add_query(query, "https://httpbin.org")
+add_query <- function(x, url) {
+  if (length(x)) {
+    quer <- list()
+    for (i in seq_along(x)) {
+      quer[[i]] <- paste(names(x)[i], urltools::url_encode(x[[i]]), sep = "=")
+    }
+    parms <- paste0(quer, collapse = "&")
+    paste0(url, "?", parms)
+  } else {
+    return(url)
+  }
+}
diff --git a/R/post-requests.R b/R/post-requests.R
new file mode 100644
index 0000000..6a43f8a
--- /dev/null
+++ b/R/post-requests.R
@@ -0,0 +1,46 @@
+#' HTTP POST requests
+#'
+#' @name post-requests
+#' @examples
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#'
+#' # post request
+#' (res_post <- x$post('post', body = list(hello = "world")))
+#'
+#' ## empty body request
+#' x$post('post')
+#'
+#' ## form requests
+#' (cli <- HttpClient$new(
+#'   url = "http://apps.kew.org/wcsp/advsearch.do"
+#' ))
+#' cli$post(
+#'   encode = "form",
+#'   body = list(
+#'     page = 'advancedSearch',
+#'     genus = 'Gagea',
+#'     species = 'pratensis',
+#'     selectedLevel = 'cont'
+#'   )
+#' )
+#'
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#' res <- x$post("post",
+#'   encode = "json",
+#'   body = list(
+#'     genus = 'Gagea',
+#'     species = 'pratensis'
+#'   )
+#' )
+#' jsonlite::fromJSON(res$parse())
+#'
+#'
+#' # path <- file.path(Sys.getenv("R_DOC_DIR"), "html/logo.jpg")
+#' # (x <- HttpClient$new(url = "https://httpbin.org"))
+#' # x$post("post",
+#' #    body = list(
+#' #      files = list(path = path)
+#' #    )
+#' # )
+#'
+NULL
diff --git a/R/query.R b/R/query.R
new file mode 100644
index 0000000..c6aa3c7
--- /dev/null
+++ b/R/query.R
@@ -0,0 +1,28 @@
+# adapted from https://github.com/hadley/httr
+encode <- function(x) {
+  if (inherits(x, "AsIs"))
+    return(x)
+  curl::curl_escape(x)
+}
+
+# adapted from https://github.com/hadley/httr
+has_name <- function(x) {
+  nms <- names(x)
+  if (is.null(nms)) return(rep(FALSE, length(x)))
+  !is.na(nms) & nms != ""
+}
+
+# adapted from https://github.com/hadley/httr
+make_query <- function(x) {
+  if (length(x) == 0) {
+    return("")
+  }
+  if (!all(has_name(x))) {
+    stop("All components of query must be named", call. = FALSE)
+  }
+  stopifnot(is.list(x))
+  x <- ccp(x)
+  names <- curl::curl_escape(names(x))
+  values <- vapply(x, encode, character(1))
+  paste0(names, "=", values, collapse = "&")
+}
diff --git a/R/response.R b/R/response.R
new file mode 100644
index 0000000..e1bb5a0
--- /dev/null
+++ b/R/response.R
@@ -0,0 +1,155 @@
+#' Base response object
+#'
+#' @export
+#' @param url (character) A url
+#' @param opts (list) curl options
+#' @param handle A handle
+#' @details
+#' \strong{Methods}
+#'   \describe{
+#'     \item{\code{parse()}}{
+#'       Parse the raw response content to text
+#'     }
+#'     \item{\code{success()}}{
+#'       Was status code less than or equal to 201.
+#'       returns boolean
+#'     }
+#'     \item{\code{status_http()}}{
+#'       Get HTTP status code, message, and explanation
+#'     }
+#'     \item{\code{raise_for_status()}}{
+#'       Check HTTP status and stop with appropriate
+#'       HTTP error code and message if >= 300.
+#'       - If you have \code{fauxpas} installed we use that,
+#'       otherwise use \pkg{httpcode}
+#'     }
+#'   }
+#' @format NULL
+#' @usage NULL
+#' @examples
+#' x <- HttpResponse$new(method = "get", url = "https://httpbin.org")
+#' x$url
+#' x$method
+#'
+#' x <- HttpClient$new(url = 'https://httpbin.org')
+#' (res <- x$get('get'))
+#' res$request_headers
+#' res$response_headers
+#' res$parse()
+#' res$status_code
+#' res$status_http()
+#' res$status_http()$status_code
+#' res$status_http()$message
+#' res$status_http()$explanation
+#' res$success()
+#'
+#' x <- HttpClient$new(url = 'https://httpbin.org/status/404')
+#' (res <- x$get())
+#'  \dontrun{res$raise_for_status()}
+#'
+#' x <- HttpClient$new(url = 'https://httpbin.org/status/414')
+#' (res <- x$get())
+#'  \dontrun{res$raise_for_status()}
+HttpResponse <- R6::R6Class(
+  'HttpResponse',
+  public = list(
+    method = NULL,
+    url = NULL,
+    opts = NULL,
+    handle = NULL,
+    status_code = NULL,
+    request_headers = NULL,
+    response_headers = NULL,
+    modified = NULL,
+    times = NULL,
+    content = NULL,
+    request = NULL,
+
+    print = function(x, ...) {
+      cat("<crul response> ", sep = "\n")
+      cat(paste0("  url: ", self$url), sep = "\n")
+      cat("  request_headers: ", sep = "\n")
+      for (i in seq_along(self$request_headers)) {
+        cat(sprintf("    %s: %s", names(self$request_headers)[i], self$request_headers[[i]]), sep = "\n")
+      }
+      cat("  response_headers: ", sep = "\n")
+      for (i in seq_along(self$response_headers)) {
+        cat(sprintf("    %s: %s", names(self$response_headers)[i], self$response_headers[[i]]), sep = "\n")
+      }
+      params <- parse_params(self$url)
+      if (!is.null(params)) {
+        cat("  params: ", sep = "\n")
+        for (i in seq_along(params)) {
+          cat(paste0("    ", sub("=", ": ", params[[i]], "=")), sep = "\n")
+        }
+      }
+      if (!is.null(self$status_code)) cat(paste0("  status: ", self$status_code), sep = "\n")
+      invisible(self)
+    },
+
+    initialize = function(method, url, opts, handle, status_code, request_headers,
+                          response_headers, modified, times, content, request) {
+      if (!missing(method)) self$method <- method
+      if (!missing(url)) self$url <- url
+      if (!missing(opts)) self$opts <- opts
+      if (!missing(handle)) self$handle <- handle
+      if (!missing(status_code)) self$status_code <- as.numeric(status_code)
+      if (!missing(request_headers)) self$request_headers <- request_headers
+      if (!missing(response_headers)) self$response_headers <- response_headers
+      if (!missing(modified)) self$modified <- modified
+      if (!missing(times)) self$times <- times
+      if (!missing(content)) self$content <- content
+      if (!missing(request)) self$request <- request
+    },
+
+    parse = function(encoding = NULL) {
+      # readBin(self$content, character())
+      iconv(readBin(self$content, character()),
+            from = guess_encoding(encoding),
+            to = "UTF-8")
+    },
+
+    success = function() {
+      self$status_code <= 201
+    },
+
+    status_http = function(verbose = FALSE) {
+      httpcode::http_code(code = self$status_code, verbose = verbose)
+    },
+
+    raise_for_status = function() {
+      if (self$status_code >= 300) {
+        if (!requireNamespace("fauxpas", quietly = TRUE)) {
+          x <- httpcode::http_code(code = self$status_code)
+          stop(sprintf("%s (HTTP %s)", x$message, x$status_code), call. = FALSE)
+        } else {
+          fauxpas::http(self, behavior = "stop")
+        }
+      }
+    }
+  )
+)
+
+guess_encoding <- function(encoding = NULL) {
+  if (!is.null(encoding)) {
+    return(check_encoding(encoding))
+  } else {
+    message("No encoding supplied: defaulting to UTF-8.")
+    return("UTF-8")
+  }
+}
+
+check_encoding <- function(x) {
+  if ((tolower(x) %in% tolower(iconvlist()))) return(x)
+  message("Invalid encoding ", x, ": defaulting to UTF-8.")
+  "UTF-8"
+}
+
+parse_params <- function(x) {
+  x <- urltools::parameters(x)
+  if (is.na(x)) {
+    NULL
+  } else {
+    strsplit(x, "&")[[1]]
+  }
+}
diff --git a/R/use_agent.R b/R/use_agent.R
new file mode 100644
index 0000000..99eb8ea
--- /dev/null
+++ b/R/use_agent.R
@@ -0,0 +1,8 @@
+make_ua <- function() {
+  versions <- c(
+    libcurl = curl::curl_version()$version,
+    `r-curl` = as.character(utils::packageVersion("curl")),
+    crul = as.character(utils::packageVersion("crul"))
+  )
+  paste0(names(versions), "/", versions, collapse = " ")
+}
diff --git a/R/writing-options.R b/R/writing-options.R
new file mode 100644
index 0000000..e011907
--- /dev/null
+++ b/R/writing-options.R
@@ -0,0 +1,18 @@
+#' Writing data options
+#'
+#' @name writing-options
+#' @examples
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#'
+#' # write to disk
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#' f <- tempfile()
+#' res <- x$get(disk = f)
+#' res$content # when using write to disk, content is a path
+#' readLines(res$content)
+#'
+#' # streaming response
+#' (x <- HttpClient$new(url = "https://httpbin.org"))
+#' res <- x$get('stream/50', stream = function(x) cat(rawToChar(x)))
+#' res$content # when streaming, content is NULL
+NULL
diff --git a/R/zzz.R b/R/zzz.R
new file mode 100644
index 0000000..e4d2325
--- /dev/null
+++ b/R/zzz.R
@@ -0,0 +1,3 @@
+`%||%` <- function(x, y) if (is.null(x)) y else x
+
+ccp <- function(x) Filter(Negate(is.null), x)
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..188ce5d
--- /dev/null
+++ b/README.md
@@ -0,0 +1,248 @@
+crul
+====
+
+
+
+[![Build Status](https://travis-ci.org/ropensci/crul.svg?branch=master)](https://travis-ci.org/ropensci/crul)
+[![codecov](https://codecov.io/gh/ropensci/crul/branch/master/graph/badge.svg)](https://codecov.io/gh/ropensci/crul)
+[![rstudio mirror downloads](http://cranlogs.r-pkg.org/badges/crul)](https://github.com/metacran/cranlogs.app)
+[![cran version](http://www.r-pkg.org/badges/version/crul)](https://cran.r-project.org/package=crul)
+
+An HTTP client, taking inspiration from Rubyland's [faraday](https://rubygems.org/gems/faraday).
+
+## Installation
+
+CRAN version
+
+
+```r
+install.packages("crul")
+```
+
+Dev version
+
+
+```r
+install.packages("devtools")
+devtools::install_github("ropensci/crul")
+```
+
+
+```r
+library("crul")
+```
+
+## the client
+
+`HttpClient` is where to start
+
+
+```r
+(x <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    timeout = 1
+  ),
+  headers = list(
+    a = "hello world"
+  )
+))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>     timeout: 1
+#>   headers: 
+#>     a: hello world
+```
+
+Makes a R6 class, that has all the bits and bobs you'd expect for doing HTTP
+requests. When it prints, it gives any defaults you've set. As you update
+the object you can see what's been set
+
+
+```r
+x$opts
+#> $timeout
+#> [1] 1
+```
+
+
+```r
+x$headers
+#> $a
+#> [1] "hello world"
+```
+
+You can also pass in curl options when you make HTTP requests, see below
+for examples.
+
+## do some http
+
+The client object created above has http methods that you can call,
+and pass paths to, as well as query parameters, body values, and any other
+curl options.
+
+Here, we'll do a __GET__ request on the route `/get` on our base url
+`https://httpbin.org` (the full url is then `https://httpbin.org/get`)
+
+
+```r
+res <- x$get("get")
+```
+
+The response from a http request is another R6 class `HttpResponse`, which
+has slots for the outputs of the request, and some functions to deal with
+the response:
+
+Status code
+
+
+```r
+res$status_code
+#> [1] 200
+```
+
+Status information
+
+
+```r
+res$status_http()
+#> <Status code: 200>
+#>   Message: OK
+#>   Explanation: Request fulfilled, document follows
+```
+
+The content
+
+
+```r
+res$content
+#>   [1] 7b 0a 20 20 22 61 72 67 73 22 3a 20 7b 7d 2c 20 0a 20 20 22 68 65 61
+#>  [24] 64 65 72 73 22 3a 20 7b 0a 20 20 20 20 22 41 22 3a 20 22 68 65 6c 6c
+#>  [47] 6f 20 77 6f 72 6c 64 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 22
+#>  [70] 3a 20 22 2a 2f 2a 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 2d 45
+#>  [93] 6e 63 6f 64 69 6e 67 22 3a 20 22 67 7a 69 70 2c 20 64 65 66 6c 61 74
+#> [116] 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 74 74 70 62
+#> [139] 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 2d 41 67 65
+#> [162] 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 31 2e 30 20 72 2d
+#> [185] 63 75 72 6c 2f 32 2e 33 20 63 72 75 6c 2f 30 2e 32 2e 30 22 0a 20 20
+#> [208] 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 37 31 2e 36 33 2e
+#> [231] 32 32 33 2e 31 31 33 22 2c 20 0a 20 20 22 75 72 6c 22 3a 20 22 68 74
+#> [254] 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 65 74 22 0a
+#> [277] 7d 0a
+```
+
+HTTP method
+
+
+```r
+res$method
+#> [1] "get"
+```
+
+Request headers
+
+
+```r
+res$request_headers
+#> $useragent
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> $a
+#> [1] "hello world"
+```
+
+Response headers
+
+
+```r
+res$response_headers
+#> $status
+#> [1] "HTTP/1.1 200 OK"
+#> 
+#> $server
+#> [1] "nginx"
+#> 
+#> $date
+#> [1] "Tue, 03 Jan 2017 05:51:33 GMT"
+#> 
+#> $`content-type`
+#> [1] "application/json"
+#> 
+#> $`content-length`
+#> [1] "278"
+#> 
+#> $connection
+#> [1] "keep-alive"
+#> 
+#> $`access-control-allow-origin`
+#> [1] "*"
+#> 
+#> $`access-control-allow-credentials`
+#> [1] "true"
+```
+
+And you can parse the content with `parse()`
+
+
+```r
+res$parse()
+#> No encoding supplied: defaulting to UTF-8.
+#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"*/*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.51.0 r-curl/2.3 crul/0.2.0\"\n  }, \n  \"origin\": \"71.63.223.113\", \n  \"url\": \"https://httpbin.org/get\"\n}\n"
+jsonlite::fromJSON(res$parse())
+#> No encoding supplied: defaulting to UTF-8.
+#> $args
+#> named list()
+#> 
+#> $headers
+#> $headers$A
+#> [1] "hello world"
+#> 
+#> $headers$Accept
+#> [1] "*/*"
+#> 
+#> $headers$`Accept-Encoding`
+#> [1] "gzip, deflate"
+#> 
+#> $headers$Host
+#> [1] "httpbin.org"
+#> 
+#> $headers$`User-Agent`
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> 
+#> $origin
+#> [1] "71.63.223.113"
+#> 
+#> $url
+#> [1] "https://httpbin.org/get"
+```
+
+## curl options
+
+
+```r
+res <- HttpClient$new(url = "http://api.gbif.org/v1/occurrence/search")
+res$get(query = list(limit = 100), timeout_ms = 100)
+#> Error in curl::curl_fetch_memory(x$url$url, handle = x$url$handle) : 
+#>   Timeout was reached
+```
+
+## TO DO
+
+Add integration for:
+
+* [webmockr](https://github.com/ropensci/webmockr)
+* [vcr](https://github.com/ropensci/vcr) 
+
+for flexible and easy HTTP request caching
+
+## Meta
+
+* Please [report any issues or bugs](https://github.com/ropensci/crul/issues).
+* License: MIT
+* Get citation information for `crul` in R doing `citation(package = 'crul')`
+* Please note that this project is released with a [Contributor Code of Conduct](CONDUCT.md).
+By participating in this project you agree to abide by its terms.
+
+[![ropensci_footer](https://ropensci.org/public_images/github_footer.png)](https://ropensci.org)
diff --git a/build/vignette.rds b/build/vignette.rds
new file mode 100644
index 0000000..c57c5ff
Binary files /dev/null and b/build/vignette.rds differ
diff --git a/debian/README.test b/debian/README.test
deleted file mode 100644
index 53fb4d7..0000000
--- a/debian/README.test
+++ /dev/null
@@ -1,8 +0,0 @@
-Notes on how this package can be tested.
-────────────────────────────────────────
-
-This package can be tested by running the provided test:
-
-   sh ./run-unit-test
-
-in order to confirm its integrity.
diff --git a/debian/changelog b/debian/changelog
deleted file mode 100644
index 2cc3dd1..0000000
--- a/debian/changelog
+++ /dev/null
@@ -1,5 +0,0 @@
-r-cran-crul (0.2.0-1) unstable; urgency=medium
-
-  * Initial release (closes: #851567)
-
- -- Andreas Tille <tille at debian.org>  Mon, 16 Jan 2017 17:28:14 +0100
diff --git a/debian/compat b/debian/compat
deleted file mode 100644
index f599e28..0000000
--- a/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-10
diff --git a/debian/control b/debian/control
deleted file mode 100644
index 5440587..0000000
--- a/debian/control
+++ /dev/null
@@ -1,32 +0,0 @@
-Source: r-cran-crul
-Maintainer: Debian Med Packaging Team <debian-med-packaging at lists.alioth.debian.org>
-Uploaders: Andreas Tille <tille at debian.org>
-Section: gnu-r
-Priority: optional
-Build-Depends: debhelper (>= 10),
-               dh-r,
-               r-base-dev,
-               r-cran-curl (>= 2.2),
-               r-cran-r6 (>= 2.2.0),
-               r-cran-mime,
-               r-cran-httpcode,
-               r-cran-urltools
-Standards-Version: 3.9.8
-Vcs-Browser: https://anonscm.debian.org/viewvc/debian-med/trunk/packages/R/r-cran-crul/
-Vcs-Svn: svn://anonscm.debian.org/debian-med/trunk/packages/R/r-cran-crul/
-Homepage: https://cran.r-project.org/package=crul
-
-Package: r-cran-crul
-Architecture: all
-Depends: ${R:Depends},
-         ${shlibs:Depends},
-         ${misc:Depends}
-Recommends: ${R:Recommends}
-Suggests: ${R:Suggests}
-Description: simple HTTP Client for GNU R
- A simple HTTP client, with tools for making HTTP requests,
- and mocking HTTP requests. The package is built on R6, and takes
- inspiration from Ruby's 'faraday' gem (<https://rubygems.org/gems/faraday>).
- The package name is a play on curl, the widely used command line tool
- for HTTP, and this package is built on top of the R package 'curl', an
- interface to 'libcurl' (<https://curl.haxx.se/libcurl>).
diff --git a/debian/copyright b/debian/copyright
deleted file mode 100644
index 06ab093..0000000
--- a/debian/copyright
+++ /dev/null
@@ -1,32 +0,0 @@
-Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: crul
-Upstream-Contact: Scott Chamberlain <myrmecocystus at gmail.com>
-Source: https://cran.r-project.org/package=crul
-
-Files: *
-Copyright: 2015-2017 Scott Chamberlain
-License: MIT
-
-Files: debian/*
-Copyright: 2017 Andreas Tille <tille at debian.org>
-License: MIT
-
-License: MIT
- Permission is hereby granted, free of charge, to any person obtaining
- a copy of this software and associated documentation files (the
- "Software"), to deal in the Software without restriction, including
- without limitation the rights to use, copy, modify, merge, publish,
- distribute, sublicense, and/or sell copies of the Software, and to
- permit persons to whom the Software is furnished to do so, subject to
- the following conditions:
- .
- The above copyright notice and this permission notice shall be
- included in all copies or substantial portions of the Software.
- .
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/debian/docs b/debian/docs
deleted file mode 100644
index 6466d39..0000000
--- a/debian/docs
+++ /dev/null
@@ -1,3 +0,0 @@
-debian/tests/run-unit-test
-debian/README.test
-tests
diff --git a/debian/rules b/debian/rules
deleted file mode 100755
index 529c38a..0000000
--- a/debian/rules
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/make -f
-
-%:
-	dh $@ --buildsystem R
-
diff --git a/debian/source/format b/debian/source/format
deleted file mode 100644
index 163aaf8..0000000
--- a/debian/source/format
+++ /dev/null
@@ -1 +0,0 @@
-3.0 (quilt)
diff --git a/debian/tests/control b/debian/tests/control
deleted file mode 100644
index d746f15..0000000
--- a/debian/tests/control
+++ /dev/null
@@ -1,5 +0,0 @@
-Tests: run-unit-test
-Depends: @, r-cran-testthat
-Restrictions: allow-stderr
-
-
diff --git a/debian/tests/run-unit-test b/debian/tests/run-unit-test
deleted file mode 100644
index ac92771..0000000
--- a/debian/tests/run-unit-test
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh -e
-
-pkgname=crul
-debname=r-cran-crul
-
-if [ "$ADTTMP" = "" ] ; then
-    ADTTMP=`mktemp -d /tmp/${debname}-test.XXXXXX`
-    trap "rm -rf $ADTTMP" 0 INT QUIT ABRT PIPE TERM
-fi
-cd $ADTTMP
-cp -a /usr/share/doc/$debname/tests/* $ADTTMP
-gunzip -r *
-for testfile in *.R; do
-    echo "BEGIN TEST $testfile"
-    LC_ALL=C R --no-save < $testfile
-done
-
diff --git a/debian/watch b/debian/watch
deleted file mode 100644
index ed1d27d..0000000
--- a/debian/watch
+++ /dev/null
@@ -1,2 +0,0 @@
-version=4
-https://cran.r-project.org/src/contrib/crul_([-\d.]*)\.tar\.gz
diff --git a/inst/doc/crul_vignette.Rmd b/inst/doc/crul_vignette.Rmd
new file mode 100644
index 0000000..0c31d74
--- /dev/null
+++ b/inst/doc/crul_vignette.Rmd
@@ -0,0 +1,281 @@
+<!--
+%\VignetteEngine{knitr::knitr}
+%\VignetteIndexEntry{crul introduction}
+%\VignetteEncoding{UTF-8}
+-->
+
+
+
+crul introduction
+=================
+
+`crul` is an HTTP client for R.
+
+## Install
+
+Stable CRAN version
+
+
+```r
+install.packages("crul")
+```
+
+Dev version
+
+
+```r
+devtools::install_github("ropensci/crul")
+```
+
+
+```r
+library("crul")
+```
+
+## the client
+
+`HttpClient` is where to start
+
+
+```r
+(x <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    timeout = 1
+  ),
+  headers = list(
+    a = "hello world"
+  )
+))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>     timeout: 1
+#>   headers: 
+#>     a: hello world
+```
+
+Makes a R6 class, that has all the bits and bobs you'd expect for doing HTTP
+requests. When it prints, it gives any defaults you've set. As you update
+the object you can see what's been set
+
+
+```r
+x$opts
+#> $timeout
+#> [1] 1
+```
+
+
+```r
+x$headers
+#> $a
+#> [1] "hello world"
+```
+
+## do some http
+
+The client object created above has http methods that you can call,
+and pass paths to, as well as query parameters, body values, and any other
+curl options.
+
+Here, we'll do a __GET__ request on the route `/get` on our base url
+`https://httpbin.org` (the full url is then `https://httpbin.org/get`)
+
+
+```r
+res <- x$get("get")
+```
+
+The response from a http request is another R6 class `HttpResponse`, which
+has slots for the outputs of the request, and some functions to deal with
+the response:
+
+Status code
+
+
+```r
+res$status_code
+#> [1] 200
+```
+
+The content
+
+
+```r
+res$content
+#>   [1] 7b 0a 20 20 22 61 72 67 73 22 3a 20 7b 7d 2c 20 0a 20 20 22 68 65 61
+#>  [24] 64 65 72 73 22 3a 20 7b 0a 20 20 20 20 22 41 22 3a 20 22 68 65 6c 6c
+#>  [47] 6f 20 77 6f 72 6c 64 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 22
+#>  [70] 3a 20 22 2a 2f 2a 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 2d 45
+#>  [93] 6e 63 6f 64 69 6e 67 22 3a 20 22 67 7a 69 70 2c 20 64 65 66 6c 61 74
+#> [116] 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 74 74 70 62
+#> [139] 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 2d 41 67 65
+#> [162] 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 31 2e 30 20 72 2d
+#> [185] 63 75 72 6c 2f 32 2e 33 20 63 72 75 6c 2f 30 2e 32 2e 30 22 0a 20 20
+#> [208] 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 37 31 2e 36 33 2e
+#> [231] 32 32 33 2e 31 31 33 22 2c 20 0a 20 20 22 75 72 6c 22 3a 20 22 68 74
+#> [254] 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 65 74 22 0a
+#> [277] 7d 0a
+```
+
+HTTP method
+
+
+```r
+res$method
+#> [1] "get"
+```
+
+Request headers
+
+
+```r
+res$request_headers
+#> $useragent
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> $a
+#> [1] "hello world"
+```
+
+Response headers
+
+
+```r
+res$response_headers
+#> $status
+#> [1] "HTTP/1.1 200 OK"
+#> 
+#> $server
+#> [1] "nginx"
+#> 
+#> $date
+#> [1] "Tue, 03 Jan 2017 05:52:03 GMT"
+#> 
+#> $`content-type`
+#> [1] "application/json"
+#> 
+#> $`content-length`
+#> [1] "278"
+#> 
+#> $connection
+#> [1] "keep-alive"
+#> 
+#> $`access-control-allow-origin`
+#> [1] "*"
+#> 
+#> $`access-control-allow-credentials`
+#> [1] "true"
+```
+
+And you can parse the content with a provided function:
+
+
+```r
+res$parse()
+#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"*/*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.51.0 r-curl/2.3 crul/0.2.0\"\n  }, \n  \"origin\": \"71.63.223.113\", \n  \"url\": \"https://httpbin.org/get\"\n}\n"
+jsonlite::fromJSON(res$parse())
+#> $args
+#> named list()
+#> 
+#> $headers
+#> $headers$A
+#> [1] "hello world"
+#> 
+#> $headers$Accept
+#> [1] "*/*"
+#> 
+#> $headers$`Accept-Encoding`
+#> [1] "gzip, deflate"
+#> 
+#> $headers$Host
+#> [1] "httpbin.org"
+#> 
+#> $headers$`User-Agent`
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> 
+#> $origin
+#> [1] "71.63.223.113"
+#> 
+#> $url
+#> [1] "https://httpbin.org/get"
+```
+
+With the `HttpClient` object, which holds any configuration stuff
+we set, we can make other HTTP verb requests. For example, a `HEAD`
+request:
+
+
+```r
+x$post(
+  path = "post", 
+  body = list(hello = "world")
+)
+#> <crul response> 
+#>   url: https://httpbin.org/post
+#>   request_headers: 
+#>     useragent: libcurl/7.51.0 r-curl/2.3 crul/0.2.0
+#>     a: hello world
+#>   response_headers: 
+#>     status: HTTP/1.1 200 OK
+#>     server: nginx
+#>     date: Tue, 03 Jan 2017 05:52:03 GMT
+#>     content-type: application/json
+#>     content-length: 491
+#>     connection: keep-alive
+#>     access-control-allow-origin: *
+#>     access-control-allow-credentials: true
+#>   status: 200
+```
+
+
+## write to disk
+
+
+```r
+x <- HttpClient$new(url = "https://httpbin.org")
+f <- tempfile()
+res <- x$get(disk = f)
+# when using write to disk, content is a path
+res$content 
+#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpoZ8Rrd/fileee7a81dea18"
+```
+
+Read lines
+
+
+```r
+readLines(res$content, n = 10)
+#>  [1] "<!DOCTYPE html>"                                                                           
+#>  [2] "<html>"                                                                                    
+#>  [3] "<head>"                                                                                    
+#>  [4] "  <meta http-equiv='content-type' value='text/html;charset=utf8'>"                         
+#>  [5] "  <meta name='generator' value='Ronn/v0.7.3 (http://github.com/rtomayko/ronn/tree/0.7.3)'>"
+#>  [6] "  <title>httpbin(1): HTTP Client Testing Service</title>"                                  
+#>  [7] "  <style type='text/css' media='all'>"                                                     
+#>  [8] "  /* style: man */"                                                                        
+#>  [9] "  body#manpage {margin:0}"                                                                 
+#> [10] "  .mp {max-width:100ex;padding:0 9ex 1ex 4ex}"
+```
+
+## stream data
+
+
+```r
+(x <- HttpClient$new(url = "https://httpbin.org"))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>   headers:
+res <- x$get('stream/5', stream = function(x) cat(rawToChar(x)))
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 0, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 1, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 2, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 3, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 4, "origin": "71.63.223.113"}
+# when streaming, content is NULL
+res$content 
+#> NULL
+```
diff --git a/inst/doc/crul_vignette.html b/inst/doc/crul_vignette.html
new file mode 100644
index 0000000..a26d3a7
--- /dev/null
+++ b/inst/doc/crul_vignette.html
@@ -0,0 +1,450 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
+
+<title>crul introduction</title>
+
+<script type="text/javascript">
+window.onload = function() {
+  var imgs = document.getElementsByTagName('img'), i, img;
+  for (i = 0; i < imgs.length; i++) {
+    img = imgs[i];
+    // center an image if it is the only element of its parent
+    if (img.parentElement.childElementCount === 1)
+      img.parentElement.style.textAlign = 'center';
+  }
+};
+</script>
+
+<!-- Styles for R syntax highlighter -->
+<style type="text/css">
+   pre .operator,
+   pre .paren {
+     color: rgb(104, 118, 135)
+   }
+
+   pre .literal {
+     color: #990073
+   }
+
+   pre .number {
+     color: #099;
+   }
+
+   pre .comment {
+     color: #998;
+     font-style: italic
+   }
+
+   pre .keyword {
+     color: #900;
+     font-weight: bold
+   }
+
+   pre .identifier {
+     color: rgb(0, 0, 0);
+   }
+
+   pre .string {
+     color: #d14;
+   }
+</style>
+
+<!-- R syntax highlighter -->
+<script type="text/javascript">
+var hljs=new function(){function m(p){return p.replace(/&/gm,"&").replace(/</gm,"<")}function f(r,q,p){return RegExp(q,"m"+(r.cI?"i":"")+(p?"g":""))}function b(r){for(var p=0;p<r.childNodes.length;p++){var q=r.childNodes[p];if(q.nodeName=="CODE"){return q}if(!(q.nodeType==3&&q.nodeValue.match(/\s+/))){break}}}function h(t,s){var p="";for(var r=0;r<t.childNodes.length;r++){if(t.childNodes[r].nodeType==3){var q=t.childNodes[r].nodeValue;if(s){q=q.replace(/\n/g,"")}p+=q}else{if(t.chi [...]
+hljs.initHighlightingOnLoad();
+</script>
+
+
+
+<style type="text/css">
+body, td {
+   font-family: sans-serif;
+   background-color: white;
+   font-size: 13px;
+}
+
+body {
+  max-width: 800px;
+  margin: auto;
+  padding: 1em;
+  line-height: 20px;
+}
+
+tt, code, pre {
+   font-family: 'DejaVu Sans Mono', 'Droid Sans Mono', 'Lucida Console', Consolas, Monaco, monospace;
+}
+
+h1 {
+   font-size:2.2em;
+}
+
+h2 {
+   font-size:1.8em;
+}
+
+h3 {
+   font-size:1.4em;
+}
+
+h4 {
+   font-size:1.0em;
+}
+
+h5 {
+   font-size:0.9em;
+}
+
+h6 {
+   font-size:0.8em;
+}
+
+a:visited {
+   color: rgb(50%, 0%, 50%);
+}
+
+pre, img {
+  max-width: 100%;
+}
+pre {
+  overflow-x: auto;
+}
+pre code {
+   display: block; padding: 0.5em;
+}
+
+code {
+  font-size: 92%;
+  border: 1px solid #ccc;
+}
+
+code[class] {
+  background-color: #F8F8F8;
+}
+
+table, td, th {
+  border: none;
+}
+
+blockquote {
+   color:#666666;
+   margin:0;
+   padding-left: 1em;
+   border-left: 0.5em #EEE solid;
+}
+
+hr {
+   height: 0px;
+   border-bottom: none;
+   border-top-width: thin;
+   border-top-style: dotted;
+   border-top-color: #999999;
+}
+
+ at media print {
+   * {
+      background: transparent !important;
+      color: black !important;
+      filter:none !important;
+      -ms-filter: none !important;
+   }
+
+   body {
+      font-size:12pt;
+      max-width:100%;
+   }
+
+   a, a:visited {
+      text-decoration: underline;
+   }
+
+   hr {
+      visibility: hidden;
+      page-break-before: always;
+   }
+
+   pre, blockquote {
+      padding-right: 1em;
+      page-break-inside: avoid;
+   }
+
+   tr, img {
+      page-break-inside: avoid;
+   }
+
+   img {
+      max-width: 100% !important;
+   }
+
+   @page :left {
+      margin: 15mm 20mm 15mm 10mm;
+   }
+
+   @page :right {
+      margin: 15mm 10mm 15mm 20mm;
+   }
+
+   p, h2, h3 {
+      orphans: 3; widows: 3;
+   }
+
+   h2, h3 {
+      page-break-after: avoid;
+   }
+}
+</style>
+
+
+
+</head>
+
+<body>
+<!--
+%\VignetteEngine{knitr::knitr}
+%\VignetteIndexEntry{crul introduction}
+%\VignetteEncoding{UTF-8}
+-->
+
+<h1>crul introduction</h1>
+
+<p><code>crul</code> is an HTTP client for R.</p>
+
+<h2>Install</h2>
+
+<p>Stable CRAN version</p>
+
+<pre><code class="r">install.packages("crul")
+</code></pre>
+
+<p>Dev version</p>
+
+<pre><code class="r">devtools::install_github("ropensci/crul")
+</code></pre>
+
+<pre><code class="r">library("crul")
+</code></pre>
+
+<h2>the client</h2>
+
+<p><code>HttpClient</code> is where to start</p>
+
+<pre><code class="r">(x <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    timeout = 1
+  ),
+  headers = list(
+    a = "hello world"
+  )
+))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>     timeout: 1
+#>   headers: 
+#>     a: hello world
+</code></pre>
+
+<p>Makes a R6 class, that has all the bits and bobs you'd expect for doing HTTP
+requests. When it prints, it gives any defaults you've set. As you update
+the object you can see what's been set</p>
+
+<pre><code class="r">x$opts
+#> $timeout
+#> [1] 1
+</code></pre>
+
+<pre><code class="r">x$headers
+#> $a
+#> [1] "hello world"
+</code></pre>
+
+<h2>do some http</h2>
+
+<p>The client object created above has http methods that you can call,
+and pass paths to, as well as query parameters, body values, and any other
+curl options.</p>
+
+<p>Here, we'll do a <strong>GET</strong> request on the route <code>/get</code> on our base url
+<code>https://httpbin.org</code> (the full url is then <code>https://httpbin.org/get</code>)</p>
+
+<pre><code class="r">res <- x$get("get")
+</code></pre>
+
+<p>The response from a http request is another R6 class <code>HttpResponse</code>, which
+has slots for the outputs of the request, and some functions to deal with
+the response:</p>
+
+<p>Status code</p>
+
+<pre><code class="r">res$status_code
+#> [1] 200
+</code></pre>
+
+<p>The content</p>
+
+<pre><code class="r">res$content
+#>   [1] 7b 0a 20 20 22 61 72 67 73 22 3a 20 7b 7d 2c 20 0a 20 20 22 68 65 61
+#>  [24] 64 65 72 73 22 3a 20 7b 0a 20 20 20 20 22 41 22 3a 20 22 68 65 6c 6c
+#>  [47] 6f 20 77 6f 72 6c 64 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 22
+#>  [70] 3a 20 22 2a 2f 2a 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 2d 45
+#>  [93] 6e 63 6f 64 69 6e 67 22 3a 20 22 67 7a 69 70 2c 20 64 65 66 6c 61 74
+#> [116] 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 74 74 70 62
+#> [139] 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 2d 41 67 65
+#> [162] 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 31 2e 30 20 72 2d
+#> [185] 63 75 72 6c 2f 32 2e 33 20 63 72 75 6c 2f 30 2e 32 2e 30 22 0a 20 20
+#> [208] 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 37 31 2e 36 33 2e
+#> [231] 32 32 33 2e 31 31 33 22 2c 20 0a 20 20 22 75 72 6c 22 3a 20 22 68 74
+#> [254] 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 65 74 22 0a
+#> [277] 7d 0a
+</code></pre>
+
+<p>HTTP method</p>
+
+<pre><code class="r">res$method
+#> [1] "get"
+</code></pre>
+
+<p>Request headers</p>
+
+<pre><code class="r">res$request_headers
+#> $useragent
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> $a
+#> [1] "hello world"
+</code></pre>
+
+<p>Response headers</p>
+
+<pre><code class="r">res$response_headers
+#> $status
+#> [1] "HTTP/1.1 200 OK"
+#> 
+#> $server
+#> [1] "nginx"
+#> 
+#> $date
+#> [1] "Tue, 03 Jan 2017 05:52:03 GMT"
+#> 
+#> $`content-type`
+#> [1] "application/json"
+#> 
+#> $`content-length`
+#> [1] "278"
+#> 
+#> $connection
+#> [1] "keep-alive"
+#> 
+#> $`access-control-allow-origin`
+#> [1] "*"
+#> 
+#> $`access-control-allow-credentials`
+#> [1] "true"
+</code></pre>
+
+<p>And you can parse the content with a provided function:</p>
+
+<pre><code class="r">res$parse()
+#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"*/*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.51.0 r-curl/2.3 crul/0.2.0\"\n  }, \n  \"origin\": \"71.63.223.113\", \n  \"url\": \"https://httpbin.org/get\&quot [...]
+jsonlite::fromJSON(res$parse())
+#> $args
+#> named list()
+#> 
+#> $headers
+#> $headers$A
+#> [1] "hello world"
+#> 
+#> $headers$Accept
+#> [1] "*/*"
+#> 
+#> $headers$`Accept-Encoding`
+#> [1] "gzip, deflate"
+#> 
+#> $headers$Host
+#> [1] "httpbin.org"
+#> 
+#> $headers$`User-Agent`
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> 
+#> $origin
+#> [1] "71.63.223.113"
+#> 
+#> $url
+#> [1] "https://httpbin.org/get"
+</code></pre>
+
+<p>With the <code>HttpClient</code> object, which holds any configuration stuff
+we set, we can make other HTTP verb requests. For example, a <code>HEAD</code>
+request:</p>
+
+<pre><code class="r">x$post(
+  path = "post", 
+  body = list(hello = "world")
+)
+#> <crul response> 
+#>   url: https://httpbin.org/post
+#>   request_headers: 
+#>     useragent: libcurl/7.51.0 r-curl/2.3 crul/0.2.0
+#>     a: hello world
+#>   response_headers: 
+#>     status: HTTP/1.1 200 OK
+#>     server: nginx
+#>     date: Tue, 03 Jan 2017 05:52:03 GMT
+#>     content-type: application/json
+#>     content-length: 491
+#>     connection: keep-alive
+#>     access-control-allow-origin: *
+#>     access-control-allow-credentials: true
+#>   status: 200
+</code></pre>
+
+<h2>write to disk</h2>
+
+<pre><code class="r">x <- HttpClient$new(url = "https://httpbin.org")
+f <- tempfile()
+res <- x$get(disk = f)
+# when using write to disk, content is a path
+res$content 
+#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpoZ8Rrd/fileee7a81dea18"
+</code></pre>
+
+<p>Read lines</p>
+
+<pre><code class="r">readLines(res$content, n = 10)
+#>  [1] "<!DOCTYPE html>"                                                                           
+#>  [2] "<html>"                                                                                    
+#>  [3] "<head>"                                                                                    
+#>  [4] "  <meta http-equiv='content-type' value='text/html;charset=utf8'>"                         
+#>  [5] "  <meta name='generator' value='Ronn/v0.7.3 (http://github.com/rtomayko/ronn/tree/0.7.3)'>"
+#>  [6] "  <title>httpbin(1): HTTP Client Testing Service</title>"                                  
+#>  [7] "  <style type='text/css' media='all'>"                                                     
+#>  [8] "  /* style: man */"                                                                        
+#>  [9] "  body#manpage {margin:0}"                                                                 
+#> [10] "  .mp {max-width:100ex;padding:0 9ex 1ex 4ex}"
+</code></pre>
+
+<h2>stream data</h2>
+
+<pre><code class="r">(x <- HttpClient$new(url = "https://httpbin.org"))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>   headers:
+res <- x$get('stream/5', stream = function(x) cat(rawToChar(x)))
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 0, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 1, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 2, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 3, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 4, "origin": "71.63.223.113"}
+# when streaming, content is NULL
+res$content 
+#> NULL
+</code></pre>
+
+</body>
+
+</html>
diff --git a/inst/vign/crul_vignette.Rmd b/inst/vign/crul_vignette.Rmd
new file mode 100644
index 0000000..39ae112
--- /dev/null
+++ b/inst/vign/crul_vignette.Rmd
@@ -0,0 +1,156 @@
+<!--
+%\VignetteEngine{knitr::knitr}
+%\VignetteIndexEntry{crul introduction}
+%\VignetteEncoding{UTF-8}
+-->
+
+```{r echo=FALSE}
+knitr::opts_chunk$set(
+  comment = "#>",
+  collapse = TRUE,
+  warning = FALSE,
+  message = FALSE
+)
+```
+
+crul introduction
+=================
+
+`crul` is an HTTP client for R.
+
+## Install
+
+Stable CRAN version
+
+```{r eval=FALSE}
+install.packages("crul")
+```
+
+Dev version
+
+```{r eval=FALSE}
+devtools::install_github("ropensci/crul")
+```
+
+```{r}
+library("crul")
+```
+
+## the client
+
+`HttpClient` is where to start
+
+```{r}
+(x <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    timeout = 1
+  ),
+  headers = list(
+    a = "hello world"
+  )
+))
+```
+
+Makes a R6 class, that has all the bits and bobs you'd expect for doing HTTP
+requests. When it prints, it gives any defaults you've set. As you update
+the object you can see what's been set
+
+```{r}
+x$opts
+```
+
+```{r}
+x$headers
+```
+
+## do some http
+
+The client object created above has http methods that you can call,
+and pass paths to, as well as query parameters, body values, and any other
+curl options.
+
+Here, we'll do a __GET__ request on the route `/get` on our base url
+`https://httpbin.org` (the full url is then `https://httpbin.org/get`)
+
+```{r}
+res <- x$get("get")
+```
+
+The response from a http request is another R6 class `HttpResponse`, which
+has slots for the outputs of the request, and some functions to deal with
+the response:
+
+Status code
+
+```{r}
+res$status_code
+```
+
+The content
+
+```{r}
+res$content
+```
+
+HTTP method
+
+```{r}
+res$method
+```
+
+Request headers
+
+```{r}
+res$request_headers
+```
+
+Response headers
+
+```{r}
+res$response_headers
+```
+
+And you can parse the content with a provided function:
+
+```{r}
+res$parse()
+jsonlite::fromJSON(res$parse())
+```
+
+With the `HttpClient` object, which holds any configuration stuff
+we set, we can make other HTTP verb requests. For example, a `HEAD`
+request:
+
+```{r}
+x$post(
+  path = "post", 
+  body = list(hello = "world")
+)
+```
+
+
+## write to disk
+
+```{r}
+x <- HttpClient$new(url = "https://httpbin.org")
+f <- tempfile()
+res <- x$get(disk = f)
+# when using write to disk, content is a path
+res$content 
+```
+
+Read lines
+
+```{r}
+readLines(res$content, n = 10)
+```
+
+## stream data
+
+```{r}
+(x <- HttpClient$new(url = "https://httpbin.org"))
+res <- x$get('stream/5', stream = function(x) cat(rawToChar(x)))
+# when streaming, content is NULL
+res$content 
+```
diff --git a/inst/vign/crul_vignette.md b/inst/vign/crul_vignette.md
new file mode 100644
index 0000000..0c31d74
--- /dev/null
+++ b/inst/vign/crul_vignette.md
@@ -0,0 +1,281 @@
+<!--
+%\VignetteEngine{knitr::knitr}
+%\VignetteIndexEntry{crul introduction}
+%\VignetteEncoding{UTF-8}
+-->
+
+
+
+crul introduction
+=================
+
+`crul` is an HTTP client for R.
+
+## Install
+
+Stable CRAN version
+
+
+```r
+install.packages("crul")
+```
+
+Dev version
+
+
+```r
+devtools::install_github("ropensci/crul")
+```
+
+
+```r
+library("crul")
+```
+
+## the client
+
+`HttpClient` is where to start
+
+
+```r
+(x <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    timeout = 1
+  ),
+  headers = list(
+    a = "hello world"
+  )
+))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>     timeout: 1
+#>   headers: 
+#>     a: hello world
+```
+
+Makes a R6 class, that has all the bits and bobs you'd expect for doing HTTP
+requests. When it prints, it gives any defaults you've set. As you update
+the object you can see what's been set
+
+
+```r
+x$opts
+#> $timeout
+#> [1] 1
+```
+
+
+```r
+x$headers
+#> $a
+#> [1] "hello world"
+```
+
+## do some http
+
+The client object created above has http methods that you can call,
+and pass paths to, as well as query parameters, body values, and any other
+curl options.
+
+Here, we'll do a __GET__ request on the route `/get` on our base url
+`https://httpbin.org` (the full url is then `https://httpbin.org/get`)
+
+
+```r
+res <- x$get("get")
+```
+
+The response from a http request is another R6 class `HttpResponse`, which
+has slots for the outputs of the request, and some functions to deal with
+the response:
+
+Status code
+
+
+```r
+res$status_code
+#> [1] 200
+```
+
+The content
+
+
+```r
+res$content
+#>   [1] 7b 0a 20 20 22 61 72 67 73 22 3a 20 7b 7d 2c 20 0a 20 20 22 68 65 61
+#>  [24] 64 65 72 73 22 3a 20 7b 0a 20 20 20 20 22 41 22 3a 20 22 68 65 6c 6c
+#>  [47] 6f 20 77 6f 72 6c 64 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 22
+#>  [70] 3a 20 22 2a 2f 2a 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 2d 45
+#>  [93] 6e 63 6f 64 69 6e 67 22 3a 20 22 67 7a 69 70 2c 20 64 65 66 6c 61 74
+#> [116] 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 74 74 70 62
+#> [139] 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 2d 41 67 65
+#> [162] 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 31 2e 30 20 72 2d
+#> [185] 63 75 72 6c 2f 32 2e 33 20 63 72 75 6c 2f 30 2e 32 2e 30 22 0a 20 20
+#> [208] 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 37 31 2e 36 33 2e
+#> [231] 32 32 33 2e 31 31 33 22 2c 20 0a 20 20 22 75 72 6c 22 3a 20 22 68 74
+#> [254] 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 65 74 22 0a
+#> [277] 7d 0a
+```
+
+HTTP method
+
+
+```r
+res$method
+#> [1] "get"
+```
+
+Request headers
+
+
+```r
+res$request_headers
+#> $useragent
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> $a
+#> [1] "hello world"
+```
+
+Response headers
+
+
+```r
+res$response_headers
+#> $status
+#> [1] "HTTP/1.1 200 OK"
+#> 
+#> $server
+#> [1] "nginx"
+#> 
+#> $date
+#> [1] "Tue, 03 Jan 2017 05:52:03 GMT"
+#> 
+#> $`content-type`
+#> [1] "application/json"
+#> 
+#> $`content-length`
+#> [1] "278"
+#> 
+#> $connection
+#> [1] "keep-alive"
+#> 
+#> $`access-control-allow-origin`
+#> [1] "*"
+#> 
+#> $`access-control-allow-credentials`
+#> [1] "true"
+```
+
+And you can parse the content with a provided function:
+
+
+```r
+res$parse()
+#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"*/*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.51.0 r-curl/2.3 crul/0.2.0\"\n  }, \n  \"origin\": \"71.63.223.113\", \n  \"url\": \"https://httpbin.org/get\"\n}\n"
+jsonlite::fromJSON(res$parse())
+#> $args
+#> named list()
+#> 
+#> $headers
+#> $headers$A
+#> [1] "hello world"
+#> 
+#> $headers$Accept
+#> [1] "*/*"
+#> 
+#> $headers$`Accept-Encoding`
+#> [1] "gzip, deflate"
+#> 
+#> $headers$Host
+#> [1] "httpbin.org"
+#> 
+#> $headers$`User-Agent`
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> 
+#> $origin
+#> [1] "71.63.223.113"
+#> 
+#> $url
+#> [1] "https://httpbin.org/get"
+```
+
+With the `HttpClient` object, which holds any configuration stuff
+we set, we can make other HTTP verb requests. For example, a `HEAD`
+request:
+
+
+```r
+x$post(
+  path = "post", 
+  body = list(hello = "world")
+)
+#> <crul response> 
+#>   url: https://httpbin.org/post
+#>   request_headers: 
+#>     useragent: libcurl/7.51.0 r-curl/2.3 crul/0.2.0
+#>     a: hello world
+#>   response_headers: 
+#>     status: HTTP/1.1 200 OK
+#>     server: nginx
+#>     date: Tue, 03 Jan 2017 05:52:03 GMT
+#>     content-type: application/json
+#>     content-length: 491
+#>     connection: keep-alive
+#>     access-control-allow-origin: *
+#>     access-control-allow-credentials: true
+#>   status: 200
+```
+
+
+## write to disk
+
+
+```r
+x <- HttpClient$new(url = "https://httpbin.org")
+f <- tempfile()
+res <- x$get(disk = f)
+# when using write to disk, content is a path
+res$content 
+#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpoZ8Rrd/fileee7a81dea18"
+```
+
+Read lines
+
+
+```r
+readLines(res$content, n = 10)
+#>  [1] "<!DOCTYPE html>"                                                                           
+#>  [2] "<html>"                                                                                    
+#>  [3] "<head>"                                                                                    
+#>  [4] "  <meta http-equiv='content-type' value='text/html;charset=utf8'>"                         
+#>  [5] "  <meta name='generator' value='Ronn/v0.7.3 (http://github.com/rtomayko/ronn/tree/0.7.3)'>"
+#>  [6] "  <title>httpbin(1): HTTP Client Testing Service</title>"                                  
+#>  [7] "  <style type='text/css' media='all'>"                                                     
+#>  [8] "  /* style: man */"                                                                        
+#>  [9] "  body#manpage {margin:0}"                                                                 
+#> [10] "  .mp {max-width:100ex;padding:0 9ex 1ex 4ex}"
+```
+
+## stream data
+
+
+```r
+(x <- HttpClient$new(url = "https://httpbin.org"))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>   headers:
+res <- x$get('stream/5', stream = function(x) cat(rawToChar(x)))
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 0, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 1, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 2, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 3, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 4, "origin": "71.63.223.113"}
+# when streaming, content is NULL
+res$content 
+#> NULL
+```
diff --git a/man/HttpClient.Rd b/man/HttpClient.Rd
new file mode 100644
index 0000000..bf0d0bc
--- /dev/null
+++ b/man/HttpClient.Rd
@@ -0,0 +1,95 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/client.R
+\docType{data}
+\name{HttpClient}
+\alias{HttpClient}
+\title{HTTP client}
+\arguments{
+\item{url}{(character) A url. One of \code{url} or \code{handle} required.}
+
+\item{opts}{(list) curl options}
+
+\item{handle}{A handle}
+}
+\description{
+HTTP client
+}
+\details{
+\strong{Methods}
+  \describe{
+    \item{\code{get(path, query, disk, stream, ...)}}{
+      Make a GET request
+    }
+    \item{\code{post(path, query, body, disk, stream, ...)}}{
+      Make a POST request
+    }
+    \item{\code{put(path, query, body, disk, stream, ...)}}{
+      Make a PUT request
+    }
+    \item{\code{patch(path, query, body, disk, stream, ...)}}{
+      Make a PATCH request
+    }
+    \item{\code{delete(path, query, body, disk, stream, ...)}}{
+      Make a DELETE request
+    }
+    \item{\code{head(path, disk, stream, ...)}}{
+      Make a HEAD request
+    }
+  }
+
+Possible parameters (not all are allowed in each HTTP verb):
+\itemize{
+ \item path - URL path, appended to the base URL
+ \item query - query terms, as a list
+ \item body - body as an R list
+ \item encode - one of form, multipart, json, or raw
+ \item disk - a path to write to. if NULL (default), memory used
+ \item stream - an R function to determine how to stream data. if
+ NULL (default), memory used
+ \item ... curl options, only those in the acceptable set from
+ \code{\link[curl]{curl_options}} except the following: httpget, httppost,
+ post, postfields, postfieldsize, and customrequest
+}
+}
+\examples{
+(x <- HttpClient$new(url = "https://httpbin.org"))
+x$url
+(res_get1 <- x$get('get'))
+res_get1$content
+res_get1$response_headers
+res_get1$parse()
+
+(res_get2 <- x$get('get', query = list(hello = "world")))
+res_get2$parse()
+library("jsonlite")
+jsonlite::fromJSON(res_get2$parse())
+
+# post request
+(res_post <- x$post('post', body = list(hello = "world")))
+
+## empty body request
+x$post('post')
+
+# put request
+(res_put <- x$put('put'))
+
+# delete request
+(res_delete <- x$delete('delete'))
+
+# patch request
+(res_patch <- x$patch('patch'))
+
+# head request
+(res_head <- x$head())
+
+# query params are URL encoded for you, so DO NOT do it yourself
+## if you url encode yourself, it gets double encoded, and that's bad
+(x <- HttpClient$new(url = "https://httpbin.org"))
+res <- x$get("get", query = list(a = 'hello world'), verbose = TRUE)
+}
+\seealso{
+\code{\link{post-requests}}, \code{\link{http-headers}},
+\code{\link{writing-options}}
+}
+\keyword{datasets}
+
diff --git a/man/HttpResponse.Rd b/man/HttpResponse.Rd
new file mode 100644
index 0000000..9581d71
--- /dev/null
+++ b/man/HttpResponse.Rd
@@ -0,0 +1,64 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/response.R
+\docType{data}
+\name{HttpResponse}
+\alias{HttpResponse}
+\title{Base response object}
+\arguments{
+\item{url}{(character) A url}
+
+\item{opts}{(list) curl options}
+
+\item{handle}{A handle}
+}
+\description{
+Base response object
+}
+\details{
+\strong{Methods}
+  \describe{
+    \item{\code{parse()}}{
+      Parse the raw response content to text
+    }
+    \item{\code{success()}}{
+      Was status code less than or equal to 201.
+      returns boolean
+    }
+    \item{\code{status_http()}}{
+      Get HTTP status code, message, and explanation
+    }
+    \item{\code{raise_for_status()}}{
+      Check HTTP status and stop with appropriate
+      HTTP error code and message if >= 300.
+      - If you have \code{fauxpas} installed we use that,
+      otherwise use \pkg{httpcode}
+    }
+  }
+}
+\examples{
+x <- HttpResponse$new(method = "get", url = "https://httpbin.org")
+x$url
+x$method
+
+x <- HttpClient$new(url = 'https://httpbin.org')
+(res <- x$get('get'))
+res$request_headers
+res$response_headers
+res$parse()
+res$status_code
+res$status_http()
+res$status_http()$status_code
+res$status_http()$message
+res$status_http()$explanation
+res$success()
+
+x <- HttpClient$new(url = 'https://httpbin.org/status/404')
+(res <- x$get())
+ \dontrun{res$raise_for_status()}
+
+x <- HttpClient$new(url = 'https://httpbin.org/status/414')
+(res <- x$get())
+ \dontrun{res$raise_for_status()}
+}
+\keyword{datasets}
+
diff --git a/man/crul-package.Rd b/man/crul-package.Rd
new file mode 100644
index 0000000..8ebaff3
--- /dev/null
+++ b/man/crul-package.Rd
@@ -0,0 +1,29 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/crul-package.r
+\docType{package}
+\name{crul-package}
+\alias{crul}
+\alias{crul-package}
+\title{\strong{HTTP R client}}
+\description{
+\strong{HTTP R client}
+}
+\section{Package API}{
+
+\itemize{
+ \item \code{\link{HttpClient}} - create a connection client, set all
+ your http options, make http requests
+ \item \code{\link{HttpResponse}} - mostly for internal use, handles
+ http responses
+}
+}
+
+\section{HTTP conditions}{
+
+We use \code{fauxpas} if you have it installed for handling HTTP
+conditions but if it's not installed we use \pkg{httpcode}
+}
+\author{
+Scott Chamberlain \email{myrmecocystus at gmail.com}
+}
+
diff --git a/man/curl-options.Rd b/man/curl-options.Rd
new file mode 100644
index 0000000..b5a8cb3
--- /dev/null
+++ b/man/curl-options.Rd
@@ -0,0 +1,27 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/curl-options.R
+\name{curl-options}
+\alias{curl-options}
+\title{curl options}
+\description{
+curl options
+}
+\examples{
+# set curl options on client initialization
+(res <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    verbose = TRUE,
+    useragent = "hello world"
+  )
+))
+res$opts
+res$get('get')
+
+# or set curl options when performing HTTP operation
+(res <- HttpClient$new(url = "https://httpbin.org"))
+res$get('get', verbose = TRUE)
+\dontrun{res$get('get', stuff = "things")}
+\dontrun{res$get('get', httpget = TRUE)}
+}
+
diff --git a/man/handle.Rd b/man/handle.Rd
new file mode 100644
index 0000000..8850b74
--- /dev/null
+++ b/man/handle.Rd
@@ -0,0 +1,25 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/handle.R
+\name{handle}
+\alias{handle}
+\title{Make a handle}
+\usage{
+handle(url, ...)
+}
+\arguments{
+\item{url}{(character) A url. required.}
+
+\item{...}{options passed on to \code{\link[curl]{new_handle}}}
+}
+\description{
+Make a handle
+}
+\examples{
+handle("https://httpbin.org")
+
+# handles - pass in your own handle
+h <- handle("https://httpbin.org")
+(res <- HttpClient$new(handle = h))
+out <- res$get("get")
+}
+
diff --git a/man/http-headers.Rd b/man/http-headers.Rd
new file mode 100644
index 0000000..84b8f73
--- /dev/null
+++ b/man/http-headers.Rd
@@ -0,0 +1,41 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/http-headers.R
+\name{http-headers}
+\alias{http-headers}
+\title{Working with HTTP headers}
+\description{
+Working with HTTP headers
+}
+\examples{
+(x <- HttpClient$new(url = "https://httpbin.org"))
+
+# set headers
+(res <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    verbose = TRUE
+  ),
+  headers = list(
+    a = "stuff",
+    b = "things"
+  )
+))
+res$headers
+# reassign header value
+res$headers$a <- "that"
+# define new header
+res$headers$c <- "what"
+# request
+res$get('get')
+
+## setting content-type via headers
+(res <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    verbose = TRUE
+  ),
+  headers = list(`Content-Type` = "application/json")
+))
+res$get('get')
+}
+
diff --git a/man/post-requests.Rd b/man/post-requests.Rd
new file mode 100644
index 0000000..7b85b9a
--- /dev/null
+++ b/man/post-requests.Rd
@@ -0,0 +1,52 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/post-requests.R
+\name{post-requests}
+\alias{post-requests}
+\title{HTTP POST requests}
+\description{
+HTTP POST requests
+}
+\examples{
+(x <- HttpClient$new(url = "https://httpbin.org"))
+
+# post request
+(res_post <- x$post('post', body = list(hello = "world")))
+
+## empty body request
+x$post('post')
+
+## form requests
+(cli <- HttpClient$new(
+  url = "http://apps.kew.org/wcsp/advsearch.do"
+))
+cli$post(
+  encode = "form",
+  body = list(
+    page = 'advancedSearch',
+    genus = 'Gagea',
+    species = 'pratensis',
+    selectedLevel = 'cont'
+  )
+)
+
+(x <- HttpClient$new(url = "https://httpbin.org"))
+res <- x$post("post",
+  encode = "json",
+  body = list(
+    genus = 'Gagea',
+    species = 'pratensis'
+  )
+)
+jsonlite::fromJSON(res$parse())
+
+
+# path <- file.path(Sys.getenv("R_DOC_DIR"), "html/logo.jpg")
+# (x <- HttpClient$new(url = "https://httpbin.org"))
+# x$post("post",
+#    body = list(
+#      files = list(path = path)
+#    )
+# )
+
+}
+
diff --git a/man/writing-options.Rd b/man/writing-options.Rd
new file mode 100644
index 0000000..6dee37c
--- /dev/null
+++ b/man/writing-options.Rd
@@ -0,0 +1,24 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/writing-options.R
+\name{writing-options}
+\alias{writing-options}
+\title{Writing data options}
+\description{
+Writing data options
+}
+\examples{
+(x <- HttpClient$new(url = "https://httpbin.org"))
+
+# write to disk
+(x <- HttpClient$new(url = "https://httpbin.org"))
+f <- tempfile()
+res <- x$get(disk = f)
+res$content # when using write to disk, content is a path
+readLines(res$content)
+
+# streaming response
+(x <- HttpClient$new(url = "https://httpbin.org"))
+res <- x$get('stream/50', stream = function(x) cat(rawToChar(x)))
+res$content # when streaming, content is NULL
+}
+
diff --git a/tests/test-all.R b/tests/test-all.R
new file mode 100644
index 0000000..7dc830f
--- /dev/null
+++ b/tests/test-all.R
@@ -0,0 +1,2 @@
+library("testthat")
+test_check("crul")
diff --git a/tests/testthat/test-client.R b/tests/testthat/test-client.R
new file mode 100644
index 0000000..05cfa59
--- /dev/null
+++ b/tests/testthat/test-client.R
@@ -0,0 +1,23 @@
+context("HttpClient")
+
+test_that("HttpClient works", {
+  skip_on_cran()
+
+  expect_is(HttpClient, "R6ClassGenerator")
+
+  aa <- HttpClient$new(url = "https://httpbin.org")
+
+  expect_is(aa, "HttpClient")
+  expect_null(aa$handle)
+  expect_length(aa$opts, 0)
+  expect_is(aa$url, "character")
+  expect_is(aa$.__enclos_env__$private$make_request, "function")
+  expect_is(aa$post, "function")
+  expect_is(aa$get, "function")
+})
+
+test_that("HttpClient fails well", {
+  skip_on_cran()
+
+  expect_error(HttpClient$new(), "need one of url or handle")
+})
diff --git a/tests/testthat/test-delete.R b/tests/testthat/test-delete.R
new file mode 100644
index 0000000..bb43839
--- /dev/null
+++ b/tests/testthat/test-delete.R
@@ -0,0 +1,38 @@
+context("request: delete")
+
+test_that("delete request works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$delete("delete")
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "delete")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_null(aa$request$fields)
+})
+
+test_that("delete request with body", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$delete("delete", body = list(hello = "world"))
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "delete")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_named(aa$request$fields, "hello")
+  expect_equal(aa$request$fields[[1]], "world")
+})
diff --git a/tests/testthat/test-get.R b/tests/testthat/test-get.R
new file mode 100644
index 0000000..248b074
--- /dev/null
+++ b/tests/testthat/test-get.R
@@ -0,0 +1,43 @@
+context("request: get")
+
+test_that("get request works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$get("get")
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "get")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+})
+
+test_that("get request - query parameters", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  querya <- list(a = "Asdfadsf", hello = "world")
+  aa <- cli$get("get", query = querya)
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "get")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  library(urltools)
+  params <- unlist(lapply(
+    strsplit(urltools::url_parse(aa$request$url$url)$parameter, "&")[[1]],
+    function(x) {
+      tmp <- strsplit(x, "=")[[1]]
+      as.list(stats::setNames(tmp[2], tmp[1]))
+    }
+  ), FALSE)
+  expect_equal(params, querya)
+})
diff --git a/tests/testthat/test-head.R b/tests/testthat/test-head.R
new file mode 100644
index 0000000..1552dd5
--- /dev/null
+++ b/tests/testthat/test-head.R
@@ -0,0 +1,20 @@
+context("request: head")
+
+test_that("head request works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://www.google.com")
+  aa <- cli$head()
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "head")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  # content is empty
+  expect_equal(aa$content, raw(0))
+})
diff --git a/tests/testthat/test-headers.R b/tests/testthat/test-headers.R
new file mode 100644
index 0000000..33b5b72
--- /dev/null
+++ b/tests/testthat/test-headers.R
@@ -0,0 +1,25 @@
+context("headers")
+
+test_that("headers work - just default headers", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$get('get')
+
+  expect_is(aa, "HttpResponse")
+  expect_named(aa$request_headers, 'useragent')
+})
+
+test_that("headers work - user headers passed", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(
+    url = "https://httpbin.org",
+    headers = list(hello = "world")
+  )
+  bb <- cli$get('get')
+
+  expect_is(bb, "HttpResponse")
+  expect_named(bb$request_headers, c('useragent', 'hello'))
+  expect_true(any(grepl("Hello", names(jsonlite::fromJSON(bb$parse())$headers))))
+})
diff --git a/tests/testthat/test-patch.R b/tests/testthat/test-patch.R
new file mode 100644
index 0000000..d2ae0ce
--- /dev/null
+++ b/tests/testthat/test-patch.R
@@ -0,0 +1,38 @@
+context("request: patch")
+
+test_that("patch request works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$patch("patch")
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "patch")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_null(aa$request$fields)
+})
+
+test_that("patch request with body", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$patch("patch", body = list(hello = "world"))
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "patch")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_named(aa$request$fields, "hello")
+  expect_equal(aa$request$fields[[1]], "world")
+})
diff --git a/tests/testthat/test-paths.R b/tests/testthat/test-paths.R
new file mode 100644
index 0000000..1e9d06b
--- /dev/null
+++ b/tests/testthat/test-paths.R
@@ -0,0 +1,47 @@
+context("paths")
+
+test_that("paths work", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$get(path = 'get')
+
+  expect_is(aa, "HttpResponse")
+  urlsp <- strsplit(aa$url, "/")[[1]]
+  expect_equal(urlsp[length(urlsp)], "get")
+  expect_equal(aa$status_code, 200)
+})
+
+test_that("path - multiple route paths work", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://api.github.com")
+  bb <- cli$get('orgs/ropenscilabs')
+
+  expect_is(bb, "HttpResponse")
+  urlsp <- strsplit(bb$url, "/")[[1]]
+  expect_equal(urlsp[4:5], c('orgs', 'ropenscilabs'))
+  expect_equal(bb$status_code, 200)
+})
+
+test_that("path - paths don't work if paths already on URL", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://api.github.com/orgs")
+  bb <- cli$get('ropenscilabs')
+
+  expect_is(bb, "HttpResponse")
+  expect_equal(bb$status_code, 404)
+})
+
+test_that("path - work with routes that have spaces", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "http://www.marinespecies.org")
+  bb <- cli$get('rest/AphiaRecordsByName/Platanista gangetica')
+
+  expect_is(bb, "HttpResponse")
+  urlsp <- strsplit(bb$url, "/")[[1]]
+  expect_equal(urlsp[length(urlsp)], 'Platanista%20gangetica')
+  expect_equal(bb$status_code, 200)
+})
diff --git a/tests/testthat/test-post.R b/tests/testthat/test-post.R
new file mode 100644
index 0000000..3de5f48
--- /dev/null
+++ b/tests/testthat/test-post.R
@@ -0,0 +1,38 @@
+context("request: post")
+
+test_that("post request works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$post("post")
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "post")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_null(aa$request$fields)
+})
+
+test_that("post request with body", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$post("post", body = list(hello = "world"))
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "post")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_named(aa$request$fields, "hello")
+  expect_equal(aa$request$fields[[1]], "world")
+})
diff --git a/tests/testthat/test-put.R b/tests/testthat/test-put.R
new file mode 100644
index 0000000..eb51ed9
--- /dev/null
+++ b/tests/testthat/test-put.R
@@ -0,0 +1,38 @@
+context("request: put")
+
+test_that("put request works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$put("put")
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "put")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_null(aa$request$fields)
+})
+
+test_that("put request with body", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$put("put", body = list(hello = "world"))
+
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$handle, 'curl_handle')
+  expect_is(aa$content, "raw")
+  expect_is(aa$method, "character")
+  expect_equal(aa$method, "put")
+  expect_is(aa$parse, "function")
+  expect_is(aa$parse(), "character")
+  expect_true(aa$success())
+
+  expect_named(aa$request$fields, "hello")
+  expect_equal(aa$request$fields[[1]], "world")
+})
diff --git a/tests/testthat/test-query.R b/tests/testthat/test-query.R
new file mode 100644
index 0000000..15418b9
--- /dev/null
+++ b/tests/testthat/test-query.R
@@ -0,0 +1,35 @@
+context("query")
+
+test_that("query works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$get('get', query = list(hello = "world"))
+
+  expect_is(aa, "HttpResponse")
+  expect_match(aa$url, "hello")
+  expect_match(aa$url, "world")
+  expect_match(jsonlite::fromJSON(aa$parse())$url, "hello")
+  expect_match(jsonlite::fromJSON(aa$parse())$url, "world")
+})
+
+test_that("query - multiple params of same name work", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$get('get', query = list(hello = 5, hello = 6))
+
+  expect_is(aa, "HttpResponse")
+  expect_equal(length(gregexpr("hello", aa$url)[[1]]), 2)
+  expect_equal(length(gregexpr("hello", jsonlite::fromJSON(aa$parse())$url)[[1]]), 2)
+})
+
+test_that("query - length 0 query list works", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+  aa <- cli$get('get', query = list())
+
+  expect_is(aa, "HttpResponse")
+  expect_false(grepl("\\?", aa$url))
+})
diff --git a/tests/testthat/test-status.R b/tests/testthat/test-status.R
new file mode 100644
index 0000000..987317c
--- /dev/null
+++ b/tests/testthat/test-status.R
@@ -0,0 +1,39 @@
+context("request: status")
+
+test_that("HTTP status is as expected", {
+  skip_on_cran()
+
+  cli <- HttpClient$new(url = "https://httpbin.org")
+
+  # im a teapot
+  aa <- cli$get("status/418")
+  expect_is(aa, "HttpResponse")
+  expect_is(aa$content, "raw")
+  expect_equal(aa$method, "get")
+  expect_equal(aa$status_code, 418)
+  expect_match(aa$response_headers[[1]], "I'M A TEAPOT")
+
+  # method not allowed
+  bb <- cli$get("status/405")
+  expect_is(bb, "HttpResponse")
+  expect_is(bb$content, "raw")
+  expect_equal(bb$method, "get")
+  expect_equal(bb$status_code, 405)
+  expect_match(bb$response_headers[[1]], "METHOD NOT ALLOWED")
+
+  # service unavailable
+  cc <- cli$get("status/503")
+  expect_is(cc, "HttpResponse")
+  expect_is(cc$content, "raw")
+  expect_equal(cc$method, "get")
+  expect_equal(cc$status_code, 503)
+  expect_match(cc$response_headers[[1]], "SERVICE UNAVAILABLE")
+
+  # Partial Content
+  dd <- cli$get("status/206")
+  expect_is(dd, "HttpResponse")
+  expect_is(dd$content, "raw")
+  expect_equal(dd$method, "get")
+  expect_equal(dd$status_code, 206)
+  expect_match(dd$response_headers[[1]], "PARTIAL CONTENT")
+})
diff --git a/tests/testthat/test-user-agent.R b/tests/testthat/test-user-agent.R
new file mode 100644
index 0000000..b00367d
--- /dev/null
+++ b/tests/testthat/test-user-agent.R
@@ -0,0 +1,12 @@
+context("user-agent")
+
+test_that("user-agent", {
+  skip_on_cran()
+
+  aa <- make_ua()
+
+  expect_is(aa, "character")
+  expect_match(aa, 'libcurl')
+  expect_match(aa, 'r-curl')
+  expect_match(aa, 'crul')
+})
diff --git a/vignettes/crul_vignette.Rmd b/vignettes/crul_vignette.Rmd
new file mode 100644
index 0000000..0c31d74
--- /dev/null
+++ b/vignettes/crul_vignette.Rmd
@@ -0,0 +1,281 @@
+<!--
+%\VignetteEngine{knitr::knitr}
+%\VignetteIndexEntry{crul introduction}
+%\VignetteEncoding{UTF-8}
+-->
+
+
+
+crul introduction
+=================
+
+`crul` is an HTTP client for R.
+
+## Install
+
+Stable CRAN version
+
+
+```r
+install.packages("crul")
+```
+
+Dev version
+
+
+```r
+devtools::install_github("ropensci/crul")
+```
+
+
+```r
+library("crul")
+```
+
+## the client
+
+`HttpClient` is where to start
+
+
+```r
+(x <- HttpClient$new(
+  url = "https://httpbin.org",
+  opts = list(
+    timeout = 1
+  ),
+  headers = list(
+    a = "hello world"
+  )
+))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>     timeout: 1
+#>   headers: 
+#>     a: hello world
+```
+
+Makes a R6 class, that has all the bits and bobs you'd expect for doing HTTP
+requests. When it prints, it gives any defaults you've set. As you update
+the object you can see what's been set
+
+
+```r
+x$opts
+#> $timeout
+#> [1] 1
+```
+
+
+```r
+x$headers
+#> $a
+#> [1] "hello world"
+```
+
+## do some http
+
+The client object created above has http methods that you can call,
+and pass paths to, as well as query parameters, body values, and any other
+curl options.
+
+Here, we'll do a __GET__ request on the route `/get` on our base url
+`https://httpbin.org` (the full url is then `https://httpbin.org/get`)
+
+
+```r
+res <- x$get("get")
+```
+
+The response from a http request is another R6 class `HttpResponse`, which
+has slots for the outputs of the request, and some functions to deal with
+the response:
+
+Status code
+
+
+```r
+res$status_code
+#> [1] 200
+```
+
+The content
+
+
+```r
+res$content
+#>   [1] 7b 0a 20 20 22 61 72 67 73 22 3a 20 7b 7d 2c 20 0a 20 20 22 68 65 61
+#>  [24] 64 65 72 73 22 3a 20 7b 0a 20 20 20 20 22 41 22 3a 20 22 68 65 6c 6c
+#>  [47] 6f 20 77 6f 72 6c 64 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 22
+#>  [70] 3a 20 22 2a 2f 2a 22 2c 20 0a 20 20 20 20 22 41 63 63 65 70 74 2d 45
+#>  [93] 6e 63 6f 64 69 6e 67 22 3a 20 22 67 7a 69 70 2c 20 64 65 66 6c 61 74
+#> [116] 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 74 74 70 62
+#> [139] 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 2d 41 67 65
+#> [162] 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 31 2e 30 20 72 2d
+#> [185] 63 75 72 6c 2f 32 2e 33 20 63 72 75 6c 2f 30 2e 32 2e 30 22 0a 20 20
+#> [208] 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 37 31 2e 36 33 2e
+#> [231] 32 32 33 2e 31 31 33 22 2c 20 0a 20 20 22 75 72 6c 22 3a 20 22 68 74
+#> [254] 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 65 74 22 0a
+#> [277] 7d 0a
+```
+
+HTTP method
+
+
+```r
+res$method
+#> [1] "get"
+```
+
+Request headers
+
+
+```r
+res$request_headers
+#> $useragent
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> $a
+#> [1] "hello world"
+```
+
+Response headers
+
+
+```r
+res$response_headers
+#> $status
+#> [1] "HTTP/1.1 200 OK"
+#> 
+#> $server
+#> [1] "nginx"
+#> 
+#> $date
+#> [1] "Tue, 03 Jan 2017 05:52:03 GMT"
+#> 
+#> $`content-type`
+#> [1] "application/json"
+#> 
+#> $`content-length`
+#> [1] "278"
+#> 
+#> $connection
+#> [1] "keep-alive"
+#> 
+#> $`access-control-allow-origin`
+#> [1] "*"
+#> 
+#> $`access-control-allow-credentials`
+#> [1] "true"
+```
+
+And you can parse the content with a provided function:
+
+
+```r
+res$parse()
+#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"*/*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.51.0 r-curl/2.3 crul/0.2.0\"\n  }, \n  \"origin\": \"71.63.223.113\", \n  \"url\": \"https://httpbin.org/get\"\n}\n"
+jsonlite::fromJSON(res$parse())
+#> $args
+#> named list()
+#> 
+#> $headers
+#> $headers$A
+#> [1] "hello world"
+#> 
+#> $headers$Accept
+#> [1] "*/*"
+#> 
+#> $headers$`Accept-Encoding`
+#> [1] "gzip, deflate"
+#> 
+#> $headers$Host
+#> [1] "httpbin.org"
+#> 
+#> $headers$`User-Agent`
+#> [1] "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"
+#> 
+#> 
+#> $origin
+#> [1] "71.63.223.113"
+#> 
+#> $url
+#> [1] "https://httpbin.org/get"
+```
+
+With the `HttpClient` object, which holds any configuration stuff
+we set, we can make other HTTP verb requests. For example, a `HEAD`
+request:
+
+
+```r
+x$post(
+  path = "post", 
+  body = list(hello = "world")
+)
+#> <crul response> 
+#>   url: https://httpbin.org/post
+#>   request_headers: 
+#>     useragent: libcurl/7.51.0 r-curl/2.3 crul/0.2.0
+#>     a: hello world
+#>   response_headers: 
+#>     status: HTTP/1.1 200 OK
+#>     server: nginx
+#>     date: Tue, 03 Jan 2017 05:52:03 GMT
+#>     content-type: application/json
+#>     content-length: 491
+#>     connection: keep-alive
+#>     access-control-allow-origin: *
+#>     access-control-allow-credentials: true
+#>   status: 200
+```
+
+
+## write to disk
+
+
+```r
+x <- HttpClient$new(url = "https://httpbin.org")
+f <- tempfile()
+res <- x$get(disk = f)
+# when using write to disk, content is a path
+res$content 
+#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpoZ8Rrd/fileee7a81dea18"
+```
+
+Read lines
+
+
+```r
+readLines(res$content, n = 10)
+#>  [1] "<!DOCTYPE html>"                                                                           
+#>  [2] "<html>"                                                                                    
+#>  [3] "<head>"                                                                                    
+#>  [4] "  <meta http-equiv='content-type' value='text/html;charset=utf8'>"                         
+#>  [5] "  <meta name='generator' value='Ronn/v0.7.3 (http://github.com/rtomayko/ronn/tree/0.7.3)'>"
+#>  [6] "  <title>httpbin(1): HTTP Client Testing Service</title>"                                  
+#>  [7] "  <style type='text/css' media='all'>"                                                     
+#>  [8] "  /* style: man */"                                                                        
+#>  [9] "  body#manpage {margin:0}"                                                                 
+#> [10] "  .mp {max-width:100ex;padding:0 9ex 1ex 4ex}"
+```
+
+## stream data
+
+
+```r
+(x <- HttpClient$new(url = "https://httpbin.org"))
+#> <crul connection> 
+#>   url: https://httpbin.org
+#>   options: 
+#>   headers:
+res <- x$get('stream/5', stream = function(x) cat(rawToChar(x)))
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 0, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 1, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 2, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 3, "origin": "71.63.223.113"}
+#> {"url": "https://httpbin.org/stream/5", "headers": {"Host": "httpbin.org", "Accept-Encoding": "gzip, deflate", "Accept": "*/*", "User-Agent": "libcurl/7.51.0 r-curl/2.3 crul/0.2.0"}, "args": {}, "id": 4, "origin": "71.63.223.113"}
+# when streaming, content is NULL
+res$content 
+#> NULL
+```

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/r-cran-crul.git



More information about the debian-med-commit mailing list