Binary files /tmp/tmpZxONLU/hvKDDgwWIT/r-cran-crul-0.4.0/build/vignette.rds and /tmp/tmpZxONLU/BvQaUVhTT5/r-cran-crul-0.5.0/build/vignette.rds differ diff -Nru r-cran-crul-0.4.0/debian/changelog r-cran-crul-0.5.0/debian/changelog --- r-cran-crul-0.4.0/debian/changelog 2017-12-04 16:41:36.000000000 +0000 +++ r-cran-crul-0.5.0/debian/changelog 2018-02-21 10:59:18.000000000 +0000 @@ -1,3 +1,12 @@ +r-cran-crul (0.5.0-1) unstable; urgency=medium + + * New upstream version + * Standards-Version: 4.1.3 + * debhelper 11 + * Versioned Build-Depends: r-cran-curl (>= 3.1) + + -- Andreas Tille Wed, 21 Feb 2018 11:59:18 +0100 + r-cran-crul (0.4.0-2) unstable; urgency=medium * Add r-cran-webmockr to test Depends diff -Nru r-cran-crul-0.4.0/debian/compat r-cran-crul-0.5.0/debian/compat --- r-cran-crul-0.4.0/debian/compat 2017-12-04 16:41:36.000000000 +0000 +++ r-cran-crul-0.5.0/debian/compat 2018-02-21 10:59:18.000000000 +0000 @@ -1 +1 @@ -10 +11 diff -Nru r-cran-crul-0.4.0/debian/control r-cran-crul-0.5.0/debian/control --- r-cran-crul-0.4.0/debian/control 2017-12-04 16:41:36.000000000 +0000 +++ r-cran-crul-0.5.0/debian/control 2018-02-21 10:59:18.000000000 +0000 @@ -3,15 +3,15 @@ Uploaders: Andreas Tille Section: gnu-r Priority: optional -Build-Depends: debhelper (>= 10), +Build-Depends: debhelper (>= 11~), dh-r, r-base-dev, - r-cran-curl, + r-cran-curl (>= 3.1), r-cran-r6, r-cran-mime, r-cran-httpcode, r-cran-urltools -Standards-Version: 4.1.1 +Standards-Version: 4.1.3 Vcs-Browser: https://anonscm.debian.org/cgit/debian-med/r-cran-crul.git Vcs-Git: https://anonscm.debian.org/git/debian-med/r-cran-crul.git Homepage: https://cran.r-project.org/package=crul diff -Nru r-cran-crul-0.4.0/DESCRIPTION r-cran-crul-0.5.0/DESCRIPTION --- r-cran-crul-0.4.0/DESCRIPTION 2017-10-02 22:41:14.000000000 +0000 +++ r-cran-crul-0.5.0/DESCRIPTION 2018-01-22 19:48:03.000000000 +0000 @@ -6,23 +6,28 @@ The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' (). -Version: 0.4.0 +Version: 0.5.0 License: MIT + file LICENSE Authors@R: c( person("Scott", "Chamberlain", role = c("aut", "cre"), - email = "myrmecocystus@gmail.com") + email = "myrmecocystus@gmail.com", + comment = c(ORCID = "0000-0003-1444-9135")) ) URL: https://github.com/ropensci/crul BugReports: https://github.com/ropensci/crul/issues -Imports: curl (>= 2.8.1), R6 (>= 2.2.0), urltools (>= 1.6.0), httpcode +Imports: curl (>= 3.1), R6 (>= 2.2.0), urltools (>= 1.6.0), httpcode (>= 0.2.0), mime Suggests: testthat, fauxpas (>= 0.1.0), webmockr (>= 0.1.0), knitr, jsonlite VignetteBuilder: knitr RoxygenNote: 6.0.1 +X-schema.org-applicationCategory: Web +X-schema.org-keywords: http, https, API, web-services, curl, download, + libcurl, async, mocking, caching +X-schema.org-isPartOf: https://ropensci.org NeedsCompilation: no -Packaged: 2017-10-02 19:34:46 UTC; sacmac -Author: Scott Chamberlain [aut, cre] +Packaged: 2018-01-22 19:11:17 UTC; sckott +Author: Scott Chamberlain [aut, cre] (0000-0003-1444-9135) Maintainer: Scott Chamberlain Repository: CRAN -Date/Publication: 2017-10-02 22:41:14 UTC +Date/Publication: 2018-01-22 19:48:03 UTC diff -Nru r-cran-crul-0.4.0/inst/doc/async.html r-cran-crul-0.5.0/inst/doc/async.html --- r-cran-crul-0.4.0/inst/doc/async.html 2017-10-02 19:34:41.000000000 +0000 +++ r-cran-crul-0.5.0/inst/doc/async.html 2018-01-22 19:11:15.000000000 +0000 @@ -11,17 +11,202 @@ - +async.utf8 - + - - - - - - - + + + + + + + @@ -124,11 +311,11 @@ %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --> -
+

async with crul

Asynchronous requests with crul.

There are two interfaces to asynchronous requests in crul:

-
    +
    1. Simple async: any number of URLs, all treated with the same curl options, headers, etc., and only one HTTP method type at a time.
    2. Varied request async: build any type of request and execute all asynchronously.
    @@ -136,9 +323,9 @@

    The second option is ideal when you want to set curl options/headers on each request and/or want to do different types of HTTP methods on each request.

    One thing to think about before using async is whether the data provider is okay with it. It’s possible that a data provider’s service may be brought down if you do too many async requests.

    library("crul")
    -
    +

    simple async

    -

    Build request objcect with 1 or more URLs

    +

    Build request object with 1 or more URLs

    (cc <- Async$new(
       urls = c(
         'https://httpbin.org/get?a=5',
    @@ -161,13 +348,13 @@
     #>     status: HTTP/1.1 200 OK
     #>     connection: keep-alive
     #>     server: meinheld/0.6.1
    -#>     date: Mon, 02 Oct 2017 19:21:08 GMT
    +#>     date: Fri, 19 Jan 2018 18:44:29 GMT
     #>     content-type: application/json
     #>     access-control-allow-origin: *
     #>     access-control-allow-credentials: true
     #>     x-powered-by: Flask
    -#>     x-processed-time: 0.00125598907471
    -#>     content-length: 349
    +#>     x-processed-time: 0.000792026519775
    +#>     content-length: 346
     #>     via: 1.1 vegur
     #>   params: 
     #>     a: 5
    @@ -181,13 +368,13 @@
     #>     status: HTTP/1.1 200 OK
     #>     connection: keep-alive
     #>     server: meinheld/0.6.1
    -#>     date: Mon, 02 Oct 2017 19:21:07 GMT
    +#>     date: Fri, 19 Jan 2018 18:44:29 GMT
     #>     content-type: application/json
     #>     access-control-allow-origin: *
     #>     access-control-allow-credentials: true
     #>     x-powered-by: Flask
    -#>     x-processed-time: 0.00107097625732
    -#>     content-length: 368
    +#>     x-processed-time: 0.00130796432495
    +#>     content-length: 365
     #>     via: 1.1 vegur
     #>   params: 
     #>     a: 5
    @@ -202,13 +389,13 @@
     #>     status: HTTP/1.1 200 OK
     #>     connection: keep-alive
     #>     server: meinheld/0.6.1
    -#>     date: Mon, 02 Oct 2017 19:21:08 GMT
    +#>     date: Fri, 19 Jan 2018 18:44:28 GMT
     #>     content-type: application/json
     #>     access-control-allow-origin: *
     #>     access-control-allow-credentials: true
     #>     x-powered-by: Flask
    -#>     x-processed-time: 0.000734090805054
    -#>     content-length: 33
    +#>     x-processed-time: 0.000822067260742
    +#>     content-length: 32
     #>     via: 1.1 vegur
     #>   status: 200

    You get back a list matching length of the number of input URLs

    @@ -218,19 +405,19 @@ res[[1]]$success() #> [1] TRUE res[[1]]$parse("UTF-8") -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" -

    Or apply access/method calls aross many results, e.g., parse all results

    +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +

    Or apply access/method calls across many results, e.g., parse all results

    lapply(res, function(z) z$parse("UTF-8"))
     #> [[1]]
    -#> [1] "{\n  \"args\": {\n    \"a\": \"5\"\n  }, \n  \"headers\": {\n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n  }, \n  \"origin\": \"157.130.179.86\", \n  \"url\": \"https://httpbin.org/get?a=5\"\n}\n"
    +#> [1] "{\n  \"args\": {\n    \"a\": \"5\"\n  }, \n  \"headers\": {\n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n  }, \n  \"origin\": \"50.22.155.214\", \n  \"url\": \"https://httpbin.org/get?a=5\"\n}\n"
     #> 
     #> [[2]]
    -#> [1] "{\n  \"args\": {\n    \"a\": \"5\", \n    \"b\": \"6\"\n  }, \n  \"headers\": {\n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n  }, \n  \"origin\": \"157.130.179.86\", \n  \"url\": \"https://httpbin.org/get?a=5&b=6\"\n}\n"
    +#> [1] "{\n  \"args\": {\n    \"a\": \"5\", \n    \"b\": \"6\"\n  }, \n  \"headers\": {\n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n  }, \n  \"origin\": \"50.22.155.214\", \n  \"url\": \"https://httpbin.org/get?a=5&b=6\"\n}\n"
     #> 
     #> [[3]]
    -#> [1] "{\n  \"origin\": \"157.130.179.86\"\n}\n"
    -
    -
    +#> [1] "{\n \"origin\": \"50.22.155.214\"\n}\n" +
+

varied request async

req1 <- HttpRequest$new(
   url = "https://httpbin.org/get?a=5",
@@ -267,8 +454,8 @@
 
res$request()

Parse all results

res$parse()
-#> [1] "{\n  \"args\": {\n    \"a\": \"5\"\n  }, \n  \"headers\": {\n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n  }, \n  \"origin\": \"157.130.179.86\", \n  \"url\": \"https://httpbin.org/get?a=5\"\n}\n"                                                                                                                                                                                                                                                                   
-#> [2] "{\n  \"args\": {\n    \"a\": \"5\", \n    \"b\": \"6\"\n  }, \n  \"data\": \"\", \n  \"files\": {}, \n  \"form\": {\n    \"a\": \"5\"\n  }, \n  \"headers\": {\n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Content-Length\": \"137\", \n    \"Content-Type\": \"multipart/form-data; boundary=------------------------9223144570b5d592\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n  }, \n  \"json\": null, \n  \"origin\": \"157.130.179.86\", \n  \"url\": \"https://httpbin.org/post?a=5&b=6\"\n}\n"
+#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [2] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"137\", \n \"Content-Type\": \"multipart/form-data; boundary=------------------------14f323a90518346b\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"json\": null, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/post?a=5&b=6\"\n}\n"
lapply(res$parse(), jsonlite::prettify)
 #> [[1]]
 #> {
@@ -280,9 +467,9 @@
 #>         "Accept-Encoding": "gzip, deflate",
 #>         "Connection": "close",
 #>         "Host": "httpbin.org",
-#>         "User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0"
+#>         "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"
 #>     },
-#>     "origin": "157.130.179.86",
+#>     "origin": "50.22.155.214",
 #>     "url": "https://httpbin.org/get?a=5"
 #> }
 #>  
@@ -305,20 +492,20 @@
 #>         "Accept-Encoding": "gzip, deflate",
 #>         "Connection": "close",
 #>         "Content-Length": "137",
-#>         "Content-Type": "multipart/form-data; boundary=------------------------9223144570b5d592",
+#>         "Content-Type": "multipart/form-data; boundary=------------------------14f323a90518346b",
 #>         "Host": "httpbin.org",
-#>         "User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0"
+#>         "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"
 #>     },
 #>     "json": null,
-#>     "origin": "157.130.179.86",
+#>     "origin": "50.22.155.214",
 #>     "url": "https://httpbin.org/post?a=5&b=6"
 #> }
 #> 

Status codes

res$status_code()
 #> [1] 200 200
-
- + + diff -Nru r-cran-crul-0.4.0/inst/doc/async.Rmd r-cran-crul-0.5.0/inst/doc/async.Rmd --- r-cran-crul-0.4.0/inst/doc/async.Rmd 2017-10-02 19:21:23.000000000 +0000 +++ r-cran-crul-0.5.0/inst/doc/async.Rmd 2018-01-19 18:44:43.000000000 +0000 @@ -13,19 +13,19 @@ There are two interfaces to asynchronous requests in `crul`: -1. Simple async: any number of URLs, all treated with the same curl options, +1. Simple async: any number of URLs, all treated with the same curl options, headers, etc., and only one HTTP method type at a time. 2. Varied request async: build any type of request and execute all asynchronously. -The first option takes less thinking, less work, and is good solution when you +The first option takes less thinking, less work, and is good solution when you just want to hit a bunch of URLs asynchronously. -The second option is ideal when you want to set curl options/headers on each +The second option is ideal when you want to set curl options/headers on each request and/or want to do different types of HTTP methods on each request. -One thing to think about before using async is whether the data provider is +One thing to think about before using async is whether the data provider is okay with it. It's possible that a data provider's service may be brought down -if you do too many async requests. +if you do too many async requests. ```r @@ -34,7 +34,7 @@ ## simple async -Build request objcect with 1 or more URLs +Build request object with 1 or more URLs @@ -67,13 +67,13 @@ #> status: HTTP/1.1 200 OK #> connection: keep-alive #> server: meinheld/0.6.1 -#> date: Mon, 02 Oct 2017 19:21:08 GMT +#> date: Fri, 19 Jan 2018 18:44:29 GMT #> content-type: application/json #> access-control-allow-origin: * #> access-control-allow-credentials: true #> x-powered-by: Flask -#> x-processed-time: 0.00125598907471 -#> content-length: 349 +#> x-processed-time: 0.000792026519775 +#> content-length: 346 #> via: 1.1 vegur #> params: #> a: 5 @@ -87,13 +87,13 @@ #> status: HTTP/1.1 200 OK #> connection: keep-alive #> server: meinheld/0.6.1 -#> date: Mon, 02 Oct 2017 19:21:07 GMT +#> date: Fri, 19 Jan 2018 18:44:29 GMT #> content-type: application/json #> access-control-allow-origin: * #> access-control-allow-credentials: true #> x-powered-by: Flask -#> x-processed-time: 0.00107097625732 -#> content-length: 368 +#> x-processed-time: 0.00130796432495 +#> content-length: 365 #> via: 1.1 vegur #> params: #> a: 5 @@ -108,13 +108,13 @@ #> status: HTTP/1.1 200 OK #> connection: keep-alive #> server: meinheld/0.6.1 -#> date: Mon, 02 Oct 2017 19:21:08 GMT +#> date: Fri, 19 Jan 2018 18:44:28 GMT #> content-type: application/json #> access-control-allow-origin: * #> access-control-allow-credentials: true #> x-powered-by: Flask -#> x-processed-time: 0.000734090805054 -#> content-length: 33 +#> x-processed-time: 0.000822067260742 +#> content-length: 32 #> via: 1.1 vegur #> status: 200 ``` @@ -130,22 +130,22 @@ res[[1]]$success() #> [1] TRUE res[[1]]$parse("UTF-8") -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" ``` -Or apply access/method calls aross many results, e.g., parse all results +Or apply access/method calls across many results, e.g., parse all results ```r lapply(res, function(z) z$parse("UTF-8")) #> [[1]] -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" #> #> [[2]] -#> [1] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5&b=6\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5&b=6\"\n}\n" #> #> [[3]] -#> [1] "{\n \"origin\": \"157.130.179.86\"\n}\n" +#> [1] "{\n \"origin\": \"50.22.155.214\"\n}\n" ``` ## varied request async @@ -197,8 +197,8 @@ ```r res$parse() -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" -#> [2] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"137\", \n \"Content-Type\": \"multipart/form-data; boundary=------------------------9223144570b5d592\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"json\": null, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/post?a=5&b=6\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [2] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"137\", \n \"Content-Type\": \"multipart/form-data; boundary=------------------------14f323a90518346b\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"json\": null, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/post?a=5&b=6\"\n}\n" ``` @@ -214,9 +214,9 @@ #> "Accept-Encoding": "gzip, deflate", #> "Connection": "close", #> "Host": "httpbin.org", -#> "User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> }, -#> "origin": "157.130.179.86", +#> "origin": "50.22.155.214", #> "url": "https://httpbin.org/get?a=5" #> } #> @@ -239,12 +239,12 @@ #> "Accept-Encoding": "gzip, deflate", #> "Connection": "close", #> "Content-Length": "137", -#> "Content-Type": "multipart/form-data; boundary=------------------------9223144570b5d592", +#> "Content-Type": "multipart/form-data; boundary=------------------------14f323a90518346b", #> "Host": "httpbin.org", -#> "User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> }, #> "json": null, -#> "origin": "157.130.179.86", +#> "origin": "50.22.155.214", #> "url": "https://httpbin.org/post?a=5&b=6" #> } #> diff -Nru r-cran-crul-0.4.0/inst/doc/crul_vignette.html r-cran-crul-0.5.0/inst/doc/crul_vignette.html --- r-cran-crul-0.4.0/inst/doc/crul_vignette.html 2017-10-02 19:34:45.000000000 +0000 +++ r-cran-crul-0.5.0/inst/doc/crul_vignette.html 2018-01-22 19:11:16.000000000 +0000 @@ -11,17 +11,202 @@ - +crul_vignette.utf8 - + - - - - - - - + + + + + + + @@ -124,18 +311,18 @@ %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --> -
+

crul introduction

crul is an HTTP client for R.

-
+

Install

Stable CRAN version

install.packages("crul")

Dev version

devtools::install_github("ropensci/crul")
library("crul")
-
-
+
+

the client

HttpClient is where to start

(x <- HttpClient$new(
@@ -162,8 +349,8 @@
 
x$headers
 #> $a
 #> [1] "hello world"
-
-
+ +

do some http

The client object created above has http methods that you can call, and pass paths to, as well as query parameters, body values, and any other curl options.

Here, we’ll do a GET request on the route /get on our base url https://httpbin.org (the full url is then https://httpbin.org/get)

@@ -185,18 +372,18 @@ #> [185] 63 6c 6f 73 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 #> [208] 74 74 70 62 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 #> [231] 2d 41 67 65 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 34 2e -#> [254] 30 20 72 2d 63 75 72 6c 2f 32 2e 38 2e 31 20 63 72 75 6c 2f 30 2e 34 -#> [277] 2e 30 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 -#> [300] 31 35 37 2e 31 33 30 2e 31 37 39 2e 38 36 22 2c 20 0a 20 20 22 75 72 -#> [323] 6c 22 3a 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 -#> [346] 67 2f 67 65 74 22 0a 7d 0a
+#> [254] 30 20 72 2d 63 75 72 6c 2f 33 2e 31 20 63 72 75 6c 2f 30 2e 35 2e 30 +#> [277] 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 35 30 +#> [300] 2e 32 32 2e 31 35 35 2e 32 31 34 22 2c 20 0a 20 20 22 75 72 6c 22 3a +#> [323] 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 +#> [346] 65 74 22 0a 7d 0a

HTTP method

res$method
 #> [1] "get"

Request headers

res$request_headers
 #> $`User-Agent`
-#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0"
+#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"
 #> 
 #> $`Accept-Encoding`
 #> [1] "gzip, deflate"
@@ -218,7 +405,7 @@
 #> [1] "meinheld/0.6.1"
 #> 
 #> $date
-#> [1] "Mon, 02 Oct 2017 19:21:12 GMT"
+#> [1] "Fri, 19 Jan 2018 18:44:35 GMT"
 #> 
 #> $`content-type`
 #> [1] "application/json"
@@ -233,16 +420,16 @@
 #> [1] "Flask"
 #> 
 #> $`x-processed-time`
-#> [1] "0.000802993774414"
+#> [1] "0.00126600265503"
 #> 
 #> $`content-length`
-#> [1] "354"
+#> [1] "351"
 #> 
 #> $via
 #> [1] "1.1 vegur"

And you can parse the content with a provided function:

res$parse()
-#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n  }, \n  \"origin\": \"157.130.179.86\", \n  \"url\": \"https://httpbin.org/get\"\n}\n"
+#> [1] "{\n  \"args\": {}, \n  \"headers\": {\n    \"A\": \"hello world\", \n    \"Accept\": \"application/json, text/xml, application/xml, */*\", \n    \"Accept-Encoding\": \"gzip, deflate\", \n    \"Connection\": \"close\", \n    \"Host\": \"httpbin.org\", \n    \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n  }, \n  \"origin\": \"50.22.155.214\", \n  \"url\": \"https://httpbin.org/get\"\n}\n"
 jsonlite::fromJSON(res$parse())
 #> $args
 #> named list()
@@ -264,11 +451,11 @@
 #> [1] "httpbin.org"
 #> 
 #> $headers$`User-Agent`
-#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0"
+#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"
 #> 
 #> 
 #> $origin
-#> [1] "157.130.179.86"
+#> [1] "50.22.155.214"
 #> 
 #> $url
 #> [1] "https://httpbin.org/get"
@@ -277,15 +464,15 @@ path = "post", body = list(hello = "world") ) -
-
+ +

write to disk

x <- HttpClient$new(url = "https://httpbin.org")
 f <- tempfile()
 res <- x$get(disk = f)
 # when using write to disk, content is a path
 res$content 
-#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpQrTvWo/file146142af607c4"
+#> [1] "/var/folders/fc/n7g_vrvn0sx_st0p8lxb3ts40000gn/T//Rtmp2d65n7/file58e3155bfa5b"

Read lines

readLines(res$content, n = 10)
 #>  [1] "<!DOCTYPE html>"                                                                           
@@ -298,8 +485,8 @@
 #>  [8] "  /* style: man */"                                                                        
 #>  [9] "  body#manpage {margin:0}"                                                                 
 #> [10] "  .mp {max-width:100ex;padding:0 9ex 1ex 4ex}"
-
-
+ +

stream data

(x <- HttpClient$new(url = "https://httpbin.org"))
 #> <crul connection> 
@@ -309,16 +496,16 @@
 #>   auth: 
 #>   headers:
 res <- x$get('stream/5', stream = function(x) cat(rawToChar(x)))
-#> {"id": 0, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"}
-#> {"id": 1, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"}
-#> {"id": 2, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"}
-#> {"id": 3, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"}
-#> {"id": 4, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"}
+#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 0, "url": "https://httpbin.org/stream/5", "args": {}}
+#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 1, "url": "https://httpbin.org/stream/5", "args": {}}
+#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 2, "url": "https://httpbin.org/stream/5", "args": {}}
+#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 3, "url": "https://httpbin.org/stream/5", "args": {}}
+#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 4, "url": "https://httpbin.org/stream/5", "args": {}}
 # when streaming, content is NULL
 res$content 
 #> NULL
-
- + + diff -Nru r-cran-crul-0.4.0/inst/doc/crul_vignette.Rmd r-cran-crul-0.5.0/inst/doc/crul_vignette.Rmd --- r-cran-crul-0.4.0/inst/doc/crul_vignette.Rmd 2017-10-02 19:21:23.000000000 +0000 +++ r-cran-crul-0.5.0/inst/doc/crul_vignette.Rmd 2018-01-19 18:44:43.000000000 +0000 @@ -117,11 +117,11 @@ #> [185] 63 6c 6f 73 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 #> [208] 74 74 70 62 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 #> [231] 2d 41 67 65 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 34 2e -#> [254] 30 20 72 2d 63 75 72 6c 2f 32 2e 38 2e 31 20 63 72 75 6c 2f 30 2e 34 -#> [277] 2e 30 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 -#> [300] 31 35 37 2e 31 33 30 2e 31 37 39 2e 38 36 22 2c 20 0a 20 20 22 75 72 -#> [323] 6c 22 3a 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 -#> [346] 67 2f 67 65 74 22 0a 7d 0a +#> [254] 30 20 72 2d 63 75 72 6c 2f 33 2e 31 20 63 72 75 6c 2f 30 2e 35 2e 30 +#> [277] 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 35 30 +#> [300] 2e 32 32 2e 31 35 35 2e 32 31 34 22 2c 20 0a 20 20 22 75 72 6c 22 3a +#> [323] 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 +#> [346] 65 74 22 0a 7d 0a ``` HTTP method @@ -138,7 +138,7 @@ ```r res$request_headers #> $`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> $`Accept-Encoding` #> [1] "gzip, deflate" @@ -165,7 +165,7 @@ #> [1] "meinheld/0.6.1" #> #> $date -#> [1] "Mon, 02 Oct 2017 19:21:12 GMT" +#> [1] "Fri, 19 Jan 2018 18:44:35 GMT" #> #> $`content-type` #> [1] "application/json" @@ -180,10 +180,10 @@ #> [1] "Flask" #> #> $`x-processed-time` -#> [1] "0.000802993774414" +#> [1] "0.00126600265503" #> #> $`content-length` -#> [1] "354" +#> [1] "351" #> #> $via #> [1] "1.1 vegur" @@ -194,7 +194,7 @@ ```r res$parse() -#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"A\": \"hello world\", \n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get\"\n}\n" +#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"A\": \"hello world\", \n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get\"\n}\n" jsonlite::fromJSON(res$parse()) #> $args #> named list() @@ -216,11 +216,11 @@ #> [1] "httpbin.org" #> #> $headers$`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> #> $origin -#> [1] "157.130.179.86" +#> [1] "50.22.155.214" #> #> $url #> [1] "https://httpbin.org/get" @@ -248,7 +248,7 @@ res <- x$get(disk = f) # when using write to disk, content is a path res$content -#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpQrTvWo/file146142af607c4" +#> [1] "/var/folders/fc/n7g_vrvn0sx_st0p8lxb3ts40000gn/T//Rtmp2d65n7/file58e3155bfa5b" ``` Read lines @@ -280,11 +280,11 @@ #> auth: #> headers: res <- x$get('stream/5', stream = function(x) cat(rawToChar(x))) -#> {"id": 0, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 1, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 2, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 3, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 4, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 0, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 1, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 2, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 3, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 4, "url": "https://httpbin.org/stream/5", "args": {}} # when streaming, content is NULL res$content #> NULL diff -Nru r-cran-crul-0.4.0/inst/doc/how-to-use-crul.html r-cran-crul-0.5.0/inst/doc/how-to-use-crul.html --- r-cran-crul-0.4.0/inst/doc/how-to-use-crul.html 2017-10-02 19:34:45.000000000 +0000 +++ r-cran-crul-0.5.0/inst/doc/how-to-use-crul.html 2018-01-22 19:11:16.000000000 +0000 @@ -11,17 +11,202 @@ - +how-to-use-crul.utf8 - + - - - - - - - + + + + + + + @@ -124,14 +311,14 @@ %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --> -
+

How to use crul

The following aims to help you decide how to use crul in different scenarios.

First, crul is aimed a bit more at developers than at the casual user doing HTTP requests. That is, crul is probably a better fit for an R package developer, mainly because it heavily uses R6 - an interface that’s very unlike the interface in httr but very similar to interacting with classes in Ruby/Python.

Second, there is not yet, but will be soon, the ability to mock HTTP requests. We are working on that, and should have it integrated soon. When that feature arrives we’ll update this vignette.

Load the library

library("crul")
-
+

A simple HTTP request function

Most likely you’ll want to do a GET request - so let’s start with that - though the details are not much different for other HTTP verbs.

And in most cases you’ll likely not want to do asynchronous requests - though see below if you do.

@@ -169,17 +356,17 @@ #> [1] "httpbin.org" #> #> $headers$`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> #> $origin -#> [1] "157.130.179.86" +#> [1] "50.22.155.214" #> #> $url #> [1] "https://httpbin.org/get"

Now you can use the make_request function in your script or package.

-
-
+
+

More customized function

Once you get more familiar (or if you’re already familiar with HTTP) you may want to have more control, toggle more switches.

In the next function, we’ll allow for users to pass in curl options, use a custom HTTP status checker, and xxx.

@@ -220,7 +407,7 @@ #> named list() #> #> $message$`total-results` -#> [1] 91794003 +#> [1] 94347210 #> #> $message$items #> list() @@ -253,7 +440,7 @@ #> named list() #> #> $message$`total-results` -#> [1] 91794003 +#> [1] 94347210 #> #> $message$items #> list() @@ -271,13 +458,13 @@
make_request2("https://api.crossref.org/works?rows=asdf")
 #> Error: HTTP (400) - Bad request syntax or unsupported method
 #>   Integer specified as asdf but must be a positive integer less than or equal to 1000.
-
-
+ +

Asynchronous requests

You may want to use asynchronous HTTP requests when any one HTTP request takes “too long”. This is of course all relative. You may be dealing with a server that responds very slowly, or other circumstances.

See the async with crul vignette for more details on asynchronous requests.

-
- + + diff -Nru r-cran-crul-0.4.0/inst/doc/how-to-use-crul.Rmd r-cran-crul-0.5.0/inst/doc/how-to-use-crul.Rmd --- r-cran-crul-0.4.0/inst/doc/how-to-use-crul.Rmd 2017-10-02 19:21:23.000000000 +0000 +++ r-cran-crul-0.5.0/inst/doc/how-to-use-crul.Rmd 2018-01-19 18:44:43.000000000 +0000 @@ -80,11 +80,11 @@ #> [1] "httpbin.org" #> #> $headers$`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> #> $origin -#> [1] "157.130.179.86" +#> [1] "50.22.155.214" #> #> $url #> [1] "https://httpbin.org/get" @@ -144,7 +144,7 @@ #> named list() #> #> $message$`total-results` -#> [1] 91794003 +#> [1] 94347210 #> #> $message$items #> list() @@ -189,7 +189,7 @@ #> named list() #> #> $message$`total-results` -#> [1] 91794003 +#> [1] 94347210 #> #> $message$items #> list() diff -Nru r-cran-crul-0.4.0/LICENSE r-cran-crul-0.5.0/LICENSE --- r-cran-crul-0.4.0/LICENSE 2017-01-21 19:31:54.000000000 +0000 +++ r-cran-crul-0.5.0/LICENSE 2018-01-19 17:28:22.000000000 +0000 @@ -1,2 +1,2 @@ -YEAR: 2017 +YEAR: 2018 COPYRIGHT HOLDER: Scott Chamberlain diff -Nru r-cran-crul-0.4.0/man/Async.Rd r-cran-crul-0.5.0/man/Async.Rd --- r-cran-crul-0.4.0/man/Async.Rd 2017-10-02 17:40:18.000000000 +0000 +++ r-cran-crul-0.5.0/man/Async.Rd 2018-01-19 17:28:22.000000000 +0000 @@ -17,19 +17,19 @@ \details{ \strong{Methods} \describe{ -\item{\code{get(path, query, ...)}}{ +\item{\code{get(path, query, disk, stream, ...)}}{ make async GET requests for all URLs } -\item{\code{post(path, query, body, encode, ...)}}{ +\item{\code{post(path, query, body, encode, disk, stream, ...)}}{ make async POST requests for all URLs } -\item{\code{put(path, query, body, encode, ...)}}{ +\item{\code{put(path, query, body, encode, disk, stream, ...)}}{ make async PUT requests for all URLs } -\item{\code{patch(path, query, body, encode, ...)}}{ +\item{\code{patch(path, query, body, encode, disk, stream, ...)}}{ make async PATCH requests for all URLs } -\item{\code{delete(path, query, body, encode, ...)}}{ +\item{\code{delete(path, query, body, encode, disk, stream, ...)}}{ make async DELETE requests for all URLs } \item{\code{head(path, ...)}}{ diff -Nru r-cran-crul-0.4.0/man/auth.Rd r-cran-crul-0.5.0/man/auth.Rd --- r-cran-crul-0.4.0/man/auth.Rd 2017-05-17 05:45:02.000000000 +0000 +++ r-cran-crul-0.5.0/man/auth.Rd 2018-01-19 18:03:33.000000000 +0000 @@ -7,18 +7,24 @@ auth(user, pwd, auth = "basic") } \arguments{ -\item{user}{(character) username, optional} +\item{user}{(character) username, required. see Details.} -\item{pwd}{(character) password, optional} +\item{pwd}{(character) password, required. see Details.} \item{auth}{(character) authentication type, one of basic (default), -digest, digest_ie, gssnegotiate, ntlm, or any. optional} +digest, digest_ie, gssnegotiate, ntlm, or any. required} } \description{ Authentication } \details{ -Only supporting simple auth for now, OAuth later. +Only supporting simple auth for now, OAuth later maybe. + +For \code{user} and \code{pwd} you are required to pass in some value. +The value can be \code{NULL} to - which is equivalent to passing in an +empty string like \code{""} in \code{httr::authenticate}. You may want to pass +in \code{NULL} for both \code{user} and \code{pwd} for example if you are using +\code{gssnegotiate} auth type. See example below. } \examples{ auth(user = "foo", pwd = "bar", auth = "basic") @@ -41,4 +47,7 @@ auth = auth(user = "user", pwd = "passwd") )) res$auth + +# gssnegotiate auth +auth(NULL, NULL, "gssnegotiate") } diff -Nru r-cran-crul-0.4.0/man/crul-package.Rd r-cran-crul-0.5.0/man/crul-package.Rd --- r-cran-crul-0.4.0/man/crul-package.Rd 2017-05-21 23:25:57.000000000 +0000 +++ r-cran-crul-0.5.0/man/crul-package.Rd 2018-01-19 04:19:08.000000000 +0000 @@ -15,6 +15,7 @@ your http options, make http requests \item \code{\link[=HttpResponse]{HttpResponse()}} - mostly for internal use, handles http responses +\item \code{\link[=Paginator]{Paginator()}} - auto-paginate through requests \item \code{\link[=Async]{Async()}} - asynchronous requests \item \code{\link[=AsyncVaried]{AsyncVaried()}} - varied asynchronous requests \item \code{\link[=HttpRequest]{HttpRequest()}} - generate an HTTP request, mostly for diff -Nru r-cran-crul-0.4.0/man/curl-options.Rd r-cran-crul-0.5.0/man/curl-options.Rd --- r-cran-crul-0.4.0/man/curl-options.Rd 2017-06-13 20:27:45.000000000 +0000 +++ r-cran-crul-0.5.0/man/curl-options.Rd 2018-01-19 17:28:22.000000000 +0000 @@ -32,13 +32,12 @@ (res <- HttpClient$new(url = "https://httpbin.org")) res$get('get', verbose = TRUE) res$get('get', stuff = "things") -res$get('get', httpget = TRUE) # set a timeout (res <- HttpClient$new( url = "https://httpbin.org", opts = list(timeout_ms = 1) )) -res$get('get') +# res$get('get') } } diff -Nru r-cran-crul-0.4.0/man/HttpClient.Rd r-cran-crul-0.5.0/man/HttpClient.Rd --- r-cran-crul-0.4.0/man/HttpClient.Rd 2017-10-01 05:08:08.000000000 +0000 +++ r-cran-crul-0.5.0/man/HttpClient.Rd 2018-01-19 17:51:33.000000000 +0000 @@ -38,7 +38,7 @@ \item{\code{delete(path, query, body, disk, stream, ...)}}{ Make a DELETE request } -\item{\code{head(path, ...)}}{ +\item{\code{head(path, query, ...)}}{ Make a HEAD request } } @@ -46,7 +46,7 @@ Possible parameters (not all are allowed in each HTTP verb): \itemize{ \item path - URL path, appended to the base URL -\item query - query terms, as a list +\item query - query terms, as a named list \item body - body as an R list \item encode - one of form, multipart, json, or raw \item disk - a path to write to. if NULL (default), memory used. diff -Nru r-cran-crul-0.4.0/man/Paginator.Rd r-cran-crul-0.5.0/man/Paginator.Rd --- r-cran-crul-0.4.0/man/Paginator.Rd 1970-01-01 00:00:00.000000000 +0000 +++ r-cran-crul-0.5.0/man/Paginator.Rd 2018-01-19 04:19:08.000000000 +0000 @@ -0,0 +1,125 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/paginator.R +\docType{data} +\name{Paginator} +\alias{Paginator} +\title{Paginator client} +\arguments{ +\item{client}{an object of class \code{HttpClient}, from a call to \link{HttpClient}} + +\item{by}{(character) how to paginate. Only 'query_params' supported for +now. In the future will support 'link_headers' and 'cursor'. See Details.} + +\item{limit_param}{(character) the name of the limit parameter. +Default: limit} + +\item{offset_param}{(character) the name of the offset parameter. +Default: offset} + +\item{limit}{(numeric/integer) the maximum records wanted} + +\item{limit_chunk}{(numeric/integer) the number by which to chunk requests, +e.g., 10 would be be each request gets 10 records} +} +\value{ +a list, with objects of class \code{\link[=HttpResponse]{HttpResponse()}}. +Responses are returned in the order they are passed in. +} +\description{ +A client to help you paginate +} +\details{ +\strong{Methods} +\describe{ +\item{\code{get(path, query, ...)}}{ +make a paginated GET request +} +\item{\code{post(path, query, body, encode, ...)}}{ +make a paginated POST request +} +\item{\code{put(path, query, body, encode, ...)}}{ +make a paginated PUT request +} +\item{\code{patch(path, query, body, encode, ...)}}{ +make a paginated PATCH request +} +\item{\code{delete(path, query, body, encode, ...)}}{ +make a paginated DELETE request +} +\item{\code{head(path, ...)}}{ +make a paginated HEAD request - not sure if this makes any sense +or not yet +} +\item{\code{responses()}}{ +list responses +- returns: a list of \code{HttpResponse} objects, empty list before +requests made +} +\item{\code{parse(encoding = "UTF-8")}}{ +parse content +- returns: character vector, empty character vector before +requests made +} +\item{\code{status_code()}}{ +(integer) HTTP status codes +- returns: numeric vector, empty numeric vector before +requests made +} +\item{\code{status()}}{ +(list) HTTP status objects +- returns: a list of \code{http_code} objects, empty list before +requests made +} +\item{\code{content()}}{ +raw content +- returns: raw list, empty list before requests made +} +\item{\code{times()}}{ +curl request times +- returns: list of named numeric vectors, empty list before +requests made +} +} + +See \code{\link[=HttpClient]{HttpClient()}} for information on parameters. +} +\section{Methods to paginate}{ + + +Supported now: +\itemize{ +\item \code{query_params}: the most common way, so is the default. This method +involves setting how many records and what record to start at for each +request. We send these query parameters for you. +} + +Supported later: +\itemize{ +\item \code{link_headers}: link headers are URLS for the next/previous/last +request given in the response header from the server. This is relatively +uncommon, though is recommended by JSONAPI and is implemented by a +well known API (GitHub). +\item \code{cursor}: this works by a single string given back in each response, to +be passed in the subsequent response, and so on until no more records +remain. This is common in Solr +} +} + +\examples{ +\dontrun{ +(cli <- HttpClient$new(url = "http://api.crossref.org")) +cc <- Paginator$new(client = cli, limit_param = "rows", + offset_param = "offset", limit = 50, limit_chunk = 10) +cc +cc$get('works') +cc +cc$responses() +cc$status() +cc$status_code() +cc$times() +cc$content() +cc$parse() +lapply(cc$parse(), jsonlite::fromJSON) +} +} +\keyword{datasets} diff -Nru r-cran-crul-0.4.0/man/writing-options.Rd r-cran-crul-0.5.0/man/writing-options.Rd --- r-cran-crul-0.4.0/man/writing-options.Rd 2017-10-02 17:40:18.000000000 +0000 +++ r-cran-crul-0.5.0/man/writing-options.Rd 2018-01-19 17:28:22.000000000 +0000 @@ -13,9 +13,62 @@ res <- x$get("get", disk = f) res$content # when using write to disk, content is a path readLines(res$content) +close(file(f)) # streaming response (x <- HttpClient$new(url = "https://httpbin.org")) res <- x$get('stream/50', stream = function(x) cat(rawToChar(x))) res$content # when streaming, content is NULL + + +## Async +(cc <- Async$new( + urls = c( + 'https://httpbin.org/get?a=5', + 'https://httpbin.org/get?foo=bar', + 'https://httpbin.org/get?b=4', + 'https://httpbin.org/get?stuff=things', + 'https://httpbin.org/get?b=4&g=7&u=9&z=1' + ) +)) +files <- replicate(5, tempfile()) +(res <- cc$get(disk = files, verbose = TRUE)) +lapply(files, readLines) + +## Async varied +### disk +f <- tempfile() +g <- tempfile() +req1 <- HttpRequest$new(url = "https://httpbin.org/get")$get(disk = f) +req2 <- HttpRequest$new(url = "https://httpbin.org/post")$post(disk = g) +req3 <- HttpRequest$new(url = "https://httpbin.org/get")$get() +(out <- AsyncVaried$new(req1, req2, req3)) +out$request() +out$content() +readLines(f) +readLines(g) +close(file(f)) +close(file(g)) + +### stream - to console +fun <- function(x) cat(rawToChar(x)) +req1 <- HttpRequest$new(url = "https://httpbin.org/get" +)$get(query = list(foo = "bar"), stream = fun) +req2 <- HttpRequest$new(url = "https://httpbin.org/get" +)$get(query = list(hello = "world"), stream = fun) +(out <- AsyncVaried$new(req1, req2)) +out$request() +out$content() + +### stream - to an R object +lst <- c() +fun <- function(x) lst <<- c(lst, x) +req1 <- HttpRequest$new(url = "https://httpbin.org/get" +)$get(query = list(foo = "bar"), stream = fun) +req2 <- HttpRequest$new(url = "https://httpbin.org/get" +)$get(query = list(hello = "world"), stream = fun) +(out <- AsyncVaried$new(req1, req2)) +out$request() +lst +cat(rawToChar(lst)) } diff -Nru r-cran-crul-0.4.0/MD5 r-cran-crul-0.5.0/MD5 --- r-cran-crul-0.4.0/MD5 2017-10-02 22:41:14.000000000 +0000 +++ r-cran-crul-0.5.0/MD5 2018-01-22 19:48:03.000000000 +0000 @@ -1,15 +1,15 @@ -09b213a2f33af731a5d703be7099c634 *DESCRIPTION -c5af52351472a750055a760a8924ce71 *LICENSE -36f6a25cae65f5a67840c2251b17a3bf *NAMESPACE -00beeeae66de780a2352a7c627c6f11a *NEWS.md -038fea3e8221beba4df24bf3a82f2143 *R/async.R -58a23acf1db567e90b3423e90d044400 *R/asyncvaried.R -c672a32ee8516230026a87ce3eed2d82 *R/auth.R +2a71e670744f0d2241ae6d2c0cca71f2 *DESCRIPTION +1b96a74f7e95cf508e814ad0c04525e5 *LICENSE +5cb7c46192fe379010832f3c510fae83 *NAMESPACE +5c0caff585651bdb48879b2a06cc0a59 *NEWS.md +17f45eabb64846fcae949d547df6105a *R/async.R +c0d8936ddfcc73a4b11994a212e81363 *R/asyncvaried.R +7e2e2e4ed678e0b434a05780f94a9900 *R/auth.R 6d634ea85a8a2473a9fafb80cb30ebf0 *R/body.R -b1f23ce750e9c9edc8e0a3c43e1db868 *R/client.R +ed27d1fb53fe530eef0c695acb0780a7 *R/client.R 3c2ac6576b4ce8f80deba972d3ad7d30 *R/cookies.R -a2d0cf3c78290e92312fedc3ea371c7d *R/crul-package.r -f158cf1c763e34264ba3267a98aba3c1 *R/curl-options.R +462569ab191f20720a6b715efb7bd1ee *R/crul-package.r +b67d3dab457101e3ab60bd377103e637 *R/curl-options.R ce31dc346613d071e58f07d49bdb14eb *R/curl_options.R b077080e3625ecf58b23553f8aa33640 *R/delete-requests.R 8f916ca18e13c4b6c544f2c284b9fabf *R/fetch.R @@ -17,9 +17,10 @@ 7fe1bb2e5890da2188c2f3099ef5ccc1 *R/headers.R 0cd2754bc2668203d10a1431d2713c4e *R/http-headers.R 204e318df39fd88233fde810013323d7 *R/httprequest.R -3960d3d3d6e9e2e37d9bc787e91d3ad6 *R/make_url.R +125f41f9820f76c9d73260dad71e323e *R/make_url.R 12ad585abb34c0f016f71dc2957ba36f *R/mocking.R c24c9f7c882823a79d1d63a0d1d7d48c *R/onLoad.R +3a1fe427e2418eca97a66b46fc937321 *R/paginator.R b470a9d9f573d004bb9fa652075f1046 *R/post-requests.R 7660da3918e7f65f037f957f9f17b058 *R/proxies.R ff10b92e14e29e606593ff962f732b7a *R/query.R @@ -27,26 +28,27 @@ c757ba50136b5b24d9d295ea82a6d3dd *R/stubbed-response.R 4fadc12ffde03e7588d4f0317c8bc5a2 *R/upload.R 14c11771d130c3cc4ba9e9f475a2951d *R/use_agent.R -1e0326b2e481b1c05671663f3cc21ded *R/writing-options.R +b30e29b9c725cb68b14f5ef7d09937db *R/writing-options.R e287b0e8ecb69825eea1e9225018b382 *R/zzz.R -009c773bd60f29fe019ff0b2822f3733 *README.md -e8ff5788d3db029586ac1974815390c5 *build/vignette.rds -63d9ba202524c58d790fe0aa193dd090 *inst/doc/async.Rmd -8f35464cac1852f67d85b73add4b2fbc *inst/doc/async.html -39534f03007fde66f48bf029dd91e528 *inst/doc/crul_vignette.Rmd -4fd8a2b6584547ec33c1c8d0052326ad *inst/doc/crul_vignette.html -6d930e5a6565f6242e3c9a50f339c0a3 *inst/doc/how-to-use-crul.Rmd -48170834be72e5cc47604d2d745f5622 *inst/doc/how-to-use-crul.html -4ce0044dbf2746868781235dce58a1d0 *man/Async.Rd +11c5d171b4e44c56f6d88ab9bc0aa9ae *README.md +dccb2b4910be8be99717ea4f6da7afed *build/vignette.rds +a0091f5663820eccd3a3c552ebb6f89a *inst/doc/async.Rmd +12689dbf9975c4b9fac04d052eafcea8 *inst/doc/async.html +f050dbe4b850e003d7e079271cd8b6b5 *inst/doc/crul_vignette.Rmd +3aad5730df5b32059449601966961433 *inst/doc/crul_vignette.html +e2c14320ff0a25c345f689af24ce9f4e *inst/doc/how-to-use-crul.Rmd +fe486e22c564dfa5d79ebb05e00238f7 *inst/doc/how-to-use-crul.html +c39762814dbebf9c986df3332da8d864 *man/Async.Rd f7d25bb9f12306064e5869c0e2bc0cd2 *man/AsyncVaried.Rd -cf39fa58fd9e99c49d8f0251e557a590 *man/HttpClient.Rd +33488633f5a576cec7f8936398281553 *man/HttpClient.Rd deee4f151be7fd6dfd9821e23a283185 *man/HttpRequest.Rd 8ceb31c33528e8d7e6ce83b7e795ced6 *man/HttpResponse.Rd ed69669a250cc2b376ea1d8bedf74193 *man/HttpStubbedResponse.Rd -af2f8ff1a1d271c642ff558c0aab7ad9 *man/auth.Rd +31e8dc77b3f3777d01bf84036da23150 *man/Paginator.Rd +1ac0957667ce6773f18113a32c065caf *man/auth.Rd c6f5bad94a644a8f655df7207ff66f0a *man/cookies.Rd -1493a47d3b24e85b4a93a126945f8a45 *man/crul-package.Rd -158948dd9ddccddd9013c959df1a5a36 *man/curl-options.Rd +5ab6bc76ffcce350a04f617ca722a08c *man/crul-package.Rd +a003044c7f1fcdad82c5397c46af04df *man/curl-options.Rd 74f5415c416ad5d026bb49471c579452 *man/delete-requests.Rd 992a0e61803fff336f4ea5b01568ebbc *man/handle.Rd 5a2be8a76e37279e6a27e43af56e79cf *man/http-headers.Rd @@ -55,18 +57,19 @@ b8e980c5139ae71b16809c2f2235ec67 *man/proxies.Rd 2f451daaf7ac19f1b18a8174fcc583e3 *man/upload.Rd a1593980eb0f949136935f57df586863 *man/url_build.Rd -9d2a329869f402316b3bcb4bf68c59ee *man/writing-options.Rd +9f0547e0c0afab1c31b2bd0f54cebe20 *man/writing-options.Rd 9d086d73e3d68be9f055a6103bf7be39 *tests/test-all.R -ff17e5fe2399153e6a627fded9e7bff0 *tests/testthat/test-async.R -b69bc5f8b2a540e10094b75064274ffd *tests/testthat/test-asyncvaried.R +6233236f043a8d925ac7fe4582d8bea3 *tests/testthat/test-async.R +efabc555ca14b871828f179bb3a41206 *tests/testthat/test-asyncvaried.R bffa7e3d248142476657230ee4141457 *tests/testthat/test-auth.R c73212db04d129aa3f61d6f8cc8e967c *tests/testthat/test-client.R 80fad867615be2365a42f29c319d3b39 *tests/testthat/test-delete.R 81c982a6d03502e5b48ce38d581e8fe8 *tests/testthat/test-get.R 162d7395dce79723ab9902c42196aaef *tests/testthat/test-handle.R -f703252d231c221048dbdd8e74db7a49 *tests/testthat/test-head.R +67bb0aa730ac61d9d81df2bc3104f9d8 *tests/testthat/test-head.R eb8f01337fba708050e1cb857384b64a *tests/testthat/test-headers.R -af5e1e93f7903088d1db4a4bc6fb6565 *tests/testthat/test-mocking.R +514d79dc3dfaa8789fc0fab426c4035a *tests/testthat/test-mocking.R +145a4a5d8d712d0a46565637858b0026 *tests/testthat/test-paginator.R d97a38299ad7f3501b4bfc9aeab50310 *tests/testthat/test-patch.R 2781ddb04413e5703d4b932dd5f1a5e3 *tests/testthat/test-paths.R 31fba8c2d3fca0387ba7f4cbc4a6a257 *tests/testthat/test-post.R @@ -79,6 +82,6 @@ 6d6424b5f9549bb410a1e31ae9d99f67 *tests/testthat/test-url_build_parse.R b66e8ddf24d1ff5ffe66761e00d75a0e *tests/testthat/test-user-agent.R 11807caff7a89ebc264d38dbdaf2cac3 *tests/testthat/test-utils.R -63d9ba202524c58d790fe0aa193dd090 *vignettes/async.Rmd -39534f03007fde66f48bf029dd91e528 *vignettes/crul_vignette.Rmd -6d930e5a6565f6242e3c9a50f339c0a3 *vignettes/how-to-use-crul.Rmd +a0091f5663820eccd3a3c552ebb6f89a *vignettes/async.Rmd +f050dbe4b850e003d7e079271cd8b6b5 *vignettes/crul_vignette.Rmd +e2c14320ff0a25c345f689af24ce9f4e *vignettes/how-to-use-crul.Rmd diff -Nru r-cran-crul-0.4.0/NAMESPACE r-cran-crul-0.5.0/NAMESPACE --- r-cran-crul-0.4.0/NAMESPACE 2017-10-02 04:09:28.000000000 +0000 +++ r-cran-crul-0.5.0/NAMESPACE 2018-01-19 04:19:08.000000000 +0000 @@ -7,6 +7,7 @@ export(HttpRequest) export(HttpResponse) export(HttpStubbedResponse) +export(Paginator) export(auth) export(handle) export(mock) diff -Nru r-cran-crul-0.4.0/NEWS.md r-cran-crul-0.5.0/NEWS.md --- r-cran-crul-0.4.0/NEWS.md 2017-10-02 19:13:18.000000000 +0000 +++ r-cran-crul-0.5.0/NEWS.md 2018-01-19 18:38:51.000000000 +0000 @@ -1,3 +1,21 @@ +crul 0.5.0 +========== + +### NEW FEATURES + +* Gains a new R6 class `Paginator` to help users automatically paginate through multiple requests. It only supports query parameter based paginating for now. We'll add support later for other types including cursors (e.g., used in Solr servers), and for link headers (e.g., used in the GitHub API). Please get in touch if you find any problems with `Paginator`. (#56) +* Async classes `Async` and `Asyncvaried` gain ability to write to disk and stream data (to disk or elsewhere, e.g. R console or to an R object) (#46) thanks @artemklevtsov for the push to do this + +### MINOR IMPROVEMENTS + +* Improved documentation for `auth` to indicate that `user` and `pwd` are indeed required - and to further indicate that one can pass in `NULL` to those parameters (similar to an empty string `""` in `httr::authenticate`) when one e.g. may want to use `gssnegotiate` method (#43) +* Fixed query builder so that one can now protect query parameters by wrapping them in `I()` (#55) + +### BUG FIXES + +* Fixed bug in `head` requests with `HttpClient` when passing `query` parameter - it was failing previously. Added `query` parameter back. (#52) + + crul 0.4.0 ========== diff -Nru r-cran-crul-0.4.0/R/async.R r-cran-crul-0.5.0/R/async.R --- r-cran-crul-0.4.0/R/async.R 2017-10-02 19:09:47.000000000 +0000 +++ r-cran-crul-0.5.0/R/async.R 2018-01-19 17:28:22.000000000 +0000 @@ -8,19 +8,19 @@ #' @details #' **Methods** #' \describe{ -#' \item{`get(path, query, ...)`}{ +#' \item{`get(path, query, disk, stream, ...)`}{ #' make async GET requests for all URLs #' } -#' \item{`post(path, query, body, encode, ...)`}{ +#' \item{`post(path, query, body, encode, disk, stream, ...)`}{ #' make async POST requests for all URLs #' } -#' \item{`put(path, query, body, encode, ...)`}{ +#' \item{`put(path, query, body, encode, disk, stream, ...)`}{ #' make async PUT requests for all URLs #' } -#' \item{`patch(path, query, body, encode, ...)`}{ +#' \item{`patch(path, query, body, encode, disk, stream, ...)`}{ #' make async PATCH requests for all URLs #' } -#' \item{`delete(path, query, body, encode, ...)`}{ +#' \item{`delete(path, query, body, encode, disk, stream, ...)`}{ #' make async DELETE requests for all URLs #' } #' \item{`head(path, ...)`}{ @@ -73,28 +73,34 @@ self$urls <- urls }, - get = function(path = NULL, query = list(), ...) { - private$gen_interface(self$urls, "get", path, query, ...) + get = function(path = NULL, query = list(), disk = NULL, + stream = NULL, ...) { + private$gen_interface(self$urls, "get", path, query, + disk = disk, stream = stream, ...) }, post = function(path = NULL, query = list(), body = NULL, - encode = "multipart", ...) { - private$gen_interface(self$urls, "post", path, query, body, encode, ...) + encode = "multipart", disk = NULL, stream = NULL, ...) { + private$gen_interface(self$urls, "post", path, query, body, encode, + disk, stream, ...) }, put = function(path = NULL, query = list(), body = NULL, - encode = "multipart", ...) { - private$gen_interface(self$urls, "put", path, query, body, encode, ...) + encode = "multipart", disk = NULL, stream = NULL, ...) { + private$gen_interface(self$urls, "put", path, query, body, encode, + disk, stream, ...) }, patch = function(path = NULL, query = list(), body = NULL, - encode = "multipart", ...) { - private$gen_interface(self$urls, "patch", path, query, body, encode, ...) + encode = "multipart", disk = NULL, stream = NULL, ...) { + private$gen_interface(self$urls, "patch", path, query, body, encode, + disk, stream, ...) }, delete = function(path = NULL, query = list(), body = NULL, - encode = "multipart", ...) { - private$gen_interface(self$urls, "delete", path, query, body, encode, ...) + encode = "multipart", disk = NULL, stream = NULL, ...) { + private$gen_interface(self$urls, "delete", path, query, body, encode, + disk, stream, ...) }, head = function(path = NULL, ...) { @@ -103,20 +109,52 @@ ), private = list( - gen_interface = function(x, method, ...) { - tmp <- AsyncVaried$new( - .list = lapply(x, function(z) { + gen_interface = function(x, method, path, query = NULL, body = NULL, + encode = NULL, disk = NULL, stream = NULL, ...) { + + if (!is.null(disk)) { + if (length(disk) > 1) { + stopifnot(length(x) == length(disk)) + reqs <- Map(function(z, m) { + switch( + method, + get = HttpRequest$new(url = z)$get(path = path, query = query, + disk = m, stream = stream, ...), + post = HttpRequest$new(url = z)$post(path = path, query = query, + body = body, encode = encode, disk = m, stream = stream, + ...), + put = HttpRequest$new(url = z)$put(path = path, query = query, + body = body, encode = encode, disk = m, stream = stream, + ...), + patch = HttpRequest$new(url = z)$patch(path = path, query = query, + body = body, encode = encode, disk = m, stream = stream, + ...), + delete = HttpRequest$new(url = z)$delete(path = path, + query = query, body = body, encode = encode, disk = m, + stream = stream, ...), + head = HttpRequest$new(url = z)$head(path = path, ...) + ) + }, x, disk) + } + } else { + reqs <- lapply(x, function(z) { switch( method, - get = HttpRequest$new(url = z)$get(...), - post = HttpRequest$new(url = z)$post(...), - put = HttpRequest$new(url = z)$put(...), - patch = HttpRequest$new(url = z)$patch(...), - delete = HttpRequest$new(url = z)$delete(...), - head = HttpRequest$new(url = z)$head(...) + get = HttpRequest$new(url = z)$get(path = path, query = query, + disk = disk, stream = stream, ...), + post = HttpRequest$new(url = z)$post(path = path, query = query, + body = body, encode = encode, disk = disk, stream = stream, ...), + put = HttpRequest$new(url = z)$put(path = path, query = query, + body = body, encode = encode, disk = disk, stream = stream, ...), + patch = HttpRequest$new(url = z)$patch(path = path, query = query, + body = body, encode = encode, disk = disk, stream = stream, ...), + delete = HttpRequest$new(url = z)$delete(path = path, query = query, + body = body, encode = encode, disk = disk, stream = stream, ...), + head = HttpRequest$new(url = z)$head(path = path, ...) ) }) - ) + } + tmp <- AsyncVaried$new(.list = reqs) tmp$request() tmp$responses() } diff -Nru r-cran-crul-0.4.0/R/asyncvaried.R r-cran-crul-0.5.0/R/asyncvaried.R --- r-cran-crul-0.4.0/R/asyncvaried.R 2017-10-02 19:10:16.000000000 +0000 +++ r-cran-crul-0.5.0/R/asyncvaried.R 2018-01-19 17:28:22.000000000 +0000 @@ -190,10 +190,33 @@ curl::handle_setform(h, .list = w$fields) } curl::handle_setheaders(h, .list = w$headers) - curl::multi_add(handle = h, - done = function(res) multi_res[[i]] <<- res, - pool = crulpool - ) + + if (is.null(w$disk) && is.null(w$stream)) { + curl::multi_add( + handle = h, + done = function(res) multi_res[[i]] <<- res, + pool = crulpool + ) + } else { + if (!is.null(w$disk) && is.null(w$stream)) { + stopifnot(inherits(w$disk, "character")) + ff <- file(w$disk, open = "wb") + curl::multi_add( + handle = h, + done = function(res) multi_res[[i]] <<- res, + data = ff, + pool = crulpool + ) + } else if (is.null(w$disk) && !is.null(w$stream)) { + stopifnot(is.function(w$stream)) + curl::multi_add( + handle = h, + done = function(res) multi_res[[i]] <<- res, + data = w$stream, + pool = crulpool + ) + } + } } for (i in seq_along(reqs)) make_request(i) diff -Nru r-cran-crul-0.4.0/R/auth.R r-cran-crul-0.5.0/R/auth.R --- r-cran-crul-0.4.0/R/auth.R 2017-05-17 05:45:02.000000000 +0000 +++ r-cran-crul-0.5.0/R/auth.R 2018-01-19 18:03:25.000000000 +0000 @@ -1,12 +1,20 @@ #' Authentication #' #' @export -#' @param user (character) username, optional -#' @param pwd (character) password, optional +#' @param user (character) username, required. see Details. +#' @param pwd (character) password, required. see Details. #' @param auth (character) authentication type, one of basic (default), -#' digest, digest_ie, gssnegotiate, ntlm, or any. optional +#' digest, digest_ie, gssnegotiate, ntlm, or any. required +#' #' @details -#' Only supporting simple auth for now, OAuth later. +#' Only supporting simple auth for now, OAuth later maybe. +#' +#' For `user` and `pwd` you are required to pass in some value. +#' The value can be `NULL` to - which is equivalent to passing in an +#' empty string like `""` in `httr::authenticate`. You may want to pass +#' in `NULL` for both `user` and `pwd` for example if you are using +#' `gssnegotiate` auth type. See example below. +#' #' @examples #' auth(user = "foo", pwd = "bar", auth = "basic") #' auth(user = "foo", pwd = "bar", auth = "digest") @@ -28,6 +36,9 @@ #' auth = auth(user = "user", pwd = "passwd") #' )) #' res$auth +#' +#' # gssnegotiate auth +#' auth(NULL, NULL, "gssnegotiate") auth <- function(user, pwd, auth = "basic") { structure(ccp(list( userpwd = make_up(user, pwd), diff -Nru r-cran-crul-0.4.0/R/client.R r-cran-crul-0.5.0/R/client.R --- r-cran-crul-0.4.0/R/client.R 2017-10-02 16:39:42.000000000 +0000 +++ r-cran-crul-0.5.0/R/client.R 2018-01-19 17:51:27.000000000 +0000 @@ -20,7 +20,7 @@ #' \item{`delete(path, query, body, disk, stream, ...)`}{ #' Make a DELETE request #' } -#' \item{`head(path, ...)`}{ +#' \item{`head(path, query, ...)`}{ #' Make a HEAD request #' } #' } @@ -30,7 +30,7 @@ #' @details Possible parameters (not all are allowed in each HTTP verb): #' \itemize{ #' \item path - URL path, appended to the base URL -#' \item query - query terms, as a list +#' \item query - query terms, as a named list #' \item body - body as an R list #' \item encode - one of form, multipart, json, or raw #' \item disk - a path to write to. if NULL (default), memory used. @@ -204,9 +204,9 @@ private$make_request(rr) }, - head = function(path = NULL, ...) { + head = function(path = NULL, query = list(), ...) { curl_opts_check(...) - url <- private$make_url(self$url, self$handle, path, NULL) + url <- private$make_url(self$url, self$handle, path, query) opts <- list(customrequest = "HEAD", nobody = TRUE) rr <- list( url = url, diff -Nru r-cran-crul-0.4.0/R/crul-package.r r-cran-crul-0.5.0/R/crul-package.r --- r-cran-crul-0.4.0/R/crul-package.r 2017-05-21 23:23:37.000000000 +0000 +++ r-cran-crul-0.5.0/R/crul-package.r 2018-01-19 04:19:08.000000000 +0000 @@ -6,6 +6,7 @@ #' your http options, make http requests #' \item [HttpResponse()] - mostly for internal use, handles #' http responses +#' \item [Paginator()] - auto-paginate through requests #' \item [Async()] - asynchronous requests #' \item [AsyncVaried()] - varied asynchronous requests #' \item [HttpRequest()] - generate an HTTP request, mostly for diff -Nru r-cran-crul-0.4.0/R/curl-options.R r-cran-crul-0.5.0/R/curl-options.R --- r-cran-crul-0.4.0/R/curl-options.R 2017-06-13 20:27:40.000000000 +0000 +++ r-cran-crul-0.5.0/R/curl-options.R 2018-01-19 17:28:22.000000000 +0000 @@ -26,13 +26,12 @@ #' (res <- HttpClient$new(url = "https://httpbin.org")) #' res$get('get', verbose = TRUE) #' res$get('get', stuff = "things") -#' res$get('get', httpget = TRUE) #' #' # set a timeout #' (res <- HttpClient$new( #' url = "https://httpbin.org", #' opts = list(timeout_ms = 1) #' )) -#' res$get('get') +#' # res$get('get') #' } NULL diff -Nru r-cran-crul-0.4.0/R/make_url.R r-cran-crul-0.5.0/R/make_url.R --- r-cran-crul-0.4.0/R/make_url.R 2017-09-27 22:35:27.000000000 +0000 +++ r-cran-crul-0.5.0/R/make_url.R 2018-01-19 04:19:08.000000000 +0000 @@ -21,7 +21,11 @@ if (length(x)) { quer <- list() for (i in seq_along(x)) { - quer[[i]] <- paste(names(x)[i], urltools::url_encode(x[[i]]), sep = "=") + if (!inherits(x[[i]], "AsIs")) { + x[[i]] <- urltools::url_encode(x[[i]]) + } + quer[[i]] <- paste(names(x)[i], x[[i]], sep = "=") + #quer[[i]] <- paste(names(x)[i], urltools::url_encode(x[[i]]), sep = "=") } parms <- paste0(quer, collapse = "&") paste0(url, "?", parms) diff -Nru r-cran-crul-0.4.0/R/paginator.R r-cran-crul-0.5.0/R/paginator.R --- r-cran-crul-0.4.0/R/paginator.R 1970-01-01 00:00:00.000000000 +0000 +++ r-cran-crul-0.5.0/R/paginator.R 2018-01-19 04:19:08.000000000 +0000 @@ -0,0 +1,268 @@ +#' Paginator client +#' +#' A client to help you paginate +#' +#' @export +#' @param client an object of class `HttpClient`, from a call to [HttpClient] +#' @param by (character) how to paginate. Only 'query_params' supported for +#' now. In the future will support 'link_headers' and 'cursor'. See Details. +#' @param limit_param (character) the name of the limit parameter. +#' Default: limit +#' @param offset_param (character) the name of the offset parameter. +#' Default: offset +#' @param limit (numeric/integer) the maximum records wanted +#' @param limit_chunk (numeric/integer) the number by which to chunk requests, +#' e.g., 10 would be be each request gets 10 records +#' @details +#' **Methods** +#' \describe{ +#' \item{`get(path, query, ...)`}{ +#' make a paginated GET request +#' } +#' \item{`post(path, query, body, encode, ...)`}{ +#' make a paginated POST request +#' } +#' \item{`put(path, query, body, encode, ...)`}{ +#' make a paginated PUT request +#' } +#' \item{`patch(path, query, body, encode, ...)`}{ +#' make a paginated PATCH request +#' } +#' \item{`delete(path, query, body, encode, ...)`}{ +#' make a paginated DELETE request +#' } +#' \item{`head(path, ...)`}{ +#' make a paginated HEAD request - not sure if this makes any sense +#' or not yet +#' } +#' \item{`responses()`}{ +#' list responses +#' - returns: a list of `HttpResponse` objects, empty list before +#' requests made +#' } +#' \item{`parse(encoding = "UTF-8")`}{ +#' parse content +#' - returns: character vector, empty character vector before +#' requests made +#' } +#' \item{`status_code()`}{ +#' (integer) HTTP status codes +#' - returns: numeric vector, empty numeric vector before +#' requests made +#' } +#' \item{`status()`}{ +#' (list) HTTP status objects +#' - returns: a list of `http_code` objects, empty list before +#' requests made +#' } +#' \item{`content()`}{ +#' raw content +#' - returns: raw list, empty list before requests made +#' } +#' \item{`times()`}{ +#' curl request times +#' - returns: list of named numeric vectors, empty list before +#' requests made +#' } +#' } +#' +#' See [HttpClient()] for information on parameters. +#' +#' @format NULL +#' @usage NULL +#' +#' @section Methods to paginate: +#' +#' Supported now: +#' +#' - `query_params`: the most common way, so is the default. This method +#' involves setting how many records and what record to start at for each +#' request. We send these query parameters for you. +#' +#' Supported later: +#' +#' - `link_headers`: link headers are URLS for the next/previous/last +#' request given in the response header from the server. This is relatively +#' uncommon, though is recommended by JSONAPI and is implemented by a +#' well known API (GitHub). +#' - `cursor`: this works by a single string given back in each response, to +#' be passed in the subsequent response, and so on until no more records +#' remain. This is common in Solr +#' +#' @return a list, with objects of class [HttpResponse()]. +#' Responses are returned in the order they are passed in. +#' +#' @examples \dontrun{ +#' (cli <- HttpClient$new(url = "http://api.crossref.org")) +#' cc <- Paginator$new(client = cli, limit_param = "rows", +#' offset_param = "offset", limit = 50, limit_chunk = 10) +#' cc +#' cc$get('works') +#' cc +#' cc$responses() +#' cc$status() +#' cc$status_code() +#' cc$times() +#' cc$content() +#' cc$parse() +#' lapply(cc$parse(), jsonlite::fromJSON) +#' } +Paginator <- R6::R6Class( + 'Paginator', + public = list( + http_req = NULL, + by = "query_params", + limit_chunk = NULL, + limit_param = NULL, + offset_param = NULL, + limit = NULL, + req = NULL, + + print = function(x, ...) { + cat(" ", sep = "\n") + cat(paste0( + " base url: ", + if (is.null(self$http_req)) self$http_req$handle$url else self$http_req$url), + sep = "\n") + cat(paste0(" by: ", self$by), sep = "\n") + cat(paste0(" limit_chunk: ", self$limit_chunk %||% ""), sep = "\n") + cat(paste0(" limit_param: ", self$limit_param %||% ""), sep = "\n") + cat(paste0(" offset_param: ", self$offset_param %||% ""), sep = "\n") + cat(paste0(" limit: ", self$limit %||% ""), sep = "\n") + cat(paste0(" status: ", + if (length(private$resps) == 0) { + "not run yet" + } else { + paste0(length(private$resps), " requests done") + }), sep = "\n") + invisible(self) + }, + + initialize = function(client, by = "query_params", limit_param, offset_param, limit, limit_chunk) { + if (!inherits(client, "HttpClient")) stop("'client' has to be an object of class 'HttpClient'", + call. = FALSE) + self$http_req <- client + if (by != "query_params") stop("'by' has to be 'query_params' for now", + call. = FALSE) + self$by <- by + if (!missing(limit_chunk)) self$limit_chunk <- limit_chunk + if (!missing(limit_param)) self$limit_param <- limit_param + if (!missing(offset_param)) self$offset_param <- offset_param + if (!missing(limit)) self$limit <- limit + if (self$by == "query_params") { + private$offset_iters <- c(0, seq(from=0, to=self$limit, by=self$limit_chunk)[-1]) + private$offset_iters <- private$offset_iters[-length(private$offset_iters)] + private$offset_args <- as.list(stats::setNames(private$offset_iters, + rep(self$offset_param, length(private$offset_iters)))) + } + }, + + # HTTP verbs + get = function(path = NULL, query = list(), ...) { + private$page("get", path, query, ...) + }, + + post = function(path = NULL, query = list(), body = NULL, + encode = "multipart", ...) { + private$page("post", path, query, body, encode, ...) + }, + + put = function(path = NULL, query = list(), body = NULL, + encode = "multipart", ...) { + private$page("put", path, query, body, encode, ...) + }, + + patch = function(path = NULL, query = list(), body = NULL, + encode = "multipart", ...) { + private$page("patch", path, query, body, encode, ...) + }, + + delete = function(path = NULL, query = list(), body = NULL, + encode = "multipart", ...) { + private$page("delete", path, query, body, encode, ...) + }, + + head = function(path = NULL, ...) { + private$page("head", path, ...) + }, + + # functions to inspect output + responses = function() { + private$resps %||% list() + }, + + status_code = function() { + vapply(private$resps, function(z) z$status_code, 1) + }, + + status = function() { + lapply(private$resps, function(z) z$status_http()) + }, + + parse = function(encoding = "UTF-8") { + vapply(private$resps, function(z) z$parse(encoding = encoding), "") + }, + + content = function() { + lapply(private$resps, function(z) z$content) + }, + + times = function() { + lapply(private$resps, function(z) z$times) + } + ), + + private = list( + offset_iters = NULL, + offset_args = NULL, + resps = NULL, + page = function(method, path, query, body, encode, ...) { + tmp <- list() + for (i in seq_along(private$offset_iters)) { + off <- private$offset_args[i] + off[self$limit_param] <- self$limit_chunk + tmp[[i]] <- switch( + method, + get = self$http_req$get(path, query = ccp(c(query, off)), ...), + post = self$http_req$post(path, query = ccp(c(query, off)), + body = body, encode = encode, ...), + put = self$http_req$put(path, query = ccp(c(query, off)), + body = body, encode = encode, ...), + patch = self$http_req$patch(path, query = ccp(c(query, off)), + body = body, encode = encode, ...), + delete = self$http_req$delete(path, query = ccp(c(query, off)), + body = body, encode = encode, ...), + head = self$http_req$head(path, ...) + ) + } + private$resps <- tmp + message("OK\n") + } + ) +) + +# sttrim <- function(str) { +# gsub("^\\s+|\\s+$", "", str) +# } + +# parse_links <- function(w) { +# if (is.null(w)) { +# NULL +# } else { +# if (inherits(w, "character")) { +# links <- sttrim(strsplit(w, ",")[[1]]) +# lapply(links, each_link) +# } else { +# nms <- sapply(w, "[[", "name") +# tmp <- unlist(w[nms %in% "next"]) +# grep("http", tmp, value = TRUE) +# } +# } +# } + +# each_link <- function(z) { +# tmp <- sttrim(strsplit(z, ";")[[1]]) +# nm <- gsub("\"|(rel)|=", "", tmp[2]) +# url <- gsub("^<|>$", "", tmp[1]) +# list(name = nm, url = url) +# } diff -Nru r-cran-crul-0.4.0/R/writing-options.R r-cran-crul-0.5.0/R/writing-options.R --- r-cran-crul-0.4.0/R/writing-options.R 2017-10-02 17:33:07.000000000 +0000 +++ r-cran-crul-0.5.0/R/writing-options.R 2018-01-19 17:28:22.000000000 +0000 @@ -8,9 +8,62 @@ #' res <- x$get("get", disk = f) #' res$content # when using write to disk, content is a path #' readLines(res$content) +#' close(file(f)) #' #' # streaming response #' (x <- HttpClient$new(url = "https://httpbin.org")) #' res <- x$get('stream/50', stream = function(x) cat(rawToChar(x))) #' res$content # when streaming, content is NULL +#' +#' +#' ## Async +#' (cc <- Async$new( +#' urls = c( +#' 'https://httpbin.org/get?a=5', +#' 'https://httpbin.org/get?foo=bar', +#' 'https://httpbin.org/get?b=4', +#' 'https://httpbin.org/get?stuff=things', +#' 'https://httpbin.org/get?b=4&g=7&u=9&z=1' +#' ) +#' )) +#' files <- replicate(5, tempfile()) +#' (res <- cc$get(disk = files, verbose = TRUE)) +#' lapply(files, readLines) +#' +#' ## Async varied +#' ### disk +#' f <- tempfile() +#' g <- tempfile() +#' req1 <- HttpRequest$new(url = "https://httpbin.org/get")$get(disk = f) +#' req2 <- HttpRequest$new(url = "https://httpbin.org/post")$post(disk = g) +#' req3 <- HttpRequest$new(url = "https://httpbin.org/get")$get() +#' (out <- AsyncVaried$new(req1, req2, req3)) +#' out$request() +#' out$content() +#' readLines(f) +#' readLines(g) +#' close(file(f)) +#' close(file(g)) +#' +#' ### stream - to console +#' fun <- function(x) cat(rawToChar(x)) +#' req1 <- HttpRequest$new(url = "https://httpbin.org/get" +#' )$get(query = list(foo = "bar"), stream = fun) +#' req2 <- HttpRequest$new(url = "https://httpbin.org/get" +#' )$get(query = list(hello = "world"), stream = fun) +#' (out <- AsyncVaried$new(req1, req2)) +#' out$request() +#' out$content() +#' +#' ### stream - to an R object +#' lst <- c() +#' fun <- function(x) lst <<- c(lst, x) +#' req1 <- HttpRequest$new(url = "https://httpbin.org/get" +#' )$get(query = list(foo = "bar"), stream = fun) +#' req2 <- HttpRequest$new(url = "https://httpbin.org/get" +#' )$get(query = list(hello = "world"), stream = fun) +#' (out <- AsyncVaried$new(req1, req2)) +#' out$request() +#' lst +#' cat(rawToChar(lst)) NULL diff -Nru r-cran-crul-0.4.0/README.md r-cran-crul-0.5.0/README.md --- r-cran-crul-0.4.0/README.md 2017-10-02 19:33:01.000000000 +0000 +++ r-cran-crul-0.5.0/README.md 2018-01-19 18:42:30.000000000 +0000 @@ -16,6 +16,8 @@ * `HttpClient` - Main interface to making HTTP requests. Synchronous requests only. * `HttpResponse` - HTTP response object, used for all responses across the different clients. +* `Paginator` - Auto-paginate through requests - supports a subset of all possible +pagination scenarios - will fill out more scenarios soon * `Async` - Asynchronous HTTP requests - a simple interface for many URLS - whose interface is similar to `HttpClient` - all URLs are treated the same. * `AsyncVaried` - Asynchronous HTTP requests - accepts any number of `HttpRequest` @@ -26,11 +28,14 @@ * `auth()` - Simple authentication helper * `proxy()` - Proxy helper * `upload()` - File upload helper +* Writing to disk and streaming: available with both synchronous requests +as well as async requests. Mocking: `crul` now integrates with [webmockr](https://github.com/ropensci/webmockr) to mock -HTTP requests. +HTTP requests. Integration with [vcr](https://github.com/ropensci/vcr) to mock specifically +around tests is coming soon. ## Installation @@ -151,11 +156,11 @@ #> [185] 63 6c 6f 73 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 #> [208] 74 74 70 62 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 #> [231] 2d 41 67 65 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 34 2e -#> [254] 30 20 72 2d 63 75 72 6c 2f 32 2e 38 2e 31 20 63 72 75 6c 2f 30 2e 34 -#> [277] 2e 30 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 -#> [300] 31 35 37 2e 31 33 30 2e 31 37 39 2e 38 36 22 2c 20 0a 20 20 22 75 72 -#> [323] 6c 22 3a 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 -#> [346] 67 2f 67 65 74 22 0a 7d 0a +#> [254] 30 20 72 2d 63 75 72 6c 2f 33 2e 31 20 63 72 75 6c 2f 30 2e 35 2e 30 +#> [277] 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 35 30 +#> [300] 2e 32 32 2e 31 35 35 2e 32 31 34 22 2c 20 0a 20 20 22 75 72 6c 22 3a +#> [323] 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 +#> [346] 65 74 22 0a 7d 0a ``` HTTP method @@ -172,7 +177,7 @@ ```r res$request_headers #> $`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> $`Accept-Encoding` #> [1] "gzip, deflate" @@ -199,7 +204,7 @@ #> [1] "meinheld/0.6.1" #> #> $date -#> [1] "Mon, 02 Oct 2017 19:20:21 GMT" +#> [1] "Fri, 19 Jan 2018 18:42:29 GMT" #> #> $`content-type` #> [1] "application/json" @@ -214,10 +219,10 @@ #> [1] "Flask" #> #> $`x-processed-time` -#> [1] "0.000764131546021" +#> [1] "0.000702142715454" #> #> $`content-length` -#> [1] "354" +#> [1] "351" #> #> $via #> [1] "1.1 vegur" @@ -229,7 +234,7 @@ ```r res$parse() #> No encoding supplied: defaulting to UTF-8. -#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"A\": \"hello world\", \n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get\"\n}\n" +#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"A\": \"hello world\", \n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get\"\n}\n" jsonlite::fromJSON(res$parse()) #> No encoding supplied: defaulting to UTF-8. #> $args @@ -252,11 +257,11 @@ #> [1] "httpbin.org" #> #> $headers$`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> #> $origin -#> [1] "157.130.179.86" +#> [1] "50.22.155.214" #> #> $url #> [1] "https://httpbin.org/get" @@ -286,21 +291,8 @@ 'https://httpbin.org/get?foo=bar' ) )) -#> -#> urls: -#> https://httpbin.org/ -#> https://httpbin.org/get?a=5 -#> https://httpbin.org/get?foo=bar res <- cc$get() lapply(res, function(z) z$parse("UTF-8")) -#> [[1]] -#> [1] "\n\n\n \n \n httpbin(1): HTTP Client Testing Service\n \n \n\n\n\n\"Fork\n\n\n\n
\n

httpbin(1): HTTP Request & Response Service

\n

Freely hosted in HTTP, HTTPS, & EU flavors by Kenneth Reitz & Runscope.

\n\n

BONUSPOINTS

\n\n\n\n

ENDPOINTS

\n\n\n\n

DESCRIPTION

\n\n

Testing an HTTP Library can become difficult sometimes. RequestBin is fantastic for testing POST requests, but doesn't let you control the response. This exists to cover all kinds of HTTP scenarios. Additional endpoints are being considered.

\n\n

All endpoint responses are JSON-encoded.

\n\n

EXAMPLES

\n\n

$ curl http://httpbin.org/ip

\n\n
{\"origin\": \"24.127.96.129\"}\n
\n\n

$ curl http://httpbin.org/user-agent

\n\n
{\"user-agent\": \"curl/7.19.7 (universal-apple-darwin10.0) libcurl/7.19.7 OpenSSL/0.9.8l zlib/1.2.3\"}\n
\n\n

$ curl http://httpbin.org/get

\n\n
{\n   \"args\": {},\n   \"headers\": {\n      \"Accept\": \"*/*\",\n      \"Connection\": \"close\",\n      \"Content-Length\": \"\",\n      \"Content-Type\": \"\",\n      \"Host\": \"httpbin.org\",\n      \"User-Agent\": \"curl/7.19.7 (universal-apple-darwin10.0) libcurl/7.19.7 OpenSSL/0.9.8l zlib/1.2.3\"\n   },\n   \"origin\": \"24.127.96.129\",\n   \"url\": \"http://httpbin.org/get\"\n}\n
\n\n

$ curl -I http://httpbin.org/status/418

\n\n
HTTP/1.1 418 I'M A TEAPOT\nServer: nginx/0.7.67\nDate: Mon, 13 Jun 2011 04:25:38 GMT\nConnection: close\nx-more-info: http://tools.ietf.org/html/rfc2324\nContent-Length: 135\n
\n\n

$ curl https://httpbin.org/get?show_env=1

\n\n
{\n  \"headers\": {\n    \"Content-Length\": \"\",\n    \"Accept-Language\": \"en-US,en;q=0.8\",\n    \"Accept-Encoding\": \"gzip,deflate,sdch\",\n    \"X-Forwarded-Port\": \"443\",\n    \"X-Forwarded-For\": \"109.60.101.240\",\n    \"Host\": \"httpbin.org\",\n    \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\",\n    \"User-Agent\": \"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.83 Safari/535.11\",\n    \"X-Request-Start\": \"1350053933441\",\n    \"Accept-Charset\": \"ISO-8859-1,utf-8;q=0.7,*;q=0.3\",\n    \"Connection\": \"keep-alive\",\n    \"X-Forwarded-Proto\": \"https\",\n    \"Cookie\": \"_gauges_unique_day=1; _gauges_unique_month=1; _gauges_unique_year=1; _gauges_unique=1; _gauges_unique_hour=1\",\n    \"Content-Type\": \"\"\n  },\n  \"args\": {\n    \"show_env\": \"1\"\n  },\n  \"origin\": \"109.60.101.240\",\n  \"url\": \"http://httpbin.org/get?show_env=1\"\n}\n
\n\n

Installing and running from PyPI

\n\n

You can install httpbin as a library from PyPI and run it as a WSGI app. For example, using Gunicorn:

\n\n
$ pip install httpbin\n$ gunicorn httpbin:app\n
\n\n\n

AUTHOR

\n\n

A Kenneth Reitz project.

\n

BTC: 1Me2iXTJ91FYZhrGvaGaRDCBtnZ4KdxCug

\n\n

SEE ALSO

\n\n

Hurl.it - Make HTTP requests.

\n

RequestBin - Inspect HTTP requests.

\n

http://python-requests.org

\n\n
\n\n\n \n\n\n\n\n\n\n" -#> -#> [[2]] -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" -#> -#> [[3]] -#> [1] "{\n \"args\": {\n \"foo\": \"bar\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?foo=bar\"\n}\n" ``` The `AsyncVaried` interface accepts any number of `HttpRequest` objects, which @@ -339,8 +331,8 @@ #> Message: OK #> Explanation: Request fulfilled, document follows out$parse() -#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Foo\": \"bar\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get\"\n}\n" -#> [2] "{\n \"args\": {}, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {}, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"0\", \n \"Content-Type\": \"application/x-www-form-urlencoded\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"json\": null, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/post\"\n}\n" +#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Foo\": \"bar\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get\"\n}\n" +#> [2] "{\n \"args\": {}, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {}, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"0\", \n \"Content-Type\": \"application/x-www-form-urlencoded\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"json\": null, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/post\"\n}\n" ``` ## TO DO @@ -358,5 +350,5 @@ * Please [report any issues or bugs](https://github.com/ropensci/crul/issues). * License: MIT * Get citation information for `crul` in R doing `citation(package = 'crul')` -* Please note that this project is released with a [Contributor Code of Conduct](CONDUCT.md). +* Please note that this project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms. diff -Nru r-cran-crul-0.4.0/tests/testthat/test-async.R r-cran-crul-0.5.0/tests/testthat/test-async.R --- r-cran-crul-0.4.0/tests/testthat/test-async.R 2017-10-01 16:53:54.000000000 +0000 +++ r-cran-crul-0.5.0/tests/testthat/test-async.R 2018-01-19 17:28:22.000000000 +0000 @@ -149,3 +149,53 @@ expect_match(out[[2]]$url, "b=6") expect_match(out[[3]]$url, "c=7") }) + +context("Async - disk") +test_that("Async - writing to disk works", { + skip_on_cran() + + cc <- Async$new( + urls = c( + 'https://httpbin.org/get?a=5', + 'https://httpbin.org/get?foo=bar', + 'https://httpbin.org/get?b=4', + 'https://httpbin.org/get?stuff=things', + 'https://httpbin.org/get?b=4&g=7&u=9&z=1' + ) + ) + files <- replicate(5, tempfile()) + res <- cc$get(disk = files) + out <- lapply(files, readLines) + + # cleanup + closeAllConnections() + + expect_is(res, "list") + expect_is(res[[1]], "HttpResponse") + expect_is(out, "list") + expect_is(out[[1]], "character") +}) + +context("Async - stream") +test_that("Async - streaming to disk works", { + skip_on_cran() + + bb <- Async$new(urls = c('https://httpbin.org/get?a=5', + 'https://httpbin.org/get?b=6', + 'https://httpbin.org/get?c=7')) + mylist <- c() + fun <- function(x) mylist <<- c(mylist, x) + out <- bb$get(stream = fun) + + expect_is(bb, "Async") + + expect_is(out[[1]], "HttpResponse") + + expect_identical(out[[1]]$content, raw(0)) + expect_identical(out[[2]]$content, raw(0)) + expect_identical(out[[3]]$content, raw(0)) + + expect_is(mylist, "raw") + expect_is(rawToChar(mylist), "character") + expect_match(rawToChar(mylist), "application/json") +}) diff -Nru r-cran-crul-0.4.0/tests/testthat/test-asyncvaried.R r-cran-crul-0.5.0/tests/testthat/test-asyncvaried.R --- r-cran-crul-0.4.0/tests/testthat/test-asyncvaried.R 2017-06-12 20:29:35.000000000 +0000 +++ r-cran-crul-0.5.0/tests/testthat/test-asyncvaried.R 2018-01-19 17:28:22.000000000 +0000 @@ -57,3 +57,63 @@ expect_match(out[[2]]$url, "b=6") expect_match(out[[3]]$url, "c=7") }) + + +context("AsyncVaried - disk") +test_that("AsyncVaried - writing to disk works", { + skip_on_cran() + + f <- tempfile() + g <- tempfile() + req1 <- HttpRequest$new(url = "https://httpbin.org/get")$get(disk = f) + req2 <- HttpRequest$new(url = "https://httpbin.org/post")$post(disk = g) + req3 <- HttpRequest$new(url = "https://httpbin.org/get")$get() + out <- AsyncVaried$new(req1, req2, req3) + out$request() + cont <- out$content() + lines_f <- readLines(f) + lines_g <- readLines(g) + + expect_is(out, "AsyncVaried") + + expect_is(cont, "list") + expect_is(cont[[1]], "raw") + expect_identical(cont[[1]], raw(0)) + expect_is(cont[[2]], "raw") + expect_identical(cont[[2]], raw(0)) + expect_is(cont[[3]], "raw") + expect_gt(length(cont[[3]]), 0) + + expect_is(lines_f, "character") + expect_gt(length(lines_f), 0) + + expect_is(lines_g, "character") + expect_gt(length(lines_g), 0) + + # cleanup + closeAllConnections() +}) + + +context("AsyncVaried - stream") +test_that("AsyncVaried - streaming to disk works", { + skip_on_cran() + + lst <- c() + fun <- function(x) lst <<- c(lst, x) + req1 <- HttpRequest$new(url = "https://httpbin.org/get" + )$get(query = list(foo = "bar"), stream = fun) + req2 <- HttpRequest$new(url = "https://httpbin.org/get" + )$get(query = list(hello = "world"), stream = fun) + out <- AsyncVaried$new(req1, req2) + suppressWarnings(out$request()) + + expect_is(out, "AsyncVaried") + + expect_identical(out$responses()[[1]]$content, raw(0)) + expect_identical(out$responses()[[2]]$content, raw(0)) + + expect_is(lst, "raw") + expect_is(rawToChar(lst), "character") + expect_match(rawToChar(lst), "application/json") +}) diff -Nru r-cran-crul-0.4.0/tests/testthat/test-head.R r-cran-crul-0.5.0/tests/testthat/test-head.R --- r-cran-crul-0.4.0/tests/testthat/test-head.R 2016-11-09 00:18:50.000000000 +0000 +++ r-cran-crul-0.5.0/tests/testthat/test-head.R 2018-01-19 17:48:55.000000000 +0000 @@ -18,3 +18,24 @@ # content is empty expect_equal(aa$content, raw(0)) }) + + +test_that("head - query passed to head doesn't fail", { + skip_on_cran() + + cli <- HttpClient$new(url = "https://www.google.com") + aa <- cli$head(query = list(foo = "bar")) + + expect_is(aa, "HttpResponse") + expect_is(aa$handle, 'curl_handle') + expect_is(aa$content, "raw") + expect_is(aa$method, "character") + expect_equal(aa$method, "head") + expect_is(aa$parse, "function") + expect_true(aa$success()) + expect_match(aa$request$url$url, "foo") + expect_match(aa$request$url$url, "bar") + + # content is empty + expect_equal(aa$content, raw(0)) +}) diff -Nru r-cran-crul-0.4.0/tests/testthat/test-mocking.R r-cran-crul-0.5.0/tests/testthat/test-mocking.R --- r-cran-crul-0.4.0/tests/testthat/test-mocking.R 2017-05-18 17:02:55.000000000 +0000 +++ r-cran-crul-0.5.0/tests/testthat/test-mocking.R 2018-01-19 04:19:08.000000000 +0000 @@ -38,7 +38,7 @@ expect_is(bb, "HttpResponse") expect_is(aa$content, "raw") - expect_null(bb$content) + expect_equal(length(bb$content), 0) expect_is(aa$times, "numeric") expect_null(bb$times) diff -Nru r-cran-crul-0.4.0/tests/testthat/test-paginator.R r-cran-crul-0.5.0/tests/testthat/test-paginator.R --- r-cran-crul-0.4.0/tests/testthat/test-paginator.R 1970-01-01 00:00:00.000000000 +0000 +++ r-cran-crul-0.5.0/tests/testthat/test-paginator.R 2018-01-19 04:19:08.000000000 +0000 @@ -0,0 +1,41 @@ +context("Paginator") + +cli <- HttpClient$new(url = "http://api.crossref.org") +aa <- Paginator$new(client = cli, by = "query_params", limit_param = "rows", + offset_param = "offset", limit = 50, limit_chunk = 10) + +test_that("Paginator works", { + skip_on_cran() + + expect_is(cli, "HttpClient") + expect_is(Paginator, "R6ClassGenerator") + + expect_is(aa, "Paginator") + expect_is(aa$.__enclos_env__$private$page, "function") + expect_is(aa$parse, "function") + expect_is(aa$content, "function") + expect_is(aa$responses, "function") + + # before requests + expect_equal(length(aa$content()), 0) + expect_equal(length(aa$status()), 0) + expect_equal(length(aa$status_code()), 0) + expect_equal(length(aa$times()), 0) + + # after requests + invisible(aa$get("works")) + expect_equal(length(aa$content()), 5) + expect_equal(length(aa$status()), 5) + expect_equal(length(aa$status_code()), 5) + expect_equal(length(aa$times()), 5) +}) + +test_that("Paginator fails well", { + skip_on_cran() + + expect_error(Paginator$new(), "argument \"client\" is missing") + expect_error(Paginator$new(cli), "'to' must be of length 1") + expect_error(Paginator$new(cli, 5), "'by' has to be 'query_params' for now") + expect_error(Paginator$new(5, "query_params"), + "'client' has to be an object of class 'HttpClient'") +}) diff -Nru r-cran-crul-0.4.0/vignettes/async.Rmd r-cran-crul-0.5.0/vignettes/async.Rmd --- r-cran-crul-0.4.0/vignettes/async.Rmd 2017-10-02 19:21:23.000000000 +0000 +++ r-cran-crul-0.5.0/vignettes/async.Rmd 2018-01-19 18:44:43.000000000 +0000 @@ -13,19 +13,19 @@ There are two interfaces to asynchronous requests in `crul`: -1. Simple async: any number of URLs, all treated with the same curl options, +1. Simple async: any number of URLs, all treated with the same curl options, headers, etc., and only one HTTP method type at a time. 2. Varied request async: build any type of request and execute all asynchronously. -The first option takes less thinking, less work, and is good solution when you +The first option takes less thinking, less work, and is good solution when you just want to hit a bunch of URLs asynchronously. -The second option is ideal when you want to set curl options/headers on each +The second option is ideal when you want to set curl options/headers on each request and/or want to do different types of HTTP methods on each request. -One thing to think about before using async is whether the data provider is +One thing to think about before using async is whether the data provider is okay with it. It's possible that a data provider's service may be brought down -if you do too many async requests. +if you do too many async requests. ```r @@ -34,7 +34,7 @@ ## simple async -Build request objcect with 1 or more URLs +Build request object with 1 or more URLs @@ -67,13 +67,13 @@ #> status: HTTP/1.1 200 OK #> connection: keep-alive #> server: meinheld/0.6.1 -#> date: Mon, 02 Oct 2017 19:21:08 GMT +#> date: Fri, 19 Jan 2018 18:44:29 GMT #> content-type: application/json #> access-control-allow-origin: * #> access-control-allow-credentials: true #> x-powered-by: Flask -#> x-processed-time: 0.00125598907471 -#> content-length: 349 +#> x-processed-time: 0.000792026519775 +#> content-length: 346 #> via: 1.1 vegur #> params: #> a: 5 @@ -87,13 +87,13 @@ #> status: HTTP/1.1 200 OK #> connection: keep-alive #> server: meinheld/0.6.1 -#> date: Mon, 02 Oct 2017 19:21:07 GMT +#> date: Fri, 19 Jan 2018 18:44:29 GMT #> content-type: application/json #> access-control-allow-origin: * #> access-control-allow-credentials: true #> x-powered-by: Flask -#> x-processed-time: 0.00107097625732 -#> content-length: 368 +#> x-processed-time: 0.00130796432495 +#> content-length: 365 #> via: 1.1 vegur #> params: #> a: 5 @@ -108,13 +108,13 @@ #> status: HTTP/1.1 200 OK #> connection: keep-alive #> server: meinheld/0.6.1 -#> date: Mon, 02 Oct 2017 19:21:08 GMT +#> date: Fri, 19 Jan 2018 18:44:28 GMT #> content-type: application/json #> access-control-allow-origin: * #> access-control-allow-credentials: true #> x-powered-by: Flask -#> x-processed-time: 0.000734090805054 -#> content-length: 33 +#> x-processed-time: 0.000822067260742 +#> content-length: 32 #> via: 1.1 vegur #> status: 200 ``` @@ -130,22 +130,22 @@ res[[1]]$success() #> [1] TRUE res[[1]]$parse("UTF-8") -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" ``` -Or apply access/method calls aross many results, e.g., parse all results +Or apply access/method calls across many results, e.g., parse all results ```r lapply(res, function(z) z$parse("UTF-8")) #> [[1]] -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" #> #> [[2]] -#> [1] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5&b=6\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5&b=6\"\n}\n" #> #> [[3]] -#> [1] "{\n \"origin\": \"157.130.179.86\"\n}\n" +#> [1] "{\n \"origin\": \"50.22.155.214\"\n}\n" ``` ## varied request async @@ -197,8 +197,8 @@ ```r res$parse() -#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" -#> [2] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"137\", \n \"Content-Type\": \"multipart/form-data; boundary=------------------------9223144570b5d592\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"json\": null, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/post?a=5&b=6\"\n}\n" +#> [1] "{\n \"args\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get?a=5\"\n}\n" +#> [2] "{\n \"args\": {\n \"a\": \"5\", \n \"b\": \"6\"\n }, \n \"data\": \"\", \n \"files\": {}, \n \"form\": {\n \"a\": \"5\"\n }, \n \"headers\": {\n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Content-Length\": \"137\", \n \"Content-Type\": \"multipart/form-data; boundary=------------------------14f323a90518346b\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"json\": null, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/post?a=5&b=6\"\n}\n" ``` @@ -214,9 +214,9 @@ #> "Accept-Encoding": "gzip, deflate", #> "Connection": "close", #> "Host": "httpbin.org", -#> "User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> }, -#> "origin": "157.130.179.86", +#> "origin": "50.22.155.214", #> "url": "https://httpbin.org/get?a=5" #> } #> @@ -239,12 +239,12 @@ #> "Accept-Encoding": "gzip, deflate", #> "Connection": "close", #> "Content-Length": "137", -#> "Content-Type": "multipart/form-data; boundary=------------------------9223144570b5d592", +#> "Content-Type": "multipart/form-data; boundary=------------------------14f323a90518346b", #> "Host": "httpbin.org", -#> "User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> }, #> "json": null, -#> "origin": "157.130.179.86", +#> "origin": "50.22.155.214", #> "url": "https://httpbin.org/post?a=5&b=6" #> } #> diff -Nru r-cran-crul-0.4.0/vignettes/crul_vignette.Rmd r-cran-crul-0.5.0/vignettes/crul_vignette.Rmd --- r-cran-crul-0.4.0/vignettes/crul_vignette.Rmd 2017-10-02 19:21:23.000000000 +0000 +++ r-cran-crul-0.5.0/vignettes/crul_vignette.Rmd 2018-01-19 18:44:43.000000000 +0000 @@ -117,11 +117,11 @@ #> [185] 63 6c 6f 73 65 22 2c 20 0a 20 20 20 20 22 48 6f 73 74 22 3a 20 22 68 #> [208] 74 74 70 62 69 6e 2e 6f 72 67 22 2c 20 0a 20 20 20 20 22 55 73 65 72 #> [231] 2d 41 67 65 6e 74 22 3a 20 22 6c 69 62 63 75 72 6c 2f 37 2e 35 34 2e -#> [254] 30 20 72 2d 63 75 72 6c 2f 32 2e 38 2e 31 20 63 72 75 6c 2f 30 2e 34 -#> [277] 2e 30 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 -#> [300] 31 35 37 2e 31 33 30 2e 31 37 39 2e 38 36 22 2c 20 0a 20 20 22 75 72 -#> [323] 6c 22 3a 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 -#> [346] 67 2f 67 65 74 22 0a 7d 0a +#> [254] 30 20 72 2d 63 75 72 6c 2f 33 2e 31 20 63 72 75 6c 2f 30 2e 35 2e 30 +#> [277] 22 0a 20 20 7d 2c 20 0a 20 20 22 6f 72 69 67 69 6e 22 3a 20 22 35 30 +#> [300] 2e 32 32 2e 31 35 35 2e 32 31 34 22 2c 20 0a 20 20 22 75 72 6c 22 3a +#> [323] 20 22 68 74 74 70 73 3a 2f 2f 68 74 74 70 62 69 6e 2e 6f 72 67 2f 67 +#> [346] 65 74 22 0a 7d 0a ``` HTTP method @@ -138,7 +138,7 @@ ```r res$request_headers #> $`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> $`Accept-Encoding` #> [1] "gzip, deflate" @@ -165,7 +165,7 @@ #> [1] "meinheld/0.6.1" #> #> $date -#> [1] "Mon, 02 Oct 2017 19:21:12 GMT" +#> [1] "Fri, 19 Jan 2018 18:44:35 GMT" #> #> $`content-type` #> [1] "application/json" @@ -180,10 +180,10 @@ #> [1] "Flask" #> #> $`x-processed-time` -#> [1] "0.000802993774414" +#> [1] "0.00126600265503" #> #> $`content-length` -#> [1] "354" +#> [1] "351" #> #> $via #> [1] "1.1 vegur" @@ -194,7 +194,7 @@ ```r res$parse() -#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"A\": \"hello world\", \n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0\"\n }, \n \"origin\": \"157.130.179.86\", \n \"url\": \"https://httpbin.org/get\"\n}\n" +#> [1] "{\n \"args\": {}, \n \"headers\": {\n \"A\": \"hello world\", \n \"Accept\": \"application/json, text/xml, application/xml, */*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"close\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"libcurl/7.54.0 r-curl/3.1 crul/0.5.0\"\n }, \n \"origin\": \"50.22.155.214\", \n \"url\": \"https://httpbin.org/get\"\n}\n" jsonlite::fromJSON(res$parse()) #> $args #> named list() @@ -216,11 +216,11 @@ #> [1] "httpbin.org" #> #> $headers$`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> #> $origin -#> [1] "157.130.179.86" +#> [1] "50.22.155.214" #> #> $url #> [1] "https://httpbin.org/get" @@ -248,7 +248,7 @@ res <- x$get(disk = f) # when using write to disk, content is a path res$content -#> [1] "/var/folders/gs/4khph0xs0436gmd2gdnwsg080000gn/T//RtmpQrTvWo/file146142af607c4" +#> [1] "/var/folders/fc/n7g_vrvn0sx_st0p8lxb3ts40000gn/T//Rtmp2d65n7/file58e3155bfa5b" ``` Read lines @@ -280,11 +280,11 @@ #> auth: #> headers: res <- x$get('stream/5', stream = function(x) cat(rawToChar(x))) -#> {"id": 0, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 1, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 2, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 3, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} -#> {"id": 4, "origin": "157.130.179.86", "args": {}, "headers": {"User-Agent": "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0", "Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Host": "httpbin.org"}, "url": "https://httpbin.org/stream/5"} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 0, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 1, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 2, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 3, "url": "https://httpbin.org/stream/5", "args": {}} +#> {"headers": {"Accept": "application/json, text/xml, application/xml, */*", "Accept-Encoding": "gzip, deflate", "Host": "httpbin.org", "Connection": "close", "User-Agent": "libcurl/7.54.0 r-curl/3.1 crul/0.5.0"}, "origin": "50.22.155.214", "id": 4, "url": "https://httpbin.org/stream/5", "args": {}} # when streaming, content is NULL res$content #> NULL diff -Nru r-cran-crul-0.4.0/vignettes/how-to-use-crul.Rmd r-cran-crul-0.5.0/vignettes/how-to-use-crul.Rmd --- r-cran-crul-0.4.0/vignettes/how-to-use-crul.Rmd 2017-10-02 19:21:23.000000000 +0000 +++ r-cran-crul-0.5.0/vignettes/how-to-use-crul.Rmd 2018-01-19 18:44:43.000000000 +0000 @@ -80,11 +80,11 @@ #> [1] "httpbin.org" #> #> $headers$`User-Agent` -#> [1] "libcurl/7.54.0 r-curl/2.8.1 crul/0.4.0" +#> [1] "libcurl/7.54.0 r-curl/3.1 crul/0.5.0" #> #> #> $origin -#> [1] "157.130.179.86" +#> [1] "50.22.155.214" #> #> $url #> [1] "https://httpbin.org/get" @@ -144,7 +144,7 @@ #> named list() #> #> $message$`total-results` -#> [1] 91794003 +#> [1] 94347210 #> #> $message$items #> list() @@ -189,7 +189,7 @@ #> named list() #> #> $message$`total-results` -#> [1] 91794003 +#> [1] 94347210 #> #> $message$items #> list()