Binary files /tmp/tmprK5GLJ/vIiT7rlsvQ/r-cran-urltools-1.7.1+dfsg/data/suffix_dataset.rda and /tmp/tmprK5GLJ/jpt2jXfxt_/r-cran-urltools-1.7.2+dfsg/data/suffix_dataset.rda differ diff -Nru r-cran-urltools-1.7.1+dfsg/debian/changelog r-cran-urltools-1.7.2+dfsg/debian/changelog --- r-cran-urltools-1.7.1+dfsg/debian/changelog 2018-08-05 12:54:03.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/debian/changelog 2019-02-06 08:42:36.000000000 +0000 @@ -1,3 +1,11 @@ +r-cran-urltools (1.7.2+dfsg-1) unstable; urgency=medium + + * New upstream version + * debhelper 12 + * Standards-Version: 4.3.0 + + -- Andreas Tille Wed, 06 Feb 2019 09:42:36 +0100 + r-cran-urltools (1.7.1+dfsg-1) unstable; urgency=medium * New upstream version diff -Nru r-cran-urltools-1.7.1+dfsg/debian/compat r-cran-urltools-1.7.2+dfsg/debian/compat --- r-cran-urltools-1.7.1+dfsg/debian/compat 2018-08-05 12:54:03.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/debian/compat 2019-02-06 08:42:36.000000000 +0000 @@ -1 +1 @@ -11 +12 diff -Nru r-cran-urltools-1.7.1+dfsg/debian/control r-cran-urltools-1.7.2+dfsg/debian/control --- r-cran-urltools-1.7.1+dfsg/debian/control 2018-08-05 12:54:03.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/debian/control 2019-02-06 08:42:36.000000000 +0000 @@ -3,12 +3,12 @@ Uploaders: Andreas Tille Section: gnu-r Priority: optional -Build-Depends: debhelper (>= 11~), +Build-Depends: debhelper (>= 12~), dh-r, r-base-dev, r-cran-rcpp, r-cran-triebeard -Standards-Version: 4.1.5 +Standards-Version: 4.3.0 Vcs-Browser: https://salsa.debian.org/r-pkg-team/r-cran-urltools Vcs-Git: https://salsa.debian.org/r-pkg-team/r-cran-urltools.git Homepage: https://cran.r-project.org/package=urltools diff -Nru r-cran-urltools-1.7.1+dfsg/DESCRIPTION r-cran-urltools-1.7.2+dfsg/DESCRIPTION --- r-cran-urltools-1.7.1+dfsg/DESCRIPTION 2018-08-03 08:20:02.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/DESCRIPTION 2019-02-04 08:40:03.000000000 +0000 @@ -1,12 +1,12 @@ Package: urltools Type: Package Title: Vectorised Tools for URL Handling and Parsing -Version: 1.7.1 -Date: 2018-07-26 -Author: Oliver Keyes [aut, cre], Jay Jacobs [aut, cre], Drew Schmidt [aut], +Version: 1.7.2 +Date: 2019-02-03 +Author: Os Keyes [aut, cre], Jay Jacobs [aut, cre], Drew Schmidt [aut], Mark Greenaway [ctb], Bob Rudis [ctb], Alex Pinto [ctb], Maryam Khezrzadeh [ctb], Peter Meilstrup [ctb], Adam M. Costello [cph], Jeff Bezanson [cph], Peter Meilstrup [ctb], Xueyuan Jiang [ctb] -Maintainer: Oliver Keyes +Maintainer: Os Keyes Description: A toolkit for all URL-handling needs, including encoding and decoding, parsing, parameter extraction and modification. All functions are designed to be both fast and entirely vectorised. It is intended to be @@ -21,10 +21,10 @@ URL: https://github.com/Ironholds/urltools/ BugReports: https://github.com/Ironholds/urltools/issues VignetteBuilder: knitr -RoxygenNote: 6.0.1 +RoxygenNote: 6.1.1 Encoding: UTF-8 Depends: R (>= 2.10) NeedsCompilation: yes -Packaged: 2018-07-26 18:36:17 UTC; ironholds +Packaged: 2019-02-04 06:27:29 UTC; ironholds Repository: CRAN -Date/Publication: 2018-08-03 08:20:02 UTC +Date/Publication: 2019-02-04 08:40:03 UTC diff -Nru r-cran-urltools-1.7.1+dfsg/man/suffix_extract.Rd r-cran-urltools-1.7.2+dfsg/man/suffix_extract.Rd --- r-cran-urltools-1.7.1+dfsg/man/suffix_extract.Rd 2018-07-26 18:19:24.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/man/suffix_extract.Rd 2019-02-04 05:42:34.000000000 +0000 @@ -43,8 +43,10 @@ domain_name <- domain("http://en.wikipedia.org") suffix_extract(domain_name) +\dontrun{ #Relying on a fresh version of the suffix dataset suffix_extract(domain("http://en.wikipedia.org"), suffix_refresh()) +} } \seealso{ diff -Nru r-cran-urltools-1.7.1+dfsg/man/urltools.Rd r-cran-urltools-1.7.2+dfsg/man/urltools.Rd --- r-cran-urltools-1.7.1+dfsg/man/urltools.Rd 2018-07-26 18:19:24.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/man/urltools.Rd 2019-02-04 05:42:34.000000000 +0000 @@ -4,7 +4,6 @@ \name{urltools} \alias{urltools} \alias{urltools-package} -\alias{urltools-package} \title{Tools for handling URLs} \description{ This package provides functions for URL encoding and decoding, diff -Nru r-cran-urltools-1.7.1+dfsg/MD5 r-cran-urltools-1.7.2+dfsg/MD5 --- r-cran-urltools-1.7.1+dfsg/MD5 2018-08-03 08:20:02.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/MD5 2019-02-04 08:40:03.000000000 +0000 @@ -1,15 +1,15 @@ -a8001efb1b8c8efc7b29bb241b09ace4 *DESCRIPTION +e38f940261a9f9a001e527414cd015fe *DESCRIPTION 1d9678dbfe1732b5d2c521e07b2ceef0 *LICENSE 142670d75fc584449972d5f768788087 *NAMESPACE -bfb4e582f0c75bdb347e811496a7bcaa *NEWS +fe71cbd2585813e2ade2c7e41a607012 *NEWS 39c4adbbe3d494fd1eef949f9e836681 *R/RcppExports.R 71642924416c94dd3d00a197e8fe73a4 *R/accessors.R -f323200797b8d7d82c02958355d79cbd *R/suffix.R +0b91de76ad403feb319813267da98567 *R/suffix.R 725ace80c43d51c92492c740f29ede76 *R/urltools.R -d101c8875ce174214696cd65e7af61fe *R/zzz.R +21b903c78b9ae088f204aa8c49e05be6 *R/zzz.R a1b6d802025de0eb7f22ebdbd434f751 *README.md 3467b0b14ce8e475df776c0665c46578 *build/vignette.rds -c924aa202b18a3de5b29cb4ecfd8bb67 *data/suffix_dataset.rda +4d2d249fa7697adcdc53fe20ba769af9 *data/suffix_dataset.rda a8544a607fdee8a4b89953c2707b4e7a *data/tld_dataset.rda c4794a2695511ab6ba493c38720c6d6a *inst/doc/urltools.R 8508660b23c51c00bc89b9c2f899301e *inst/doc/urltools.Rmd @@ -28,14 +28,14 @@ 582ed9b2c50a5b3e4f3e60f12da53c40 *man/puny.Rd 211391048c8be055487084c4b5cd8cd1 *man/scheme.Rd 46071f9454056976839ccfa37f2faa16 *man/suffix_dataset.Rd -9c10d7e3f7f43d48692aea719172979b *man/suffix_extract.Rd +dec735c4e74a975b1fa4ff455bc95a56 *man/suffix_extract.Rd 9e44b3e060fe89001b43ad9d049df3f6 *man/suffix_refresh.Rd 20d879287e515fb601ec504f61a1c58f *man/tld_dataset.Rd 2c922b7da2d1bcce657b1fcfe2976cf0 *man/tld_extract.Rd c90a9f6a6a1327a75d62d1a0af0c1c9a *man/tld_refresh.Rd 8d56da32af7aabb2f48900106785178e *man/url_compose.Rd 1b254ce7405825e23b5648e3e9559198 *man/url_parse.Rd -e8abae60709462eebdabdeaf07103ccc *man/urltools.Rd +19923e2f89e9f01bb41d256adbbe0191 *man/urltools.Rd 399ef9028eb9655f1cb1e0eaf923f0ee *src/RcppExports.cpp 3f377216f27b10a88c4f67266678624e *src/compose.cpp 40ec93a4ce3cc5e0e62d52b103800bef *src/compose.h @@ -60,5 +60,5 @@ f26f5d4a938bbac141489bd4dbd2dd77 *tests/testthat/test_parameters.R 6e00df6bf84c08ee155a3d6489d2f35f *tests/testthat/test_parsing.R 3e624b6a700ba5fa0a8e85f24de9ba8d *tests/testthat/test_puny.R -536a7b5df0d453e38d82f5738c5b2f8b *tests/testthat/test_suffixes.R +e28daf75dc01e75ab397a30899948c54 *tests/testthat/test_suffixes.R 8508660b23c51c00bc89b9c2f899301e *vignettes/urltools.Rmd diff -Nru r-cran-urltools-1.7.1+dfsg/NEWS r-cran-urltools-1.7.2+dfsg/NEWS --- r-cran-urltools-1.7.1+dfsg/NEWS 2018-07-26 18:22:18.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/NEWS 2019-02-04 05:40:04.000000000 +0000 @@ -1,4 +1,8 @@ -Version 1.7.0 +Version 1.7.2 +------------------------------------------------------------------------- +* Fix a breaking unit test, update suffix dataset + +Version 1.7.1 ------------------------------------------------------------------------- DEVELOPMENT diff -Nru r-cran-urltools-1.7.1+dfsg/R/suffix.R r-cran-urltools-1.7.2+dfsg/R/suffix.R --- r-cran-urltools-1.7.1+dfsg/R/suffix.R 2018-07-26 18:19:24.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/R/suffix.R 2019-02-04 05:47:30.000000000 +0000 @@ -52,7 +52,7 @@ #Read in and filter connection <- url("https://www.publicsuffix.org/list/effective_tld_names.dat", method = "libcurl") - results <- readLines(connection) + results <- readLines(connection, encoding = "UTF-8") close(connection) # making an assumption that sections are broken by blank lines @@ -127,8 +127,10 @@ #' domain_name <- domain("http://en.wikipedia.org") #' suffix_extract(domain_name) #' +#' \dontrun{ #' #Relying on a fresh version of the suffix dataset #' suffix_extract(domain("http://en.wikipedia.org"), suffix_refresh()) +#' } #' #' @importFrom triebeard trie longest_match #' @export diff -Nru r-cran-urltools-1.7.1+dfsg/R/zzz.R r-cran-urltools-1.7.2+dfsg/R/zzz.R --- r-cran-urltools-1.7.1+dfsg/R/zzz.R 2018-07-26 18:19:24.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/R/zzz.R 2019-02-04 06:07:56.000000000 +0000 @@ -4,8 +4,8 @@ if(is.null(suffixes)){ suffixes <- urltools::suffix_dataset } - cleaned_suffixes <- gsub(x = suffixes, pattern = "*.", replacement = "", fixed = TRUE) - is_wildcard <- cleaned_suffixes[which(grepl(x = suffixes, pattern = "*.", fixed = TRUE))] + cleaned_suffixes <- gsub(x = suffixes$suffixes, pattern = "*.", replacement = "", fixed = TRUE) + is_wildcard <- cleaned_suffixes[which(grepl(x = suffixes$suffixes, pattern = "*.", fixed = TRUE))] suff_trie <- triebeard::trie(keys = reverse_strings(paste0(".", cleaned_suffixes)), values = cleaned_suffixes) return(list(suff_trie = suff_trie, diff -Nru r-cran-urltools-1.7.1+dfsg/tests/testthat/test_suffixes.R r-cran-urltools-1.7.2+dfsg/tests/testthat/test_suffixes.R --- r-cran-urltools-1.7.1+dfsg/tests/testthat/test_suffixes.R 2018-07-26 18:19:24.000000000 +0000 +++ r-cran-urltools-1.7.2+dfsg/tests/testthat/test_suffixes.R 2019-02-04 06:10:52.000000000 +0000 @@ -71,30 +71,30 @@ }) -test_that("Suffix extraction works with new suffixes",{ - result <- suffix_extract("en.wikipedia.org", suffix_refresh()) - expect_that(ncol(result), equals(4)) - expect_that(names(result), equals(c("host","subdomain","domain","suffix"))) - expect_that(nrow(result), equals(1)) - - expect_that(result$subdomain[1], equals("en")) - expect_that(result$domain[1], equals("wikipedia")) - expect_that(result$suffix[1], equals("org")) -}) - -test_that("Suffix extraction works with an arbitrary suffixes database (to ensure it is loading it)",{ - result <- suffix_extract(c("is-this-a.bananaboat", "en.wikipedia.org"), data.frame(suffixes = "bananaboat")) - expect_that(ncol(result), equals(4)) - expect_that(names(result), equals(c("host","subdomain","domain","suffix"))) - expect_that(nrow(result), equals(2)) - - expect_equal(result$subdomain[1], NA_character_) - expect_equal(result$domain[1], "is-this-a") - expect_equal(result$suffix[1], "bananaboat") - expect_equal(result$subdomain[2], NA_character_) - expect_equal(result$domain[2], NA_character_) - expect_equal(result$suffix[2], NA_character_) -}) +# test_that("Suffix extraction works with new suffixes",{ +# result <- suffix_extract("en.wikipedia.org", suffix_refresh()) +# expect_that(ncol(result), equals(4)) +# expect_that(names(result), equals(c("host","subdomain","domain","suffix"))) +# expect_that(nrow(result), equals(1)) +# +# expect_that(result$subdomain[1], equals("en")) +# expect_that(result$domain[1], equals("wikipedia")) +# expect_that(result$suffix[1], equals("org")) +# }) +# +# test_that("Suffix extraction works with an arbitrary suffixes database (to ensure it is loading it)",{ +# result <- suffix_extract(c("is-this-a.bananaboat", "en.wikipedia.org"), data.frame(suffixes = "bananaboat")) +# expect_that(ncol(result), equals(4)) +# expect_that(names(result), equals(c("host","subdomain","domain","suffix"))) +# expect_that(nrow(result), equals(2)) +# +# expect_equal(result$subdomain[1], NA_character_) +# expect_equal(result$domain[1], "is-this-a") +# expect_equal(result$suffix[1], "bananaboat") +# expect_equal(result$subdomain[2], NA_character_) +# expect_equal(result$domain[2], NA_character_) +# expect_equal(result$suffix[2], NA_character_) +# }) test_that("Suffix extraction is back to normal using the internal database when it receives suffixes=NULL",{ result <- suffix_extract("en.wikipedia.org")