Skip to content

Consolidate fetch interfaces #99

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 42 commits into from
May 23, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
f2566ee
refactor: consolidate fetch interface
dshemetov May 6, 2023
9f3bb2e
docs: document
dshemetov May 6, 2023
c221815
bug: fix epidata error handling and epidatr vignette
dshemetov May 6, 2023
4547219
docs: document
dshemetov May 6, 2023
e94b3c9
bug: fix fetch tests
dshemetov May 6, 2023
56aac93
doc: document
dshemetov May 6, 2023
2e32cf8
bug: add magrittr to suggests
dshemetov May 6, 2023
93fc16b
bug: fix mutate call
dshemetov May 6, 2023
1c03257
feat: change global_base_url to non-proxy host
dshemetov May 11, 2023
670009c
test: Favor `expect_identical` over `expect_equal`; avoid `all_equal`
lcbrooks May 11, 2023
eb4e602
docs: Polish `get_auth_key()` roxygen
lcbrooks May 11, 2023
3939005
tests: Use `withr` in auth tests for better consistency, specificity
lcbrooks May 11, 2023
24eba74
tests: `usethis::use_testthat(edition = 3)` (enforce 3rd edition)
lcbrooks May 11, 2023
8daac67
docs: match style of multiple `@return` entries in `?epidata_call`
lcbrooks May 11, 2023
036f131
refactor: eliminate unused/redundant binding
lcbrooks May 12, 2023
b5d76b7
feat: improve `print.epidata_call` instructions
lcbrooks May 12, 2023
d28a133
docs: describe the two categories of classic $epidata formats
lcbrooks May 12, 2023
ad329f6
docs: fix delphi doc example
dshemetov May 12, 2023
7b89f30
bug: fix no results error code
dshemetov May 12, 2023
0195f11
docs: fix epidata results doc
dshemetov May 12, 2023
e320812
test: fix test comments
dshemetov May 12, 2023
da7199b
refactor: use factor for parsing categoricals
dshemetov May 12, 2023
186a531
refactor: fetch interface
dshemetov May 18, 2023
b1dcea7
style: styler
dshemetov May 18, 2023
4bccc1e
doc: docs
dshemetov May 18, 2023
d83ef0b
bug: fix tests
dshemetov May 18, 2023
53bca8e
docs: update many docstrings
dshemetov May 18, 2023
55ecb3f
refactor(covidcast_epidata): forward HTTP errors
dshemetov May 18, 2023
54e60b3
refactor(fetch): centralize fetch
dshemetov May 18, 2023
162d9ff
refactor(pkg): minor changes
dshemetov May 18, 2023
121fb56
docs: doc
dshemetov May 18, 2023
627eaa6
Merge branch 'dev' into ds/fetch
dshemetov May 18, 2023
f9c9b53
style: styler
dshemetov May 18, 2023
d6f5f89
pkg: set package version to 0.5.0 in constants.R
dshemetov May 18, 2023
9a4971c
refactor: get consistent naming for API request variables
dshemetov May 20, 2023
f06a114
doc: docs
dshemetov May 20, 2023
c151e2c
docs: Fix paragraph-filling that ignored `@param`
lcbrooks May 22, 2023
cf29057
fix: use regular `factor`s, not `ordered`, for `{geo,time}_type`
lcbrooks May 22, 2023
030a60b
docs(as_of): fix typo
lcbrooks May 22, 2023
51814e5
docs(fetch_csv): document new parameters
lcbrooks May 22, 2023
4b6f964
fix: raise warnings from API when able
lcbrooks May 22, 2023
9e97e72
docs: remove docs for replaced `method` parameter
lcbrooks May 22, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions R/covidcast.R
Original file line number Diff line number Diff line change
Expand Up @@ -108,25 +108,25 @@ print.covidcast_data_source <- function(source, ...) {
#' @export
covidcast_epidata <- function(base_url = global_base_url) {
url <- join_url(base_url, "covidcast/meta")
result <- do_request(url, list())
response <- do_request(url, list())
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

praise: These clarifications are great! When talking about these things, we should try to match this terminology, though we might need to also say "http response" or "epidata response" sometimes. Though I'm still going to mix up result and response for a while though.


if (result$status_code != 200) {
if (response$status_code != 200) {
# 500, 429, 401 are possible
msg <- "fetch data from API"
if (httr::http_type(result) == "text/html") {
if (httr::http_type(response) == "text/html") {
# grab the error information out of the returned HTML document
msg <- paste(msg, ":", xml2::xml_text(xml2::xml_find_all(
xml2::read_html(content(result, "text")),
xml2::read_html(content(response, "text")),
"//p"
)))
}
httr::stop_for_status(result, task = msg)
httr::stop_for_status(response, task = msg)
}

content <- httr::content(result, "text", encoding = "UTF-8")
content_json <- jsonlite::fromJSON(content, simplifyVector = FALSE)
response_content <- httr::content(response, "text", encoding = "UTF-8")
response_content <- jsonlite::fromJSON(response_content, simplifyVector = FALSE)

sources <- do.call(c, lapply(content_json, parse_source, base_url = base_url))
sources <- do.call(c, lapply(response_content, parse_source, base_url = base_url))
class(sources) <- c("covidcast_data_source_list", class(sources))

all_signals <- do.call(c, lapply(sources, function(x) {
Expand Down
52 changes: 26 additions & 26 deletions R/epidatacall.R
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,8 @@ fetch_tbl <- function(epidata_call, fields = NULL, disable_date_parsing = FALSE)
)
}

content <- fetch_classic(epidata_call, fields, disable_data_frame_parsing = FALSE)
return(parse_data_frame(epidata_call, content, disable_date_parsing) %>% as_tibble())
response_content <- fetch_classic(epidata_call, fields, disable_data_frame_parsing = FALSE)
return(parse_data_frame(epidata_call, response_content, disable_date_parsing) %>% as_tibble())
}

#' Fetches the data, raises on epidata errors, and returns the results as a
Expand All @@ -182,15 +182,15 @@ fetch_classic <- function(epidata_call, fields = NULL, disable_data_frame_parsin
stopifnot(inherits(epidata_call, "epidata_call"))
stopifnot(is.null(fields) || is.character(fields))

result <- request_impl(epidata_call, "classic", fields)
content <- httr::content(result, as = "text", encoding = "UTF-8")
response <- request_impl(epidata_call, "classic", fields)
response_content <- httr::content(response, as = "text", encoding = "UTF-8")
response_content <- jsonlite::fromJSON(response_content, simplifyDataFrame = !disable_data_frame_parsing)

content <- jsonlite::fromJSON(content, simplifyDataFrame = !disable_data_frame_parsing)
# success is 1, no results is -2, truncated is 2, -1 is generic error
if (content$result != 1) {
rlang::abort(paste0("epidata error: ", content$message), "epidata_error")
if (response_content$result != 1) {
rlang::abort(paste0("epidata error: ", response_content$message), "epidata_error")
}
return(content$epidata)
return(response_content$epidata)
}

#' Fetches the data and returns the CSV text
Expand All @@ -214,12 +214,12 @@ fetch_csv <- function(epidata_call, fields = NULL, disable_date_parsing = FALSE,
)
}

result <- request_impl(epidata_call, "csv", fields)
content <- httr::content(result, "text", encoding = "UTF-8")
class(content) <- c("epidata_csv", class(content))
response <- request_impl(epidata_call, "csv", fields)
response_content <- httr::content(response, "text", encoding = "UTF-8")
class(response_content) <- c("epidata_csv", class(response_content))

if (disable_tibble_output) {
return(content)
return(response_content)
}

meta <- epidata_call$meta
Expand All @@ -234,28 +234,28 @@ fetch_csv <- function(epidata_call, fields = NULL, disable_date_parsing = FALSE,
}
}

tbl <- if (length(col_names) > 0) {
readr::read_csv(content, col_types = col_types)
csv_tibble <- if (length(col_names) > 0) {
readr::read_csv(response_content, col_types = col_types)
} else {
readr::read_csv(content)
readr::read_csv(response_content)
}

if (!disable_date_parsing) {
# parse weeks
columns <- colnames(tbl)
columns <- colnames(csv_tibble)
for (i in seq_len(length(meta))) {
info <- meta[[i]]
if (info$name %in% columns && info$type == "epiweek") {
tbl[[info$name]] <- parse_api_week(tbl[[info$name]])
csv_tibble[[info$name]] <- parse_api_week(csv_tibble[[info$name]])
}
}
}
tbl
csv_tibble
}

fetch_debug <- function(epidata_call, format_type = "classic", fields = NULL) {
result <- request_impl(epidata_call, format_type, fields)
content <- httr::content(result, "text", encoding = "UTF-8")
response <- request_impl(epidata_call, format_type, fields)
content <- httr::content(response, "text", encoding = "UTF-8")
content
}

Expand Down Expand Up @@ -309,22 +309,22 @@ request_impl <- function(epidata_call, format_type, fields = NULL) {

url <- full_url(epidata_call)
params <- request_arguments(epidata_call, format_type, fields)
result <- do_request(url, params)
response <- do_request(url, params)

if (result$status_code != 200) {
if (response$status_code != 200) {
# 500, 429, 401 are possible
msg <- "fetch data from API"
if (httr::http_type(result) == "text/html") {
if (httr::http_type(response) == "text/html") {
# grab the error information out of the returned HTML document
msg <- paste(msg, ":", xml2::xml_text(xml2::xml_find_all(
xml2::read_html(content(result, "text")),
xml2::read_html(content(response, "text")),
"//p"
)))
}
httr::stop_for_status(result, task = msg)
httr::stop_for_status(response, task = msg)
}

result
response
}

#' @export
Expand Down