Skip to content

Commit be68df3

Browse files
authored
Merge pull request #1020 from cmu-delphi/main
Deploy Quidel Fixes to prod
2 parents 1ce90e5 + ba353e9 commit be68df3

20 files changed

+1380
-425
lines changed

facebook/delphiFacebook/NAMESPACE

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@ importFrom(dplyr,across)
6363
importFrom(dplyr,all_of)
6464
importFrom(dplyr,anti_join)
6565
importFrom(dplyr,arrange)
66+
importFrom(dplyr,bind_cols)
6667
importFrom(dplyr,bind_rows)
6768
importFrom(dplyr,case_when)
6869
importFrom(dplyr,coalesce)

facebook/delphiFacebook/R/contingency_aggregate.R

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -264,15 +264,10 @@ summarize_aggs <- function(df, crosswalk_data, aggregations, geo_level, params)
264264
}
265265

266266
## Find all unique groups and associated frequencies, saved in column `Freq`.
267-
# Keep rows with missing values initially so that we get the correct column
268-
# names. Explicitly drop groups with missing values in second step.
269267
unique_groups_counts <- as.data.frame(
270268
table(df[, group_vars, with=FALSE], exclude=NULL, dnn=group_vars),
271269
stringsAsFactors=FALSE
272270
)
273-
unique_groups_counts <- unique_groups_counts[
274-
complete.cases(unique_groups_counts[, group_vars]),
275-
]
276271

277272
# Drop groups with less than threshold sample size.
278273
unique_groups_counts <- filter(unique_groups_counts, Freq >= params$num_filter)
@@ -327,9 +322,10 @@ summarize_aggs <- function(df, crosswalk_data, aggregations, geo_level, params)
327322
aggregation <- aggregations$id[row]
328323
group_vars <- aggregations$group_by[[row]]
329324
post_fn <- aggregations$post_fn[[row]]
330-
325+
326+
# Keep only aggregations where the main value, `val`, is present.
331327
dfs_out[[aggregation]] <- dfs_out[[aggregation]][
332-
rowSums(is.na(dfs_out[[aggregation]][, c("val", "sample_size", group_vars)])) == 0,
328+
rowSums(is.na(dfs_out[[aggregation]][, c("val", "sample_size")])) == 0,
333329
]
334330

335331
dfs_out[[aggregation]] <- apply_privacy_censoring(dfs_out[[aggregation]], params)

facebook/delphiFacebook/R/contingency_calculate.R

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,11 @@
1313
#' be a non-integer
1414
#'
1515
#' @return a list of named means and other descriptive statistics
16-
compute_numeric <- function(response, weight, sample_size, total_represented)
16+
compute_household_binary <- function(response, weight, sample_size, total_represented)
1717
{
1818
response_mean <- compute_count_response(response, weight, sample_size)
1919
response_mean$sample_size <- sample_size
2020
response_mean$represented <- total_represented
21-
response_mean$se <- NA_real_
2221

2322
return(response_mean)
2423
}

facebook/delphiFacebook/R/contingency_indicators.R

Lines changed: 237 additions & 248 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)