Skip to content

Commit

Permalink
Merge pull request #31 from gongcastro/test
Browse files Browse the repository at this point in the history
Test
  • Loading branch information
gongcastro authored Oct 11, 2023
2 parents ddc27e2 + 319a487 commit d3312bc
Show file tree
Hide file tree
Showing 21 changed files with 375 additions and 402 deletions.
121 changes: 61 additions & 60 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,60 +1,61 @@
Package: bvq
Title: Barcelona Vocabulary Questionnaire Database and Helper Functions
Version: 0.4.0
Authors@R: c(
person(given = "Gonzalo", family = "Garcia-Castro",
email = "[email protected]",
role = c("cre", "aut"),
comment = c(ORCID = "0000-0002-8553-4209")),
person(given = "Daniela S. ", family = "Ávila-Varela",
email = "[email protected]",
role = "aut",
comment = c(ORCID = "0000-0002-3518-8117")),
person(given = "Nuria",
family = "Sebastian-Galles",
email = "[email protected]",
role = "ctb",
comment = c(ORCID = "0000-0001-6938-2498")))
Maintainer: Gonzalo Garcia-Castro <[email protected]>
Description: Download, clean, and process the Barcelona Vocabulary
Questionnaire (BVQ) data. BVQ is a vocabulary inventory developed for
assesing the vocabulary of Catalan-Spanish bilinguals infants from the
Metropolitan Area of Barcelona (Spain). This
package includes functions to download the data from formr servers,
and return the processed data in multiple formats.
License: MIT + file LICENSE
URL: https://gongcastro.github.io/bvq/,
https://github.com/gongcastro/bvq/
BugReports: https://github.com/gongcastro/bvq/issues
Depends:
R (>= 3.5.0),
Imports:
cli (>= 3.6.1),
dplyr (>= 1.1.0),
formr (>= 0.9.1),
googlesheets4 (>= 1.0.0),
janitor (>= 2.2.0),
lifecycle (>= 1.0.3),
lubridate (>= 1.8.0),
magrittr (>= 2.0.3),
rlang (>= 1.1.1),
tibble (>= 3.2.1),
tidyr (>= 1.2.0)
Suggests:
knitr,
readxl,
rmarkdown,
roxygen2,
testthat (>= 3.0.0),
ggplot2 (>= 3.4.2),
withr
VignetteBuilder:
knitr
Remotes:
github::rubenarslan/formr
Config/testthat/edition: 3
Encoding: UTF-8
Language: en-US
LazyData: true
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.2.3
Package: bvq
Title: Barcelona Vocabulary Questionnaire Database and Helper Functions
Version: 0.4.0
Authors@R: c(
person(given = "Gonzalo", family = "Garcia-Castro",
email = "[email protected]",
role = c("cre", "aut"),
comment = c(ORCID = "0000-0002-8553-4209")),
person(given = "Daniela S. ", family = "Ávila-Varela",
email = "[email protected]",
role = "aut",
comment = c(ORCID = "0000-0002-3518-8117")),
person(given = "Nuria",
family = "Sebastian-Galles",
email = "[email protected]",
role = "ctb",
comment = c(ORCID = "0000-0001-6938-2498")))
Maintainer: Gonzalo Garcia-Castro <[email protected]>
Description: Download, clean, and process the Barcelona Vocabulary
Questionnaire (BVQ) data. BVQ is a vocabulary inventory developed for
assesing the vocabulary of Catalan-Spanish bilinguals infants from the
Metropolitan Area of Barcelona (Spain). This
package includes functions to download the data from formr servers,
and return the processed data in multiple formats.
License: MIT + file LICENSE
URL: https://gongcastro.github.io/bvq/,
https://github.com/gongcastro/bvq/
BugReports: https://github.com/gongcastro/bvq/issues
Depends:
R (>= 3.5.0),
Imports:
cli (>= 3.6.1),
dplyr (>= 1.1.0),
formr (>= 0.9.1),
googlesheets4 (>= 1.0.0),
janitor (>= 2.2.0),
lifecycle (>= 1.0.3),
lubridate (>= 1.8.0),
magrittr (>= 2.0.3),
purrr (>= 1.0.1),
rlang (>= 1.1.1),
tibble (>= 3.2.1),
tidyr (>= 1.2.0)
Suggests:
knitr,
readxl,
rmarkdown,
roxygen2,
testthat (>= 3.0.0),
ggplot2 (>= 3.4.2),
withr
VignetteBuilder:
knitr
Remotes:
github::rubenarslan/formr
Config/testthat/edition: 3
Encoding: UTF-8
Language: en-US
LazyData: true
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.2.3
8 changes: 2 additions & 6 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -26,36 +26,32 @@ importFrom(cli,cli_progress_step)
importFrom(cli,cli_progress_update)
importFrom(cli,qty)
importFrom(dplyr,across)
importFrom(dplyr,arrange)
importFrom(dplyr,case_when)
importFrom(dplyr,distinct)
importFrom(dplyr,filter)
importFrom(dplyr,if_any)
importFrom(dplyr,left_join)
importFrom(dplyr,mutate)
importFrom(dplyr,n)
importFrom(dplyr,select)
importFrom(dplyr,summarise)
importFrom(formr,formr_connect)
importFrom(formr,formr_raw_results)
importFrom(googlesheets4,gs4_auth)
importFrom(googlesheets4,gs4_has_token)
importFrom(googlesheets4,read_sheet)
importFrom(janitor,clean_names)
importFrom(janitor,make_clean_names)
importFrom(lifecycle,deprecated)
importFrom(lubridate,as_date)
importFrom(lubridate,as_datetime)
importFrom(lubridate,time_length)
importFrom(lubridate,today)
importFrom(magrittr,"%>%")
importFrom(purrr,map)
importFrom(purrr,set_names)
importFrom(rlang,":=")
importFrom(rlang,.data)
importFrom(rlang,as_label)
importFrom(rlang,as_name)
importFrom(rlang,enquo)
importFrom(rlang,enquos)
importFrom(stats,time)
importFrom(tibble,tibble)
importFrom(tidyr,pivot_longer)
importFrom(tidyr,pivot_wider)
2 changes: 2 additions & 0 deletions R/globals.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ utils::globalVariables(unique(c(
"date_birth",
"date_sent",
"date_test",
"x",
"time",
"link",
# bvq_responses:
"response_id",
Expand Down
6 changes: 3 additions & 3 deletions R/import.R
Original file line number Diff line number Diff line change
Expand Up @@ -227,9 +227,9 @@ fix_logs_df <- function(raw, participants_tmp) {
# variables to correct types
logs[c("created", "ended")] <- lapply(logs[c("created", "ended")], as.POSIXct)
# remove if missing any critical variable
logs$code <- fix_response_id(ifelse(logs$response_id=="",
NA_character_,
logs$response_id))
logs$response_id <- fix_response_id(if_else(logs$response_id=="",
NA_character_,
gsub("BL", "", logs$response_id)))
logs <- logs[!is.na(logs$response_id) & !is.na(logs$ended) & !is.na(logs$session), ]
# fix codes known to be wrong
logs <- fix_code_raw(logs)
Expand Down
2 changes: 1 addition & 1 deletion R/logs.R
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ bvq_logs <- function(participants = bvq_participants(),
age = diff_in_time(date_finished, date_birth, "months"),
duration = diff_in_time(date_finished, date_started, "days")
) %>%
# compute participant's progreslos through the questionnaire
# compute participant's progres through the questionnaire
mutate(progress = complete_items / total_items,
completed = progress >= 0.95) %>%
# select relevant columns and reorder them
Expand Down
106 changes: 57 additions & 49 deletions R/norms.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,13 @@
#'
#' @export bvq_norms
#'
#' @importFrom dplyr mutate
#' @importFrom dplyr filter
#' @importFrom dplyr select
#' @importFrom dplyr left_join
#' @importFrom dplyr summarise
#' @importFrom dplyr arrange
#' @import dplyr
#' @importFrom tidyr pivot_longer
#'
#' @param responses Responses data frame, as generated by [bvq::bvq_responses()].
#' @param participants Participants data frame, as generated by
#' [bvq::bvq_participants()]
#' @param ... <[`dynamic-dots`][rlang::dyn-dots]>. Unquote name of the variable(s) to group
#' @param ... <[`dynamic-dots`][rlang::dyn-dots]>. Unquoted name of the variable(s) to group
#' data into. Norms will be calculated by aggregating responses
#' within the groups that result from the combination of crossing of the
#' variables provided in `...`. These variables can refer to item properties
Expand Down Expand Up @@ -82,10 +78,7 @@ bvq_norms <- function(participants = bvq_participants(),
...,
te = NULL,
item = NULL,
age = c(0, 100)) {

if (is.null(participants)) participants <- bvq_participants()
if (is.null(responses)) responses <- bvq_responses(participants)
age = c(0, Inf)) {

# collect ... into a character vector for `any_of`
dots_vctr <- as.character(match.call(expand.dots = FALSE)$`...`)
Expand All @@ -94,24 +87,29 @@ bvq_norms <- function(participants = bvq_participants(),

# retrieve participants and logs -------------------------------------------

logs_tmp <- bvq_logs(participants = participants, responses = responses)
cols.keep <- colnames(logs_tmp) %in% c("child_id", "time", "dominance", group_vars)
logs_tmp <- logs_tmp[, cols.keep]
logs_tmp$age <- floor(logs_tmp$age)
logs_tmp <- logs_tmp[logs_tmp$age >= min(age) & logs_tmp$age <= max(age), ]
logs_tmp <- bvq_logs(participants = participants,
responses = responses) %>%
select(child_id, time, dominance, any_of(group_vars)) %>%
mutate(age = floor(age)) %>%
dplyr::filter(between(age, min(.env$age), max(.env$age)))

cols.keep <- colnames(bvq::pool) %in% c("language", group_vars)
pool_tmp <- bvq::pool[, cols.keep]
pool_tmp <- select(bvq::pool, language, any_of(group_vars))

# check arguments ----------------------------------------------------------

if (is.null(item)) {

item <- unique(responses$item)

} else {

item_in_pool <- item %in% unique(pool_tmp$item)

if (!all(item_in_pool)) {
item_not_in_pool <- paste0(item[which(!item_in_pool)], collapse = ", ")
cli_abort("item(?s) `{item_not_in_pool}` {?does/do} not exist in pool")

item_not_in_pool <- paste0(item[which(!item_in_pool)],
collapse = ", ")
cli::cli_abort("item(?s) `{item_not_in_pool}` {?does/do} not exist in pool")
}
}

Expand All @@ -125,32 +123,27 @@ bvq_norms <- function(participants = bvq_participants(),

# compute norms ------------------------------------------------------------

responses_tmp <- responses[responses$item %in% item &
!is.na(responses$response), ]
responses_tmp$understands <- responses_tmp$response > 1
responses_tmp$produces <- responses_tmp$response > 2
responses_tmp <- responses_tmp[, colnames(responses_tmp)!="response"]
responses_tmp <- tidyr::pivot_longer(responses_tmp,
c(understands, produces),
names_to = "type",
values_to = "response")


norms <- merge(responses_tmp, logs_tmp)
norms <- merge(norms, pool_tmp, all.x = TRUE)

norms$item_dominance <- ifelse(norms$language==norms$dominance, "L1", "L2")
norms <- summarise(norms,
.sum = sum(response, na.rm = TRUE),
.n = n(),
.by = any_of(group_vars))
norms$.prop <- prop_adj(norms$.sum, norms$.n)
norms <- norms[order(norms$te,
norms$item,
norms$type,
norms$age,
decreasing = TRUE), , drop = FALSE]
norms <- tibble::as_tibble(norms)
responses_tmp <- responses %>%
filter(item %in% .env$item,
!is.na(response)) %>%
mutate(understands = response > 1,
produces = response > 2) %>%
select(-response) %>%
tidyr::pivot_longer(c(understands, produces),
names_to = "type",
values_to = "response")

suppressMessages({
norms <- list(responses_tmp, logs_tmp, pool_tmp) %>%
reduce(inner_join,
relationship = "many-to-many") %>%
mutate(item_dominance = if_else(language==dominance, "L1", "L2")) %>%
summarise(.sum = sum(response, na.rm = TRUE),
.n = n(),
.by = any_of(group_vars)) %>%
mutate(.prop = prop_adj(.sum, .n)) %>%
arrange(te, item, type, age)
})

return(norms)
}
Expand All @@ -172,32 +165,47 @@ bvq_norms <- function(participants = bvq_participants(),
#' @keywords internal
#'
check_arg_te <- function(te, item) { # nocov start

if (is.logical(te)) {

if (te) {
item <- pool$item[pool$te %in% pool$te[pool$item %in% item]]
return(item)
}

} else if (is.numeric(te)) {

te_in_pool <- te %in% unique(pool$te)
if (!all(te_in_pool)) {

te_not_in_pool <- te[which(!te_in_pool)]
cli_abort("te{?s} {te_not_in_pool} {?does/do} not exist in pool")
cli::cli_abort("te{?s} {te_not_in_pool} {?does/do} not exist in pool")

} else {

items_not_in_te <- item %in% pool$item[pool$te %in% te]

if (!all(items_not_in_te)) {

which_items_not_in_te <- item[which(!items_not_in_te)]
error_msg <- "Item{?s} '{which_items_not_in_te}' {?was/were} not included in `te`. \\
Its norms will not be returned. To compute the norms for all `items` \\
and their translations, set {.code te = TRUE}."
cli_alert_warning(error_msg)
cli::cli_alert_warning(error_msg)
}

item <- pool$item[pool$te %in% te]

return(item)
}

} else {

if (!is.null(te)) {
cli_abort("`te` must have class {.cls logical} or {.cls integer}")

cli::cli_abort("`te` must have class {.cls logical} or {.cls integer}")
}

}
return(item)
} # nocov end
Loading

0 comments on commit d3312bc

Please sign in to comment.