From 3e4626d5598db015be4a716cc9179beddfca92a3 Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 02:09:55 +0000 Subject: [PATCH 01/11] test this out --- .github/workflows/test-coverage.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml index 2c5bb50..2b55357 100644 --- a/.github/workflows/test-coverage.yaml +++ b/.github/workflows/test-coverage.yaml @@ -21,7 +21,7 @@ jobs: with: use-public-rspm: true - - uses: r-lib/actions/setup-r-dependencies@v2 + - uses: r-lib/actions/setup-r-dependencies with: extra-packages: any::covr needs: coverage From 35dec72872613261a36769128246ebaba6957785 Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 02:11:53 +0000 Subject: [PATCH 02/11] maybe? --- .github/workflows/test-coverage.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml index 2b55357..9d651cd 100644 --- a/.github/workflows/test-coverage.yaml +++ b/.github/workflows/test-coverage.yaml @@ -21,7 +21,7 @@ jobs: with: use-public-rspm: true - - uses: r-lib/actions/setup-r-dependencies + - uses: r-lib/actions/setup-r-dependencies@v1 with: extra-packages: any::covr needs: coverage From 3851de9779ecb2ddaaa2fbbf3c71b837e6b539e6 Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 02:16:00 +0000 Subject: [PATCH 03/11] Try flipping R version --- .github/workflows/test-coverage.yaml | 2 +- renv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml index 9d651cd..2c5bb50 100644 --- a/.github/workflows/test-coverage.yaml +++ b/.github/workflows/test-coverage.yaml @@ -21,7 +21,7 @@ jobs: with: use-public-rspm: true - - uses: r-lib/actions/setup-r-dependencies@v1 + - uses: r-lib/actions/setup-r-dependencies@v2 with: extra-packages: any::covr needs: coverage diff --git a/renv.lock b/renv.lock index 4f4f6d3..a2b276e 100644 --- a/renv.lock +++ b/renv.lock @@ -1,6 +1,6 @@ { "R": { - "Version": "4.3.0", + "Version": "4.2.1", "Repositories": [ { "Name": "CRAN", From 90e9f3e6a63f865c812c6959a4ff8f033c4d2a8e Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 02:26:03 +0000 Subject: [PATCH 04/11] bypass lock file --- renv.lock => save_renv.lock | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename renv.lock => save_renv.lock (100%) diff --git a/renv.lock b/save_renv.lock similarity index 100% rename from renv.lock rename to save_renv.lock From aba8d0ecdbeddf2b29e77b540f5f691e028ab4bd Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 02:33:15 +0000 Subject: [PATCH 05/11] might as well try this too. --- .Rprofile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.Rprofile b/.Rprofile index 81b960f..13efc46 100644 --- a/.Rprofile +++ b/.Rprofile @@ -1 +1 @@ -source("renv/activate.R") +# source("renv/activate.R") From fa935147166e1bdc71a347bb5ddb20c0bb19296a Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 03:04:39 +0000 Subject: [PATCH 06/11] Several updates for a passing R CMD Check --- DESCRIPTION | 3 +- NAMESPACE | 1 + R/data.R | 20 ++++++-- R/data_metadata.R | 15 +++--- R/dataset_metadata.R | 2 +- R/file_metadata.R | 11 ++-- R/global.R | 6 ++- R/zzz.R | 1 + man/data_metadata.Rd | 6 +-- man/data_metadata_setters.Rd | 4 +- man/datasetjson-package.Rd | 2 +- man/file_metadata.Rd | 4 +- man/file_metadata_setters.Rd | 7 +-- man/sas_date_formats.Rd | 6 ++- man/sas_datetime_formats.Rd | 6 ++- man/sas_time_formats.Rd | 6 ++- man/schema_1_0_0.Rd | 6 ++- scratch.R | 98 ------------------------------------ 18 files changed, 67 insertions(+), 137 deletions(-) delete mode 100644 scratch.R diff --git a/DESCRIPTION b/DESCRIPTION index e53c55d..61c09a1 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -13,7 +13,7 @@ Authors@R: c( email = "nmasel@its.jnj.com", role = "aut") ) -Description: R package for reading and writing CDISC Dataset JSON files +Description: R package for reading and writing CDISC Dataset JSON files. License: Apache License (>= 2) URL: https://github.com/atorus-research/datasetjson BugReports: https://github.com/atorus-research/datasetjson/issues @@ -25,6 +25,7 @@ RoxygenNote: 7.2.3 Depends: R (>= 3.5) Imports: jsonlite (>= 1.8.7), + jsonvalidate (>= 1.3.2) Suggests: testthat (>= 2.1.0), knitr, diff --git a/NAMESPACE b/NAMESPACE index ff3d5f3..116383e 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -25,3 +25,4 @@ importFrom(jsonlite,fromJSON) importFrom(jsonlite,toJSON) importFrom(jsonvalidate,json_validate) importFrom(tools,file_path_sans_ext) +importFrom(utils,tail) diff --git a/R/data.R b/R/data.R index 9d921a9..bc658f8 100644 --- a/R/data.R +++ b/R/data.R @@ -20,7 +20,10 @@ #' Valid SAS(c) date formats pulled from #' https://documentation.sas.com/doc/en/vdmmlcdc/8.1/ds2pg/p0bz5detpfj01qn1kz2in7xymkdl.htm #' -#' @format ## `sas_date_formats` A character vector with 45 elements +#' @format ## `sas_date_formats` +#' \describe{ +#' A character vector with 45 elements +#' } "sas_date_formats" #' A List of valid SAS(c) datetime formats @@ -28,7 +31,10 @@ #' Valid SAS(c) datetime formats pulled from #' https://documentation.sas.com/doc/en/vdmmlcdc/8.1/ds2pg/p0bz5detpfj01qn1kz2in7xymkdl.htm #' -#' @format ## `sas_datetime_formats` A character vector with 7 elements +#' @format ## `sas_datetime_formats` +#' \describe{ +#' A character vector with 7 elements +#' } "sas_datetime_formats" #' A List of valid SAS(c) time formats @@ -36,12 +42,18 @@ #' Valid SAS(c) time formats pulled from #' https://documentation.sas.com/doc/en/vdmmlcdc/8.1/ds2pg/p0bz5detpfj01qn1kz2in7xymkdl.htm #' -#' @format ## `sas_time_formats` A character vector with 4 elements +#' @format ## `sas_time_formats` +#' \describe{ +#' A character vector with 4 elements +#' } "sas_time_formats" #' Dataset JSON Schema Version 1.0.0 #' #' This object is a character vector holding the schema for Dataset JSON Version 1.0.0 #' -#' @format ## `schema_1_0_0` A character vector with 1 element +#' @format ## `schema_1_0_0` +#' \describe{ +#' A character vector with 1 element +#' } "schema_1_0_0" diff --git a/R/data_metadata.R b/R/data_metadata.R index 979d36d..8486a29 100644 --- a/R/data_metadata.R +++ b/R/data_metadata.R @@ -1,8 +1,6 @@ #' Create the data metadata container for a Dataset JSON object #' -#' @param data_type Type of data being written. clinicalData for subject level -#' data, and referenceData for non-subject level data (i.e. TDMs, Associated -#' Persons) +#' @param study Study OID value #' @param metadata_version Metadata version OID value #' @param metadata_ref Metadata reference (i.e. path to Define.xml) #' @@ -43,8 +41,7 @@ data_metadata <- function(study = "NA", metadata_version = "NA", metadata_ref = #' This set of functions #' @param x data metadata or datasetjson object #' @param study Study OID value -#' -#' @param ... +#' @param ... Additional parameters #' #' @return A datasetjson or data_metadata object #' @export @@ -64,7 +61,7 @@ set_study_oid <- function(x, study, ...) { #' @family Data metadata setters #' @rdname data_metadata_setters #' @export -set_study_oid.data_metadata <- function(x, study) { +set_study_oid.data_metadata <- function(x, study, ...) { stopifnot_data_metadata(x) x[['studyOID']] <- study x @@ -72,7 +69,7 @@ set_study_oid.data_metadata <- function(x, study) { #' @export #' @noRd -set_study_oid.datasetjson <- function(x, study) { +set_study_oid.datasetjson <- function(x, study, ...) { stopifnot_datasetjson(x) data_type <- get_data_type(x) x[[data_type]][['studyOID']] <- study @@ -89,7 +86,7 @@ set_metadata_version <- function(x, metadata_version, ...) { #' @export #' @noRd -set_metadata_version.data_metadata <- function(x, metadata_version) { +set_metadata_version.data_metadata <- function(x, metadata_version, ...) { stopifnot_data_metadata(x) x[['metaDataVersionOID']] <- metadata_version x @@ -97,7 +94,7 @@ set_metadata_version.data_metadata <- function(x, metadata_version) { #' @export #' @noRd -set_metadata_version.datasetjson <- function(x, metadata_version) { +set_metadata_version.datasetjson <- function(x, metadata_version, ...) { stopifnot_datasetjson(x) data_type <- get_data_type(x) x[[data_type]][['metaDataVersionOID']] <- metadata_version diff --git a/R/dataset_metadata.R b/R/dataset_metadata.R index 750563f..cff3ea4 100644 --- a/R/dataset_metadata.R +++ b/R/dataset_metadata.R @@ -85,7 +85,7 @@ set_item_data <- function(x, .data, ...) { #' @noRd #' @export #' @method set_item_data dataset_metadata -set_item_data.dataset_metadata <- function(x, .data) { +set_item_data.dataset_metadata <- function(x, .data, ...) { stopifnot_dataset_metadata(x) if (!inherits(.data, "data.frame")) { diff --git a/R/file_metadata.R b/R/file_metadata.R index 1a16c11..12592be 100644 --- a/R/file_metadata.R +++ b/R/file_metadata.R @@ -15,7 +15,6 @@ #' @examples #' # Create using parameters #' file_meta <- file_metadata( -#' "clinicalData", #' originator = "Some Org", #' sys = "source system", #' sys_version = "1.0" @@ -24,8 +23,7 @@ #' # Set parameters after #' file_meta <- file_metadata() #' -#' file_meta_updated <- set_data_type(file_meta, "referenceData") -#' file_meta_updated <- set_file_oid(file_meta_updated, "/some/path") +#' file_meta_updated <- set_file_oid(file_meta, "/some/path") #' file_meta_updated <- set_originator(file_meta_updated, "Some Org") #' file_meta_updated <- set_source_system(file_meta_updated, "source system", "1.0") file_metadata <- function(originator="NA", sys = "NA", sys_version = "NA", version = "1.0.0") { @@ -76,7 +74,9 @@ get_datetime <- function() { #' generated the Dataset-JSON file." #' @param file_oid fileOID parameter, defined as "A unique identifier for this #' file." -#' @param data_type +#' @param data_type Type of data being written. clinicalData for subject level +#' data, and referenceData for non-subject level data (i.e. TDMs, Associated +#' Persons) #' #' @return datasetjson or file_metadata object #' @export @@ -86,8 +86,7 @@ get_datetime <- function() { #' @examples #' file_meta <- file_metadata() #' -#' file_meta_updated <- set_data_type(file_meta, "referenceData") -#' file_meta_updated <- set_file_oid(file_meta_updated, "/some/path") +#' file_meta_updated <- set_file_oid(file_meta, "/some/path") #' file_meta_updated <- set_originator(file_meta_updated, "Some Org") #' file_meta_updated <- set_source_system(file_meta_updated, "source system", "1.0") set_source_system <- function(x, sys, sys_version) { diff --git a/R/global.R b/R/global.R index 9a385f4..3bf88eb 100644 --- a/R/global.R +++ b/R/global.R @@ -1 +1,5 @@ -globalVariables() +globalVariables(c( + "schema_1_0_0", + "sas_date_formats", + "sas_datetime_formats" +)) diff --git a/R/zzz.R b/R/zzz.R index 90ca1e7..bba701f 100644 --- a/R/zzz.R +++ b/R/zzz.R @@ -1,6 +1,7 @@ #' @importFrom jsonvalidate json_validate #' @importFrom jsonlite fromJSON toJSON #' @importFrom tools file_path_sans_ext +#' @importFrom utils tail NULL #' @keywords internal diff --git a/man/data_metadata.Rd b/man/data_metadata.Rd index 26b2f04..df34df7 100644 --- a/man/data_metadata.Rd +++ b/man/data_metadata.Rd @@ -7,13 +7,11 @@ data_metadata(study = "NA", metadata_version = "NA", metadata_ref = "NA") } \arguments{ +\item{study}{Study OID value} + \item{metadata_version}{Metadata version OID value} \item{metadata_ref}{Metadata reference (i.e. path to Define.xml)} - -\item{data_type}{Type of data being written. clinicalData for subject level -data, and referenceData for non-subject level data (i.e. TDMs, Associated -Persons)} } \value{ data_metadata object diff --git a/man/data_metadata_setters.Rd b/man/data_metadata_setters.Rd index d70aab1..36e3ff1 100644 --- a/man/data_metadata_setters.Rd +++ b/man/data_metadata_setters.Rd @@ -9,7 +9,7 @@ \usage{ set_study_oid(x, study, ...) -\method{set_study_oid}{data_metadata}(x, study) +\method{set_study_oid}{data_metadata}(x, study, ...) set_metadata_version(x, metadata_version, ...) @@ -20,7 +20,7 @@ set_metadata_ref(x, metadata_ref) \item{study}{Study OID value} -\item{...}{} +\item{...}{Additional parameters} \item{metadata_version}{Metadata version OID value} diff --git a/man/datasetjson-package.Rd b/man/datasetjson-package.Rd index 7bd2875..f85839f 100644 --- a/man/datasetjson-package.Rd +++ b/man/datasetjson-package.Rd @@ -6,7 +6,7 @@ \alias{datasetjson-package} \title{datasetjson: Read and write CDISC Dataset JSON files} \description{ -R package for reading and writing CDISC Dataset JSON files +R package for reading and writing CDISC Dataset JSON files. } \seealso{ Useful links: diff --git a/man/file_metadata.Rd b/man/file_metadata.Rd index b0a77aa..92bc8bf 100644 --- a/man/file_metadata.Rd +++ b/man/file_metadata.Rd @@ -33,7 +33,6 @@ Create a file metadata object \examples{ # Create using parameters file_meta <- file_metadata( - "clinicalData", originator = "Some Org", sys = "source system", sys_version = "1.0" @@ -42,8 +41,7 @@ file_meta <- file_metadata( # Set parameters after file_meta <- file_metadata() -file_meta_updated <- set_data_type(file_meta, "referenceData") -file_meta_updated <- set_file_oid(file_meta_updated, "/some/path") +file_meta_updated <- set_file_oid(file_meta, "/some/path") file_meta_updated <- set_originator(file_meta_updated, "Some Org") file_meta_updated <- set_source_system(file_meta_updated, "source system", "1.0") } diff --git a/man/file_metadata_setters.Rd b/man/file_metadata_setters.Rd index fbf3329..49ba5f3 100644 --- a/man/file_metadata_setters.Rd +++ b/man/file_metadata_setters.Rd @@ -31,7 +31,9 @@ generated the Dataset-JSON file."} \item{file_oid}{fileOID parameter, defined as "A unique identifier for this file."} -\item{data_type}{} +\item{data_type}{Type of data being written. clinicalData for subject level +data, and referenceData for non-subject level data (i.e. TDMs, Associated +Persons)} } \value{ datasetjson or file_metadata object @@ -43,8 +45,7 @@ object. \examples{ file_meta <- file_metadata() -file_meta_updated <- set_data_type(file_meta, "referenceData") -file_meta_updated <- set_file_oid(file_meta_updated, "/some/path") +file_meta_updated <- set_file_oid(file_meta, "/some/path") file_meta_updated <- set_originator(file_meta_updated, "Some Org") file_meta_updated <- set_source_system(file_meta_updated, "source system", "1.0") } diff --git a/man/sas_date_formats.Rd b/man/sas_date_formats.Rd index 72d0977..ebf3449 100644 --- a/man/sas_date_formats.Rd +++ b/man/sas_date_formats.Rd @@ -5,7 +5,11 @@ \alias{sas_date_formats} \title{A List of valid SAS(c) date formats} \format{ -\subsection{\code{sas_date_formats} A character vector with 45 elements}{ +\subsection{\code{sas_date_formats}}{ + +\describe{ +A character vector with 45 elements +} } } \usage{ diff --git a/man/sas_datetime_formats.Rd b/man/sas_datetime_formats.Rd index 710db86..d8358a6 100644 --- a/man/sas_datetime_formats.Rd +++ b/man/sas_datetime_formats.Rd @@ -5,7 +5,11 @@ \alias{sas_datetime_formats} \title{A List of valid SAS(c) datetime formats} \format{ -\subsection{\code{sas_datetime_formats} A character vector with 7 elements}{ +\subsection{\code{sas_datetime_formats}}{ + +\describe{ +A character vector with 7 elements +} } } \usage{ diff --git a/man/sas_time_formats.Rd b/man/sas_time_formats.Rd index b212955..60588f4 100644 --- a/man/sas_time_formats.Rd +++ b/man/sas_time_formats.Rd @@ -5,7 +5,11 @@ \alias{sas_time_formats} \title{A List of valid SAS(c) time formats} \format{ -\subsection{\code{sas_time_formats} A character vector with 4 elements}{ +\subsection{\code{sas_time_formats}}{ + +\describe{ +A character vector with 4 elements +} } } \usage{ diff --git a/man/schema_1_0_0.Rd b/man/schema_1_0_0.Rd index a61f157..633c00b 100644 --- a/man/schema_1_0_0.Rd +++ b/man/schema_1_0_0.Rd @@ -5,7 +5,11 @@ \alias{schema_1_0_0} \title{Dataset JSON Schema Version 1.0.0} \format{ -\subsection{\code{schema_1_0_0} A character vector with 1 element}{ +\subsection{\code{schema_1_0_0}}{ + +\describe{ +A character vector with 1 element +} } } \usage{ diff --git a/scratch.R b/scratch.R deleted file mode 100644 index f7dba98..0000000 --- a/scratch.R +++ /dev/null @@ -1,98 +0,0 @@ -# Code provided by Tilo Blenk on 2022-11-28 - - -#' Read dataset-json file with data of one SDTM/ADaM domain into data frame -#' -#' Read dataset-json file with data of one SDTM/ADaM domain into data frame. -#' -#' @param path path to dataset-json file -#' @return data frame with data of dataset-json file -#' @export -#' @import jsonlite -#' @examples -#' \dontrun{ -#' dm <- read_dataset_json("path/to/dm.json") -#' } -read_dataset_json <- function(path) { - j <- fromJSON(path) - x <- names(j$clinicalData$itemGroupData) - - # re-create data frame with correct data types - # length is ignored at the moment but could be used for rounding - # of numerical values and padding of string values - d <- as.data.frame(j$clinicalData$itemGroupData[[x]]$itemData) - colnames(d) <- j$clinicalData$itemGroupData[[x]]$items$name - tt <- j$clinicalData$itemGroupData[[x]]$items$type - for (i in seq_along(tt)) { - if (tolower(tt[i]) %in% c("integer", "int")) { - d[,i] <- as.integer(d[,i]) - } else if (tolower(tt[i]) %in% c("float", "numeric", "num", "double", "double precision")) { - d[,i] <- as.double(d[,i]) - } - # everything not being integer, double, or numeric is considered as character - } - d[,-1] # get rid of ITEMGROUPDATASEQ column -} - - -#' Write data frame with data of one SDTM/ADaM domain into dataset-json file -#' -#' Write data frame with data of one SDTM/ADaM domain into dataset-json file. -#' -#' @param d data frame with data of one SDTM/ADaM domain -#' @param path path to dataset-json file to write data to -#' @export -#' @import jsonlite -#' @examples -#' \dontrun{ -#' write_dataset_json(dm, "path/to/dm.json") -#' } -write_dataset_json <- function(d, path) { - studyid <- d[1, "STUDYID", drop = TRUE] - domain <- d[1, "DOMAIN", drop = TRUE] - - # insert sequence number, ie ITEMGROUPDATASEQ, as first column - x <- colnames(d) - d$ITEMGROUPDATASEQ <- seq_len(nrow(d)) - d <- d[, c("ITEMGROUPDATASEQ", x)] - - # get metadata, in particular the data type, from the data themselves - m <- data.frame( - name = colnames(d), - type = unname(sapply(d, class)) - ) - - # assemble variable description as list for correct formatting with toJSON() - # the actual domain data can be formatted easily in the right way with - # toJSON(..., dataframe = "values", ...) but then the variable description - # need to be assembled as a list for correct formatting - l <- lapply(seq_len(nrow(m)), function(i) { - list( - OID = if (m$name[i] == "ITEMGROUPDATASEQ") m$name[i] - else paste("IT", m$name[i], sep = "."), - name = m$name[i], - # label ???, could come from define-xml - type = m$type[i]) - # length ???, could come from define-xml - }) - - # assemble data for later formatting to json with toJSON() - j <- list( - clinicalData = list( - studyOID = studyid, - metaDataVersionOID = "3.1.2", - itemGroupData = list() - ) - ) - # needs to be added in a separate step because of dynamic name IT.DOMAIN - j[["clinicalData"]][["itemGroupData"]][[paste("IT", domain, sep = ".")]] <- list( - records = nrow(d), - name = domain, - # label ???, could come from define-xml - items = l, - itemData = d - ) - - # convert data to json by toJSON() and write to file - cat(toJSON(j, dataframe = "values", na = "null", auto_unbox = TRUE, pretty = TRUE), "\n", file = path) -} From 6e6e3d7321470eb9dd9a6d776e59342af38735dc Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 12:10:18 +0000 Subject: [PATCH 07/11] remove placeholder functions --- R/dataset_metadata.R | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/R/dataset_metadata.R b/R/dataset_metadata.R index cff3ea4..1608d12 100644 --- a/R/dataset_metadata.R +++ b/R/dataset_metadata.R @@ -187,39 +187,3 @@ df_to_list_rows <- function(x) { y[!is.na(y)] }) } - -#' Apply JSON metadata to dataframe as attributes -#' -#' This function takes supplied metadata and applies it to a dataframe as -#' corresponding attributes -#' -#' @param .data A Dataframe -#' @param metadata A list containing Dataset JSON dataset object metadata -#' -#' @return dataframe -#' @examples -#' # TODO: -#' @noRd -apply_dataset_metadata <- function(.data, metadata) { - # TODO: Set records, name, and label to the dataframe as a whole - - # TODO: Set OID, name, label, type, length, and format, and keySequence as - # necessary to each variable - TRUE -} - -#' Gather Dataset JSON metadata from a dataframe which has Dataset JSON metadata -#' applied -#' -#' This function will gather the attributes from a data frame which has Dataset -#' JSON metadata applied. -#' -#' @param .data A dataframe with Dataset JSON attributes applied -#' -#' @return A list of Dataset dataset object JSON metadata -#' @noRd -gather_dataset_metadata <- function(.data) { - - # Retrieve the necessary metadata off of a data frame that pertains to a dataset JSON object - TRUE -} From c6cb587be6dad6d2cfd640415c6fe990b17c541a Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 12:10:30 +0000 Subject: [PATCH 08/11] Add unit tests for error generators --- tests/testthat/test-utils.R | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 tests/testthat/test-utils.R diff --git a/tests/testthat/test-utils.R b/tests/testthat/test-utils.R new file mode 100644 index 0000000..b34a12e --- /dev/null +++ b/tests/testthat/test-utils.R @@ -0,0 +1,6 @@ +test_that("Type checker functions throw proper errors", { + expect_error(stopifnot_datasetjson(1, "Input must be a datasetjson object")) + expect_error(stopifnot_dataset_metadata(1, "Input must be a dataset_metadata object")) + expect_error(stopifnot_data_metadata(1, "Input must be a data_metadata object")) + expect_error(stopifnot_file_metadata(1, "Input must be a file_metadata object")) +}) From 04ffed57cabfd5940168605b0629d0ee9b15178f Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 12:13:56 +0000 Subject: [PATCH 09/11] revert this back --- save_renv.lock => renv.lock | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename save_renv.lock => renv.lock (100%) diff --git a/save_renv.lock b/renv.lock similarity index 100% rename from save_renv.lock rename to renv.lock From 77ababa9c4d55b5d9cbdd57b5bdb61ac58866517 Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 12:14:20 +0000 Subject: [PATCH 10/11] blank space --- tests/testthat/test-dataset_metadata.R | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/testthat/test-dataset_metadata.R b/tests/testthat/test-dataset_metadata.R index 43b0340..1e834af 100644 --- a/tests/testthat/test-dataset_metadata.R +++ b/tests/testthat/test-dataset_metadata.R @@ -62,5 +62,3 @@ test_that("dataset_metadata generates messages as expected", { ), ".data must be a data.frame" ) }) - - From 8f02fb9065a22e9420fbff4be0b0e0251b06f18a Mon Sep 17 00:00:00 2001 From: "mike.stackhouse" Date: Wed, 13 Sep 2023 12:25:44 +0000 Subject: [PATCH 11/11] Fix broken tests and make consistent error message --- R/utils.R | 2 +- tests/testthat/test-utils.R | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/R/utils.R b/R/utils.R index e2bc797..65c5bac 100644 --- a/R/utils.R +++ b/R/utils.R @@ -24,7 +24,7 @@ stopifnot_data_metadata <- function(x) { stopifnot_file_metadata <- function(x) { if (!(inherits(x, "datasetjson") | inherits(x, "file_metadata"))) { - stop("x must be a datasetjson object or file_metadata object", call.=FALSE) + stop("Input must be a datasetjson object or file_metadata object", call.=FALSE) } } diff --git a/tests/testthat/test-utils.R b/tests/testthat/test-utils.R index b34a12e..3c55aab 100644 --- a/tests/testthat/test-utils.R +++ b/tests/testthat/test-utils.R @@ -1,6 +1,6 @@ test_that("Type checker functions throw proper errors", { - expect_error(stopifnot_datasetjson(1, "Input must be a datasetjson object")) - expect_error(stopifnot_dataset_metadata(1, "Input must be a dataset_metadata object")) - expect_error(stopifnot_data_metadata(1, "Input must be a data_metadata object")) - expect_error(stopifnot_file_metadata(1, "Input must be a file_metadata object")) + expect_error(stopifnot_datasetjson(1), "Input must be a datasetjson object") + expect_error(stopifnot_dataset_metadata(1), "Input must be a dataset_metadata object") + expect_error(stopifnot_data_metadata(1), "Input must be a data_metadata object") + expect_error(stopifnot_file_metadata(1), "Input must be a datasetjson object or file_metadata object") })