Skip to content

Commit

Permalink
PR for Base Unit Tests and CI (#51)
Browse files Browse the repository at this point in the history
* Added basic tests

* Fixed minor typo; fixed tests

* Added pandoc to gh actions

* Still trying pandoc

* Still trying pandoc

* Still trying pandoc (typo)

* Added latex

* Debugging missing extra packages

* Moved lfe install to test script

* Moved lfe install to test script (debugging)

* Added testthat

* Added haven for some reason

* Gave up and added lfe to recomended packages (for test)
  • Loading branch information
mcaceresb authored Jan 23, 2024
1 parent bd629c1 commit 56e62db
Show file tree
Hide file tree
Showing 7 changed files with 250 additions and 4 deletions.
59 changes: 59 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
name: Unit and build tests

on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
workflow_dispatch:
branches: [ master ]

jobs:
tests:
runs-on: ${{ matrix.config.os }}
name: ${{ matrix.config.os }} (${{ matrix.config.r }})
strategy:
matrix:
config:
- {os: macos-latest, r: 'release'}
- {os: ubuntu-latest, r: 'release'}

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Set up R
uses: r-lib/actions/setup-r@v2
with:
r-version: ${{ matrix.config.r }}
use-public-rspm: true

- name: Install dependencies
uses: r-lib/actions/setup-r-dependencies@v2
with:
extra-packages: |
any::devtools
any::knitr
any::formatR
any::testthat
any::haven
any::lfe
needs: |
devtools
knitr
rmarkdown
testthat
haven
lfe
- name: Setup pandoc
uses: r-lib/actions/setup-pandoc@v2

- name: Setup LaTeX
uses: r-lib/actions/setup-tinytex@v2

- name: Check build
run: |
devtools::document()
devtools::check()
shell: Rscript {0}
3 changes: 3 additions & 0 deletions DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ Imports:
TruncatedNormal (>= 1.0)
Suggests:
knitr,
testthat,
haven,
lfe,
rmarkdown
Authors@R:
c(person(given = "Ashesh",
Expand Down
4 changes: 2 additions & 2 deletions R/deltarmm.R
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

# Create a vector to extract the max first dif, which corresponds with the first dif for period s, or minus this if max_positive == FALSE
v_max_dif <- base::matrix(0, nrow = 1, ncol = numPrePeriods + numPostPeriods + 1)
v_max_dif[(numPrePeriods+s):(numPrePeriods+1+s)] <- c(-1,1)
v_max_dif[(numPrePe@jonathandroth Just checking in!riods+s):(numPrePeriods+1+s)] <- c(-1,1)

if (max_positive == FALSE){
v_max_dif <- -v_max_dif
Expand Down Expand Up @@ -310,7 +310,7 @@ computeConditionalCS_DeltaRMM <- function(betahat, sigma, numPrePeriods, numPost
postPeriodMomentsOnly = postPeriodMomentsOnly, monotonicityDirection = monotonicityDirection,
gridPoints = gridPoints, grid.ub = grid.ub, grid.lb = grid.lb)
CIs_RMM_plus_allS[,s_i] = CI_s_plus$accept

# Compute CI for s, (-) and bind it to all CI's for (-)
CI_s_minus = .computeConditionalCS_DeltaRMM_fixedS(s = s_indices[s_i], max_positive = FALSE, Mbar = Mbar,
betahat = betahat, sigma = sigma, numPrePeriods = numPrePeriods,
Expand Down
8 changes: 8 additions & 0 deletions R/honest_did.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
#'
#' @description a function to compute a sensitivity analysis
#' using the approach of Rambachan and Roth (2021)
#'
#' @param ... Parameters to pass to the relevant method.
honest_did <- function(...) UseMethod("honest_did")

#' @title honest_did.AGGTEobj
Expand All @@ -10,6 +12,7 @@ honest_did <- function(...) UseMethod("honest_did")
#' using the approach of Rambachan and Roth (2021) when
#' the event study is estimating using the `did` package
#'
#' @param es Result from aggte (object of class AGGTEobj).
#' @param e event time to compute the sensitivity analysis for.
#' The default value is `e=0` corresponding to the "on impact"
#' effect of participating in the treatment.
Expand All @@ -18,6 +21,11 @@ honest_did <- function(...) UseMethod("honest_did")
#' in pre-treatment periods) or "relative_magnitude" (which
#' conducts a sensitivity analysis based on the relative magnitudes
#' of deviations from parallel trends in pre-treatment periods).
#' @param gridPoints Number of grid points used for the underlying test
#' inversion. Default equals 100. User may wish to change the number of grid
#' points for computational reasons.
#' @param ... Parameters to pass to `createSensitivityResults` or
#' `createSensitivityResults_relativeMagnitudes`.
#' @inheritParams HonestDiD::createSensitivityResults
#' @inheritParams HonestDid::createSensitivityResults_relativeMagnitudes
honest_did.AGGTEobj <- function(es,
Expand Down
2 changes: 1 addition & 1 deletion R/sensitivityresults.R
Original file line number Diff line number Diff line change
Expand Up @@ -732,7 +732,7 @@ constructOriginalCS <- function(betahat, sigma,
createEventStudyPlot <- function(betahat, stdErrors = NULL, sigma = NULL,
numPrePeriods, numPostPeriods, alpha = 0.05,
timeVec, referencePeriod,
useRelativeEventTime = F) {
useRelativeEventTime = FALSE) {
if (base::is.null(stdErrors) & base::is.null(sigma)) {
base::stop("User must specify either vector of standard errors or vcv matrix!")
} else if (base::is.null(stdErrors) & !is.null(sigma)) {
Expand Down
2 changes: 1 addition & 1 deletion man/createEventStudyPlot.Rd
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ Constructs event study plot using the estimated event study coefficients and sta
}
\usage{
createEventStudyPlot(betahat, stdErrors = NULL, sigma = NULL,
numPrePeriods, numPostPeriods, alpha, timeVec,
numPrePeriods, numPostPeriods, alpha = 0.05, timeVec,
referencePeriod, useRelativeEventTime = FALSE)
}
%- maybe also 'usage' for other objects documented here.
Expand Down
176 changes: 176 additions & 0 deletions tests/test_base.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
# remove.packages("HonestDiD")
# install.packages(".", repos=NULL, type="source")
# testthat::test_dir("tests")

library(lfe)
library(haven)
library(testthat)
library(HonestDiD)
data(BCdata_EventStudy)
data(LWdata_EventStudy)

BC_numPrePeriods <- length(BCdata_EventStudy$prePeriodIndices)
BC_numPostPeriods <- length(BCdata_EventStudy$postPeriodIndices)
BC_l_vec <- basisVector(index = 1, size = BC_numPostPeriods)
BC_l_vec <- cbind(c(1, 0, 0, 0))

test_that("HonestDiD base run with no errors", {
BC_DeltaSDNB_RobustResults <-
createSensitivityResults(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec,
method = "FLCI",
Mvec = seq(from=0, to=0.3, by=0.1))

BC_DeltaSDNB_RobustResultsConditional <-
createSensitivityResults(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec,
method = "Conditional",
Mvec = seq(from=0, to=0.3, by=0.1))

BC_DeltaSDNB_RobustResultsCF <-
createSensitivityResults(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec,
method = "C-F",
Mvec = seq(from=0, to=0.3, by=0.1))

BC_DeltaSDNB_RobustResultsCLF <-
createSensitivityResults(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec,
method = "C-LF",
Mvec = seq(from=0, to=0.3, by=0.1))

BC_OriginalResults <-
constructOriginalCS(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec)

BC_DeltaSDNB_SensitivityPlot <-
createSensitivityPlot(robustResults = BC_DeltaSDNB_RobustResults,
originalResults = BC_OriginalResults)

expect_silent(BC_DeltaSDNB_RobustResults)
expect_silent(BC_DeltaSDNB_RobustResultsConditional)
expect_silent(BC_DeltaSDNB_RobustResultsCF)
expect_silent(BC_DeltaSDNB_RobustResultsCLF)
expect_silent(BC_OriginalResults)
expect_silent(BC_DeltaSDNB_SensitivityPlot)
})

test_that("HonestDiD options run with no errors", {
LWdata_RawData = haven::read_dta(system.file("extdata", "LWdata_RawData.dta", package = "HonestDiD"))
sum(LWdata_RawData$nobs)

# Estimate event study using lfe package
EmpFemale.EventStudy = lfe::felm(emp ~
rtESV13 + rtESV14 + rtESV15 +
rtESV16 + rtESV17 + rtESV18 +
rtESV19 + rtESV110 + rtESV111 + # End Pre-periods
rtESV113 + rtESV114 + rtESV115 +
rtESV116 + rtESV117 + rtESV118 +
rtESV119 + rtESV120 + rtESV121 +
rtESV122 + rtESV123 + rtESV124 +
rtESV125 + rtESV126 + rtESV127 +
rtESV128 + rtESV129 + rtESV130 +
rtESV131 + rtESV132 + rtESV133 +
rtESV134 + rtESV135 + # End post-periods
yearsfcor + yearsflr + aveitc + fscontrol +
asian + black + hispanic + other |
factor(PUS_SURVEY_YEAR)*factor(BIRTHYEAR) +
factor(PUS_SURVEY_YEAR) + factor(BIRTHSTATE) |
0 | BIRTHSTATE,
data = LWdata_RawData,
weights = LWdata_RawData$nobs)
summary(EmpFemale.EventStudy)

coefIndex = which(grepl(x = dimnames(EmpFemale.EventStudy$coefficients)[[1]], pattern = "rtESV"))
betahat = EmpFemale.EventStudy$beta[coefIndex, ]

# Extract estimated variance-covariance matrix of event study coefficients
sigma = EmpFemale.EventStudy$clustervcv[coefIndex, coefIndex]

# Construct vector of event times and the scalar reference period
timeVec = c(seq(from = -11, to = -3, by = 1), seq(from = -1, to = 21, by = 1))
referencePeriod <- -2
postPeriodIndices <- which(timeVec > -2)
prePeriodIndices <- which(timeVec < -2)
LW_numPrePeriods <- length(prePeriodIndices)
LW_numPostPeriods <- length(postPeriodIndices)
LW_l_vec <- basisVector(index = 1, size = LW_numPostPeriods)

for( method in c("C-F", "C-LF", "Conditional", "FLCI") ) {
for( monotonicityDirection in c("increasing", "decreasing") ) {
for ( biasDirection in c("positive", "negative") ) {
LW_DeltaSDNB_RobustResults <-
createSensitivityResults(betahat = betahat,
sigma = sigma,
numPrePeriods = LW_numPrePeriods,
numPostPeriods = LW_numPostPeriods,
l_vec = LW_l_vec,
method = method,
monotonicityDirection = monotonicityDirection,
biasDirection = biasDirection,
Mvec = seq(from=0, to=0.3, by=0.1))
print(c(method, monotonicityDirection, biasDirection, LW_DeltaSDNB_RobustResults))
expect_silent(LW_DeltaSDNB_RobustResults)
}
}
}

for ( method in c(NULL, "C-LF", "Conditional") ) {
for ( monotonicityDirection in c("increasing", "decreasing", NULL) ) {
for ( bound in c("deviation from parallel trends", "deviation from linear trend") ) {
BC_DeltaRM_RobustResults <-
createSensitivityResults_relativeMagnitudes(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec,
gridPoints = 100,
grid.ub = 1,
grid.lb = -1,
bound = bound,
method = method,
monotonicityDirection = monotonicityDirection,
Mbarvec = seq(from=0, to=1, by=0.5))
print(c(method, monotonicityDirection, biasDirection, bound, BC_DeltaRM_RobustResults))
expect_silent(BC_DeltaRM_RobustResults)
}
}
}

for ( method in c(NULL, "C-LF", "Conditional") ) {
for ( biasDirection in c("positive", "negative", NULL) ) {
for ( bound in c("deviation from parallel trends", "deviation from linear trend") ) {
BC_DeltaRM_RobustResults <-
createSensitivityResults_relativeMagnitudes(betahat = BCdata_EventStudy$betahat,
sigma = BCdata_EventStudy$sigma,
numPrePeriods = BC_numPrePeriods,
numPostPeriods = BC_numPostPeriods,
l_vec = BC_l_vec,
gridPoints = 100,
grid.ub = 1,
grid.lb = -1,
bound = bound,
method = method,
biasDirection = biasDirection,
Mbarvec = seq(from=0, to=1, by=0.5))
print(c(method, monotonicityDirection, biasDirection, bound, BC_DeltaRM_RobustResults))
expect_silent(BC_DeltaRM_RobustResults)
}
}
}
})

0 comments on commit 56e62db

Please sign in to comment.