Skip to content

Commit

Permalink
... fix store args + minor bugs + add AIPTW slow test ...
Browse files Browse the repository at this point in the history
  • Loading branch information
bozenne committed Oct 17, 2024
1 parent 8ee359b commit 27c4726
Show file tree
Hide file tree
Showing 9 changed files with 163 additions and 109 deletions.
17 changes: 9 additions & 8 deletions R/ate-iid.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
## Author: Brice Ozenne
## Created: apr 5 2018 (17:01)
## Version:
## Last-Updated: Oct 14 2024 (10:01)
## Last-Updated: Oct 17 2024 (11:39)
## By: Brice Ozenne
## Update #: 1352
## Update #: 1360
##----------------------------------------------------------------------
##
### Commentary:
Expand Down Expand Up @@ -47,6 +47,7 @@ iidATE <- function(estimator,
n.obs,
n.times,
product.limit,
store,
...){

## ** prepare output
Expand Down Expand Up @@ -145,7 +146,7 @@ iidATE <- function(estimator,
attr(factor, "factor") <- lapply(1:n.contrasts, function(iC){cbind(-iW.IPTW[,iC]*iW.IPCW2[,iTime]*Y.tau[,iTime])})
## browser()
term.censoring <- attr(predictRisk(object.censor, newdata = mydata, times = c(0,time.jumpC)[index.obsSINDEXjumpC[,iTime]+1],
diag = TRUE, product.limit = product.limit, average.iid = factor),"average.iid")
diag = TRUE, product.limit = product.limit, average.iid = factor, store = store),"average.iid")

for(iC in 1:n.contrasts){ ## iGrid <- 1
## - because predictRisk outputs the risk instead of the survival
Expand Down Expand Up @@ -185,7 +186,7 @@ iidATE <- function(estimator,
}

term.intF1_tau <- attr(predictRisk(object.event, newdata = mydata, times = times, cause = cause,
average.iid = factor, product.limit = product.limit),"average.iid")
average.iid = factor, product.limit = product.limit, store = store),"average.iid")

for(iC in 1:n.contrasts){ ## iC <- 1
iid.AIPTW[[iC]] <- iid.AIPTW[[iC]] + term.intF1_tau[[iC]]
Expand All @@ -198,7 +199,7 @@ iidATE <- function(estimator,
})

integrand.F1t <- attr(predictRisk(object.event, newdata = mydata, times = time.jumpC, cause = cause,
average.iid = factor, product.limit = product.limit), "average.iid")
average.iid = factor, product.limit = product.limit, store = store), "average.iid")

for(iC in 1:n.contrasts){ ## iC <- 1
iid.AIPTW[[iC]] <- iid.AIPTW[[iC]] + subsetIndex(rowCumSum(integrand.F1t[[iC]]),
Expand Down Expand Up @@ -232,7 +233,7 @@ iidATE <- function(estimator,
return(-colMultiply_cpp(ls.F1tau_F1t_dM_SSG[[iTau]]*beforeEvent.jumpC, scale = iW.IPTW[,iC]))
})
integrand.St <- attr(predictRisk(object.event, type = "survival", newdata = mydata, times = time.jumpC-tol, cause = cause,
average.iid = factor, product.limit = product.limit), "average.iid")
average.iid = factor, product.limit = product.limit, store = store), "average.iid")
for(iGrid in 1:n.grid){ ## iGrid <- 1
iTau <- grid[iGrid,"tau"]
iC <- grid[iGrid,"contrast"]
Expand All @@ -254,7 +255,7 @@ iidATE <- function(estimator,
})

integrand.G1 <- predictCox(object.censor, newdata = mydata, times = time.jumpC - tol,
average.iid = factor)$survival.average.iid
average.iid = factor, store = store)$survival.average.iid

## ## integral censoring martingale
factor <- TRUE
Expand All @@ -264,7 +265,7 @@ iidATE <- function(estimator,
return(-colMultiply_cpp(ls.F1tau_F1t_SG[[iTau]]*beforeEvent.jumpC, scale = iW.IPTW[,iC]))
})
integrand.G2 <- predictCox(object.censor, newdata = mydata, times = time.jumpC, type = "hazard",
average.iid = factor)$hazard.average.iid
average.iid = factor, store = store)$hazard.average.iid

for(iGrid in 1:n.grid){ ## iGrid <- 1
iTau <- grid[iGrid,"tau"]
Expand Down
17 changes: 9 additions & 8 deletions R/ate-pointEstimate.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
## Author: Brice Ozenne
## Created: jun 27 2019 (10:43)
## Version:
## Last-Updated: Oct 16 2024 (12:31)
## Last-Updated: Oct 17 2024 (11:44)
## By: Brice Ozenne
## Update #: 1028
## Update #: 1041
##----------------------------------------------------------------------
##
### Commentary:
Expand Down Expand Up @@ -40,6 +40,7 @@ ATE_TI <- function(object.event,
data.index,
method.iid,
product.limit,
store,
...){

tol <- 1e-12 ## difference in jump time must be above tol
Expand Down Expand Up @@ -142,7 +143,7 @@ ATE_TI <- function(object.event,
if(attr(estimator,"IPCW")){
iPred <- lapply(1:n.times, function(iT){ ## iT <- 1
1-predictRisk(object.censor, newdata = mydata, times = c(0,time.jumpC)[index.obsSINDEXjumpC[,iT]+1],
diag = TRUE, product.limit = product.limit, iid = (method.iid==2)*return.iid.nuisance)
diag = TRUE, product.limit = product.limit, iid = (method.iid==2)*return.iid.nuisance, store = store)
})

G.T_tau <- do.call(cbind,iPred)
Expand Down Expand Up @@ -199,7 +200,7 @@ ATE_TI <- function(object.event,
}
outRisk <- predictRisk(object.event, newdata = data.i, times = times,
average.iid = factor, cause = cause,
product.limit = product.limit)
product.limit = product.limit, store = store)

F1.ctf.tau[[iC]][ls.index.strata[[iC]],] <- outRisk
if(return.iid.nuisance){
Expand All @@ -225,7 +226,7 @@ ATE_TI <- function(object.event,

## absolute risk at event times and jump times of the censoring process
predTempo <- predictRisk(object.event, newdata = mydata, times = c(times, time.jumpC), cause = cause, product.limit = product.limit,
iid = (method.iid==2)*return.iid.nuisance)
iid = (method.iid==2)*return.iid.nuisance, store = store)
F1.tau <- predTempo[,1:n.times,drop=FALSE]
F1.jump <- predTempo[,n.times + (1:index.lastjumpC),drop=FALSE]
if((method.iid==2)*return.iid.nuisance){
Expand All @@ -234,7 +235,7 @@ ATE_TI <- function(object.event,

## survival at all jump of the censoring process
S.jump <- predictRisk(object.event, type = "survival", newdata = mydata, times = time.jumpC-tol, product.limit = product.limit,
iid = (method.iid==2)*return.iid.nuisance)
iid = (method.iid==2)*return.iid.nuisance, store = store)
if((method.iid==2)*return.iid.nuisance){
out$store$iid.nuisance.survival <- attr(S.jump,"iid")
attr(S.jump,"iid") <- NULL
Expand All @@ -243,14 +244,14 @@ ATE_TI <- function(object.event,
## martingale for the censoring process
## at all times of jump of the censoring process
G.jump <- 1-predictRisk(object.censor, newdata = mydata, times = if(index.lastjumpC>1){c(0,time.jumpC[1:(index.lastjumpC-1)])}else{0},
product.limit = product.limit, iid = (method.iid==2)*return.iid.nuisance)
product.limit = product.limit, iid = (method.iid==2)*return.iid.nuisance, store = store)

if(return.iid.nuisance && (method.iid==2)){
out$store$iid.nuisance.censoring <- -attr(G.jump,"iid")
attr(G.jump,"iid") <- NULL
}
dN.jump <- do.call(cbind,lapply(time.jumpC, function(iJump){(mydata[[eventVar.time]] == iJump)*(mydata[[eventVar.status]] == level.censoring)}))
dLambda.jump <- predictCox(object.censor, newdata = mydata, times = time.jumpC, type = "hazard", iid = (method.iid==2)*return.iid.nuisance)
dLambda.jump <- predictCox(object.censor, newdata = mydata, times = time.jumpC, type = "hazard", iid = (method.iid==2)*return.iid.nuisance, store = store)
if((method.iid==2)*return.iid.nuisance){
out$store$iid.nuisance.martingale <- dLambda.jump$hazard.iid
}
Expand Down
15 changes: 8 additions & 7 deletions R/ate.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
## author: Thomas Alexander Gerds
## created: Oct 23 2016 (08:53)
## Version:
## last-updated: Oct 16 2024 (12:48)
## last-updated: Oct 17 2024 (11:30)
## By: Brice Ozenne
## Update #: 2453
## Update #: 2482
#----------------------------------------------------------------------
##
### Commentary:
Expand Down Expand Up @@ -603,14 +603,15 @@ ate <- function(event,
censorVar.status = censorVar.status,
return.iid.nuisance = return.iid.nuisance,
data.index = data.index,
method.iid = method.iid),
dots)
method.iid = method.iid,
store = store),
dots[names(dots) %in% "store" == FALSE])
if (attr(estimator,"TD")){
args.pointEstimate <- c(args.pointEstimate,list(formula=formula))
}
## note: system.time() seems to slow down the execution of the function, this is why Sys.time is used instead.
tps1 <- Sys.time()

pointEstimate <- do.call(fct.pointEstimate, args.pointEstimate)

tps2 <- Sys.time()
Expand Down Expand Up @@ -1139,14 +1140,14 @@ ate_initArgs <- function(object.event,
stop("Incorrect names for argument \'store\': should \"data\" and \"iid\". \n",
"For instance store = c(data = \"full\", iid = \"full\") or store = c(data = \"minimal\", iid = \"minimal\").\n")
}
if("data" %in% names(store)){
if("data" %in% names(store) && !is.null(store[["data"]])){
if(store[["data"]] %in% c("minimal","full") == FALSE){
stop("Element in argument \'store\' should take value \'minimal\' or \'full\'.\n",
"For instance store = c(data = \"full\") or store = c(data = \"minimal\").\n")
}
store.data <- store[["data"]]
}
if("iid" %in% names(store)){
if("iid" %in% names(store) && !is.null(store[["iid"]])){
if(store[["iid"]] %in% c("minimal","full") == FALSE){
stop("Element in argument \'store\' should take value \'minimal\' or \'full\'.\n",
"For instance store = c(iid = \"full\") or store = c(iid = \"minimal\").\n")
Expand Down
27 changes: 18 additions & 9 deletions R/confint.ate.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
## Author: Brice Ozenne
## Created: maj 23 2018 (14:08)
## Version:
## Last-Updated: Oct 16 2024 (12:42)
## Last-Updated: Oct 17 2024 (12:22)
## By: Brice Ozenne
## Update #: 1012
## Update #: 1020
##----------------------------------------------------------------------
##
### Commentary:
Expand Down Expand Up @@ -413,16 +413,25 @@ confintBoot.ate <- function(object, estimator, out, seed){
out$ratioRisk[iEstimator, c("p.value") := boot.p[indexRatio], on = "estimator"]
}
}

## ** export
col.meanRisk <- c("estimator","time","treatment","estimate","estimate.boot","se","lower","upper")
col.diffRisk <- c("estimator","time","A","B","estimate.A","estimate.B","estimate","estimate.boot","se","lower","upper")
col.ratioRisk <- c("estimator","time","A","B","estimate.A","estimate.B","estimate","estimate.boot","se","lower","upper")

if(attr(object$estimator,"TD")){
setcolorder(out$meanRisk, neworder = c("estimator","time","landmark","treatment","estimate","estimate.boot","se","lower","upper"))
setcolorder(out$diffRisk, neworder = c("estimator","time","landmark","A","B","estimate.A","estimate.B","estimate","estimate.boot","se","lower","upper","p.value"))
setcolorder(out$ratioRisk, neworder = c("estimator","time","landmark","A","B","estimate.A","estimate.B","estimate","estimate.boot","se","lower","upper","p.value"))
}else{
setcolorder(out$meanRisk, neworder = c("estimator","time","treatment","estimate","estimate.boot","se","lower","upper"))
setcolorder(out$diffRisk, neworder = c("estimator","time","A","B","estimate.A","estimate.B","estimate","estimate.boot","se","lower","upper","p.value"))
setcolorder(out$ratioRisk, neworder = c("estimator","time","A","B","estimate.A","estimate.B","estimate","estimate.boot","se","lower","upper","p.value"))
col.meanRisk <- c(col.meanRisk[1:2], "landmark", col.meanRisk[-(1:2)])
col.diffRisk <- c(col.diffRisk[1:2], "landmark", col.diffRisk[-(1:2)])
col.ratioRisk <- c(col.ratioRisk[1:2], "landmark", col.ratioRisk[-(1:2)])
}
if(p.value){
col.diffRisk <- c(col.diffRisk, "p.value")
col.ratioRisk <- c(col.ratioRisk, "p.value")
}
setcolorder(out$meanRisk, neworder = col.meanRisk)
setcolorder(out$diffRisk, neworder = col.diffRisk)
setcolorder(out$ratioRisk, neworder = col.ratioRisk)

data.table::setattr(out$meanRisk, name = "vcov", value = vcov.meanRisk)
data.table::setattr(out$diffRisk, name = "vcov", value = vcov.diffRisk)
data.table::setattr(out$ratioRisk, name = "vcov", value = vcov.ratioRisk)
Expand Down
22 changes: 11 additions & 11 deletions R/model.tables.ate.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
## Author: Brice Ozenne
## Created: Oct 16 2024 (11:48)
## Version:
## Last-Updated: Oct 17 2024 (09:15)
## Last-Updated: Oct 17 2024 (12:16)
## By: Brice Ozenne
## Update #: 11
## Update #: 18
##----------------------------------------------------------------------
##
### Commentary:
Expand Down Expand Up @@ -67,11 +67,6 @@ model.tables.ate <- function(x, contrasts = NULL, times = NULL, estimator = NULL
type <- match.arg(type, c("meanRisk","diffRisk","ratioRisk"))
}

dots <- list(...)
if(length(dots)>0){
stop("Unknown argument(s) \'",paste(names(dots),collapse="\' \'"),"\'. \n")
}

if(x$inference$se == FALSE){
stop("Cannot evaluate the uncertainty about the estimates when the standard error has not been stored. \n",
"Set argument \'se\' to TRUE when calling the ate function \n")
Expand Down Expand Up @@ -102,18 +97,23 @@ model.tables.ate <- function(x, contrasts = NULL, times = NULL, estimator = NULL
if(is.factor(object.reduce$ratioRisk$B)){
object.reduce$ratioRisk$B <- droplevels(object.reduce$ratioRisk$B)
}

object.reduce$iid <- list(lapply(object.reduce$iid[[estimator]][contrasts], function(iIID){iIID[,x$eval.times %in% times, drop = FALSE]}))
names(object.reduce$iid) <- estimator
if(x$inference$iid){
object.reduce$iid <- list(lapply(object.reduce$iid[[estimator]][contrasts], function(iIID){iIID[,x$eval.times %in% times, drop = FALSE]}))
names(object.reduce$iid) <- estimator
}else if(x$inference$bootstrap){
object.reduce$boot$t0 <- x$boot$t0[c(subset.meanRisk,NROW(x$meanRisk)+subset.diffRisk,NROW(x$meanRisk)+NROW(x$diffRisk)+subset.ratioRisk)]
object.reduce$boot$t <- x$boot$t[,c(subset.meanRisk,NROW(x$meanRisk)+subset.diffRisk,NROW(x$meanRisk)+NROW(x$diffRisk)+subset.ratioRisk),drop=FALSE]
}
object.reduce$estimator <- estimator ## side effect: drop attributes but they are not used by confintIID.ate
attr(object.reduce$estimator,"TD") <- attr(x$estimator,"TD")
object.reduce$eval.times <- times
object.reduce$contrasts <- contrasts
object.reduce$allContrasts <- utils::combn(contrasts, m = 2)
object.reduce$inference.contrasts <- contrasts
object.reduce$inference.allContrasts <- utils::combn(contrasts, m = 2)

## *** call confint
out <- stats::confint(object.reduce)[[type]]
out <- stats::confint(object.reduce, ...)[[type]]

## *** export
return(out)
Expand Down
4 changes: 2 additions & 2 deletions R/predict.CauseSpecificCox.R
Original file line number Diff line number Diff line change
Expand Up @@ -297,14 +297,14 @@ predict.CauseSpecificCox <- function(object,
stop("Incorrect names for argument \'store\': should \"data\" and \"iid\". \n",
"For instance store = c(data = \"full\", iid = \"full\") or store = c(data = \"minimal\", iid = \"minimal\").\n")
}
if("data" %in% names(store)){
if("data" %in% names(store) && !is.null(store[["data"]])){
if(store[["data"]] %in% c("minimal","full") == FALSE){
stop("Element in argument \'store\' should take value \'minimal\' or \'full\'.\n",
"For instance store = c(data = \"full\") or store = c(data = \"minimal\").\n")
}
store.data <- store[["data"]]
}
if("iid" %in% names(store)){
if("iid" %in% names(store) && !is.null(store[["iid"]])){
if(store[["iid"]] %in% c("minimal","full") == FALSE){
stop("Element in argument \'store\' should take value \'minimal\' or \'full\'.\n",
"For instance store = c(iid = \"full\") or store = c(iid = \"minimal\").\n")
Expand Down
4 changes: 2 additions & 2 deletions R/predictCox.R
Original file line number Diff line number Diff line change
Expand Up @@ -396,14 +396,14 @@ predictCox <- function(object,
stop("Incorrect names for argument \'store\': should \"data\" and \"iid\". \n",
"For instance store = c(data = \"full\", iid = \"full\") or store = c(data = \"minimal\", iid = \"minimal\").\n")
}
if("data" %in% names(store)){
if("data" %in% names(store) && !is.null(store[["data"]])){
if(store[["data"]] %in% c("minimal","full") == FALSE){
stop("Element in argument \'store\' should take value \'minimal\' or \'full\'.\n",
"For instance store = c(data = \"full\") or store = c(data = \"minimal\").\n")
}
store.data <- store[["data"]]
}
if("iid" %in% names(store)){
if("iid" %in% names(store) && !is.null(store[["iid"]])){
if(store[["iid"]] %in% c("minimal","full") == FALSE){
stop("Element in argument \'store\' should take value \'minimal\' or \'full\'.\n",
"For instance store = c(iid = \"full\") or store = c(iid = \"minimal\").\n")
Expand Down
Loading

0 comments on commit 27c4726

Please sign in to comment.