Skip to content

Commit

Permalink
moving the application of the cosinor anlyses inside g.analyse.avday.…
Browse files Browse the repository at this point in the history
…R to a seperate function to ease re-using it in g.part5 #708
  • Loading branch information
vincentvanhees committed Feb 14, 2023
1 parent b96c5e9 commit 91b592d
Show file tree
Hide file tree
Showing 4 changed files with 189 additions and 69 deletions.
75 changes: 75 additions & 0 deletions R/applyCosinorAnalyses.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
applyCosinorAnalyses = function(ts, qcheck, midnightsi, shortEpoch, longEpoch) {
# qcheck - vector of length ts to indicate invalid values
ws2 = longEpoch
ws3 = shortEpoch
# Re-derive Xi but this time include entire time series
# Here, we ignore duplicated values (when clock moves backward due to DST)
handleDST = !duplicated(ts)
qcheck = qcheck[handleDST]
Xi = ts[handleDST, which(colnames(ts) %in% "timestamp" == FALSE)]
Nlong_epochs_day = (1440 * 60) / ws2 # this is 96 by default
dstgap = which(diff(midnightsi) != Nlong_epochs_day)
if (length(dstgap) > 0) {
# Time moved forward due to DST
gaplocation = ((midnightsi[dstgap[1]] * ws2) / ws3) + (2 * (3600/ws3))
# Insert NA values
Xi = c(Xi[1:gaplocation], rep(NA, 3600/ws3), Xi[(gaplocation + 1):length(Xi)])
qcheck = c(qcheck[1:gaplocation], rep(NA, 3600/ws3), qcheck[(gaplocation + 1):length(qcheck)])
}

# Xi = log((Xi * 1000) + 1) # log transformed to be more robust against peaks in the data
# set non-wear to missing values, because for Cosinor fit
# it seems more logical to only fit with real data
# this comes at the price of not being able to extract F_pseudo
firstvalid = 1
if (length(which(qcheck == 1)) > 0) {
is.na(Xi[which(qcheck == 1)]) = TRUE
# ignore invalid start of recording (if applicable)
# such that 24 hour blocks start from first valid value
firstvalid = which(qcheck == 0)[1]
if (is.na(firstvalid) == FALSE) {
if (firstvalid != 1) {
Xi = Xi[firstvalid:length(Xi)]
}
}
}
if (length(which(is.na(Xi) == FALSE)) > (1440 * (60/ws3))) { # Only attempt cosinor analyses if there is more than 24 hours of data
midnightsi_ws3 = (midnightsi - 1) * (ws2 / ws3)
timeOffsetHours = (midnightsi_ws3[which(midnightsi_ws3 >= firstvalid - 1)[1]] - (firstvalid - 1)) / (3600 / ws3)
if (ws3 < 60) {
# If epochsize < 1 minute then aggregate to 1 minute by taking maximum value
# but keep NA values
XTtime = rep(1:length(Xi), each = 60 / ws3)
XT = data.frame(Xi = Xi, time = XTtime[1:length(Xi)])
custommean = function(x) {
y = NA
if (length(x) > 0) {
if (length(which(is.na(x) == FALSE) ) > 0) {
y = mean(x, na.rm = TRUE)
}
}
return(y)
}
XT = aggregate(x = XT, by = list(XT$time), FUN = custommean)
if (length(which(is.nan(XT$Xi) == TRUE)) > 0) {
is.na(XT$Xi[which(is.nan(XT$Xi) == TRUE)]) = TRUE
}
# experimental: clip all peaks above Xth percentile?
# Q9 = quantile(x = XT$Xi, probs = 0.75, na.rm = TRUE)
# XT$Xi[which(XT$Xi >= Q9)] = Q9

# log transform of data in millig
notna = !is.na(XT$Xi)
XT$Xi[notna] = log((XT$Xi[notna]*1000) + 1)
Xi = XT$Xi
epochsize = 60
} else {
epochsize = ws3
}
cosinor_coef = cosinorAnalyses(Xi = Xi, epochsize = epochsize, timeOffsetHours = timeOffsetHours)
cosinor_coef$timeOffsetHours = timeOffsetHours
} else {
cosinor_coef = c()
}
return(cosinor_coef)
}
141 changes: 72 additions & 69 deletions R/g.analyse.avday.R
Original file line number Diff line number Diff line change
Expand Up @@ -120,75 +120,78 @@ g.analyse.avday = function(doquan, averageday, M, IMP, t_TWDI, quantiletype,
#----------------------------------
# (Extended) Cosinor analysis
if (params_247[["cosinor"]] == TRUE) {
# Re-derive Xi but this time include entire time series
# Here, we ignore duplicated values (when clock moves backward due to DST)
handleDST = !duplicated(IMP$metashort)
qcheck = qcheck[handleDST]
Xi = IMP$metashort[handleDST, acc.metric]
Nlong_epochs_day = (1440 * 60) / ws2 # this is 96 by default
dstgap = which(diff(midnightsi) != Nlong_epochs_day)
if (length(dstgap) > 0) {
# Time moved forward due to DST
gaplocation = ((midnightsi[dstgap[1]] * ws2) / ws3) + (2 * (3600/ws3))
# Insert NA values
Xi = c(Xi[1:gaplocation], rep(NA, 3600/ws3), Xi[(gaplocation + 1):length(Xi)])
qcheck = c(qcheck[1:gaplocation], rep(NA, 3600/ws3), qcheck[(gaplocation + 1):length(qcheck)])
}

# Xi = log((Xi * 1000) + 1) # log transformed to be more robust against peaks in the data
# set non-wear to missing values, because for Cosinor fit
# it seems more logical to only fit with real data
# this comes at the price of not being able to extract F_pseudo
firstvalid = 1
if (length(which(qcheck == 1)) > 0) {
is.na(Xi[which(qcheck == 1)]) = TRUE
# ignore invalid start of recording (if applicable)
# such that 24 hour blocks start from first valid value
firstvalid = which(qcheck == 0)[1]
if (is.na(firstvalid) == FALSE) {
if (firstvalid != 1) {
Xi = Xi[firstvalid:length(Xi)]
}
}
}
if (length(which(is.na(Xi) == FALSE)) > (1440 * (60/ws3))) { # Only attempt cosinor analyses if there is more than 24 hours of data
midnightsi_ws3 = (midnightsi - 1) * (ws2 / ws3)
timeOffsetHours = (midnightsi_ws3[which(midnightsi_ws3 >= firstvalid - 1)[1]] - (firstvalid - 1)) / (3600 / ws3)
if (ws3 < 60) {
# If epochsize < 1 minute then aggregate to 1 minute by taking maximum value
# but keep NA values
XTtime = rep(1:length(Xi), each = 60 / ws3)
XT = data.frame(Xi = Xi, time = XTtime[1:length(Xi)])
custommean = function(x) {
y = NA
if (length(x) > 0) {
if (length(which(is.na(x) == FALSE) ) > 0) {
y = mean(x, na.rm = TRUE)
}
}
return(y)
}
XT = aggregate(x = XT, by = list(XT$time), FUN = custommean)
if (length(which(is.nan(XT$Xi) == TRUE)) > 0) {
is.na(XT$Xi[which(is.nan(XT$Xi) == TRUE)]) = TRUE
}
# experimental: clip all peaks above Xth percentile?
# Q9 = quantile(x = XT$Xi, probs = 0.75, na.rm = TRUE)
# XT$Xi[which(XT$Xi >= Q9)] = Q9

# log transform of data in millig
notna = !is.na(XT$Xi)
XT$Xi[notna] = log((XT$Xi[notna]*1000) + 1)
Xi = XT$Xi
epochsize = 60
} else {
epochsize = ws3
}
cosinor_coef = cosinorAnalyses(Xi = Xi, epochsize = epochsize, timeOffsetHours = timeOffsetHours)
cosinor_coef$timeOffsetHours = timeOffsetHours
} else {
cosinor_coef = c()
}
cosinor_coef = applyCosinorAnalyses(ts = IMP$metashort[, c("timestamp", acc.metric)],
qcheck = qcheck,
midnightsi, shortEpoch = ws3, longEpoch = ws2)
# # Re-derive Xi but this time include entire time series
# # Here, we ignore duplicated values (when clock moves backward due to DST)
# handleDST = !duplicated(IMP$metashort)
# qcheck = qcheck[handleDST]
# Xi = IMP$metashort[handleDST, acc.metric]
# Nlong_epochs_day = (1440 * 60) / ws2 # this is 96 by default
# dstgap = which(diff(midnightsi) != Nlong_epochs_day)
# if (length(dstgap) > 0) {
# # Time moved forward due to DST
# gaplocation = ((midnightsi[dstgap[1]] * ws2) / ws3) + (2 * (3600/ws3))
# # Insert NA values
# Xi = c(Xi[1:gaplocation], rep(NA, 3600/ws3), Xi[(gaplocation + 1):length(Xi)])
# qcheck = c(qcheck[1:gaplocation], rep(NA, 3600/ws3), qcheck[(gaplocation + 1):length(qcheck)])
# }
#
# # Xi = log((Xi * 1000) + 1) # log transformed to be more robust against peaks in the data
# # set non-wear to missing values, because for Cosinor fit
# # it seems more logical to only fit with real data
# # this comes at the price of not being able to extract F_pseudo
# firstvalid = 1
# if (length(which(qcheck == 1)) > 0) {
# is.na(Xi[which(qcheck == 1)]) = TRUE
# # ignore invalid start of recording (if applicable)
# # such that 24 hour blocks start from first valid value
# firstvalid = which(qcheck == 0)[1]
# if (is.na(firstvalid) == FALSE) {
# if (firstvalid != 1) {
# Xi = Xi[firstvalid:length(Xi)]
# }
# }
# }
# if (length(which(is.na(Xi) == FALSE)) > (1440 * (60/ws3))) { # Only attempt cosinor analyses if there is more than 24 hours of data
# midnightsi_ws3 = (midnightsi - 1) * (ws2 / ws3)
# timeOffsetHours = (midnightsi_ws3[which(midnightsi_ws3 >= firstvalid - 1)[1]] - (firstvalid - 1)) / (3600 / ws3)
# if (ws3 < 60) {
# # If epochsize < 1 minute then aggregate to 1 minute by taking maximum value
# # but keep NA values
# XTtime = rep(1:length(Xi), each = 60 / ws3)
# XT = data.frame(Xi = Xi, time = XTtime[1:length(Xi)])
# custommean = function(x) {
# y = NA
# if (length(x) > 0) {
# if (length(which(is.na(x) == FALSE) ) > 0) {
# y = mean(x, na.rm = TRUE)
# }
# }
# return(y)
# }
# XT = aggregate(x = XT, by = list(XT$time), FUN = custommean)
# if (length(which(is.nan(XT$Xi) == TRUE)) > 0) {
# is.na(XT$Xi[which(is.nan(XT$Xi) == TRUE)]) = TRUE
# }
# # experimental: clip all peaks above Xth percentile?
# # Q9 = quantile(x = XT$Xi, probs = 0.75, na.rm = TRUE)
# # XT$Xi[which(XT$Xi >= Q9)] = Q9
#
# # log transform of data in millig
# notna = !is.na(XT$Xi)
# XT$Xi[notna] = log((XT$Xi[notna]*1000) + 1)
# Xi = XT$Xi
# epochsize = 60
# } else {
# epochsize = ws3
# }
# cosinor_coef = cosinorAnalyses(Xi = Xi, epochsize = epochsize, timeOffsetHours = timeOffsetHours)
# cosinor_coef$timeOffsetHours = timeOffsetHours
# } else {
# cosinor_coef = c()
# }
} else {
cosinor_coef = c()
}
Expand Down
8 changes: 8 additions & 0 deletions R/g.part5.R
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,14 @@ g.part5 = function(datadir = c(), metadatadir = c(), f0=c(), f1=c(),
}
}
ts$window = 0
#===============================================================
# Cosinor analyses based on only the data used for GGIR part5





#===============================================================
for (TRLi in params_phyact[["threshold.lig"]]) {
for (TRMi in params_phyact[["threshold.mod"]]) {
for (TRVi in params_phyact[["threshold.vig"]]) {
Expand Down
34 changes: 34 additions & 0 deletions man/applyCosinorAnalyses.Rd
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
\name{applyCosinorAnalyses}
\alias{applyCosinorAnalyses}
\title{
Apply Cosinor Analyses to time series
}
\description{
Wrapper function around \link{cosinorAnalyses} that first prepares the time series
before applying the cosinorAnlayses
}
\usage{
applyCosinorAnalyses(ts, qcheck, midnightsi, shortEpoch, longEpoch)
}
\arguments{
\item{ts}{
Data.frame with timestamps and acceleration metric.
}
\item{qcheck}{
Vector of equal length as number of rows in ts with value 1 for invalid
timestamps, 0 otherwise.
}
\item{midnightsi}{
Indices for midnights in the time series
}
\item{shortEpoch}{
First element of argument windowsizes, the resolution of the ts and qcheck values.
}
\item{longEpoch}{
Second element of argument windowsizes, the resolution of the midnightsi
values in seconds.
}
}
\author{
Vincent T van Hees <v.vanhees@accelting.com>
}

0 comments on commit 91b592d

Please sign in to comment.