Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: More information in query log prefix #632

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -273,11 +273,9 @@ import org.slf4j.Logger

if (state.queryCount != 0 && log.isDebugEnabled())
log.debug(
"{} next query [{}] from slices [{} - {}], between time [{} - {}]. Found [{}] rows in previous query.",
"{} next query [{}], between time [{} - {}]. Found [{}] rows in previous query.",
logPrefix,
state.queryCount,
minSlice,
maxSlice,
fromTimestamp,
toTimestamp,
state.rowCount)
Expand All @@ -300,11 +298,9 @@ import org.slf4j.Logger
} else {
if (log.isDebugEnabled)
log.debug(
"{} query [{}] from slices [{} - {}] completed. Found [{}] rows in previous query.",
"{} query [{}] completed. Found [{}] rows in previous query.",
logPrefix,
state.queryCount,
minSlice,
maxSlice,
state.rowCount)

state -> None
Expand All @@ -319,13 +315,7 @@ import org.slf4j.Logger
.futureSource[Envelope, NotUsed] {
currentTimestamp.map { currentTime =>
if (log.isDebugEnabled())
log.debug(
"{} query slices [{} - {}], from time [{}] until now [{}].",
logPrefix,
minSlice,
maxSlice,
initialOffset.timestamp,
currentTime)
log.debug("{} query, from time [{}] until now [{}].", logPrefix, initialOffset.timestamp, currentTime)

ContinuousQuery[QueryState, Envelope](
initialState = QueryState.empty.copy(latest = initialOffset),
Expand All @@ -348,12 +338,7 @@ import org.slf4j.Logger
val initialOffset = toTimestampOffset(offset)

if (log.isDebugEnabled())
log.debug(
"Starting {} query from slices [{} - {}], from time [{}].",
logPrefix,
minSlice,
maxSlice,
initialOffset.timestamp)
log.debug("{} starting query from time [{}].", logPrefix, initialOffset.timestamp)

def nextOffset(state: QueryState, envelope: Envelope): QueryState = {
if (EnvelopeOrigin.isHeartbeatEvent(envelope))
Expand Down Expand Up @@ -408,13 +393,7 @@ import org.slf4j.Logger

if (log.isDebugEnabled)
delay.foreach { d =>
log.debug(
"{} query [{}] from slices [{} - {}] delay next [{}] ms.",
logPrefix,
state.queryCount,
minSlice,
maxSlice,
d.toMillis)
log.debug("{} query [{}] delay next [{}] ms.", logPrefix, state.queryCount, d.toMillis)
}

delay
Expand Down Expand Up @@ -517,12 +496,10 @@ import org.slf4j.Logger
else
""
log.debug(
"{} next query [{}]{} from slices [{} - {}], between time [{} - {}]. {}",
"{} next query [{}]{}, between time [{} - {}]. {}",
logPrefix,
newState.queryCount,
backtrackingInfo,
minSlice,
maxSlice,
fromTimestamp,
toTimestamp.getOrElse("None"),
if (newIdleCount >= 3) s"Idle in [$newIdleCount] queries."
Expand Down Expand Up @@ -617,12 +594,10 @@ import org.slf4j.Logger
if (log.isDebugEnabled) {
val sum = counts.iterator.map { case Bucket(_, count) => count }.sum
log.debug(
"{} retrieved [{}] event count buckets, with a total of [{}], from slices [{} - {}], from time [{}]",
"{} retrieved [{}] event count buckets, with a total of [{}], from time [{}]",
logPrefix,
counts.size,
sum,
minSlice,
maxSlice,
fromTimestamp)
}
newState
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,12 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat
maxSlice: Int,
offset: Offset): Source[EventEnvelope[Event], NotUsed] = {
bySlice(entityType, minSlice)
.currentBySlices("currentEventsBySlices", entityType, minSlice, maxSlice, offset)
.currentBySlices(
s"[$entityType] currentEventsBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
offset)
}

/**
Expand Down Expand Up @@ -312,7 +317,12 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat
maxSlice: Int,
offset: Offset): Source[EventEnvelope[Event], NotUsed] = {
val dbSource =
bySlice[Event](entityType, minSlice).liveBySlices("eventsBySlices", entityType, minSlice, maxSlice, offset)
bySlice[Event](entityType, minSlice).liveBySlices(
s"[$entityType] eventsBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
offset)
if (settings.journalPublishEvents) {
val pubSubSource = eventsBySlicesPubSubSource[Event](entityType, minSlice, maxSlice)
mergeDbAndPubSubSources(dbSource, pubSubSource)
Expand Down Expand Up @@ -345,7 +355,12 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat

val snapshotSource =
snapshotsBySlice[Snapshot, Event](entityType, minSlice, transformSnapshot)
.currentBySlices("currentSnapshotsBySlices", entityType, minSlice, maxSlice, offset)
.currentBySlices(
s"[$entityType] currentSnapshotsBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
offset)

Source.fromGraph(
new StartingFromSnapshotStage[Event](
Expand All @@ -368,7 +383,7 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat
snapshotOffsets.size)

bySlice(entityType, minSlice).currentBySlices(
"currentEventsBySlices",
s"[$entityType] currentEventsBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
Expand Down Expand Up @@ -402,7 +417,12 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat

val snapshotSource =
snapshotsBySlice[Snapshot, Event](entityType, minSlice, transformSnapshot)
.currentBySlices("snapshotsBySlices", entityType, minSlice, maxSlice, offset)
.currentBySlices(
s"[$entityType] snapshotsBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
offset)

Source.fromGraph(
new StartingFromSnapshotStage[Event](
Expand All @@ -426,7 +446,7 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat

val dbSource =
bySlice[Event](entityType, minSlice).liveBySlices(
"eventsBySlices",
s"[$entityType] eventsBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
Expand Down Expand Up @@ -697,11 +717,18 @@ final class R2dbcReadJournal(system: ExtendedActorSystem, config: Config, cfgPat

// EventTimestampQuery
override def timestampOf(persistenceId: String, sequenceNr: Long): Future[Option[Instant]] = {
queryDao.timestampOfEvent(persistenceId, sequenceNr)
val result = queryDao.timestampOfEvent(persistenceId, sequenceNr)
if (log.isDebugEnabled) {
result.foreach { t =>
log.debug("[{}] timestampOf seqNr [{}] is [{}]", persistenceId, sequenceNr, t)
}
}
result
}

//LoadEventQuery
override def loadEnvelope[Event](persistenceId: String, sequenceNr: Long): Future[EventEnvelope[Event]] = {
log.debug("[{}] loadEnvelope seqNr [{}]", persistenceId, sequenceNr)
queryDao
.loadEvent(persistenceId, sequenceNr, includePayload = true)
.map {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,14 +307,24 @@ class R2dbcDurableStateStore[A](system: ExtendedActorSystem, config: Config, cfg
minSlice: Int,
maxSlice: Int,
offset: Offset): Source[DurableStateChange[A], NotUsed] =
bySlice.currentBySlices("currentChangesBySlices", entityType, minSlice, maxSlice, offset)
bySlice.currentBySlices(
s"[$entityType] currentChangesBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
offset)

override def changesBySlices(
entityType: String,
minSlice: Int,
maxSlice: Int,
offset: Offset): Source[DurableStateChange[A], NotUsed] =
bySlice.liveBySlices("changesBySlices", entityType, minSlice, maxSlice, offset)
bySlice.liveBySlices(
s"[$entityType] changesBySlices [$minSlice-$maxSlice]: ",
entityType,
minSlice,
maxSlice,
offset)

/**
* Note: If you have configured `custom-table` this query will look in both the default table and the custom tables.
Expand Down
Loading