Skip to content

Commit

Permalink
Update estimated latency metric in tsoStream instead of tsoDispatcher
Browse files Browse the repository at this point in the history
Signed-off-by: MyonKeminta <MyonKeminta@users.noreply.github.com>
  • Loading branch information
MyonKeminta committed Sep 25, 2024
1 parent 0bfe9a6 commit af1c3d2
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
1 change: 0 additions & 1 deletion client/tso_dispatcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,6 @@ tsoBatchLoop:
// continue collecting.
if td.isConcurrentRPCEnabled() {
estimatedLatency := stream.EstimatedRPCLatency()
estimateTSOLatencyGauge.WithLabelValues(streamURL).Set(estimatedLatency.Seconds())
goalBatchTime := estimatedLatency / time.Duration(td.rpcConcurrency)

failpoint.Inject("tsoDispatcherConcurrentModeAssertDelayDuration", func(val failpoint.Value) {
Expand Down
2 changes: 2 additions & 0 deletions client/tso_stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,8 @@ func (s *tsoStream) recvLoop(ctx context.Context) {
currentSample := math.Log(float64(latency.Microseconds()))
filteredValue := filter.update(sampleTime, currentSample)
s.estimatedLatencyMicros.Store(uint64(math.Exp(filteredValue)))
// `filteredValue` is in microseconds. Update the metrics in seconds.
estimateTSOLatencyGauge.WithLabelValues(s.streamID).Set(filteredValue * 1e-6)
}

recvLoop:
Expand Down

0 comments on commit af1c3d2

Please sign in to comment.