Skip to content

Commit

Permalink
Fix: clickhousesystemtablesreceiver: fix scraper waiting forever afte…
Browse files Browse the repository at this point in the history
…r ch downtime (#407)

fixes #406
  • Loading branch information
raj-k-singh authored Sep 20, 2024
1 parent 587debb commit 8969d16
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 1 deletion.
7 changes: 6 additions & 1 deletion receiver/clickhousesystemtablesreceiver/receiver.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,12 @@ func (r *systemTablesReceiver) scrapeQueryLogIfReady(ctx context.Context) (
// For example, this can happen if this was the first successful scrape
// after several failed attempts and subsequent waits for r.ScrapeIntervalSeconds
nextScrapeMinServerTs := r.nextScrapeIntervalStartTs + r.scrapeIntervalSeconds + r.scrapeDelaySeconds
nextWaitSeconds := max(0, nextScrapeMinServerTs-serverTsNow)

nextWaitSeconds := uint32(0)
if nextScrapeMinServerTs > serverTsNow {
// Do the subtraction only if it will not lead to an overflow/wrap around
nextWaitSeconds = nextScrapeMinServerTs - serverTsNow
}

return nextWaitSeconds, nil
}
Expand Down
10 changes: 10 additions & 0 deletions receiver/clickhousesystemtablesreceiver/receiver_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,4 +127,14 @@ func TestReceiver(t *testing.T) {
et, exists := lr.Attributes().Get("event_time")
require.True(exists)
require.Equal(et.Str(), testQlEventTime.Format(time.RFC3339))

// should scrape again immediately if scrape is too far behind the server ts
// for example: this can happen if clickhouse goes down for some time
mockQuerrier.tsNow += 10 * testScrapeIntervalSeconds
testQl4 := makeTestQueryLog("host-4", time.Now(), "test query 4")
mockQuerrier.nextScrapeResult = []QueryLog{testQl4}

waitSeconds, err = testReceiver.scrapeQueryLogIfReady(context.Background())
require.Nil(err)
require.Equal(uint32(0), waitSeconds)
}

0 comments on commit 8969d16

Please sign in to comment.