Skip to content

Commit

Permalink
Metric cleanup (#50)
Browse files Browse the repository at this point in the history
* Metrics cleanup, removed unnecesary meters, renamed label variables

* Fix test
  • Loading branch information
adam-fowler authored Jan 9, 2025
1 parent 7ec9be1 commit 1fb1585
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 29 deletions.
32 changes: 12 additions & 20 deletions Sources/Jobs/JobMetricsHelper.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,16 @@ import Metrics

/// OTEL labels and dimensions
internal enum JobMetricsHelper {
/// Metrics label
static let metricsLabel: String = "swift.jobs"
/// Meter label for Processsing, Queued, Failed and Completed
/// Counter label
static let counterLabel: String = "swift.jobs"
/// Job duration timer label
static let timerLabel: String = "swift.jobs.duration"
/// Job queued timer label
static let queuedTimerLabel: String = "swift.jobs.queued.duration"
/// Meter label for Processsing, Queued
static let meterLabel: String = "swift.jobs.meter"
/// Meter label for discarded jobs
static let discardedMeter: String = "swift.jobs.discarded"
/// Counter label for discarded jobs
static let discardedCounter: String = "swift.jobs.discarded"
/// Used for the histogram which can be useful to see by job status
enum JobStatus: String, Codable, Sendable {
case queued
Expand All @@ -46,21 +50,9 @@ internal enum JobMetricsHelper {
error: Error? = nil,
retrying: Bool = false
) {
// This meter can be used to display total job
// Or decrement processing vector in Prometheus UI or Grafana
// with something like count(swif_jobs_meter{status="processing"}
// unless on(jobID) (swif_jobs_meter{status="queued"})
// or (swif_jobs_meter{status="completed")) or vector(0)
Meter(
label: JobMetricsHelper.meterLabel,
dimensions: [
("status", JobMetricsHelper.JobStatus.completed.rawValue)
]
).increment()

if retrying {
Counter(
label: Self.metricsLabel,
label: Self.counterLabel,
dimensions: [("name", name), ("status", JobStatus.retried.rawValue)]
).increment()
return
Expand All @@ -84,14 +76,14 @@ internal enum JobMetricsHelper {

// Calculate job execution time
Timer(
label: "\(Self.metricsLabel).duration",
label: Self.timerLabel,
dimensions: dimensions,
preferredDisplayUnit: .seconds
).recordNanoseconds(DispatchTime.now().uptimeNanoseconds - startTime)

// Increment job counter base on status
Counter(
label: Self.metricsLabel,
label: Self.counterLabel,
dimensions: dimensions
).increment()
}
Expand Down
10 changes: 5 additions & 5 deletions Sources/Jobs/JobQueueHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ final class JobQueueHandler<Queue: JobQueueDriver>: Sendable {
} catch let error as JobQueueError where error == .unrecognisedJobId {
logger.debug("Failed to find Job with ID while decoding")
try await self.queue.failed(jobId: queuedJob.id, error: error)
Meter(
label: JobMetricsHelper.discardedMeter,
Counter(
label: JobMetricsHelper.discardedCounter,
dimensions: [
("reason", "INVALID_JOB_ID")
]
Expand All @@ -109,8 +109,8 @@ final class JobQueueHandler<Queue: JobQueueDriver>: Sendable {
} catch {
logger.debug("Job failed to decode")
try await self.queue.failed(jobId: queuedJob.id, error: JobQueueError.decodeJobFailed)
Meter(
label: JobMetricsHelper.discardedMeter,
Counter(
label: JobMetricsHelper.discardedCounter,
dimensions: [
("reason", "DECODE_FAILED")
]
Expand All @@ -122,7 +122,7 @@ final class JobQueueHandler<Queue: JobQueueDriver>: Sendable {
// Calculate wait time from queued to processing
let jobQueuedDuration = Date.now.timeIntervalSince(job.queuedAt)
Timer(
label: "\(JobMetricsHelper.metricsLabel).queued.duration",
label: JobMetricsHelper.queuedTimerLabel,
dimensions: [("name", job.name)],
preferredDisplayUnit: .seconds
).recordSeconds(jobQueuedDuration)
Expand Down
8 changes: 4 additions & 4 deletions Tests/JobsTests/MetricsTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -346,10 +346,10 @@ final class MetricsTests: XCTestCase {
XCTAssertEqual($0, "test")
}

let queuedMeter = try XCTUnwrap(Self.testMetrics.meters.withLockedValue { $0 }["swift.jobs.discarded"] as? TestMeter)
XCTAssertEqual(queuedMeter.dimensions.count, 1)
XCTAssertEqual(queuedMeter.dimensions[0].0, "reason")
XCTAssertEqual(queuedMeter.dimensions[0].1, "DECODE_FAILED")
let discardedCounter = try XCTUnwrap(Self.testMetrics.counters.withLockedValue { $0 }["swift.jobs.discarded"] as? TestCounter)
XCTAssertEqual(discardedCounter.dimensions.count, 1)
XCTAssertEqual(discardedCounter.dimensions[0].0, "reason")
XCTAssertEqual(discardedCounter.dimensions[0].1, "DECODE_FAILED")
}

func testErrorRetryAndThenSucceed() async throws {
Expand Down

0 comments on commit 1fb1585

Please sign in to comment.