1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
module Gitlab
module Metrics
# Class that sends certain metrics to InfluxDB at a specific interval.
#
# This class is used to gather statistics that can't be directly associated
# with a transaction such as system memory usage, garbage collection
# statistics, etc.
class Sampler
# interval - The sampling interval in seconds.
def initialize(interval = Metrics.settings[:sample_interval])
interval_half = interval.to_f / 2
@interval = interval
@interval_steps = (-interval_half..interval_half).step(0.1).to_a
@last_step = nil
@metrics = []
@last_minor_gc = Delta.new(GC.stat[:minor_gc_count])
@last_major_gc = Delta.new(GC.stat[:major_gc_count])
if Gitlab::Metrics.mri?
require 'allocations'
Allocations.start
end
end
def start
Thread.new do
Thread.current.abort_on_exception = true
loop do
sleep(sleep_interval)
sample
end
end
end
def sample
sample_memory_usage
sample_file_descriptors
sample_objects
sample_gc
flush
ensure
GC::Profiler.clear
@metrics.clear
end
def flush
Metrics.submit_metrics(@metrics.map(&:to_hash))
end
def sample_memory_usage
add_metric('memory_usage', value: System.memory_usage)
end
def sample_file_descriptors
add_metric('file_descriptors', value: System.file_descriptor_count)
end
if Metrics.mri?
def sample_objects
sample = Allocations.to_hash
counts = sample.each_with_object({}) do |(klass, count), hash|
name = klass.name
next unless name
hash[name] = count
end
# Symbols aren't allocated so we'll need to add those manually.
counts['Symbol'] = Symbol.all_symbols.length
counts.each do |name, count|
add_metric('object_counts', { count: count }, type: name)
end
end
else
def sample_objects
end
end
def sample_gc
time = GC::Profiler.total_time * 1000.0
stats = GC.stat.merge(total_time: time)
# We want the difference of GC runs compared to the last sample, not the
# total amount since the process started.
stats[:minor_gc_count] =
@last_minor_gc.compared_with(stats[:minor_gc_count])
stats[:major_gc_count] =
@last_major_gc.compared_with(stats[:major_gc_count])
stats[:count] = stats[:minor_gc_count] + stats[:major_gc_count]
add_metric('gc_statistics', stats)
end
def add_metric(series, values, tags = {})
prefix = sidekiq? ? 'sidekiq_' : 'rails_'
@metrics << Metric.new("#{prefix}#{series}", values, tags)
end
def sidekiq?
Sidekiq.server?
end
# Returns the sleep interval with a random adjustment.
#
# The random adjustment is put in place to ensure we:
#
# 1. Don't generate samples at the exact same interval every time (thus
# potentially missing anything that happens in between samples).
# 2. Don't sample data at the same interval two times in a row.
def sleep_interval
while step = @interval_steps.sample
if step != @last_step
@last_step = step
return @interval + @last_step
end
end
end
end
end
end