-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathcarbon-multi-resize.py
executable file
·368 lines (288 loc) · 12.5 KB
/
carbon-multi-resize.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
#!/usr/bin/env python
#
# Run whisper-resize on all *.wsp files under top directory to set their
# storage size and aggregations to match current values configured in
# storage-schemas.conf and storage-aggregations.conf
#
# Usage:
# export GRAPHITE_ROOT=.... # or
# export GRAPHITE_STORAGE_DIR=/var/lib/graphite # Debian or Ubuntu
# carbon-multi-resize [UPDATE_BASE]
#
# where UPDATE_BASE is a subdirectory of GRAPHITE_STORAGE_DIR, eg
# "/var/lib/graphite/whisper/collected/device1". If omitted, all of
# GRAPHITE_STORAGE_DIR will be processed.
#
# *.wsp files that already have storage space matching what is
# configured in storage-schemas.conf will be automatically skipped.
#
# Environment variables recognised, to match Debian /etc/carbon
# settings:
# - GRAPHITE_ROOT
# - GRAPHITE_CONF_DIR
# - GRAPHITE_STORAGE_DIR
# - GRAPHITE_BIN_DIR
#
# The program will attempt to automatically locate unspecified values,
# either relative to GRAPHITE_ROOT, or in Debian/Ubuntu packaged default
# locations if those exist.
#
# Copyright viralshah (https://github.com/viralshah), 2013.
# Copyright TheVirtual Ltd http://www.thevirtual.co.nz/), 2015.
#
# Original written by viralshah (https://github.com/viralshah), 2013
# Updated by Ewen McNeill <[email protected]>, 2015-06-30, for Debian layout
#---------------------------------------------------------------------------
#
import os
from os.path import abspath, dirname, exists, join, normpath
import re
import subprocess
import sys
from carbon.conf import OrderedConfigParser
from carbon.util import pickle
import carbon.exceptions
import whisper
# Permit path overrides from environment
# GRAPHITE_ROOT - Root directory of the graphite installation.
# Defaults to ../
# GRAPHITE_CONF_DIR - Configuration directory (where this file lives).
# Defaults to $GRAPHITE_ROOT/conf/
# GRAPHITE_STORAGE_DIR - Storage directory for whipser/rrd/log/pid files.
# Defaults to $GRAPHITE_ROOT/storage/
# GRAPHITE_BIN_DIR - Directory with whisper-resize
# Defaults to $GRAPHITE_ROOT/bin/
#
GRAPHITE_ROOT = os.environ.get('GRAPHITE_ROOT')
GRAPHITE_STORAGE_DIR = os.environ.get('GRAPHITE_STORAGE_DIR')
GRAPHITE_CONF_DIR = os.environ.get('GRAPHITE_CONF_DIR')
GRAPHITE_BIN_DIR = os.environ.get('GRAPHITE_BIN_DIR')
# Base for updates, from command line (optional, defaults to LOCAL_DATA_DIR
# below if not set)
UPDATE_BASE = (sys.argv + [None])[1]
# Defaults, relative to GRAPHITE_DIR
if GRAPHITE_ROOT and not GRAPHITE_STORAGE_DIR:
GRAPHITE_STORAGE_DIR = join(GRAPHITE_ROOT, 'storage')
if GRAPHITE_ROOT and not GRAPHITE_CONF_DIR:
GRAPHITE_CONF_DIR = join(GRAPHITE_ROOT, 'conf')
if GRAPHITE_ROOT and not GRAPHITE_BIN_DIR:
GRAPHITE_BIN_DIR = join(GRAPHITE_ROOT, 'bin')
# Debian specific overrides, used if the graphite default ones are missing
# and these ones exist (ie, looks like a Debian/Ubuntu package layout)
DEBIAN_GRAPHITE_BIN_DIR = '/usr/bin'
DEBIAN_GRAPHITE_CONF_DIR = '/etc/carbon'
DEBIAN_GRAPHITE_STORAGE_DIR = '/var/lib/graphite'
DEBIAN_WHISPER_RESIZE = 'whisper-resize'
if exists(DEBIAN_GRAPHITE_BIN_DIR) and (not GRAPHITE_BIN_DIR or
not exists(GRAPHITE_BIN_DIR)):
GRAPHITE_BIN_DIR = DEBIAN_GRAPHITE_BIN_DIR
if exists(DEBIAN_GRAPHITE_CONF_DIR) and (not GRAPHITE_CONF_DIR or
not exists(GRAPHITE_CONF_DIR)):
GRAPHITE_CONF_DIR = DEBIAN_GRAPHITE_CONF_DIR
if exists(DEBIAN_GRAPHITE_STORAGE_DIR) and (not GRAPHITE_STORAGE_DIR or
not exists(GRAPHITE_STORAGE_DIR)):
GRAPHITE_STORAGE_DIR = DEBIAN_GRAPHITE_STORAGE_DIR
# Validate that we actually have paths to everything
if not (GRAPHITE_STORAGE_DIR and GRAPHITE_CONF_DIR and GRAPHITE_BIN_DIR):
print("Unable to find graphite directories - set one or more of:")
print(" GRAPHITE_ROOT")
print(" GRAPHITE_BIN_DIR")
print(" GRAPHITE_CONF_DIR")
print(" GRAPHITE_STORAGE_DIR")
print("(defaults are relative to GRAPHITE_ROOT)")
sys.exit(1)
# Derived paths to specific things
WHITELISTS_DIR = join(GRAPHITE_STORAGE_DIR, 'lists')
LOCAL_DATA_DIR = join(GRAPHITE_STORAGE_DIR, 'whisper')
WHISPER_RESIZE = join(GRAPHITE_BIN_DIR, 'whisper-resize.py')
if exists(join(GRAPHITE_BIN_DIR, DEBIAN_WHISPER_RESIZE)) and \
not exists(WHISPER_RESIZE):
WHISPER_RESIZE = join(GRAPHITE_BIN_DIR, DEBIAN_WHISPER_RESIZE)
STORAGE_SCHEMAS_CONFIG = join(GRAPHITE_CONF_DIR, 'storage-schemas.conf')
STORAGE_AGGREGATION_CONFIG = join(GRAPHITE_CONF_DIR, 'storage-aggregation.conf')
# Default to processing all local data, but otherwise ensure that
# path names are likely to appear to be within LOCAL_DATA_DIR
if UPDATE_BASE:
UPDATE_BASE = os.path.abspath(UPDATE_BASE)
else:
UPDATE_BASE = os.path.abspath(LOCAL_DATA_DIR)
# Hacky test that UPDATE_BASE is inside LOCAL_DATA_DIR
if not normpath(LOCAL_DATA_DIR) in UPDATE_BASE:
print("UPDATE_BASE must be a subdirectory of LOCAL_DATA_DIR")
print("(otherwise storage schemas selection will fail, and all")
print(" files will end up with default storage schema)")
sys.exit(1)
#---------------------------------------------------------------------------
class Schema:
def test(self, metric):
raise NotImplementedError()
def matches(self, metric):
return bool( self.test(metric) )
class DefaultSchema(Schema):
def __init__(self, name, archives):
self.name = name
self.archives = archives
def test(self, metric):
return True
class PatternSchema(Schema):
def __init__(self, name, pattern, archives):
self.name = name
self.pattern = pattern
self.regex = re.compile(pattern)
self.archives = archives
def test(self, metric):
return self.regex.search(metric)
class ListSchema(Schema):
def __init__(self, name, listName, archives):
self.name = name
self.listName = listName
self.archives = archives
self.path = join(WHITELISTS_DIR, listName)
if exists(self.path):
self.mtime = os.stat(self.path).st_mtime
fh = open(self.path, 'rb')
self.members = pickle.load(fh)
fh.close()
else:
self.mtime = 0
self.members = frozenset()
def test(self, metric):
if exists(self.path):
current_mtime = os.stat(self.path).st_mtime
if current_mtime > self.mtime:
self.mtime = current_mtime
fh = open(self.path, 'rb')
self.members = pickle.load(fh)
fh.close()
return metric in self.members
class Archive:
def __init__(self,secondsPerPoint,points):
self.secondsPerPoint = int(secondsPerPoint)
self.points = int(points)
def __str__(self):
return "Archive = (Seconds per point: %d, Datapoints to save: %d)" % (self.secondsPerPoint, self.points)
def getTuple(self):
return (self.secondsPerPoint,self.points)
@staticmethod
def fromString(retentionDef):
(secondsPerPoint, points) = whisper.parseRetentionDef(retentionDef)
return Archive(secondsPerPoint, points)
def loadStorageSchemas():
schemaList = []
config = OrderedConfigParser()
config.read(STORAGE_SCHEMAS_CONFIG)
for section in config.sections():
options = dict( config.items(section) )
matchAll = options.get('match-all')
pattern = options.get('pattern')
retentions = options['retentions'].split(',')
archives = [ Archive.fromString(s) for s in retentions ]
if matchAll:
mySchema = DefaultSchema(section, archives)
elif pattern:
mySchema = PatternSchema(section, pattern, archives)
archiveList = [a.getTuple() for a in archives]
try:
whisper.validateArchiveList(archiveList)
schemaList.append(mySchema)
except whisper.InvalidConfiguration, e:
print "Invalid schemas found in %s: %s" % (section, e)
schemaList.append(defaultSchema)
return schemaList
def loadAggregationSchemas():
# NOTE: This abuses the Schema classes above, and should probably be refactored.
schemaList = []
config = OrderedConfigParser()
try:
config.read(STORAGE_AGGREGATION_CONFIG)
except IOError:
print "%s not found, ignoring." % STORAGE_AGGREGATION_CONFIG
except carbon.exceptions.CarbonConfigException:
print "%s not found, ignoring." % STORAGE_AGGREGATION_CONFIG
for section in config.sections():
options = dict( config.items(section) )
matchAll = options.get('match-all')
pattern = options.get('pattern')
xFilesFactor = options.get('xfilesfactor')
aggregationMethod = options.get('aggregationmethod')
try:
if xFilesFactor is not None:
xFilesFactor = float(xFilesFactor)
assert 0 <= xFilesFactor <= 1
if aggregationMethod is not None:
assert aggregationMethod in whisper.aggregationMethods
except:
print "Invalid schemas found in %s." % section
continue
archives = (xFilesFactor, aggregationMethod)
if matchAll:
mySchema = DefaultSchema(section, archives)
elif pattern:
mySchema = PatternSchema(section, pattern, archives)
schemaList.append(mySchema)
schemaList.append(defaultAggregation)
return schemaList
defaultArchive = Archive(60, 60 * 24 * 7) #default retention for unclassified data (7 days of minutely data)
defaultSchema = DefaultSchema('default', [defaultArchive])
defaultAggregation = DefaultSchema('default', (None, None))
print "Loading storage-schemas configuration from: '%s'" % STORAGE_SCHEMAS_CONFIG
schemas = loadStorageSchemas()
print "Loading storage-aggregation configuration from: '%s'" % STORAGE_AGGREGATION_CONFIG
agg_schemas = loadAggregationSchemas()
#print schemas
#print agg_schemas
def get_archive_config(metric):
archiveConfig = None
xFilesFactor, aggregationMethod = None, None
for schema in schemas:
if schema.matches(metric):
#print 'new metric %s matched schema %s' % (metric, schema.name)
archiveConfig = [archive.getTuple() for archive in schema.archives]
break
for schema in agg_schemas:
if schema.matches(metric):
#print 'new metric %s matched aggregation schema %s' % (metric, schema.name)
xFilesFactor, aggregationMethod = schema.archives
break
if not archiveConfig:
raise Exception("No storage schema matched the metric '%s', check your storage-schemas.conf file." % metric)
return (archiveConfig, xFilesFactor, aggregationMethod)
def diff_file_conf(metric, filepath):
"""
Returns true if the actual file has parameters different from those in the configuration files
"""
(archiveConfig, xFilesFactor, aggregationMethod) = get_archive_config(metric)
info = whisper.info(filepath)
if info['xFilesFactor'] != xFilesFactor or info['aggregationMethod'] != aggregationMethod:
#print "{0} {1}".format(info['aggregationMethod'], aggregationMethod)
#print "{0} {1}".format(info['xFilesFactor'], xFilesFactor)
return True
for (archivefile, archiveconf) in zip(info['archives'], archiveConfig):
(secondsPerPoint, points) = archiveconf
#print "{0} {1}".format(archivefile['secondsPerPoint'], secondsPerPoint)
#print "{0} {1}".format(archivefile['points'], points)
if archivefile['secondsPerPoint'] != secondsPerPoint or archivefile['points'] != points:
return True
print("Processing data in %s" % UPDATE_BASE)
wsp_regex = re.compile('\.wsp$')
root_dir_regex = re.compile('^' + LOCAL_DATA_DIR + os.sep)
dir_sep_regex = re.compile(os.sep)
for root, dirs, files in os.walk(UPDATE_BASE):
for filename in [f for f in files if wsp_regex.search(f)]:
filepath = join(root, filename)
metric = dir_sep_regex.sub('.', wsp_regex.sub('',
root_dir_regex.sub('', filepath)))
print "Processing {0}".format(filepath)
if diff_file_conf(metric, filepath):
#there is a difference and we need to resize the whisper file
(archiveConfig, xFilesFactor, aggregationMethod) = get_archive_config(metric)
command_args = [WHISPER_RESIZE, filepath]
for (secondsPerPoint, points) in archiveConfig:
command_args.append("{0}:{1}".format(secondsPerPoint, points))
command_args.append('--nobackup')
if aggregationMethod:
command_args.append('--aggregationMethod={0}'.format(aggregationMethod))
if xFilesFactor is not None:
command_args.append('--xFilesFactor={0}'.format(xFilesFactor))
#print ' '.join(command_args)
subprocess.check_output(command_args)