forked from andyfase/DBRdashboard
-
Notifications
You must be signed in to change notification settings - Fork 0
/
analyzeDBR.config
206 lines (198 loc) · 6.29 KB
/
analyzeDBR.config
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
[general]
namespace = "DBR"
[ri]
enableRIanalysis = true
enableRITotalUtilization = true # Set this to true to get a total RI percentage utilization value.
riPercentageThreshold = 5 # Ignore un-used RI's where percentage of under-use lower than this value
riTotalThreshold = 5 # Ignore un-used RI's where total number of RI's (per instance type) is below this.
cwNameTotal = "riTotalUtilization"
cwName = "riUnderUtilization"
cwDimension = "instance"
cwDimensionTotal = "total"
cwType = "Percent"
sql = """
SELECT distinct
rtrim(split_part(split_part(split_part(itemdescription, 'hourly fee per ', 2), ',' ,1), '(', 1)) AS platform,
split_part(usagetype, ':', 2) AS instance,
substr(usagestartdate, 1, 13) AS date,
availabilityzone AS az,
count(*) AS hours
FROM dbr.autodbr_**DATE**
WHERE productname = 'Amazon Elastic Compute Cloud'
AND operation like '%RunInstances%'
AND usagetype like '%Usage%'
AND reservedinstance = 'Y'
AND split_part(usagetype, ':', 2) is not NULL
AND length(availabilityzone) > 1
AND length(usagestartdate) > 1
AND cast(usagestartdate as timestamp) > now() - interval '72' hour
GROUP BY
rtrim(split_part(split_part(split_part(itemdescription, 'hourly fee per ', 2), ',' ,1), '(', 1)),
split_part(usagetype, ':', 2),
substr(usagestartdate, 1, 13),
availabilityzone
"""
[ri.ignore] ## Ignore un-used RI's in this map/hash
"t2.micro" = 1
"m1.small" = 1 # This has to be ignored as RI usage in DBR file for this instance type is not accurate
[[metrics]]
## Count of Instance purchase types (RI, Spot, onDemand) per hour"
enabled = true
type = "dimension-per-row"
cwName = "InstancePurchaseType"
cwDimension = "type"
cwType = "Count"
sql = """
SELECT distinct
substr(split_part(usagetype, ':', 1), strpos(split_part(usagetype, ':', 1), '-') + 1, 10) AS dimension,
substr(usagestartdate, 1, 13) AS date,
count(*) AS value
FROM dbr.autodbr_**DATE**
WHERE productname = 'Amazon Elastic Compute Cloud'
AND operation like '%RunInstances%'
AND usagetype like '%Usage%'
AND cast(usagestartdate as timestamp) > now() - interval '24' hour
GROUP BY
substr(split_part(usagetype, ':', 1), strpos(split_part(usagetype, ':', 1), '-') + 1, 10),
substr(usagestartdate, 1, 13)
ORDER BY substr(usagestartdate, 1, 13) desc \
"""
[[metrics]]
## Summary of Overall Cost per hour
enabled = true
type = "dimension-per-row"
cwName = "TotalCost"
cwDimension = "cost"
cwType = "None"
sql = """
SELECT
'total' as dimension,
substr(usagestartdate, 1, 13) AS date,
sum(cast(**COST** as double)) AS value
FROM dbr.autodbr_**DATE**
WHERE length(usagestartdate) >= 19
AND cast(usagestartdate as timestamp) > now() - interval '24' hour
GROUP BY substr(usagestartdate, 1, 13)
ORDER BY substr(usagestartdate, 1, 13) desc \
"""
[[metrics]]
## Summary of Cost per service per hour
enabled = true
type = "dimension-per-row"
cwName = "ServiceCost"
cwDimension = "service"
cwType = "None"
sql = """
SELECT
productname AS dimension,
substr(usagestartdate, 1, 13) AS date,
sum(cast(**COST** as double)) AS value
FROM dbr.autodbr_**DATE**
WHERE length(usagestartdate) >= 19
AND cast(usagestartdate as timestamp) > now() - interval '24' hour
GROUP BY
productname,
substr(usagestartdate, 1, 13)
HAVING sum(cast(**COST** as double)) > 0
ORDER BY substr(usagestartdate, 1, 13), productname desc \
"""
[[metrics]]
## Count of Instance Types per Hour
enabled = true
type = "dimension-per-row"
cwName = "InstanceType"
cwDimension = "instance"
cwType = "Count"
sql = """
SELECT distinct
split_part(usagetype, ':', 2) AS dimension,
substr(usagestartdate, 1, 13) AS date,
count(*) AS value
FROM dbr.autodbr_**DATE**
WHERE productname = 'Amazon Elastic Compute Cloud'
AND operation like '%RunInstances%'
AND usagetype like '%Usage%'
AND cast(usagestartdate as timestamp) > now() - interval '24' hour
GROUP BY
split_part(usagetype, ':', 2),
substr(usagestartdate, 1, 13)
ORDER BY substr(usagestartdate, 1, 13), split_part(usagetype, ':', 2) desc
"""
[[metrics]]
## Count of Linked Account per hour
## Only enable this if you have linked accounts
enabled = false
type = "dimension-per-row"
cwName = "AccountCost"
cwDimension = "accountid"
cwType = "None"
sql = """
SELECT distinct
linkedaccountid AS dimension,
substr(usagestartdate, 1, 13) AS date,
sum(cast(blendedcost as double)) AS value
FROM dbr.hootsuite
WHERE length(usagestartdate) >= 19
AND cast(usagestartdate as timestamp) > now() - interval '24' hour
AND length(linkedaccountid) > 1
GROUP BY
linkedaccountid,
substr(usagestartdate, 1, 13)
ORDER BY
substr(usagestartdate, 1, 13),
sum(cast(blendedcost as double)) desc
"""
[athena]
create_database = "create database if not exists `dbr` comment \"AutoDBR Athena Database\""
create_table = """
create external table if not exists `dbr.autodbr_**DATE**` (
`InvoiceID` string,
`PayerAccountId` string,
`LinkedAccountId` string,
`RecordType` string,
`RecordId` string,
`ProductName` string,
`RateId` string,
`SubscriptionId` string,
`PricingPlanId` string,
`UsageType` string,
`Operation` string,
`AvailabilityZone` string,
`ReservedInstance` string,
`ItemDescription` string,
`UsageStartDate` string,
`UsageEndDate` string,
`UsageQuantity` string,
`Rate` string,
`Cost` string
)
STORED AS PARQUET
LOCATION 's3://**BUCKET**/dbr-parquet/**ACCOUNT**-**DATE**/' \
"""
create_table_blended = """
create external table if not exists `dbr.autodbr_**DATE**` (
`InvoiceID` string,
`PayerAccountId` string,
`LinkedAccountId` string,
`RecordType` string,
`RecordId` string,
`ProductName` string,
`RateId` string,
`SubscriptionId` string,
`PricingPlanId` string,
`UsageType` string,
`Operation` string,
`AvailabilityZone` string,
`ReservedInstance` string,
`ItemDescription` string,
`UsageStartDate` string,
`UsageEndDate` string,
`UsageQuantity` string,
`BlendedRate` string,
`BlendedCost` string,
`UnBlendedRate` string,
`UnBlendedCost` string
)
STORED AS PARQUET
LOCATION 's3://**BUCKET**/dbr-parquet/**ACCOUNT**-**DATE**/' \
"""