@@ -11,11 +11,11 @@ def get_redis_ts():
11
11
return get_redis_client ().ts ()
12
12
13
13
14
- @router .get ("/builds-per-hour " )
15
- def get_builds_per_hour ():
14
+ @router .get ("/builds-per-day " )
15
+ def get_builds_per_day ():
16
16
ts = get_redis_ts ()
17
17
now = int (datetime .utcnow ().timestamp () * 1000 )
18
- start = now - 7 * 24 * 60 * 60 * 1000 # last 24 hours
18
+ start = now - 30 * 24 * 60 * 60 * 1000 # last 30 days
19
19
20
20
# aggregate all time series labeled with stats=builds
21
21
results = ts .mrange (
@@ -24,24 +24,24 @@ def get_builds_per_hour():
24
24
filters = ["stats=builds" ],
25
25
with_labels = False ,
26
26
aggregation_type = "sum" ,
27
- bucket_size_msec = 3600000 , # 1 hour
27
+ bucket_size_msec = 86400000 , # 1 day (24 hours)
28
28
)
29
29
30
30
# create a map from timestamp to build count
31
- hourly_counts = {}
31
+ daily_counts = {}
32
32
33
33
for entry in results :
34
34
data = list (entry .values ())[0 ][1 ]
35
35
for ts , value in data :
36
- hourly_counts [ts ] = hourly_counts .get (ts , 0 ) + int (value )
36
+ daily_counts [ts ] = daily_counts .get (ts , 0 ) + int (value )
37
37
38
38
# sort by timestamp
39
- sorted_data = sorted (hourly_counts .items ())
39
+ sorted_data = sorted (daily_counts .items ())
40
40
41
41
labels = [datetime .utcfromtimestamp (ts / 1000 ).isoformat () for ts , _ in sorted_data ]
42
42
values = [count for _ , count in sorted_data ]
43
43
44
44
return {
45
45
"labels" : labels ,
46
- "datasets" : [{"label" : "Builds per hour " , "data" : values }],
46
+ "datasets" : [{"label" : "Builds per day " , "data" : values }],
47
47
}
0 commit comments