Rewrite how historical stats are stored and calculated.

Closes #310.
This commit is contained in:
Eugene Bujak 2018-09-06 02:11:36 +03:00
parent c7a5275d42
commit 04562dece3
3 changed files with 142 additions and 180 deletions

View File

@ -199,21 +199,23 @@ func handleStatus(w http.ResponseWriter, r *http.Request) {
// stats // stats
// ----- // -----
func handleStats(w http.ResponseWriter, r *http.Request) { func handleStats(w http.ResponseWriter, r *http.Request) {
snap := &statistics.lastsnap histrical := generateMapFromStats(&statistics.perMinute, 0, 2)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
}
summed["stats_period"] = "3 minutes"
// generate from last 3 minutes json, err := json.Marshal(summed)
var last3mins statsSnapshot
last3mins.filteredTotal = snap.filteredTotal - statistics.perMinute.filteredTotal[2]
last3mins.filteredLists = snap.filteredLists - statistics.perMinute.filteredLists[2]
last3mins.filteredSafebrowsing = snap.filteredSafebrowsing - statistics.perMinute.filteredSafebrowsing[2]
last3mins.filteredParental = snap.filteredParental - statistics.perMinute.filteredParental[2]
last3mins.totalRequests = snap.totalRequests - statistics.perMinute.totalRequests[2]
last3mins.processingTimeSum = snap.processingTimeSum - statistics.perMinute.processingTimeSum[2]
last3mins.processingTimeCount = snap.processingTimeCount - statistics.perMinute.processingTimeCount[2]
// rate := computeRate(append([]float64(snap.totalRequests}, statistics.perMinute.totalRequests[0:2])
data := generateMapFromSnap(last3mins)
json, err := json.Marshal(data)
if err != nil { if err != nil {
errortext := fmt.Sprintf("Unable to marshal status json: %s", err) errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
log.Println(errortext) log.Println(errortext)
@ -232,28 +234,29 @@ func handleStats(w http.ResponseWriter, r *http.Request) {
func handleStatsHistory(w http.ResponseWriter, r *http.Request) { func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
// handle time unit and prepare our time window size // handle time unit and prepare our time window size
limitTime := time.Now() now := time.Now()
timeUnit := r.URL.Query().Get("time_unit") timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats var stats *periodicStats
switch timeUnit { var timeUnit time.Duration
switch timeUnitString {
case "seconds": case "seconds":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Second) timeUnit = time.Second
stats = &statistics.perSecond stats = &statistics.perSecond
case "minutes": case "minutes":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Minute) timeUnit = time.Minute
stats = &statistics.perMinute stats = &statistics.perMinute
case "hours": case "hours":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Hour) timeUnit = time.Hour
stats = &statistics.perHour stats = &statistics.perHour
case "days": case "days":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Hour * 24) timeUnit = time.Hour * 24
stats = &statistics.perDay stats = &statistics.perDay
default: default:
http.Error(w, "Must specify valid time_unit parameter", 400) http.Error(w, "Must specify valid time_unit parameter", 400)
return return
} }
// check if start time is within supported time range // parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time")) startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil { if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err) errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
@ -261,12 +264,6 @@ func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
http.Error(w, errortext, 400) http.Error(w, errortext, 400)
return return
} }
if startTime.Before(limitTime) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
// check if end time is within supported time range
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time")) endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
if err != nil { if err != nil {
errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err) errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
@ -274,28 +271,22 @@ func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
http.Error(w, errortext, 400) http.Error(w, errortext, 400)
return return
} }
if endTime.Before(limitTime) {
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501) http.Error(w, "end_time parameter is outside of supported range", 501)
return return
} }
// calculate how which slice range we need to provide // calculate start and end of our array
var start int // basically it's how many hours/minutes/etc have passed since now
var end int start := int(now.Sub(endTime) / timeUnit)
switch timeUnit { end := int(now.Sub(startTime) / timeUnit)
case "seconds":
start = int(startTime.Sub(limitTime).Seconds())
end = int(endTime.Sub(limitTime).Seconds())
case "minutes":
start = int(startTime.Sub(limitTime).Minutes())
end = int(endTime.Sub(limitTime).Minutes())
case "hours":
start = int(startTime.Sub(limitTime).Hours())
end = int(endTime.Sub(limitTime).Hours())
case "days":
start = int(startTime.Sub(limitTime).Hours() / 24.0)
end = int(endTime.Sub(limitTime).Hours() / 24.0)
}
// swap them around if they're inverted // swap them around if they're inverted
if start > end { if start > end {

View File

@ -51,33 +51,14 @@ func ensureDELETE(handler func(http.ResponseWriter, *http.Request)) func(http.Re
// -------------------------- // --------------------------
// helper functions for stats // helper functions for stats
// -------------------------- // --------------------------
func computeRate(input []float64) []float64 { func getSlice(input [statsHistoryElements]float64, start int, end int) []float64 {
output := make([]float64, 0) output := make([]float64, 0)
for i := len(input) - 2; i >= 0; i-- { for i := start; i <= end; i++ {
value := input[i] output = append(output, input[i])
diff := value - input[i+1]
output = append([]float64{diff}, output...)
} }
return output return output
} }
func generateMapFromSnap(snap statsSnapshot) map[string]interface{} {
var avgProcessingTime float64
if snap.processingTimeCount > 0 {
avgProcessingTime = snap.processingTimeSum / snap.processingTimeCount
}
result := map[string]interface{}{
"dns_queries": snap.totalRequests,
"blocked_filtering": snap.filteredLists,
"replaced_safebrowsing": snap.filteredSafebrowsing,
"replaced_safesearch": snap.filteredSafesearch,
"replaced_parental": snap.filteredParental,
"avg_processing_time": avgProcessingTime,
}
return result
}
func generateMapFromStats(stats *periodicStats, start int, end int) map[string]interface{} { func generateMapFromStats(stats *periodicStats, start int, end int) map[string]interface{} {
// clamp // clamp
start = clamp(start, 0, statsHistoryElements) start = clamp(start, 0, statsHistoryElements)
@ -85,8 +66,8 @@ func generateMapFromStats(stats *periodicStats, start int, end int) map[string]i
avgProcessingTime := make([]float64, 0) avgProcessingTime := make([]float64, 0)
count := computeRate(stats.processingTimeCount[start:end]) count := getSlice(stats.entries[processingTimeCount], start, end)
sum := computeRate(stats.processingTimeSum[start:end]) sum := getSlice(stats.entries[processingTimeSum], start, end)
for i := 0; i < len(count); i++ { for i := 0; i < len(count); i++ {
var avg float64 var avg float64
if count[i] != 0 { if count[i] != 0 {
@ -97,11 +78,11 @@ func generateMapFromStats(stats *periodicStats, start int, end int) map[string]i
} }
result := map[string]interface{}{ result := map[string]interface{}{
"dns_queries": computeRate(stats.totalRequests[start:end]), "dns_queries": getSlice(stats.entries[totalRequests], start, end),
"blocked_filtering": computeRate(stats.filteredLists[start:end]), "blocked_filtering": getSlice(stats.entries[filteredLists], start, end),
"replaced_safebrowsing": computeRate(stats.filteredSafebrowsing[start:end]), "replaced_safebrowsing": getSlice(stats.entries[filteredSafebrowsing], start, end),
"replaced_safesearch": computeRate(stats.filteredSafesearch[start:end]), "replaced_safesearch": getSlice(stats.entries[filteredSafesearch], start, end),
"replaced_parental": computeRate(stats.filteredParental[start:end]), "replaced_parental": getSlice(stats.entries[filteredParental], start, end),
"avg_processing_time": avgProcessingTime, "avg_processing_time": avgProcessingTime,
} }
return result return result

200
stats.go
View File

@ -8,70 +8,50 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
"regexp"
"strconv" "strconv"
"strings" "strings"
"syscall" "syscall"
"time" "time"
) )
type periodicStats struct {
totalRequests []float64
filteredTotal []float64
filteredLists []float64
filteredSafebrowsing []float64
filteredSafesearch []float64
filteredParental []float64
processingTimeSum []float64
processingTimeCount []float64
lastRotate time.Time // last time this data was rotated
}
type statsSnapshot struct {
totalRequests float64
filteredTotal float64
filteredLists float64
filteredSafebrowsing float64
filteredSafesearch float64
filteredParental float64
processingTimeSum float64
processingTimeCount float64
}
type statsCollection struct {
perSecond periodicStats
perMinute periodicStats
perHour periodicStats
perDay periodicStats
lastsnap statsSnapshot
}
var statistics statsCollection
var client = &http.Client{ var client = &http.Client{
Timeout: time.Second * 30, Timeout: time.Second * 30,
} }
const statsHistoryElements = 60 + 1 // +1 for calculating delta // as seen over HTTP
type statsEntry map[string]float64
type statsEntries map[string][statsHistoryElements]float64
var requestCountTotalRegex = regexp.MustCompile(`^coredns_dns_request_count_total`) const (
var requestDurationSecondsSum = regexp.MustCompile(`^coredns_dns_request_duration_seconds_sum`) statsHistoryElements = 60 + 1 // +1 for calculating delta
var requestDurationSecondsCount = regexp.MustCompile(`^coredns_dns_request_duration_seconds_count`) totalRequests = `coredns_dns_request_count_total`
filteredTotal = `coredns_dnsfilter_filtered_total`
filteredLists = `coredns_dnsfilter_filtered_lists_total`
filteredSafebrowsing = `coredns_dnsfilter_filtered_safebrowsing_total`
filteredSafesearch = `coredns_dnsfilter_safesearch_total`
filteredParental = `coredns_dnsfilter_filtered_parental_total`
processingTimeSum = `coredns_dns_request_duration_seconds_sum`
processingTimeCount = `coredns_dns_request_duration_seconds_count`
)
func initPeriodicStats(stats *periodicStats) { type periodicStats struct {
stats.totalRequests = make([]float64, statsHistoryElements) entries statsEntries
stats.filteredTotal = make([]float64, statsHistoryElements) lastRotate time.Time // last time this data was rotated
stats.filteredLists = make([]float64, statsHistoryElements) }
stats.filteredSafebrowsing = make([]float64, statsHistoryElements)
stats.filteredSafesearch = make([]float64, statsHistoryElements) type stats struct {
stats.filteredParental = make([]float64, statsHistoryElements) perSecond periodicStats
stats.processingTimeSum = make([]float64, statsHistoryElements) perMinute periodicStats
stats.processingTimeCount = make([]float64, statsHistoryElements) perHour periodicStats
perDay periodicStats
lastSeen statsEntry
}
var statistics stats
func initPeriodicStats(periodic *periodicStats) {
periodic.entries = statsEntries{}
} }
func init() { func init() {
@ -106,37 +86,22 @@ func isConnRefused(err error) bool {
return false return false
} }
func sliceRotate(slice *[]float64) { func statsRotate(periodic *periodicStats, now time.Time) {
a := (*slice)[:len(*slice)-1] for key, values := range periodic.entries {
*slice = append([]float64{0}, a...) newValues := [statsHistoryElements]float64{}
} for i := 1; i < len(values); i++ {
newValues[i] = values[i-1]
func statsRotate(stats *periodicStats, now time.Time) { }
sliceRotate(&stats.totalRequests) periodic.entries[key] = newValues
sliceRotate(&stats.filteredTotal)
sliceRotate(&stats.filteredLists)
sliceRotate(&stats.filteredSafebrowsing)
sliceRotate(&stats.filteredSafesearch)
sliceRotate(&stats.filteredParental)
sliceRotate(&stats.processingTimeSum)
sliceRotate(&stats.processingTimeCount)
stats.lastRotate = now
}
func handleValue(input string, target *float64) {
value, err := strconv.ParseFloat(input, 64)
if err != nil {
log.Println("Failed to parse number input:", err)
return
} }
*target = value periodic.lastRotate = now
} }
// called every second, accumulates stats for each second, minute, hour and day // called every second, accumulates stats for each second, minute, hour and day
func collectStats() { func collectStats() {
now := time.Now() now := time.Now()
// rotate each second // rotate each second
// NOTE: since we are called every second, always rotate, otherwise aliasing problems cause the rotation to skip // NOTE: since we are called every second, always rotate perSecond, otherwise aliasing problems cause the rotation to skip
if true { if true {
statsRotate(&statistics.perSecond, now) statsRotate(&statistics.perSecond, now)
} }
@ -172,6 +137,8 @@ func collectStats() {
return return
} }
entry := statsEntry{}
// handle body // handle body
scanner := bufio.NewScanner(strings.NewReader(string(body))) scanner := bufio.NewScanner(strings.NewReader(string(body)))
for scanner.Scan() { for scanner.Scan() {
@ -181,38 +148,61 @@ func collectStats() {
continue continue
} }
splitted := strings.Split(line, " ") splitted := strings.Split(line, " ")
switch { if len(splitted) < 2 {
case splitted[0] == "coredns_dnsfilter_filtered_total": continue
handleValue(splitted[1], &statistics.lastsnap.filteredTotal)
case splitted[0] == "coredns_dnsfilter_filtered_lists_total":
handleValue(splitted[1], &statistics.lastsnap.filteredLists)
case splitted[0] == "coredns_dnsfilter_filtered_safebrowsing_total":
handleValue(splitted[1], &statistics.lastsnap.filteredSafebrowsing)
case splitted[0] == "coredns_dnsfilter_filtered_parental_total":
handleValue(splitted[1], &statistics.lastsnap.filteredParental)
case requestCountTotalRegex.MatchString(splitted[0]):
handleValue(splitted[1], &statistics.lastsnap.totalRequests)
case requestDurationSecondsSum.MatchString(splitted[0]):
handleValue(splitted[1], &statistics.lastsnap.processingTimeSum)
case requestDurationSecondsCount.MatchString(splitted[0]):
handleValue(splitted[1], &statistics.lastsnap.processingTimeCount)
} }
value, err := strconv.ParseFloat(splitted[1], 64)
if err != nil {
log.Printf("Failed to parse number input %s: %s", splitted[1], err)
continue
}
key := splitted[0]
index := strings.IndexByte(key, '{')
if index >= 0 {
key = key[:index]
}
// empty keys are not ok
if key == "" {
continue
}
got, ok := entry[key]
if ok {
value += got
}
entry[key] = value
} }
// put the snap into per-second, per-minute, per-hour and per-day // calculate delta
assignSnapToStats(&statistics.perSecond) delta := calcDelta(entry, statistics.lastSeen)
assignSnapToStats(&statistics.perMinute)
assignSnapToStats(&statistics.perHour) // apply delta to second/minute/hour/day
assignSnapToStats(&statistics.perDay) applyDelta(&statistics.perSecond, delta)
applyDelta(&statistics.perMinute, delta)
applyDelta(&statistics.perHour, delta)
applyDelta(&statistics.perDay, delta)
// save last seen
statistics.lastSeen = entry
} }
func assignSnapToStats(stats *periodicStats) { func calcDelta(current, seen statsEntry) statsEntry {
stats.totalRequests[0] = statistics.lastsnap.totalRequests delta := statsEntry{}
stats.filteredTotal[0] = statistics.lastsnap.filteredTotal for key, currentValue := range current {
stats.filteredLists[0] = statistics.lastsnap.filteredLists seenValue := seen[key]
stats.filteredSafebrowsing[0] = statistics.lastsnap.filteredSafebrowsing deltaValue := currentValue - seenValue
stats.filteredSafesearch[0] = statistics.lastsnap.filteredSafesearch delta[key] = deltaValue
stats.filteredParental[0] = statistics.lastsnap.filteredParental }
stats.processingTimeSum[0] = statistics.lastsnap.processingTimeSum return delta
stats.processingTimeCount[0] = statistics.lastsnap.processingTimeCount }
func applyDelta(current *periodicStats, delta statsEntry) {
for key, deltaValue := range delta {
currentValues := current.entries[key]
currentValues[0] += deltaValue
current.entries[key] = currentValues
}
} }