Pull request: 2639 use testify require vol.1

Merge in DNS/adguard-home from 2639-testify-require-1 to master

Updates #2639.

Squashed commit of the following:

commit da7d283c6b20b4dbbc0af4689fa812d14f022b52
Merge: c4af71b0 63e4adc0
Author: Eugene Burkov <e.burkov@adguard.com>
Date:   Tue Feb 9 14:27:41 2021 +0300

    Merge branch 'master' into 2639-testify-require-1

commit c4af71b002dc68785106328f60946d7fa73fb933
Author: Eugene Burkov <e.burkov@adguard.com>
Date:   Mon Feb 8 19:32:51 2021 +0300

    querylog: fix tests for windows

commit b616ea5de88a38550ffd42253d3054ea6f90cff9
Author: Eugene Burkov <e.burkov@adguard.com>
Date:   Mon Feb 8 18:29:28 2021 +0300

    querylog: imp tests again

commit 091a698df5fbe6c3e572fde12da395f527c88b95
Author: Eugene Burkov <e.burkov@adguard.com>
Date:   Mon Feb 8 15:49:38 2021 +0300

    querylog: imp tests
This commit is contained in:
Eugene Burkov 2021-02-09 15:17:02 +03:00
parent 63e4adc0e7
commit 1fa4d55ae3
3 changed files with 588 additions and 631 deletions

View File

@ -1,9 +1,12 @@
package querylog
import (
"fmt"
"io/ioutil"
"math/rand"
"net"
"os"
"runtime"
"sort"
"testing"
"time"
@ -14,226 +17,292 @@ import (
"github.com/AdguardTeam/AdGuardHome/internal/dnsfilter"
"github.com/miekg/dns"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestMain(m *testing.M) {
aghtest.DiscardLogOutput(m)
}
func prepareTestDir() string {
const dir = "./agh-test"
_ = os.RemoveAll(dir)
_ = os.MkdirAll(dir, 0o755)
func prepareTestDir(t *testing.T) string {
t.Helper()
wd, err := os.Getwd()
require.Nil(t, err)
dir, err := ioutil.TempDir(wd, "agh-tests")
require.Nil(t, err)
require.NotEmpty(t, dir)
t.Cleanup(func() {
// TODO(e.burkov): Replace with t.TempDir methods after updating
// go version to 1.15.
start := time.Now()
for {
err := os.RemoveAll(dir)
if err == nil {
break
}
if runtime.GOOS != "windows" || time.Since(start) >= 500*time.Millisecond {
break
}
time.Sleep(5 * time.Millisecond)
}
assert.Nil(t, err)
})
return dir
}
// Check adding and loading (with filtering) entries from disk and memory
// TestQueryLog tests adding and loading (with filtering) entries from disk and
// memory.
func TestQueryLog(t *testing.T) {
conf := Config{
l := newQueryLog(Config{
Enabled: true,
FileEnabled: true,
Interval: 1,
MemSize: 100,
}
conf.BaseDir = prepareTestDir()
defer func() { _ = os.RemoveAll(conf.BaseDir) }()
l := newQueryLog(conf)
BaseDir: prepareTestDir(t),
})
// add disk entries
// Add disk entries.
addEntry(l, "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
// write to disk (first file)
_ = l.flushLogBuffer(true)
// start writing to the second file
_ = l.rotate()
// add disk entries
// Write to disk (first file).
require.Nil(t, l.flushLogBuffer(true))
// Start writing to the second file.
require.Nil(t, l.rotate())
// Add disk entries.
addEntry(l, "example.org", net.IPv4(1, 1, 1, 2), net.IPv4(2, 2, 2, 2))
// write to disk
_ = l.flushLogBuffer(true)
// add memory entries
// Write to disk.
require.Nil(t, l.flushLogBuffer(true))
// Add memory entries.
addEntry(l, "test.example.org", net.IPv4(1, 1, 1, 3), net.IPv4(2, 2, 2, 3))
addEntry(l, "example.com", net.IPv4(1, 1, 1, 4), net.IPv4(2, 2, 2, 4))
// get all entries
params := newSearchParams()
entries, _ := l.search(params)
assert.Len(t, entries, 4)
assertLogEntry(t, entries[0], "example.com", net.IPv4(1, 1, 1, 4), net.IPv4(2, 2, 2, 4))
assertLogEntry(t, entries[1], "test.example.org", net.IPv4(1, 1, 1, 3), net.IPv4(2, 2, 2, 3))
assertLogEntry(t, entries[2], "example.org", net.IPv4(1, 1, 1, 2), net.IPv4(2, 2, 2, 2))
assertLogEntry(t, entries[3], "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
type tcAssertion struct {
num int
host string
answer, client net.IP
}
// search by domain (strict)
params = newSearchParams()
params.searchCriteria = append(params.searchCriteria, searchCriteria{
testCases := []struct {
name string
sCr []searchCriteria
want []tcAssertion
}{{
name: "all",
sCr: []searchCriteria{},
want: []tcAssertion{
{num: 0, host: "example.com", answer: net.IPv4(1, 1, 1, 4), client: net.IPv4(2, 2, 2, 4)},
{num: 1, host: "test.example.org", answer: net.IPv4(1, 1, 1, 3), client: net.IPv4(2, 2, 2, 3)},
{num: 2, host: "example.org", answer: net.IPv4(1, 1, 1, 2), client: net.IPv4(2, 2, 2, 2)},
{num: 3, host: "example.org", answer: net.IPv4(1, 1, 1, 1), client: net.IPv4(2, 2, 2, 1)},
},
}, {
name: "by_domain_strict",
sCr: []searchCriteria{{
criteriaType: ctDomainOrClient,
strict: true,
value: "TEST.example.org",
})
entries, _ = l.search(params)
assert.Len(t, entries, 1)
assertLogEntry(t, entries[0], "test.example.org", net.IPv4(1, 1, 1, 3), net.IPv4(2, 2, 2, 3))
// search by domain (not strict)
params = newSearchParams()
params.searchCriteria = append(params.searchCriteria, searchCriteria{
}},
want: []tcAssertion{{
num: 0, host: "test.example.org", answer: net.IPv4(1, 1, 1, 3), client: net.IPv4(2, 2, 2, 3),
}},
}, {
name: "by_domain_non-strict",
sCr: []searchCriteria{{
criteriaType: ctDomainOrClient,
strict: false,
value: "example.ORG",
})
entries, _ = l.search(params)
assert.Len(t, entries, 3)
assertLogEntry(t, entries[0], "test.example.org", net.IPv4(1, 1, 1, 3), net.IPv4(2, 2, 2, 3))
assertLogEntry(t, entries[1], "example.org", net.IPv4(1, 1, 1, 2), net.IPv4(2, 2, 2, 2))
assertLogEntry(t, entries[2], "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
// search by client IP (strict)
params = newSearchParams()
params.searchCriteria = append(params.searchCriteria, searchCriteria{
}},
want: []tcAssertion{
{num: 0, host: "test.example.org", answer: net.IPv4(1, 1, 1, 3), client: net.IPv4(2, 2, 2, 3)},
{num: 1, host: "example.org", answer: net.IPv4(1, 1, 1, 2), client: net.IPv4(2, 2, 2, 2)},
{num: 2, host: "example.org", answer: net.IPv4(1, 1, 1, 1), client: net.IPv4(2, 2, 2, 1)},
},
}, {
name: "by_client_ip_strict",
sCr: []searchCriteria{{
criteriaType: ctDomainOrClient,
strict: true,
value: "2.2.2.2",
})
entries, _ = l.search(params)
assert.Len(t, entries, 1)
assertLogEntry(t, entries[0], "example.org", net.IPv4(1, 1, 1, 2), net.IPv4(2, 2, 2, 2))
// search by client IP (part of)
params = newSearchParams()
params.searchCriteria = append(params.searchCriteria, searchCriteria{
}},
want: []tcAssertion{{
num: 0, host: "example.org", answer: net.IPv4(1, 1, 1, 2), client: net.IPv4(2, 2, 2, 2),
}},
}, {
name: "by_client_ip_non-strict",
sCr: []searchCriteria{{
criteriaType: ctDomainOrClient,
strict: false,
value: "2.2.2",
}},
want: []tcAssertion{
{num: 0, host: "example.com", answer: net.IPv4(1, 1, 1, 4), client: net.IPv4(2, 2, 2, 4)},
{num: 1, host: "test.example.org", answer: net.IPv4(1, 1, 1, 3), client: net.IPv4(2, 2, 2, 3)},
{num: 2, host: "example.org", answer: net.IPv4(1, 1, 1, 2), client: net.IPv4(2, 2, 2, 2)},
{num: 3, host: "example.org", answer: net.IPv4(1, 1, 1, 1), client: net.IPv4(2, 2, 2, 1)},
},
}}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
params := newSearchParams()
params.searchCriteria = tc.sCr
entries, _ := l.search(params)
require.Len(t, entries, len(tc.want))
for _, want := range tc.want {
assertLogEntry(t, entries[want.num], want.host, want.answer, want.client)
}
})
entries, _ = l.search(params)
assert.Len(t, entries, 4)
assertLogEntry(t, entries[0], "example.com", net.IPv4(1, 1, 1, 4), net.IPv4(2, 2, 2, 4))
assertLogEntry(t, entries[1], "test.example.org", net.IPv4(1, 1, 1, 3), net.IPv4(2, 2, 2, 3))
assertLogEntry(t, entries[2], "example.org", net.IPv4(1, 1, 1, 2), net.IPv4(2, 2, 2, 2))
assertLogEntry(t, entries[3], "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
}
}
func TestQueryLogOffsetLimit(t *testing.T) {
conf := Config{
l := newQueryLog(Config{
Enabled: true,
Interval: 1,
MemSize: 100,
}
conf.BaseDir = prepareTestDir()
defer func() { _ = os.RemoveAll(conf.BaseDir) }()
l := newQueryLog(conf)
BaseDir: prepareTestDir(t),
})
// add 10 entries to the log
for i := 0; i < 10; i++ {
addEntry(l, "second.example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
const (
entNum = 10
firstPageDomain = "first.example.org"
secondPageDomain = "second.example.org"
)
// Add entries to the log.
for i := 0; i < entNum; i++ {
addEntry(l, secondPageDomain, net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
}
// write them to disk (first file)
_ = l.flushLogBuffer(true)
// add 10 more entries to the log (memory)
for i := 0; i < 10; i++ {
addEntry(l, "first.example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
// Write them to the first file.
require.Nil(t, l.flushLogBuffer(true))
// Add more to the in-memory part of log.
for i := 0; i < entNum; i++ {
addEntry(l, firstPageDomain, net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
}
// First page
params := newSearchParams()
params.offset = 0
params.limit = 10
testCases := []struct {
name string
offset int
limit int
wantLen int
want string
}{{
name: "page_1",
offset: 0,
limit: 10,
wantLen: 10,
want: firstPageDomain,
}, {
name: "page_2",
offset: 10,
limit: 10,
wantLen: 10,
want: secondPageDomain,
}, {
name: "page_2.5",
offset: 15,
limit: 10,
wantLen: 5,
want: secondPageDomain,
}, {
name: "page_3",
offset: 20,
limit: 10,
wantLen: 0,
}}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
params.offset = tc.offset
params.limit = tc.limit
entries, _ := l.search(params)
assert.Len(t, entries, 10)
assert.Equal(t, entries[0].QHost, "first.example.org")
assert.Equal(t, entries[9].QHost, "first.example.org")
// Second page
params.offset = 10
params.limit = 10
entries, _ = l.search(params)
assert.Len(t, entries, 10)
assert.Equal(t, entries[0].QHost, "second.example.org")
assert.Equal(t, entries[9].QHost, "second.example.org")
require.Len(t, entries, tc.wantLen)
// Second and a half page
params.offset = 15
params.limit = 10
entries, _ = l.search(params)
assert.Len(t, entries, 5)
assert.Equal(t, entries[0].QHost, "second.example.org")
assert.Equal(t, entries[4].QHost, "second.example.org")
// Third page
params.offset = 20
params.limit = 10
entries, _ = l.search(params)
assert.Empty(t, entries)
if tc.wantLen > 0 {
assert.Equal(t, entries[0].QHost, tc.want)
assert.Equal(t, entries[tc.wantLen-1].QHost, tc.want)
}
})
}
}
func TestQueryLogMaxFileScanEntries(t *testing.T) {
conf := Config{
l := newQueryLog(Config{
Enabled: true,
FileEnabled: true,
Interval: 1,
MemSize: 100,
}
conf.BaseDir = prepareTestDir()
defer func() { _ = os.RemoveAll(conf.BaseDir) }()
l := newQueryLog(conf)
BaseDir: prepareTestDir(t),
})
// add 10 entries to the log
for i := 0; i < 10; i++ {
const entNum = 10
// Add entries to the log.
for i := 0; i < entNum; i++ {
addEntry(l, "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
}
// write them to disk (first file)
_ = l.flushLogBuffer(true)
// Write them to disk.
require.Nil(t, l.flushLogBuffer(true))
params := newSearchParams()
params.maxFileScanEntries = 5 // do not scan more than 5 records
entries, _ := l.search(params)
assert.Len(t, entries, 5)
params.maxFileScanEntries = 0 // disable the limit
entries, _ = l.search(params)
assert.Len(t, entries, 10)
for _, maxFileScanEntries := range []int{5, 0} {
t.Run(fmt.Sprintf("limit_%d", maxFileScanEntries), func(t *testing.T) {
params.maxFileScanEntries = maxFileScanEntries
entries, _ := l.search(params)
assert.Len(t, entries, entNum-maxFileScanEntries)
})
}
}
func TestQueryLogFileDisabled(t *testing.T) {
conf := Config{
l := newQueryLog(Config{
Enabled: true,
FileEnabled: false,
Interval: 1,
MemSize: 2,
}
conf.BaseDir = prepareTestDir()
defer func() { _ = os.RemoveAll(conf.BaseDir) }()
l := newQueryLog(conf)
BaseDir: prepareTestDir(t),
})
addEntry(l, "example1.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
addEntry(l, "example2.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
// The oldest entry is going to be removed from memory buffer.
addEntry(l, "example3.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
// the oldest entry is now removed from mem buffer
params := newSearchParams()
ll, _ := l.search(params)
assert.Len(t, ll, 2)
require.Len(t, ll, 2)
assert.Equal(t, "example3.org", ll[0].QHost)
assert.Equal(t, "example2.org", ll[1].QHost)
}
func addEntry(l *queryLog, host string, answerStr, client net.IP) {
q := dns.Msg{}
q.Question = append(q.Question, dns.Question{
q := dns.Msg{
Question: []dns.Question{{
Name: host + ".",
Qtype: dns.TypeA,
Qclass: dns.ClassINET,
})
}},
}
a := dns.Msg{}
a.Question = append(a.Question, q.Question[0])
answer := new(dns.A)
answer.Hdr = dns.RR_Header{
a := dns.Msg{
Question: q.Question,
Answer: []dns.RR{&dns.A{
Hdr: dns.RR_Header{
Name: q.Question[0].Name,
Rrtype: dns.TypeA,
Class: dns.ClassINET,
},
A: answerStr,
}},
}
answer.A = answerStr
a.Answer = append(a.Answer, answer)
res := dnsfilter.Result{
IsFiltered: true,
Reason: dnsfilter.Rewritten,
@ -254,19 +323,22 @@ func addEntry(l *queryLog, host string, answerStr, client net.IP) {
l.Add(params)
}
func assertLogEntry(t *testing.T, entry *logEntry, host string, answer, client net.IP) bool {
func assertLogEntry(t *testing.T, entry *logEntry, host string, answer, client net.IP) {
t.Helper()
require.NotNil(t, entry)
assert.Equal(t, host, entry.QHost)
assert.Equal(t, client, entry.IP)
assert.Equal(t, "A", entry.QType)
assert.Equal(t, "IN", entry.QClass)
msg := new(dns.Msg)
assert.Nil(t, msg.Unpack(entry.Answer))
assert.Len(t, msg.Answer, 1)
msg := &dns.Msg{}
require.Nil(t, msg.Unpack(entry.Answer))
require.Len(t, msg.Answer, 1)
ip := proxyutil.GetIPFromDNSRecord(msg.Answer[0]).To16()
assert.NotNil(t, ip)
assert.Equal(t, answer, ip)
return true
}
func testEntries() (entries []*logEntry) {
@ -332,8 +404,8 @@ func TestLogEntriesByTime_sort(t *testing.T) {
entries := testEntries()
sort.Sort(logEntriesByTimeDesc(entries))
for i := 1; i < len(entries); i++ {
assert.False(t, entries[i].Time.After(entries[i-1].Time),
"%s %s", entries[i].Time, entries[i-1].Time)
for i := range entries[1:] {
assert.False(t, entries[i+1].Time.After(entries[i].Time),
"%s %s", entries[i+1].Time, entries[i].Time)
}
}

View File

@ -2,347 +2,340 @@ package querylog
import (
"encoding/binary"
"errors"
"fmt"
"io"
"io/ioutil"
"math"
"net"
"os"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQLogFileEmpty(t *testing.T) {
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFile := prepareTestFile(testDir, 0)
// prepareTestFiles prepares several test query log files, each with the
// specified lines count.
func prepareTestFiles(t *testing.T, dir string, filesNum, linesNum int) []string {
t.Helper()
// create the new QLogFile instance
q, err := NewQLogFile(testFile)
assert.Nil(t, err)
assert.NotNil(t, q)
defer q.Close()
// seek to the start
pos, err := q.SeekStart()
assert.Nil(t, err)
assert.EqualValues(t, 0, pos)
// try reading anyway
line, err := q.ReadNext()
assert.Equal(t, io.EOF, err)
assert.Empty(t, line)
}
func TestQLogFileLarge(t *testing.T) {
// should be large enough
count := 50000
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFile := prepareTestFile(testDir, count)
// create the new QLogFile instance
q, err := NewQLogFile(testFile)
assert.Nil(t, err)
assert.NotNil(t, q)
defer q.Close()
// seek to the start
pos, err := q.SeekStart()
assert.Nil(t, err)
assert.NotEqualValues(t, 0, pos)
read := 0
var line string
for err == nil {
line, err = q.ReadNext()
if err == nil {
assert.NotZero(t, len(line))
read++
}
}
assert.Equal(t, count, read)
assert.Equal(t, io.EOF, err)
}
func TestQLogFileSeekLargeFile(t *testing.T) {
// more or less big file
count := 10000
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFile := prepareTestFile(testDir, count)
// create the new QLogFile instance
q, err := NewQLogFile(testFile)
assert.Nil(t, err)
assert.NotNil(t, q)
defer q.Close()
// CASE 1: NOT TOO OLD LINE
testSeekLineQLogFile(t, q, 300)
// CASE 2: OLD LINE
testSeekLineQLogFile(t, q, count-300)
// CASE 3: FIRST LINE
testSeekLineQLogFile(t, q, 0)
// CASE 4: LAST LINE
testSeekLineQLogFile(t, q, count)
// CASE 5: Seek non-existent (too low)
_, _, err = q.SeekTS(123)
assert.NotNil(t, err)
// CASE 6: Seek non-existent (too high)
ts, _ := time.Parse(time.RFC3339, "2100-01-02T15:04:05Z07:00")
_, _, err = q.SeekTS(ts.UnixNano())
assert.NotNil(t, err)
// CASE 7: "Almost" found
line, err := getQLogFileLine(q, count/2)
assert.Nil(t, err)
// ALMOST the record we need
timestamp := readQLogTimestamp(line) - 1
assert.NotEqualValues(t, 0, timestamp)
_, depth, err := q.SeekTS(timestamp)
assert.NotNil(t, err)
assert.LessOrEqual(t, depth, int(math.Log2(float64(count))+3))
}
func TestQLogFileSeekSmallFile(t *testing.T) {
// more or less big file
count := 10
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFile := prepareTestFile(testDir, count)
// create the new QLogFile instance
q, err := NewQLogFile(testFile)
assert.Nil(t, err)
assert.NotNil(t, q)
defer q.Close()
// CASE 1: NOT TOO OLD LINE
testSeekLineQLogFile(t, q, 2)
// CASE 2: OLD LINE
testSeekLineQLogFile(t, q, count-2)
// CASE 3: FIRST LINE
testSeekLineQLogFile(t, q, 0)
// CASE 4: LAST LINE
testSeekLineQLogFile(t, q, count)
// CASE 5: Seek non-existent (too low)
_, _, err = q.SeekTS(123)
assert.NotNil(t, err)
// CASE 6: Seek non-existent (too high)
ts, _ := time.Parse(time.RFC3339, "2100-01-02T15:04:05Z07:00")
_, _, err = q.SeekTS(ts.UnixNano())
assert.NotNil(t, err)
// CASE 7: "Almost" found
line, err := getQLogFileLine(q, count/2)
assert.Nil(t, err)
// ALMOST the record we need
timestamp := readQLogTimestamp(line) - 1
assert.NotEqualValues(t, 0, timestamp)
_, depth, err := q.SeekTS(timestamp)
assert.NotNil(t, err)
assert.LessOrEqual(t, depth, int(math.Log2(float64(count))+3))
}
func testSeekLineQLogFile(t *testing.T, q *QLogFile, lineNumber int) {
line, err := getQLogFileLine(q, lineNumber)
assert.Nil(t, err)
ts := readQLogTimestamp(line)
assert.NotEqualValues(t, 0, ts)
// try seeking to that line now
pos, _, err := q.SeekTS(ts)
assert.Nil(t, err)
assert.NotEqualValues(t, 0, pos)
testLine, err := q.ReadNext()
assert.Nil(t, err)
assert.Equal(t, line, testLine)
}
func getQLogFileLine(q *QLogFile, lineNumber int) (string, error) {
_, err := q.SeekStart()
if err != nil {
return "", err
}
for i := 1; i < lineNumber; i++ {
_, err := q.ReadNext()
if err != nil {
return "", err
}
}
return q.ReadNext()
}
// Check adding and loading (with filtering) entries from disk and memory
func TestQLogFile(t *testing.T) {
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFile := prepareTestFile(testDir, 2)
// create the new QLogFile instance
q, err := NewQLogFile(testFile)
assert.Nil(t, err)
assert.NotNil(t, q)
defer q.Close()
// seek to the start
pos, err := q.SeekStart()
assert.Nil(t, err)
assert.Greater(t, pos, int64(0))
// read first line
line, err := q.ReadNext()
assert.Nil(t, err)
assert.Contains(t, line, "0.0.0.2")
assert.True(t, strings.HasPrefix(line, "{"), line)
assert.True(t, strings.HasSuffix(line, "}"), line)
// read second line
line, err = q.ReadNext()
assert.Nil(t, err)
assert.EqualValues(t, 0, q.position)
assert.Contains(t, line, "0.0.0.1")
assert.True(t, strings.HasPrefix(line, "{"), line)
assert.True(t, strings.HasSuffix(line, "}"), line)
// try reading again (there's nothing to read anymore)
line, err = q.ReadNext()
assert.Equal(t, io.EOF, err)
assert.Empty(t, line)
}
// prepareTestFile - prepares a test query log file with the specified number of lines
func prepareTestFile(dir string, linesCount int) string {
return prepareTestFiles(dir, 1, linesCount)[0]
}
// prepareTestFiles - prepares several test query log files
// each of them -- with the specified linesCount
func prepareTestFiles(dir string, filesCount, linesCount int) []string {
format := `{"IP":"${IP}","T":"${TIMESTAMP}","QH":"example.org","QT":"A","QC":"IN","Answer":"AAAAAAABAAEAAAAAB2V4YW1wbGUDb3JnAAABAAEHZXhhbXBsZQNvcmcAAAEAAQAAAAAABAECAwQ=","Result":{},"Elapsed":0,"Upstream":"upstream"}`
const strV = "\"%s\""
const nl = "\n"
const format = `{"IP":` + strV + `,"T":` + strV + `,` +
`"QH":"example.org","QT":"A","QC":"IN",` +
`"Answer":"AAAAAAABAAEAAAAAB2V4YW1wbGUDb3JnAAABAAEHZXhhbXBsZQNvcmcAAAEAAQAAAAAABAECAwQ=",` +
`"Result":{},"Elapsed":0,"Upstream":"upstream"}` + nl
lineTime, _ := time.Parse(time.RFC3339Nano, "2020-02-18T22:36:35.920973+03:00")
lineIP := uint32(0)
files := make([]string, filesCount)
for j := 0; j < filesCount; j++ {
f, _ := ioutil.TempFile(dir, "*.txt")
files[filesCount-j-1] = f.Name()
files := make([]string, filesNum)
for j := range files {
f, err := ioutil.TempFile(dir, "*.txt")
require.Nil(t, err)
files[filesNum-j-1] = f.Name()
for i := 0; i < linesCount; i++ {
for i := 0; i < linesNum; i++ {
lineIP++
lineTime = lineTime.Add(time.Second)
ip := make(net.IP, 4)
binary.BigEndian.PutUint32(ip, lineIP)
line := format
line = strings.ReplaceAll(line, "${IP}", ip.String())
line = strings.ReplaceAll(line, "${TIMESTAMP}", lineTime.Format(time.RFC3339Nano))
line := fmt.Sprintf(format, ip, lineTime.Format(time.RFC3339Nano))
_, _ = f.WriteString(line)
_, _ = f.WriteString("\n")
_, err = f.WriteString(line)
require.Nil(t, err)
}
}
return files
}
func TestQLogSeek(t *testing.T) {
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
// prepareTestFile prepares a test query log file with the specified number of
// lines.
func prepareTestFile(t *testing.T, dir string, linesCount int) string {
t.Helper()
d := `{"T":"2020-08-31T18:44:23.911246629+03:00","QH":"wfqvjymurpwegyv","QT":"A","QC":"IN","CP":"","Answer":"","Result":{},"Elapsed":66286385,"Upstream":"tls://dns-unfiltered.adguard.com:853"}
{"T":"2020-08-31T18:44:25.376690873+03:00"}
{"T":"2020-08-31T18:44:25.382540454+03:00"}`
f, _ := ioutil.TempFile(testDir, "*.txt")
_, _ = f.WriteString(d)
defer f.Close()
q, err := NewQLogFile(f.Name())
assert.Nil(t, err)
defer q.Close()
target, _ := time.Parse(time.RFC3339, "2020-08-31T18:44:25.376690873+03:00")
_, depth, err := q.SeekTS(target.UnixNano())
assert.Nil(t, err)
assert.Equal(t, 1, depth)
return prepareTestFiles(t, dir, 1, linesCount)[0]
}
func TestQLogSeek_ErrTSTooLate(t *testing.T) {
testDir := prepareTestDir()
// newTestQLogFile creates new *QLogFile for tests and registers the required
// cleanup functions.
func newTestQLogFile(t *testing.T, linesNum int) (file *QLogFile) {
t.Helper()
testFile := prepareTestFile(t, prepareTestDir(t), linesNum)
// Create the new QLogFile instance.
file, err := NewQLogFile(testFile)
require.Nil(t, err)
assert.NotNil(t, file)
t.Cleanup(func() {
_ = os.RemoveAll(testDir)
assert.Nil(t, file.Close())
})
d := `{"T":"2020-08-31T18:44:23.911246629+03:00","QH":"wfqvjymurpwegyv","QT":"A","QC":"IN","CP":"","Answer":"","Result":{},"Elapsed":66286385,"Upstream":"tls://dns-unfiltered.adguard.com:853"}
{"T":"2020-08-31T18:44:25.376690873+03:00"}
{"T":"2020-08-31T18:44:25.382540454+03:00"}
`
f, err := ioutil.TempFile(testDir, "*.txt")
assert.Nil(t, err)
defer f.Close()
_, err = f.WriteString(d)
assert.Nil(t, err)
q, err := NewQLogFile(f.Name())
assert.Nil(t, err)
defer q.Close()
target, err := time.Parse(time.RFC3339, "2020-08-31T18:44:25.382540454+03:00")
assert.Nil(t, err)
_, depth, err := q.SeekTS(target.UnixNano() + int64(time.Second))
assert.Equal(t, ErrTSTooLate, err)
assert.Equal(t, 2, depth)
return file
}
func TestQLogSeek_ErrTSTooEarly(t *testing.T) {
testDir := prepareTestDir()
func TestQLogFile_ReadNext(t *testing.T) {
testCases := []struct {
name string
linesNum int
}{{
name: "empty",
linesNum: 0,
}, {
name: "large",
linesNum: 50000,
}}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
q := newTestQLogFile(t, tc.linesNum)
// Calculate the expected position.
fileInfo, err := q.file.Stat()
require.Nil(t, err)
var expPos int64
if expPos = fileInfo.Size(); expPos > 0 {
expPos--
}
// Seek to the start.
pos, err := q.SeekStart()
require.Nil(t, err)
require.EqualValues(t, expPos, pos)
var read int
var line string
for err == nil {
line, err = q.ReadNext()
if err == nil {
assert.NotEmpty(t, line)
read++
}
}
require.Equal(t, io.EOF, err)
assert.Equal(t, tc.linesNum, read)
})
}
}
func TestQLogFile_SeekTS_good(t *testing.T) {
linesCases := []struct {
name string
num int
}{{
name: "large",
num: 10000,
}, {
name: "small",
num: 10,
}}
for _, l := range linesCases {
testCases := []struct {
name string
linesNum int
line int
}{{
name: "not_too_old",
line: 2,
}, {
name: "old",
line: l.num - 2,
}, {
name: "first",
line: 0,
}, {
name: "last",
line: l.num,
}}
q := newTestQLogFile(t, l.num)
for _, tc := range testCases {
t.Run(l.name+"_"+tc.name, func(t *testing.T) {
line, err := getQLogFileLine(q, tc.line)
require.Nil(t, err)
ts := readQLogTimestamp(line)
assert.NotEqualValues(t, 0, ts)
// Try seeking to that line now.
pos, _, err := q.SeekTS(ts)
require.Nil(t, err)
assert.NotEqualValues(t, 0, pos)
testLine, err := q.ReadNext()
require.Nil(t, err)
assert.Equal(t, line, testLine)
})
}
}
}
func TestQLogFile_SeekTS_bad(t *testing.T) {
linesCases := []struct {
name string
num int
}{{
name: "large",
num: 10000,
}, {
name: "small",
num: 10,
}}
for _, l := range linesCases {
testCases := []struct {
name string
ts int64
leq bool
}{{
name: "non-existent_long_ago",
}, {
name: "non-existent_far_ahead",
}, {
name: "almost",
leq: true,
}}
q := newTestQLogFile(t, l.num)
testCases[0].ts = 123
lateTS, _ := time.Parse(time.RFC3339, "2100-01-02T15:04:05Z07:00")
testCases[1].ts = lateTS.UnixNano()
line, err := getQLogFileLine(q, l.num/2)
require.Nil(t, err)
testCases[2].ts = readQLogTimestamp(line) - 1
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
assert.NotEqualValues(t, 0, tc.ts)
_, depth, err := q.SeekTS(tc.ts)
assert.NotEmpty(t, l.num)
require.NotNil(t, err)
if tc.leq {
assert.LessOrEqual(t, depth, int(math.Log2(float64(l.num))+3))
}
})
}
}
}
func getQLogFileLine(q *QLogFile, lineNumber int) (line string, err error) {
if _, err = q.SeekStart(); err != nil {
return line, err
}
for i := 1; i < lineNumber; i++ {
if _, err = q.ReadNext(); err != nil {
return line, err
}
}
return q.ReadNext()
}
// Check adding and loading (with filtering) entries from disk and memory.
func TestQLogFile(t *testing.T) {
// Create the new QLogFile instance.
q := newTestQLogFile(t, 2)
// Seek to the start.
pos, err := q.SeekStart()
require.Nil(t, err)
assert.Greater(t, pos, int64(0))
// Read first line.
line, err := q.ReadNext()
require.Nil(t, err)
assert.Contains(t, line, "0.0.0.2")
assert.True(t, strings.HasPrefix(line, "{"), line)
assert.True(t, strings.HasSuffix(line, "}"), line)
// Read second line.
line, err = q.ReadNext()
require.Nil(t, err)
assert.EqualValues(t, 0, q.position)
assert.Contains(t, line, "0.0.0.1")
assert.True(t, strings.HasPrefix(line, "{"), line)
assert.True(t, strings.HasSuffix(line, "}"), line)
// Try reading again (there's nothing to read anymore).
line, err = q.ReadNext()
require.Equal(t, io.EOF, err)
assert.Empty(t, line)
}
func NewTestQLogFileData(t *testing.T, data string) (file *QLogFile) {
f, err := ioutil.TempFile(prepareTestDir(t), "*.txt")
require.Nil(t, err)
t.Cleanup(func() {
_ = os.RemoveAll(testDir)
assert.Nil(t, f.Close())
})
d := `{"T":"2020-08-31T18:44:23.911246629+03:00","QH":"wfqvjymurpwegyv","QT":"A","QC":"IN","CP":"","Answer":"","Result":{},"Elapsed":66286385,"Upstream":"tls://dns-unfiltered.adguard.com:853"}
{"T":"2020-08-31T18:44:25.376690873+03:00"}
{"T":"2020-08-31T18:44:25.382540454+03:00"}
`
f, err := ioutil.TempFile(testDir, "*.txt")
assert.Nil(t, err)
defer f.Close()
_, err = f.WriteString(data)
require.Nil(t, err)
_, err = f.WriteString(d)
assert.Nil(t, err)
file, err = NewQLogFile(f.Name())
require.Nil(t, err)
t.Cleanup(func() {
assert.Nil(t, file.Close())
})
q, err := NewQLogFile(f.Name())
assert.Nil(t, err)
defer q.Close()
target, err := time.Parse(time.RFC3339, "2020-08-31T18:44:23.911246629+03:00")
assert.Nil(t, err)
_, depth, err := q.SeekTS(target.UnixNano() - int64(time.Second))
assert.Equal(t, ErrTSTooEarly, err)
assert.Equal(t, 1, depth)
return file
}
func TestQLog_Seek(t *testing.T) {
const nl = "\n"
const strV = "%s"
const recs = `{"T":"` + strV + `","QH":"wfqvjymurpwegyv","QT":"A","QC":"IN","CP":"","Answer":"","Result":{},"Elapsed":66286385,"Upstream":"tls://dns-unfiltered.adguard.com:853"}` + nl +
`{"T":"` + strV + `"}` + nl +
`{"T":"` + strV + `"}` + nl
timestamp, _ := time.Parse(time.RFC3339Nano, "2020-08-31T18:44:25.376690873+03:00")
testCases := []struct {
name string
delta int
wantErr error
wantDepth int
}{{
name: "ok",
delta: 0,
wantErr: nil,
wantDepth: 2,
}, {
name: "too_late",
delta: 2,
wantErr: ErrTSTooLate,
wantDepth: 2,
}, {
name: "too_early",
delta: -2,
wantErr: ErrTSTooEarly,
wantDepth: 1,
}}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
data := fmt.Sprintf(recs,
timestamp.Add(-time.Second).Format(time.RFC3339Nano),
timestamp.Format(time.RFC3339Nano),
timestamp.Add(time.Second).Format(time.RFC3339Nano),
)
q := NewTestQLogFileData(t, data)
_, depth, err := q.SeekTS(timestamp.Add(time.Second * time.Duration(tc.delta)).UnixNano())
require.Truef(t, errors.Is(err, tc.wantErr), "%v", err)
assert.Equal(t, tc.wantDepth, depth)
})
}
}

View File

@ -3,110 +3,77 @@ package querylog
import (
"errors"
"io"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQLogReaderEmpty(t *testing.T) {
r, err := NewQLogReader([]string{})
assert.Nil(t, err)
assert.NotNil(t, r)
defer r.Close()
// newTestQLogReader creates new *QLogReader for tests and registers the
// required cleanup functions.
func newTestQLogReader(t *testing.T, filesNum, linesNum int) (reader *QLogReader) {
t.Helper()
// seek to the start
err = r.SeekStart()
assert.Nil(t, err)
testFiles := prepareTestFiles(t, prepareTestDir(t), filesNum, linesNum)
line, err := r.ReadNext()
assert.Empty(t, line)
assert.Equal(t, io.EOF, err)
// Create the new QLogReader instance.
reader, err := NewQLogReader(testFiles)
require.Nil(t, err)
assert.NotNil(t, reader)
t.Cleanup(func() {
assert.Nil(t, reader.Close())
})
return reader
}
func TestQLogReaderOneFile(t *testing.T) {
// let's do one small file
count := 10
filesCount := 1
func TestQLogReader(t *testing.T) {
testCases := []struct {
name string
filesNum int
linesNum int
}{{
name: "empty",
filesNum: 0,
linesNum: 0,
}, {
name: "one_file",
filesNum: 1,
linesNum: 10,
}, {
name: "multiple_files",
filesNum: 5,
linesNum: 10000,
}}
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFiles := prepareTestFiles(testDir, filesCount, count)
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
r := newTestQLogReader(t, tc.filesNum, tc.linesNum)
r, err := NewQLogReader(testFiles)
assert.Nil(t, err)
assert.NotNil(t, r)
defer r.Close()
// Seek to the start.
err := r.SeekStart()
require.Nil(t, err)
// seek to the start
err = r.SeekStart()
assert.Nil(t, err)
// read everything
read := 0
// Read everything.
var read int
var line string
for err == nil {
line, err = r.ReadNext()
if err == nil {
assert.True(t, len(line) > 0)
assert.NotEmpty(t, line)
read++
}
}
assert.Equal(t, count*filesCount, read)
assert.Equal(t, io.EOF, err)
require.Equal(t, io.EOF, err)
assert.Equal(t, tc.filesNum*tc.linesNum, read)
})
}
func TestQLogReaderMultipleFiles(t *testing.T) {
// should be large enough
count := 10000
filesCount := 5
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFiles := prepareTestFiles(testDir, filesCount, count)
r, err := NewQLogReader(testFiles)
assert.Nil(t, err)
assert.NotNil(t, r)
defer r.Close()
// seek to the start
err = r.SeekStart()
assert.Nil(t, err)
// read everything
read := 0
var line string
for err == nil {
line, err = r.ReadNext()
if err == nil {
assert.True(t, len(line) > 0)
read++
}
}
assert.Equal(t, count*filesCount, read)
assert.Equal(t, io.EOF, err)
}
func TestQLogReader_Seek(t *testing.T) {
count := 10000
filesCount := 2
testDir := prepareTestDir()
t.Cleanup(func() {
_ = os.RemoveAll(testDir)
})
testFiles := prepareTestFiles(testDir, filesCount, count)
r, err := NewQLogReader(testFiles)
assert.Nil(t, err)
assert.NotNil(t, r)
t.Cleanup(func() {
_ = r.Close()
})
r := newTestQLogReader(t, 2, 10000)
testCases := []struct {
name string
@ -114,7 +81,7 @@ func TestQLogReader_Seek(t *testing.T) {
want error
}{{
name: "not_too_old",
time: "2020-02-19T04:04:56.920973+03:00",
time: "2020-02-18T22:39:35.920973+03:00",
want: nil,
}, {
name: "old",
@ -122,7 +89,7 @@ func TestQLogReader_Seek(t *testing.T) {
want: nil,
}, {
name: "first",
time: "2020-02-19T04:09:55.920973+03:00",
time: "2020-02-18T22:36:36.920973+03:00",
want: nil,
}, {
name: "last",
@ -147,28 +114,20 @@ func TestQLogReader_Seek(t *testing.T) {
timestamp, err := time.Parse(time.RFC3339Nano, tc.time)
assert.Nil(t, err)
if tc.name == "first" {
assert.True(t, true)
}
err = r.SeekTS(timestamp.UnixNano())
assert.True(t, errors.Is(err, tc.want), err)
assert.True(t, errors.Is(err, tc.want))
})
}
}
func TestQLogReader_ReadNext(t *testing.T) {
count := 10
filesCount := 1
testDir := prepareTestDir()
t.Cleanup(func() {
_ = os.RemoveAll(testDir)
})
testFiles := prepareTestFiles(testDir, filesCount, count)
r, err := NewQLogReader(testFiles)
assert.Nil(t, err)
assert.NotNil(t, r)
t.Cleanup(func() {
_ = r.Close()
})
const linesNum = 10
const filesNum = 1
r := newTestQLogReader(t, filesNum, linesNum)
testCases := []struct {
name string
@ -180,7 +139,7 @@ func TestQLogReader_ReadNext(t *testing.T) {
want: nil,
}, {
name: "too_big",
start: count + 1,
start: linesNum + 1,
want: io.EOF,
}}
@ -199,70 +158,3 @@ func TestQLogReader_ReadNext(t *testing.T) {
})
}
}
// TODO(e.burkov): Remove the tests below. Make tests above more compelling.
func TestQLogReaderSeek(t *testing.T) {
// more or less big file
count := 10000
filesCount := 2
testDir := prepareTestDir()
defer func() { _ = os.RemoveAll(testDir) }()
testFiles := prepareTestFiles(testDir, filesCount, count)
r, err := NewQLogReader(testFiles)
assert.Nil(t, err)
assert.NotNil(t, r)
defer r.Close()
// CASE 1: NOT TOO OLD LINE
testSeekLineQLogReader(t, r, 300)
// CASE 2: OLD LINE
testSeekLineQLogReader(t, r, count-300)
// CASE 3: FIRST LINE
testSeekLineQLogReader(t, r, 0)
// CASE 4: LAST LINE
testSeekLineQLogReader(t, r, count)
// CASE 5: Seek non-existent (too low)
err = r.SeekTS(123)
assert.NotNil(t, err)
// CASE 6: Seek non-existent (too high)
ts, _ := time.Parse(time.RFC3339, "2100-01-02T15:04:05Z07:00")
err = r.SeekTS(ts.UnixNano())
assert.NotNil(t, err)
}
func testSeekLineQLogReader(t *testing.T, r *QLogReader, lineNumber int) {
line, err := getQLogReaderLine(r, lineNumber)
assert.Nil(t, err)
ts := readQLogTimestamp(line)
assert.NotEqualValues(t, 0, ts)
// try seeking to that line now
err = r.SeekTS(ts)
assert.Nil(t, err)
testLine, err := r.ReadNext()
assert.Nil(t, err)
assert.Equal(t, line, testLine)
}
func getQLogReaderLine(r *QLogReader, lineNumber int) (string, error) {
err := r.SeekStart()
if err != nil {
return "", err
}
for i := 1; i < lineNumber; i++ {
_, err := r.ReadNext()
if err != nil {
return "", err
}
}
return r.ReadNext()
}