2018-08-30 15:25:33 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
2018-12-05 16:17:17 +00:00
|
|
|
"net"
|
2018-08-30 15:25:33 +01:00
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2018-12-24 12:19:52 +00:00
|
|
|
"github.com/AdguardTeam/dnsproxy/upstream"
|
|
|
|
|
2018-12-05 11:21:25 +00:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/dnsforward"
|
2018-12-05 16:17:17 +00:00
|
|
|
"github.com/miekg/dns"
|
2018-11-05 21:47:59 +00:00
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
"gopkg.in/asaskevich/govalidator.v4"
|
|
|
|
)
|
|
|
|
|
|
|
|
const updatePeriod = time.Minute * 30
|
|
|
|
|
2018-09-20 18:02:25 +01:00
|
|
|
// cached version.json to avoid hammering github.io for each page reload
|
|
|
|
var versionCheckJSON []byte
|
|
|
|
var versionCheckLastTime time.Time
|
|
|
|
|
2018-10-15 14:02:19 +01:00
|
|
|
const versionCheckURL = "https://adguardteam.github.io/AdGuardHome/version.json"
|
2018-09-20 18:02:25 +01:00
|
|
|
const versionCheckPeriod = time.Hour * 8
|
|
|
|
|
2018-10-10 13:47:08 +01:00
|
|
|
var client = &http.Client{
|
|
|
|
Timeout: time.Second * 30,
|
|
|
|
}
|
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
// -------------------
|
2018-12-05 17:29:00 +00:00
|
|
|
// dns run control
|
2018-08-30 15:25:33 +01:00
|
|
|
// -------------------
|
2018-12-05 17:29:00 +00:00
|
|
|
func writeAllConfigsAndReloadDNS() error {
|
2018-08-30 15:25:33 +01:00
|
|
|
err := writeAllConfigs()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Couldn't write all configs: %s", err)
|
|
|
|
return err
|
|
|
|
}
|
2018-11-28 15:57:20 +00:00
|
|
|
reconfigureDNSServer()
|
2018-08-30 15:25:33 +01:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-10-10 18:13:03 +01:00
|
|
|
func httpUpdateConfigReloadDNSReturnOK(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
err := writeAllConfigsAndReloadDNS()
|
2018-10-10 18:13:03 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't write config file: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
returnOK(w, r)
|
|
|
|
}
|
|
|
|
|
|
|
|
func returnOK(w http.ResponseWriter, r *http.Request) {
|
|
|
|
_, err := fmt.Fprintf(w, "OK\n")
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusInternalServerError)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
func handleStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
2018-10-10 18:13:03 +01:00
|
|
|
"dns_address": config.BindHost,
|
2018-12-05 17:29:00 +00:00
|
|
|
"dns_port": config.DNS.Port,
|
|
|
|
"protection_enabled": config.DNS.ProtectionEnabled,
|
|
|
|
"querylog_enabled": config.DNS.QueryLogEnabled,
|
2018-10-10 18:13:03 +01:00
|
|
|
"running": isRunning(),
|
2018-12-05 17:29:00 +00:00
|
|
|
"bootstrap_dns": config.DNS.BootstrapDNS,
|
|
|
|
"upstream_dns": config.DNS.UpstreamDNS,
|
2018-10-10 18:13:03 +01:00
|
|
|
"version": VersionString,
|
2018-11-21 17:42:55 +00:00
|
|
|
"language": config.Language,
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
2018-10-20 17:58:39 +01:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 17:58:39 +01:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-10 18:13:03 +01:00
|
|
|
func handleProtectionEnable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.ProtectionEnabled = true
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
|
|
|
}
|
|
|
|
|
|
|
|
func handleProtectionDisable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.ProtectionEnabled = false
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
|
|
|
}
|
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
// -----
|
|
|
|
// stats
|
|
|
|
// -----
|
|
|
|
func handleQueryLogEnable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.QueryLogEnabled = true
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleQueryLogDisable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.QueryLogEnabled = false
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
2018-09-26 15:47:23 +01:00
|
|
|
func httpError(w http.ResponseWriter, code int, format string, args ...interface{}) {
|
|
|
|
text := fmt.Sprintf(format, args...)
|
2018-10-11 16:32:23 +01:00
|
|
|
log.Println(text)
|
2018-09-26 15:47:23 +01:00
|
|
|
http.Error(w, text, code)
|
|
|
|
}
|
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
func handleSetUpstreamDNS(w http.ResponseWriter, r *http.Request) {
|
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Failed to read request body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
// if empty body -- user is asking for default servers
|
2018-11-05 21:47:59 +00:00
|
|
|
hosts := strings.Fields(string(body))
|
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
if len(hosts) == 0 {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.UpstreamDNS = defaultDNS
|
2018-08-30 15:25:33 +01:00
|
|
|
} else {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.UpstreamDNS = hosts
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
err = writeAllConfigs()
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write config file: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
2018-11-28 15:57:20 +00:00
|
|
|
reconfigureDNSServer()
|
2018-09-14 14:50:56 +01:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d servers\n", len(hosts))
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-14 14:50:56 +01:00
|
|
|
}
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
2018-09-19 17:12:09 +01:00
|
|
|
func handleTestUpstreamDNS(w http.ResponseWriter, r *http.Request) {
|
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Failed to read request body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-09-19 17:12:09 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
hosts := strings.Fields(string(body))
|
|
|
|
|
|
|
|
if len(hosts) == 0 {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("No servers specified")
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-19 17:12:09 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
result := map[string]string{}
|
|
|
|
|
|
|
|
for _, host := range hosts {
|
2018-10-07 21:43:24 +01:00
|
|
|
err = checkDNS(host)
|
2018-09-19 17:12:09 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
result[host] = err.Error()
|
|
|
|
} else {
|
|
|
|
result[host] = "OK"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-20 17:58:39 +01:00
|
|
|
jsonVal, err := json.Marshal(result)
|
2018-09-19 17:12:09 +01:00
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-19 17:12:09 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 17:58:39 +01:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-09-19 17:12:09 +01:00
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-19 17:12:09 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-26 15:47:23 +01:00
|
|
|
func checkDNS(input string) error {
|
2018-12-05 16:17:17 +00:00
|
|
|
log.Printf("Checking if DNS %s works...", input)
|
2018-12-24 22:59:38 +00:00
|
|
|
u, err := upstream.AddressToUpstream(input, "", dnsforward.DefaultTimeout)
|
2018-09-26 15:47:23 +01:00
|
|
|
if err != nil {
|
2018-12-05 16:17:17 +00:00
|
|
|
return fmt.Errorf("Failed to choose upstream for %s: %s", input, err)
|
2018-09-19 17:12:09 +01:00
|
|
|
}
|
2018-09-26 15:47:23 +01:00
|
|
|
|
2018-12-05 16:17:17 +00:00
|
|
|
req := dns.Msg{}
|
|
|
|
req.Id = dns.Id()
|
|
|
|
req.RecursionDesired = true
|
|
|
|
req.Question = []dns.Question{
|
|
|
|
{Name: "google-public-dns-a.google.com.", Qtype: dns.TypeA, Qclass: dns.ClassINET},
|
|
|
|
}
|
|
|
|
reply, err := u.Exchange(&req)
|
2018-09-19 17:12:09 +01:00
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
return fmt.Errorf("couldn't communicate with DNS server %s: %s", input, err)
|
2018-09-19 17:12:09 +01:00
|
|
|
}
|
2018-12-05 16:17:17 +00:00
|
|
|
if len(reply.Answer) != 1 {
|
|
|
|
return fmt.Errorf("DNS server %s returned wrong answer", input)
|
|
|
|
}
|
|
|
|
if t, ok := reply.Answer[0].(*dns.A); ok {
|
|
|
|
if !net.IPv4(8, 8, 8, 8).Equal(t.A) {
|
|
|
|
return fmt.Errorf("DNS server %s returned wrong answer: %v", input, t.A)
|
|
|
|
}
|
2018-09-26 15:47:23 +01:00
|
|
|
}
|
|
|
|
|
2018-12-05 16:17:17 +00:00
|
|
|
log.Printf("DNS %s works OK", input)
|
2018-11-05 21:47:59 +00:00
|
|
|
return nil
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
2018-09-20 18:02:25 +01:00
|
|
|
func handleGetVersionJSON(w http.ResponseWriter, r *http.Request) {
|
|
|
|
now := time.Now()
|
|
|
|
if now.Sub(versionCheckLastTime) <= versionCheckPeriod && len(versionCheckJSON) != 0 {
|
|
|
|
// return cached copy
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
w.Write(versionCheckJSON)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp, err := client.Get(versionCheckURL)
|
|
|
|
if err != nil {
|
2018-12-05 17:29:00 +00:00
|
|
|
errortext := fmt.Sprintf("Couldn't get version check json from %s: %T %s\n", versionCheckURL, err, err)
|
2018-09-20 18:02:25 +01:00
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusBadGateway)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if resp != nil && resp.Body != nil {
|
|
|
|
defer resp.Body.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
// read the body entirely
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
2018-12-05 17:29:00 +00:00
|
|
|
errortext := fmt.Sprintf("Couldn't read response body from %s: %s", versionCheckURL, err)
|
2018-09-20 18:02:25 +01:00
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusBadGateway)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, err = w.Write(body)
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusInternalServerError)
|
|
|
|
}
|
|
|
|
|
|
|
|
versionCheckLastTime = now
|
|
|
|
versionCheckJSON = body
|
|
|
|
}
|
|
|
|
|
2018-08-30 15:25:33 +01:00
|
|
|
// ---------
|
|
|
|
// filtering
|
|
|
|
// ---------
|
|
|
|
|
|
|
|
func handleFilteringEnable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.FilteringEnabled = true
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringDisable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.FilteringEnabled = false
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
2018-12-05 17:29:00 +00:00
|
|
|
"enabled": config.DNS.FilteringEnabled,
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
2018-10-06 22:58:59 +01:00
|
|
|
config.RLock()
|
2018-08-30 15:25:33 +01:00
|
|
|
data["filters"] = config.Filters
|
|
|
|
data["user_rules"] = config.UserRules
|
2018-10-20 17:58:39 +01:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-10-06 22:58:59 +01:00
|
|
|
config.RUnlock()
|
2018-08-30 15:25:33 +01:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 17:58:39 +01:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringAddURL(w http.ResponseWriter, r *http.Request) {
|
2018-10-11 16:32:23 +01:00
|
|
|
filter := filter{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&filter)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
2018-10-11 16:32:23 +01:00
|
|
|
httpError(w, http.StatusBadRequest, "Failed to parse request body json: %s", err)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-11 16:32:23 +01:00
|
|
|
if len(filter.URL) == 0 {
|
2018-08-30 15:25:33 +01:00
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-11 16:32:23 +01:00
|
|
|
if valid := govalidator.IsRequestURL(filter.URL); !valid {
|
2018-08-30 15:25:33 +01:00
|
|
|
http.Error(w, "URL parameter is not valid request URL", 400)
|
|
|
|
return
|
|
|
|
}
|
2018-09-14 02:33:54 +01:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Check for duplicates
|
2018-09-14 02:33:54 +01:00
|
|
|
for i := range config.Filters {
|
2018-10-11 16:32:23 +01:00
|
|
|
if config.Filters[i].URL == filter.URL {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Filter URL already added -- %s", filter.URL)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 02:33:54 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Set necessary properties
|
2018-11-27 18:25:03 +00:00
|
|
|
filter.ID = assignUniqueFilterID()
|
2018-10-30 09:24:59 +00:00
|
|
|
filter.Enabled = true
|
|
|
|
|
|
|
|
// Download the filter contents
|
2018-10-29 23:17:24 +00:00
|
|
|
ok, err := filter.update(true)
|
2018-09-14 02:33:54 +01:00
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't fetch filter from url %s: %s", filter.URL, err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 02:33:54 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if filter.RulesCount == 0 {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Filter at the url %s has no rules (maybe it points to blank page?)", filter.URL)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 02:33:54 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if !ok {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Filter at the url %s is invalid (maybe it points to blank page?)", filter.URL)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Save the filter contents
|
|
|
|
err = filter.save()
|
|
|
|
if err != nil {
|
|
|
|
errorText := fmt.Sprintf("Failed to save filter %d due to %s", filter.ID, err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 02:33:54 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-12-05 17:29:00 +00:00
|
|
|
// URL is deemed valid, append it to filters, update config, write new filter file and tell dns to reload it
|
|
|
|
// TODO: since we directly feed filters in-memory, revisit if writing configs is always neccessary
|
2018-08-30 15:25:33 +01:00
|
|
|
config.Filters = append(config.Filters, filter)
|
|
|
|
err = writeAllConfigs()
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write config file: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-11-28 15:57:20 +00:00
|
|
|
reconfigureDNSServer()
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-09-14 14:50:56 +01:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", filter.RulesCount)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-14 14:50:56 +01:00
|
|
|
}
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringRemoveURL(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
url, ok := parameters["url"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if valid := govalidator.IsRequestURL(url); !valid {
|
|
|
|
http.Error(w, "URL parameter is not valid request URL", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// go through each element and delete if url matches
|
|
|
|
newFilters := config.Filters[:0]
|
|
|
|
for _, filter := range config.Filters {
|
|
|
|
if filter.URL != url {
|
|
|
|
newFilters = append(newFilters, filter)
|
2018-10-29 23:17:24 +00:00
|
|
|
} else {
|
|
|
|
// Remove the filter file
|
2018-11-27 13:48:57 +00:00
|
|
|
err := os.Remove(filter.Path())
|
2018-10-29 23:17:24 +00:00
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't remove the filter file: %s", err)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-10-29 23:17:24 +00:00
|
|
|
return
|
|
|
|
}
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
// Update the configuration after removing filter files
|
2018-08-30 15:25:33 +01:00
|
|
|
config.Filters = newFilters
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringEnableURL(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
url, ok := parameters["url"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if valid := govalidator.IsRequestURL(url); !valid {
|
|
|
|
http.Error(w, "URL parameter is not valid request URL", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
found := false
|
|
|
|
for i := range config.Filters {
|
|
|
|
filter := &config.Filters[i] // otherwise we will be operating on a copy
|
|
|
|
if filter.URL == url {
|
|
|
|
filter.Enabled = true
|
|
|
|
found = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
http.Error(w, "URL parameter was not previously added", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// kick off refresh of rules from new URLs
|
2018-11-27 18:30:11 +00:00
|
|
|
refreshFiltersIfNeccessary(false)
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringDisableURL(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
url, ok := parameters["url"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if valid := govalidator.IsRequestURL(url); !valid {
|
|
|
|
http.Error(w, "URL parameter is not valid request URL", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
found := false
|
|
|
|
for i := range config.Filters {
|
|
|
|
filter := &config.Filters[i] // otherwise we will be operating on a copy
|
|
|
|
if filter.URL == url {
|
|
|
|
filter.Enabled = false
|
|
|
|
found = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
http.Error(w, "URL parameter was not previously added", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringSetRules(w http.ResponseWriter, r *http.Request) {
|
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Failed to read request body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 15:25:33 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
config.UserRules = strings.Split(string(body), "\n")
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringRefresh(w http.ResponseWriter, r *http.Request) {
|
|
|
|
force := r.URL.Query().Get("force")
|
2018-11-27 18:30:11 +00:00
|
|
|
updated := refreshFiltersIfNeccessary(force != "")
|
2018-08-30 15:25:33 +01:00
|
|
|
fmt.Fprintf(w, "OK %d filters updated\n", updated)
|
|
|
|
}
|
|
|
|
|
|
|
|
// ------------
|
|
|
|
// safebrowsing
|
|
|
|
// ------------
|
|
|
|
|
|
|
|
func handleSafeBrowsingEnable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.SafeBrowsingEnabled = true
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeBrowsingDisable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.SafeBrowsingEnabled = false
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeBrowsingStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
2018-12-05 17:29:00 +00:00
|
|
|
"enabled": config.DNS.SafeBrowsingEnabled,
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
2018-10-20 17:58:39 +01:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 17:58:39 +01:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// --------
|
|
|
|
// parental
|
|
|
|
// --------
|
|
|
|
func handleParentalEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
sensitivity, ok := parameters["sensitivity"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
switch sensitivity {
|
|
|
|
case "3":
|
|
|
|
break
|
|
|
|
case "EARLY_CHILDHOOD":
|
|
|
|
sensitivity = "3"
|
|
|
|
case "10":
|
|
|
|
break
|
|
|
|
case "YOUNG":
|
|
|
|
sensitivity = "10"
|
|
|
|
case "13":
|
|
|
|
break
|
|
|
|
case "TEEN":
|
|
|
|
sensitivity = "13"
|
|
|
|
case "17":
|
|
|
|
break
|
|
|
|
case "MATURE":
|
|
|
|
sensitivity = "17"
|
|
|
|
default:
|
|
|
|
http.Error(w, "Sensitivity must be set to valid value", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
i, err := strconv.Atoi(sensitivity)
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Sensitivity must be set to valid value", 400)
|
|
|
|
return
|
|
|
|
}
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.ParentalSensitivity = i
|
|
|
|
config.DNS.ParentalEnabled = true
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleParentalDisable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.ParentalEnabled = false
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleParentalStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
2018-12-05 17:29:00 +00:00
|
|
|
"enabled": config.DNS.ParentalEnabled,
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
2018-12-05 17:29:00 +00:00
|
|
|
if config.DNS.ParentalEnabled {
|
|
|
|
data["sensitivity"] = config.DNS.ParentalSensitivity
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
2018-10-20 17:58:39 +01:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 17:58:39 +01:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// ------------
|
|
|
|
// safebrowsing
|
|
|
|
// ------------
|
|
|
|
|
|
|
|
func handleSafeSearchEnable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.SafeSearchEnabled = true
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeSearchDisable(w http.ResponseWriter, r *http.Request) {
|
2018-12-05 17:29:00 +00:00
|
|
|
config.DNS.SafeSearchEnabled = false
|
2018-10-10 18:13:03 +01:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeSearchStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
2018-12-05 17:29:00 +00:00
|
|
|
"enabled": config.DNS.SafeSearchEnabled,
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|
2018-10-20 17:58:39 +01:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 17:58:39 +01:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 15:25:33 +01:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func registerControlHandlers() {
|
2018-09-18 18:59:41 +01:00
|
|
|
http.HandleFunc("/control/status", optionalAuth(ensureGET(handleStatus)))
|
2018-10-10 18:13:03 +01:00
|
|
|
http.HandleFunc("/control/enable_protection", optionalAuth(ensurePOST(handleProtectionEnable)))
|
|
|
|
http.HandleFunc("/control/disable_protection", optionalAuth(ensurePOST(handleProtectionDisable)))
|
2018-12-05 11:21:25 +00:00
|
|
|
http.HandleFunc("/control/querylog", optionalAuth(ensureGET(dnsforward.HandleQueryLog)))
|
2018-09-18 18:59:41 +01:00
|
|
|
http.HandleFunc("/control/querylog_enable", optionalAuth(ensurePOST(handleQueryLogEnable)))
|
|
|
|
http.HandleFunc("/control/querylog_disable", optionalAuth(ensurePOST(handleQueryLogDisable)))
|
|
|
|
http.HandleFunc("/control/set_upstream_dns", optionalAuth(ensurePOST(handleSetUpstreamDNS)))
|
2018-09-19 17:12:09 +01:00
|
|
|
http.HandleFunc("/control/test_upstream_dns", optionalAuth(ensurePOST(handleTestUpstreamDNS)))
|
2018-11-21 17:42:55 +00:00
|
|
|
http.HandleFunc("/control/i18n/change_language", optionalAuth(ensurePOST(handleI18nChangeLanguage)))
|
|
|
|
http.HandleFunc("/control/i18n/current_language", optionalAuth(ensureGET(handleI18nCurrentLanguage)))
|
2018-12-05 11:21:25 +00:00
|
|
|
http.HandleFunc("/control/stats_top", optionalAuth(ensureGET(dnsforward.HandleStatsTop)))
|
|
|
|
http.HandleFunc("/control/stats", optionalAuth(ensureGET(dnsforward.HandleStats)))
|
|
|
|
http.HandleFunc("/control/stats_history", optionalAuth(ensureGET(dnsforward.HandleStatsHistory)))
|
|
|
|
http.HandleFunc("/control/stats_reset", optionalAuth(ensurePOST(dnsforward.HandleStatsReset)))
|
2018-09-20 18:02:25 +01:00
|
|
|
http.HandleFunc("/control/version.json", optionalAuth(handleGetVersionJSON))
|
2018-09-18 18:59:41 +01:00
|
|
|
http.HandleFunc("/control/filtering/enable", optionalAuth(ensurePOST(handleFilteringEnable)))
|
|
|
|
http.HandleFunc("/control/filtering/disable", optionalAuth(ensurePOST(handleFilteringDisable)))
|
|
|
|
http.HandleFunc("/control/filtering/add_url", optionalAuth(ensurePUT(handleFilteringAddURL)))
|
|
|
|
http.HandleFunc("/control/filtering/remove_url", optionalAuth(ensureDELETE(handleFilteringRemoveURL)))
|
|
|
|
http.HandleFunc("/control/filtering/enable_url", optionalAuth(ensurePOST(handleFilteringEnableURL)))
|
|
|
|
http.HandleFunc("/control/filtering/disable_url", optionalAuth(ensurePOST(handleFilteringDisableURL)))
|
|
|
|
http.HandleFunc("/control/filtering/refresh", optionalAuth(ensurePOST(handleFilteringRefresh)))
|
2018-10-10 18:13:03 +01:00
|
|
|
http.HandleFunc("/control/filtering/status", optionalAuth(ensureGET(handleFilteringStatus)))
|
|
|
|
http.HandleFunc("/control/filtering/set_rules", optionalAuth(ensurePUT(handleFilteringSetRules)))
|
2018-09-18 18:59:41 +01:00
|
|
|
http.HandleFunc("/control/safebrowsing/enable", optionalAuth(ensurePOST(handleSafeBrowsingEnable)))
|
|
|
|
http.HandleFunc("/control/safebrowsing/disable", optionalAuth(ensurePOST(handleSafeBrowsingDisable)))
|
|
|
|
http.HandleFunc("/control/safebrowsing/status", optionalAuth(ensureGET(handleSafeBrowsingStatus)))
|
|
|
|
http.HandleFunc("/control/parental/enable", optionalAuth(ensurePOST(handleParentalEnable)))
|
|
|
|
http.HandleFunc("/control/parental/disable", optionalAuth(ensurePOST(handleParentalDisable)))
|
|
|
|
http.HandleFunc("/control/parental/status", optionalAuth(ensureGET(handleParentalStatus)))
|
|
|
|
http.HandleFunc("/control/safesearch/enable", optionalAuth(ensurePOST(handleSafeSearchEnable)))
|
|
|
|
http.HandleFunc("/control/safesearch/disable", optionalAuth(ensurePOST(handleSafeSearchDisable)))
|
|
|
|
http.HandleFunc("/control/safesearch/status", optionalAuth(ensureGET(handleSafeSearchStatus)))
|
2018-12-12 17:22:45 +00:00
|
|
|
http.HandleFunc("/control/dhcp/status", optionalAuth(ensureGET(handleDHCPStatus)))
|
2018-12-21 12:52:59 +00:00
|
|
|
http.HandleFunc("/control/dhcp/interfaces", optionalAuth(ensureGET(handleDHCPInterfaces)))
|
2018-12-12 17:22:45 +00:00
|
|
|
http.HandleFunc("/control/dhcp/set_config", optionalAuth(ensurePOST(handleDHCPSetConfig)))
|
|
|
|
http.HandleFunc("/control/dhcp/find_active_dhcp", optionalAuth(ensurePOST(handleDHCPFindActiveServer)))
|
2018-08-30 15:25:33 +01:00
|
|
|
}
|