Merge branch 'master' into 5615-rm-raw
This commit is contained in:
commit
59a8265e31
59
CHANGELOG.md
59
CHANGELOG.md
|
@ -25,6 +25,14 @@ NOTE: Add new changes BELOW THIS COMMENT.
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
- Two new HTTP APIs, `PUT /control/stats/config/update` and `GET
|
||||||
|
control/stats/config`, which can be used to set and receive the query log
|
||||||
|
configuration. See openapi/openapi.yaml for the full description.
|
||||||
|
- Two new HTTP APIs, `PUT /control/querylog/config/update` and `GET
|
||||||
|
control/querylog/config`, which can be used to set and receive the statistics
|
||||||
|
configuration. See openapi/openapi.yaml for the full description.
|
||||||
|
- The ability to set custom IP for EDNS Client Subnet by using the DNS-server
|
||||||
|
configuration section on the DNS settings page in the UI ([#1472]).
|
||||||
- The ability to manage safesearch for each service by using the new
|
- The ability to manage safesearch for each service by using the new
|
||||||
`safe_search` field ([#1163]).
|
`safe_search` field ([#1163]).
|
||||||
|
|
||||||
|
@ -35,8 +43,26 @@ NOTE: Add new changes BELOW THIS COMMENT.
|
||||||
|
|
||||||
#### Configuration Changes
|
#### Configuration Changes
|
||||||
|
|
||||||
In this release, the schema version has changed from 17 to 19.
|
In this release, the schema version has changed from 17 to 20.
|
||||||
|
|
||||||
|
- Property `statistics.interval`, which in schema versions 19 and earlier used
|
||||||
|
to be an integer number of days, is now a string with a human-readable
|
||||||
|
duration:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# BEFORE:
|
||||||
|
'statistics':
|
||||||
|
# …
|
||||||
|
'interval': 1
|
||||||
|
|
||||||
|
# AFTER:
|
||||||
|
'statistics':
|
||||||
|
# …
|
||||||
|
'interval': '24h'
|
||||||
|
```
|
||||||
|
|
||||||
|
To rollback this change, convert the property back into days and change the
|
||||||
|
`schema_version` back to `19`.
|
||||||
- The `dns.safesearch_enabled` field has been replaced with `safe_search`
|
- The `dns.safesearch_enabled` field has been replaced with `safe_search`
|
||||||
object containing per-service settings.
|
object containing per-service settings.
|
||||||
- The `clients.persistent.safesearch_enabled` field has been replaced with
|
- The `clients.persistent.safesearch_enabled` field has been replaced with
|
||||||
|
@ -62,12 +88,41 @@ In this release, the schema version has changed from 17 to 19.
|
||||||
client's specific `clients.persistent.safesearch` and then change the
|
client's specific `clients.persistent.safesearch` and then change the
|
||||||
`schema_version` back to `17`.
|
`schema_version` back to `17`.
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
|
||||||
|
- The `POST /control/safesearch/enable` HTTP API is deprecated. Use the new
|
||||||
|
`PUT /control/safesearch/settings` API.
|
||||||
|
- The `POST /control/safesearch/disable` HTTP API is deprecated. Use the new
|
||||||
|
`PUT /control/safesearch/settings` API
|
||||||
|
- The `safesearch_enabled` field is deprecated in the following HTTP APIs:
|
||||||
|
- `GET /control/clients`
|
||||||
|
- `POST /control/clients/add`
|
||||||
|
- `POST /control/clients/update`
|
||||||
|
- `GET /control/clients/find?ip0=...&ip1=...&ip2=...`
|
||||||
|
|
||||||
|
Check `openapi/openapi.yaml` for more details.
|
||||||
|
- The `GET /control/stats_info` HTTP API; use the new `GET
|
||||||
|
/control/stats/config` API instead.
|
||||||
|
|
||||||
|
**NOTE:** If interval is custom then it will be equal to `90` days for
|
||||||
|
compatibility reasons. See openapi/openapi.yaml and `openapi/CHANGELOG.md`.
|
||||||
|
- The `POST /control/stats_config` HTTP API; use the new `PUT
|
||||||
|
/control/stats/config/update` API instead.
|
||||||
|
- The `GET /control/querylog_info` HTTP API; use the new `GET
|
||||||
|
/control/querylog/config` API instead.
|
||||||
|
|
||||||
|
**NOTE:** If interval is custom then it will be equal to `90` days for
|
||||||
|
compatibility reasons. See openapi/openapi.yaml and `openapi/CHANGELOG.md`.
|
||||||
|
- The `POST /control/querylog_config` HTTP API; use the new `PUT
|
||||||
|
/control/querylog/config/update` API instead.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- Panic caused by empty top-level domain name label in `/etc/hosts` files
|
- Panic caused by empty top-level domain name label in `/etc/hosts` files
|
||||||
([#5584]).
|
([#5584]).
|
||||||
|
|
||||||
[#1163]: https://github.com/AdguardTeam/AdGuardHome/issues/1163
|
[#1163]: https://github.com/AdguardTeam/AdGuardHome/issues/1163
|
||||||
|
[#1472]: https://github.com/AdguardTeam/AdGuardHome/issues/1472
|
||||||
[#5567]: https://github.com/AdguardTeam/AdGuardHome/issues/5567
|
[#5567]: https://github.com/AdguardTeam/AdGuardHome/issues/5567
|
||||||
[#5584]: https://github.com/AdguardTeam/AdGuardHome/issues/5584
|
[#5584]: https://github.com/AdguardTeam/AdGuardHome/issues/5584
|
||||||
|
|
||||||
|
@ -100,8 +155,6 @@ See also the [v0.107.26 GitHub milestone][ms-v0.107.26].
|
||||||
|
|
||||||
#### Configuration Changes
|
#### Configuration Changes
|
||||||
|
|
||||||
In this release, the schema version has changed from 16 to 17.
|
|
||||||
|
|
||||||
- Property `edns_client_subnet`, which in schema versions 16 and earlier used
|
- Property `edns_client_subnet`, which in schema versions 16 and earlier used
|
||||||
to be a part of the `dns` object, is now part of the `dns.edns_client_subnet`
|
to be a part of the `dns` object, is now part of the `dns.edns_client_subnet`
|
||||||
object:
|
object:
|
||||||
|
|
|
@ -167,6 +167,7 @@
|
||||||
"enabled_parental_toast": "Enabled Parental Control",
|
"enabled_parental_toast": "Enabled Parental Control",
|
||||||
"disabled_safe_search_toast": "Disabled Safe Search",
|
"disabled_safe_search_toast": "Disabled Safe Search",
|
||||||
"enabled_save_search_toast": "Enabled Safe Search",
|
"enabled_save_search_toast": "Enabled Safe Search",
|
||||||
|
"updated_save_search_toast": "Safe Search settings updated",
|
||||||
"enabled_table_header": "Enabled",
|
"enabled_table_header": "Enabled",
|
||||||
"name_table_header": "Name",
|
"name_table_header": "Name",
|
||||||
"list_url_table_header": "List URL",
|
"list_url_table_header": "List URL",
|
||||||
|
@ -290,6 +291,8 @@
|
||||||
"rate_limit": "Rate limit",
|
"rate_limit": "Rate limit",
|
||||||
"edns_enable": "Enable EDNS client subnet",
|
"edns_enable": "Enable EDNS client subnet",
|
||||||
"edns_cs_desc": "Add the EDNS Client Subnet option (ECS) to upstream requests and log the values sent by the clients in the query log.",
|
"edns_cs_desc": "Add the EDNS Client Subnet option (ECS) to upstream requests and log the values sent by the clients in the query log.",
|
||||||
|
"edns_use_custom_ip": "Use custom IP for EDNS",
|
||||||
|
"edns_use_custom_ip_desc": "Allow to use custom IP for EDNS",
|
||||||
"rate_limit_desc": "The number of requests per second allowed per client. Setting it to 0 means no limit.",
|
"rate_limit_desc": "The number of requests per second allowed per client. Setting it to 0 means no limit.",
|
||||||
"blocking_ipv4_desc": "IP address to be returned for a blocked A request",
|
"blocking_ipv4_desc": "IP address to be returned for a blocked A request",
|
||||||
"blocking_ipv6_desc": "IP address to be returned for a blocked AAAA request",
|
"blocking_ipv6_desc": "IP address to be returned for a blocked AAAA request",
|
||||||
|
@ -523,6 +526,10 @@
|
||||||
"statistics_retention_confirm": "Are you sure you want to change statistics retention? If you decrease the interval value, some data will be lost",
|
"statistics_retention_confirm": "Are you sure you want to change statistics retention? If you decrease the interval value, some data will be lost",
|
||||||
"statistics_cleared": "Statistics successfully cleared",
|
"statistics_cleared": "Statistics successfully cleared",
|
||||||
"statistics_enable": "Enable statistics",
|
"statistics_enable": "Enable statistics",
|
||||||
|
"ignore_domains": "Ignored domains (separated by newline)",
|
||||||
|
"ignore_domains_title": "Ignored domains",
|
||||||
|
"ignore_domains_desc_stats": "Queries for these domains are not written to the statistics",
|
||||||
|
"ignore_domains_desc_query": "Queries for these domains are not written to the query log",
|
||||||
"interval_hours": "{{count}} hour",
|
"interval_hours": "{{count}} hour",
|
||||||
"interval_hours_plural": "{{count}} hours",
|
"interval_hours_plural": "{{count}} hours",
|
||||||
"filters_configuration": "Filters configuration",
|
"filters_configuration": "Filters configuration",
|
||||||
|
@ -642,5 +649,6 @@
|
||||||
"anonymizer_notification": "<0>Note:</0> IP anonymization is enabled. You can disable it in <1>General settings</1>.",
|
"anonymizer_notification": "<0>Note:</0> IP anonymization is enabled. You can disable it in <1>General settings</1>.",
|
||||||
"confirm_dns_cache_clear": "Are you sure you want to clear DNS cache?",
|
"confirm_dns_cache_clear": "Are you sure you want to clear DNS cache?",
|
||||||
"cache_cleared": "DNS cache successfully cleared",
|
"cache_cleared": "DNS cache successfully cleared",
|
||||||
"clear_cache": "Clear cache"
|
"clear_cache": "Clear cache",
|
||||||
|
"make_static": "Make static"
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,12 @@ import { getFilteringStatus, setRules } from './filtering';
|
||||||
export const toggleSettingStatus = createAction('SETTING_STATUS_TOGGLE');
|
export const toggleSettingStatus = createAction('SETTING_STATUS_TOGGLE');
|
||||||
export const showSettingsFailure = createAction('SETTINGS_FAILURE_SHOW');
|
export const showSettingsFailure = createAction('SETTINGS_FAILURE_SHOW');
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {*} settingKey = SETTINGS_NAMES
|
||||||
|
* @param {*} status: boolean | SafeSearchConfig
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
export const toggleSetting = (settingKey, status) => async (dispatch) => {
|
export const toggleSetting = (settingKey, status) => async (dispatch) => {
|
||||||
let successMessage = '';
|
let successMessage = '';
|
||||||
try {
|
try {
|
||||||
|
@ -49,14 +55,9 @@ export const toggleSetting = (settingKey, status) => async (dispatch) => {
|
||||||
dispatch(toggleSettingStatus({ settingKey }));
|
dispatch(toggleSettingStatus({ settingKey }));
|
||||||
break;
|
break;
|
||||||
case SETTINGS_NAMES.safesearch:
|
case SETTINGS_NAMES.safesearch:
|
||||||
if (status) {
|
successMessage = 'updated_save_search_toast';
|
||||||
successMessage = 'disabled_safe_search_toast';
|
await apiClient.updateSafesearch(status);
|
||||||
await apiClient.disableSafesearch();
|
dispatch(toggleSettingStatus({ settingKey, value: status }));
|
||||||
} else {
|
|
||||||
successMessage = 'enabled_save_search_toast';
|
|
||||||
await apiClient.enableSafesearch();
|
|
||||||
}
|
|
||||||
dispatch(toggleSettingStatus({ settingKey }));
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
@ -71,7 +72,9 @@ export const initSettingsRequest = createAction('SETTINGS_INIT_REQUEST');
|
||||||
export const initSettingsFailure = createAction('SETTINGS_INIT_FAILURE');
|
export const initSettingsFailure = createAction('SETTINGS_INIT_FAILURE');
|
||||||
export const initSettingsSuccess = createAction('SETTINGS_INIT_SUCCESS');
|
export const initSettingsSuccess = createAction('SETTINGS_INIT_SUCCESS');
|
||||||
|
|
||||||
export const initSettings = (settingsList) => async (dispatch) => {
|
export const initSettings = (settingsList = {
|
||||||
|
safebrowsing: {}, parental: {},
|
||||||
|
}) => async (dispatch) => {
|
||||||
dispatch(initSettingsRequest());
|
dispatch(initSettingsRequest());
|
||||||
try {
|
try {
|
||||||
const safebrowsingStatus = await apiClient.getSafebrowsingStatus();
|
const safebrowsingStatus = await apiClient.getSafebrowsingStatus();
|
||||||
|
@ -80,7 +83,6 @@ export const initSettings = (settingsList) => async (dispatch) => {
|
||||||
const {
|
const {
|
||||||
safebrowsing,
|
safebrowsing,
|
||||||
parental,
|
parental,
|
||||||
safesearch,
|
|
||||||
} = settingsList;
|
} = settingsList;
|
||||||
const newSettingsList = {
|
const newSettingsList = {
|
||||||
safebrowsing: {
|
safebrowsing: {
|
||||||
|
@ -92,8 +94,7 @@ export const initSettings = (settingsList) => async (dispatch) => {
|
||||||
enabled: parentalStatus.enabled,
|
enabled: parentalStatus.enabled,
|
||||||
},
|
},
|
||||||
safesearch: {
|
safesearch: {
|
||||||
...safesearch,
|
...safesearchStatus,
|
||||||
enabled: safesearchStatus.enabled,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
dispatch(initSettingsSuccess({ settingsList: newSettingsList }));
|
dispatch(initSettingsSuccess({ settingsList: newSettingsList }));
|
||||||
|
|
|
@ -177,7 +177,7 @@ export const getLogsConfigSuccess = createAction('GET_LOGS_CONFIG_SUCCESS');
|
||||||
export const getLogsConfig = () => async (dispatch) => {
|
export const getLogsConfig = () => async (dispatch) => {
|
||||||
dispatch(getLogsConfigRequest());
|
dispatch(getLogsConfigRequest());
|
||||||
try {
|
try {
|
||||||
const data = await apiClient.getQueryLogInfo();
|
const data = await apiClient.getQueryLogConfig();
|
||||||
dispatch(getLogsConfigSuccess(data));
|
dispatch(getLogsConfigSuccess(data));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
dispatch(addErrorToast({ error }));
|
dispatch(addErrorToast({ error }));
|
||||||
|
|
|
@ -13,7 +13,7 @@ export const getStatsConfigSuccess = createAction('GET_STATS_CONFIG_SUCCESS');
|
||||||
export const getStatsConfig = () => async (dispatch) => {
|
export const getStatsConfig = () => async (dispatch) => {
|
||||||
dispatch(getStatsConfigRequest());
|
dispatch(getStatsConfigRequest());
|
||||||
try {
|
try {
|
||||||
const data = await apiClient.getStatsInfo();
|
const data = await apiClient.getStatsConfig();
|
||||||
dispatch(getStatsConfigSuccess(data));
|
dispatch(getStatsConfigSuccess(data));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
dispatch(addErrorToast({ error }));
|
dispatch(addErrorToast({ error }));
|
||||||
|
|
|
@ -208,24 +208,40 @@ class Api {
|
||||||
// Safesearch
|
// Safesearch
|
||||||
SAFESEARCH_STATUS = { path: 'safesearch/status', method: 'GET' };
|
SAFESEARCH_STATUS = { path: 'safesearch/status', method: 'GET' };
|
||||||
|
|
||||||
SAFESEARCH_ENABLE = { path: 'safesearch/enable', method: 'POST' };
|
SAFESEARCH_UPDATE = { path: 'safesearch/settings', method: 'PUT' };
|
||||||
|
|
||||||
SAFESEARCH_DISABLE = { path: 'safesearch/disable', method: 'POST' };
|
|
||||||
|
|
||||||
getSafesearchStatus() {
|
getSafesearchStatus() {
|
||||||
const { path, method } = this.SAFESEARCH_STATUS;
|
const { path, method } = this.SAFESEARCH_STATUS;
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
enableSafesearch() {
|
/**
|
||||||
const { path, method } = this.SAFESEARCH_ENABLE;
|
* interface SafeSearchConfig {
|
||||||
return this.makeRequest(path, method);
|
"enabled": boolean,
|
||||||
|
"bing": boolean,
|
||||||
|
"duckduckgo": boolean,
|
||||||
|
"google": boolean,
|
||||||
|
"pixabay": boolean,
|
||||||
|
"yandex": boolean,
|
||||||
|
"youtube": boolean
|
||||||
|
* }
|
||||||
|
* @param {*} data - SafeSearchConfig
|
||||||
|
* @returns 200 ok
|
||||||
|
*/
|
||||||
|
updateSafesearch(data) {
|
||||||
|
const { path, method } = this.SAFESEARCH_UPDATE;
|
||||||
|
return this.makeRequest(path, method, { data });
|
||||||
}
|
}
|
||||||
|
|
||||||
disableSafesearch() {
|
// enableSafesearch() {
|
||||||
const { path, method } = this.SAFESEARCH_DISABLE;
|
// const { path, method } = this.SAFESEARCH_ENABLE;
|
||||||
return this.makeRequest(path, method);
|
// return this.makeRequest(path, method);
|
||||||
}
|
// }
|
||||||
|
|
||||||
|
// disableSafesearch() {
|
||||||
|
// const { path, method } = this.SAFESEARCH_DISABLE;
|
||||||
|
// return this.makeRequest(path, method);
|
||||||
|
// }
|
||||||
|
|
||||||
// Language
|
// Language
|
||||||
|
|
||||||
|
@ -497,9 +513,9 @@ class Api {
|
||||||
// Settings for statistics
|
// Settings for statistics
|
||||||
GET_STATS = { path: 'stats', method: 'GET' };
|
GET_STATS = { path: 'stats', method: 'GET' };
|
||||||
|
|
||||||
STATS_INFO = { path: 'stats_info', method: 'GET' };
|
GET_STATS_CONFIG = { path: 'stats/config', method: 'GET' };
|
||||||
|
|
||||||
STATS_CONFIG = { path: 'stats_config', method: 'POST' };
|
UPDATE_STATS_CONFIG = { path: 'stats/config/update', method: 'PUT' };
|
||||||
|
|
||||||
STATS_RESET = { path: 'stats_reset', method: 'POST' };
|
STATS_RESET = { path: 'stats_reset', method: 'POST' };
|
||||||
|
|
||||||
|
@ -508,13 +524,13 @@ class Api {
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
getStatsInfo() {
|
getStatsConfig() {
|
||||||
const { path, method } = this.STATS_INFO;
|
const { path, method } = this.GET_STATS_CONFIG;
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
setStatsConfig(data) {
|
setStatsConfig(data) {
|
||||||
const { path, method } = this.STATS_CONFIG;
|
const { path, method } = this.UPDATE_STATS_CONFIG;
|
||||||
const config = {
|
const config = {
|
||||||
data,
|
data,
|
||||||
};
|
};
|
||||||
|
@ -529,9 +545,9 @@ class Api {
|
||||||
// Query log
|
// Query log
|
||||||
GET_QUERY_LOG = { path: 'querylog', method: 'GET' };
|
GET_QUERY_LOG = { path: 'querylog', method: 'GET' };
|
||||||
|
|
||||||
QUERY_LOG_CONFIG = { path: 'querylog_config', method: 'POST' };
|
UPDATE_QUERY_LOG_CONFIG = { path: 'querylog/config/update', method: 'PUT' };
|
||||||
|
|
||||||
QUERY_LOG_INFO = { path: 'querylog_info', method: 'GET' };
|
GET_QUERY_LOG_CONFIG = { path: 'querylog/config', method: 'GET' };
|
||||||
|
|
||||||
QUERY_LOG_CLEAR = { path: 'querylog_clear', method: 'POST' };
|
QUERY_LOG_CLEAR = { path: 'querylog_clear', method: 'POST' };
|
||||||
|
|
||||||
|
@ -543,13 +559,13 @@ class Api {
|
||||||
return this.makeRequest(url, method);
|
return this.makeRequest(url, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
getQueryLogInfo() {
|
getQueryLogConfig() {
|
||||||
const { path, method } = this.QUERY_LOG_INFO;
|
const { path, method } = this.GET_QUERY_LOG_CONFIG;
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
setQueryLogConfig(data) {
|
setQueryLogConfig(data) {
|
||||||
const { path, method } = this.QUERY_LOG_CONFIG;
|
const { path, method } = this.UPDATE_QUERY_LOG_CONFIG;
|
||||||
const config = {
|
const config = {
|
||||||
data,
|
data,
|
||||||
};
|
};
|
||||||
|
|
|
@ -29,8 +29,11 @@ const BlockedDomains = ({
|
||||||
blockedFiltering,
|
blockedFiltering,
|
||||||
replacedSafebrowsing,
|
replacedSafebrowsing,
|
||||||
replacedParental,
|
replacedParental,
|
||||||
|
replacedSafesearch,
|
||||||
}) => {
|
}) => {
|
||||||
const totalBlocked = blockedFiltering + replacedSafebrowsing + replacedParental;
|
const totalBlocked = (
|
||||||
|
blockedFiltering + replacedSafebrowsing + replacedParental + replacedSafesearch
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card
|
<Card
|
||||||
|
@ -71,6 +74,7 @@ BlockedDomains.propTypes = {
|
||||||
topBlockedDomains: PropTypes.array.isRequired,
|
topBlockedDomains: PropTypes.array.isRequired,
|
||||||
blockedFiltering: PropTypes.number.isRequired,
|
blockedFiltering: PropTypes.number.isRequired,
|
||||||
replacedSafebrowsing: PropTypes.number.isRequired,
|
replacedSafebrowsing: PropTypes.number.isRequired,
|
||||||
|
replacedSafesearch: PropTypes.number.isRequired,
|
||||||
replacedParental: PropTypes.number.isRequired,
|
replacedParental: PropTypes.number.isRequired,
|
||||||
refreshButton: PropTypes.node.isRequired,
|
refreshButton: PropTypes.node.isRequired,
|
||||||
subtitle: PropTypes.string.isRequired,
|
subtitle: PropTypes.string.isRequired,
|
||||||
|
|
|
@ -151,6 +151,7 @@ const Dashboard = ({
|
||||||
topBlockedDomains={stats.topBlockedDomains}
|
topBlockedDomains={stats.topBlockedDomains}
|
||||||
blockedFiltering={stats.numBlockedFiltering}
|
blockedFiltering={stats.numBlockedFiltering}
|
||||||
replacedSafebrowsing={stats.numReplacedSafebrowsing}
|
replacedSafebrowsing={stats.numReplacedSafebrowsing}
|
||||||
|
replacedSafesearch={stats.numReplacedSafesearch}
|
||||||
replacedParental={stats.numReplacedParental}
|
replacedParental={stats.numReplacedParental}
|
||||||
refreshButton={refreshButton}
|
refreshButton={refreshButton}
|
||||||
/>
|
/>
|
||||||
|
|
|
@ -7,6 +7,7 @@ import { useDispatch, useSelector } from 'react-redux';
|
||||||
import ReactTable from 'react-table';
|
import ReactTable from 'react-table';
|
||||||
|
|
||||||
import { getAllBlockedServices } from '../../../../actions/services';
|
import { getAllBlockedServices } from '../../../../actions/services';
|
||||||
|
import { initSettings } from '../../../../actions';
|
||||||
import {
|
import {
|
||||||
splitByNewLine,
|
splitByNewLine,
|
||||||
countClientsStatistics,
|
countClientsStatistics,
|
||||||
|
@ -38,9 +39,13 @@ const ClientsTable = ({
|
||||||
const [t] = useTranslation();
|
const [t] = useTranslation();
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
const services = useSelector((store) => store?.services);
|
const services = useSelector((store) => store?.services);
|
||||||
|
const globalSettings = useSelector((store) => store?.settings.settingsList) || {};
|
||||||
|
|
||||||
|
const { safesearch } = globalSettings;
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
dispatch(getAllBlockedServices());
|
dispatch(getAllBlockedServices());
|
||||||
|
dispatch(initSettings());
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handleFormAdd = (values) => {
|
const handleFormAdd = (values) => {
|
||||||
|
@ -107,6 +112,7 @@ const ClientsTable = ({
|
||||||
tags: [],
|
tags: [],
|
||||||
use_global_settings: true,
|
use_global_settings: true,
|
||||||
use_global_blocked_services: true,
|
use_global_blocked_services: true,
|
||||||
|
safe_search: { ...(safesearch || {}) },
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ import Select from 'react-select';
|
||||||
import i18n from '../../../i18n';
|
import i18n from '../../../i18n';
|
||||||
import Tabs from '../../ui/Tabs';
|
import Tabs from '../../ui/Tabs';
|
||||||
import Examples from '../Dns/Upstream/Examples';
|
import Examples from '../Dns/Upstream/Examples';
|
||||||
import { toggleAllServices, trimLinesAndRemoveEmpty } from '../../../helpers/helpers';
|
import { toggleAllServices, trimLinesAndRemoveEmpty, captitalizeWords } from '../../../helpers/helpers';
|
||||||
import {
|
import {
|
||||||
renderInputField,
|
renderInputField,
|
||||||
renderGroupField,
|
renderGroupField,
|
||||||
|
@ -40,10 +40,6 @@ const settingsCheckboxes = [
|
||||||
name: 'parental_enabled',
|
name: 'parental_enabled',
|
||||||
placeholder: 'use_adguard_parental',
|
placeholder: 'use_adguard_parental',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: 'safesearch_enabled',
|
|
||||||
placeholder: 'enforce_safe_search',
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
const validate = (values) => {
|
const validate = (values) => {
|
||||||
const errors = {};
|
const errors = {};
|
||||||
|
@ -139,8 +135,12 @@ let Form = (props) => {
|
||||||
processingUpdating,
|
processingUpdating,
|
||||||
invalid,
|
invalid,
|
||||||
tagsOptions,
|
tagsOptions,
|
||||||
|
initialValues,
|
||||||
} = props;
|
} = props;
|
||||||
const services = useSelector((store) => store?.services);
|
const services = useSelector((store) => store?.services);
|
||||||
|
const { safe_search } = initialValues;
|
||||||
|
const safeSearchServices = { ...safe_search };
|
||||||
|
delete safeSearchServices.enabled;
|
||||||
|
|
||||||
const [activeTabLabel, setActiveTabLabel] = useState('settings');
|
const [activeTabLabel, setActiveTabLabel] = useState('settings');
|
||||||
|
|
||||||
|
@ -163,6 +163,28 @@ let Form = (props) => {
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
|
<div className="form__group">
|
||||||
|
<Field
|
||||||
|
name="safe_search.enabled"
|
||||||
|
type="checkbox"
|
||||||
|
component={CheckboxField}
|
||||||
|
placeholder={t('enforce_safe_search')}
|
||||||
|
disabled={useGlobalSettings}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className='form__group--inner'>
|
||||||
|
{Object.keys(safeSearchServices).map((searchKey) => (
|
||||||
|
<div key={searchKey}>
|
||||||
|
<Field
|
||||||
|
name={`safe_search.${searchKey}`}
|
||||||
|
type="checkbox"
|
||||||
|
component={CheckboxField}
|
||||||
|
placeholder={captitalizeWords(searchKey)}
|
||||||
|
disabled={useGlobalSettings}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</div>,
|
</div>,
|
||||||
},
|
},
|
||||||
block_services: {
|
block_services: {
|
||||||
|
@ -358,6 +380,7 @@ Form.propTypes = {
|
||||||
processingUpdating: PropTypes.bool.isRequired,
|
processingUpdating: PropTypes.bool.isRequired,
|
||||||
invalid: PropTypes.bool.isRequired,
|
invalid: PropTypes.bool.isRequired,
|
||||||
tagsOptions: PropTypes.array.isRequired,
|
tagsOptions: PropTypes.array.isRequired,
|
||||||
|
initialValues: PropTypes.object,
|
||||||
};
|
};
|
||||||
|
|
||||||
const selector = formValueSelector(FORM_NAME.CLIENT);
|
const selector = formValueSelector(FORM_NAME.CLIENT);
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
import React, { Component } from 'react';
|
import React, { Component } from 'react';
|
||||||
|
import { connect } from 'react-redux';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import ReactTable from 'react-table';
|
import ReactTable from 'react-table';
|
||||||
import { Trans, withTranslation } from 'react-i18next';
|
import { Trans, withTranslation } from 'react-i18next';
|
||||||
import { LEASES_TABLE_DEFAULT_PAGE_SIZE } from '../../../helpers/constants';
|
import { LEASES_TABLE_DEFAULT_PAGE_SIZE } from '../../../helpers/constants';
|
||||||
import { sortIp } from '../../../helpers/helpers';
|
import { sortIp } from '../../../helpers/helpers';
|
||||||
|
import { toggleLeaseModal } from '../../../actions';
|
||||||
|
|
||||||
class Leases extends Component {
|
class Leases extends Component {
|
||||||
cellWrap = ({ value }) => (
|
cellWrap = ({ value }) => (
|
||||||
|
@ -14,6 +16,30 @@ class Leases extends Component {
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
convertToStatic = (data) => () => {
|
||||||
|
const { dispatch } = this.props;
|
||||||
|
dispatch(toggleLeaseModal(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
makeStatic = ({ row }) => {
|
||||||
|
const { t, disabledLeasesButton } = this.props;
|
||||||
|
return (
|
||||||
|
<div className="logs__row logs__row--center">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="btn btn-icon btn-icon--green btn-outline-secondary btn-sm"
|
||||||
|
title={t('make_static')}
|
||||||
|
onClick={this.convertToStatic(row)}
|
||||||
|
disabled={disabledLeasesButton}
|
||||||
|
>
|
||||||
|
<svg className="icons icon12">
|
||||||
|
<use xlinkHref="#plus" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { leases, t } = this.props;
|
const { leases, t } = this.props;
|
||||||
return (
|
return (
|
||||||
|
@ -23,20 +49,27 @@ class Leases extends Component {
|
||||||
{
|
{
|
||||||
Header: 'MAC',
|
Header: 'MAC',
|
||||||
accessor: 'mac',
|
accessor: 'mac',
|
||||||
|
minWidth: 180,
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
}, {
|
}, {
|
||||||
Header: 'IP',
|
Header: 'IP',
|
||||||
accessor: 'ip',
|
accessor: 'ip',
|
||||||
|
minWidth: 230,
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
sortMethod: sortIp,
|
sortMethod: sortIp,
|
||||||
}, {
|
}, {
|
||||||
Header: <Trans>dhcp_table_hostname</Trans>,
|
Header: <Trans>dhcp_table_hostname</Trans>,
|
||||||
accessor: 'hostname',
|
accessor: 'hostname',
|
||||||
|
minWidth: 230,
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
}, {
|
}, {
|
||||||
Header: <Trans>dhcp_table_expires</Trans>,
|
Header: <Trans>dhcp_table_expires</Trans>,
|
||||||
accessor: 'expires',
|
accessor: 'expires',
|
||||||
|
minWidth: 220,
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
|
}, {
|
||||||
|
Header: <Trans>actions_table_header</Trans>,
|
||||||
|
Cell: this.makeStatic,
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
pageSize={LEASES_TABLE_DEFAULT_PAGE_SIZE}
|
pageSize={LEASES_TABLE_DEFAULT_PAGE_SIZE}
|
||||||
|
@ -53,6 +86,8 @@ class Leases extends Component {
|
||||||
Leases.propTypes = {
|
Leases.propTypes = {
|
||||||
leases: PropTypes.array,
|
leases: PropTypes.array,
|
||||||
t: PropTypes.func,
|
t: PropTypes.func,
|
||||||
|
dispatch: PropTypes.func,
|
||||||
|
disabledLeasesButton: PropTypes.bool,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withTranslation()(Leases);
|
export default withTranslation()(connect(() => ({}), (dispatch) => ({ dispatch }))(Leases));
|
||||||
|
|
|
@ -2,7 +2,7 @@ import React from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import { Field, reduxForm } from 'redux-form';
|
import { Field, reduxForm } from 'redux-form';
|
||||||
import { Trans, useTranslation } from 'react-i18next';
|
import { Trans, useTranslation } from 'react-i18next';
|
||||||
import { useDispatch } from 'react-redux';
|
import { useDispatch, useSelector, shallowEqual } from 'react-redux';
|
||||||
|
|
||||||
import { renderInputField, normalizeMac } from '../../../../helpers/form';
|
import { renderInputField, normalizeMac } from '../../../../helpers/form';
|
||||||
import {
|
import {
|
||||||
|
@ -25,6 +25,7 @@ const Form = ({
|
||||||
}) => {
|
}) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
|
const dynamicLease = useSelector((store) => store.dhcp.leaseModalConfig, shallowEqual);
|
||||||
|
|
||||||
const onClick = () => {
|
const onClick = () => {
|
||||||
reset();
|
reset();
|
||||||
|
@ -87,7 +88,7 @@ const Form = ({
|
||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
className="btn btn-success btn-standard"
|
className="btn btn-success btn-standard"
|
||||||
disabled={submitting || pristine || processingAdding}
|
disabled={submitting || processingAdding || (pristine && !dynamicLease)}
|
||||||
>
|
>
|
||||||
<Trans>save_btn</Trans>
|
<Trans>save_btn</Trans>
|
||||||
</button>
|
</button>
|
||||||
|
|
|
@ -2,7 +2,7 @@ import React from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import { Trans, withTranslation } from 'react-i18next';
|
import { Trans, withTranslation } from 'react-i18next';
|
||||||
import ReactModal from 'react-modal';
|
import ReactModal from 'react-modal';
|
||||||
import { useDispatch } from 'react-redux';
|
import { shallowEqual, useDispatch, useSelector } from 'react-redux';
|
||||||
import Form from './Form';
|
import Form from './Form';
|
||||||
import { toggleLeaseModal } from '../../../../actions';
|
import { toggleLeaseModal } from '../../../../actions';
|
||||||
|
|
||||||
|
@ -18,6 +18,9 @@ const Modal = ({
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
|
|
||||||
const toggleModal = () => dispatch(toggleLeaseModal());
|
const toggleModal = () => dispatch(toggleLeaseModal());
|
||||||
|
const leaseInitialData = useSelector(
|
||||||
|
(state) => state.dhcp.leaseModalConfig, shallowEqual,
|
||||||
|
) || {};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ReactModal
|
<ReactModal
|
||||||
|
@ -37,9 +40,9 @@ const Modal = ({
|
||||||
</div>
|
</div>
|
||||||
<Form
|
<Form
|
||||||
initialValues={{
|
initialValues={{
|
||||||
mac: '',
|
mac: leaseInitialData.mac ?? '',
|
||||||
ip: '',
|
ip: leaseInitialData.ip ?? '',
|
||||||
hostname: '',
|
hostname: leaseInitialData.hostname ?? '',
|
||||||
cidr,
|
cidr,
|
||||||
rangeStart,
|
rangeStart,
|
||||||
rangeEnd,
|
rangeEnd,
|
||||||
|
|
|
@ -54,17 +54,20 @@ const StaticLeases = ({
|
||||||
{
|
{
|
||||||
Header: 'MAC',
|
Header: 'MAC',
|
||||||
accessor: 'mac',
|
accessor: 'mac',
|
||||||
|
minWidth: 180,
|
||||||
Cell: cellWrap,
|
Cell: cellWrap,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Header: 'IP',
|
Header: 'IP',
|
||||||
accessor: 'ip',
|
accessor: 'ip',
|
||||||
|
minWidth: 230,
|
||||||
sortMethod: sortIp,
|
sortMethod: sortIp,
|
||||||
Cell: cellWrap,
|
Cell: cellWrap,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Header: <Trans>dhcp_table_hostname</Trans>,
|
Header: <Trans>dhcp_table_hostname</Trans>,
|
||||||
accessor: 'hostname',
|
accessor: 'hostname',
|
||||||
|
minWidth: 230,
|
||||||
Cell: cellWrap,
|
Cell: cellWrap,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -188,8 +188,8 @@ const Dhcp = () => {
|
||||||
|
|
||||||
const inputtedIPv4values = dhcp?.values?.v4?.gateway_ip && dhcp?.values?.v4?.subnet_mask;
|
const inputtedIPv4values = dhcp?.values?.v4?.gateway_ip && dhcp?.values?.v4?.subnet_mask;
|
||||||
const isEmptyConfig = !Object.values(dhcp?.values?.v4 ?? {}).some(Boolean);
|
const isEmptyConfig = !Object.values(dhcp?.values?.v4 ?? {}).some(Boolean);
|
||||||
const disabledLeasesButton = dhcp?.syncErrors || interfaces?.syncErrors
|
const disabledLeasesButton = Boolean(dhcp?.syncErrors || interfaces?.syncErrors
|
||||||
|| !isInterfaceIncludesIpv4 || isEmptyConfig || processingConfig || !inputtedIPv4values;
|
|| !isInterfaceIncludesIpv4 || isEmptyConfig || processingConfig || !inputtedIPv4values);
|
||||||
const cidr = inputtedIPv4values ? `${dhcp?.values?.v4?.gateway_ip}/${subnetMaskToBitMask(dhcp?.values?.v4?.subnet_mask)}` : '';
|
const cidr = inputtedIPv4values ? `${dhcp?.values?.v4?.gateway_ip}/${subnetMaskToBitMask(dhcp?.values?.v4?.subnet_mask)}` : '';
|
||||||
|
|
||||||
return <>
|
return <>
|
||||||
|
@ -260,7 +260,7 @@ const Dhcp = () => {
|
||||||
>
|
>
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col">
|
<div className="col">
|
||||||
<Leases leases={leases} />
|
<Leases leases={leases} disabledLeasesButton={disabledLeasesButton}/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</Card>}
|
</Card>}
|
||||||
|
|
|
@ -13,15 +13,11 @@ import {
|
||||||
validateIpv4,
|
validateIpv4,
|
||||||
validateIpv6,
|
validateIpv6,
|
||||||
validateRequiredValue,
|
validateRequiredValue,
|
||||||
|
validateIp,
|
||||||
} from '../../../../helpers/validators';
|
} from '../../../../helpers/validators';
|
||||||
import { BLOCKING_MODES, FORM_NAME, UINT32_RANGE } from '../../../../helpers/constants';
|
import { BLOCKING_MODES, FORM_NAME, UINT32_RANGE } from '../../../../helpers/constants';
|
||||||
|
|
||||||
const checkboxes = [
|
const checkboxes = [
|
||||||
{
|
|
||||||
name: 'edns_cs_enabled',
|
|
||||||
placeholder: 'edns_enable',
|
|
||||||
subtitle: 'edns_cs_desc',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: 'dnssec_enabled',
|
name: 'dnssec_enabled',
|
||||||
placeholder: 'dnssec_enable',
|
placeholder: 'dnssec_enable',
|
||||||
|
@ -66,6 +62,8 @@ const Form = ({
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const {
|
const {
|
||||||
blocking_mode,
|
blocking_mode,
|
||||||
|
edns_cs_enabled,
|
||||||
|
edns_cs_use_custom,
|
||||||
} = useSelector((state) => state.form[FORM_NAME.BLOCKING_MODE].values ?? {}, shallowEqual);
|
} = useSelector((state) => state.form[FORM_NAME.BLOCKING_MODE].values ?? {}, shallowEqual);
|
||||||
|
|
||||||
return <form onSubmit={handleSubmit}>
|
return <form onSubmit={handleSubmit}>
|
||||||
|
@ -92,6 +90,39 @@ const Form = ({
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="col-12">
|
||||||
|
<div className="form__group form__group--settings">
|
||||||
|
<Field
|
||||||
|
name="edns_cs_enabled"
|
||||||
|
type="checkbox"
|
||||||
|
component={CheckboxField}
|
||||||
|
placeholder={t('edns_enable')}
|
||||||
|
disabled={processing}
|
||||||
|
subtitle={t('edns_cs_desc')}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="col-12 form__group form__group--inner">
|
||||||
|
<div className="form__group ">
|
||||||
|
<Field
|
||||||
|
name="edns_cs_use_custom"
|
||||||
|
type="checkbox"
|
||||||
|
component={CheckboxField}
|
||||||
|
placeholder={t('edns_use_custom_ip')}
|
||||||
|
disabled={processing || !edns_cs_enabled}
|
||||||
|
subtitle={t('edns_use_custom_ip_desc')}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{edns_cs_use_custom && (<Field
|
||||||
|
name="edns_cs_custom_ip"
|
||||||
|
component={renderInputField}
|
||||||
|
className="form-control"
|
||||||
|
placeholder={t('form_enter_ip')}
|
||||||
|
validate={[validateIp, validateRequiredValue]}
|
||||||
|
/>)}
|
||||||
|
|
||||||
|
</div>
|
||||||
{checkboxes.map(({ name, placeholder, subtitle }) => <div className="col-12" key={name}>
|
{checkboxes.map(({ name, placeholder, subtitle }) => <div className="col-12" key={name}>
|
||||||
<div className="form__group form__group--settings">
|
<div className="form__group form__group--settings">
|
||||||
<Field
|
<Field
|
||||||
|
|
|
@ -14,6 +14,8 @@ const Config = () => {
|
||||||
blocking_ipv4,
|
blocking_ipv4,
|
||||||
blocking_ipv6,
|
blocking_ipv6,
|
||||||
edns_cs_enabled,
|
edns_cs_enabled,
|
||||||
|
edns_cs_use_custom,
|
||||||
|
edns_cs_custom_ip,
|
||||||
dnssec_enabled,
|
dnssec_enabled,
|
||||||
disable_ipv6,
|
disable_ipv6,
|
||||||
processingSetConfig,
|
processingSetConfig,
|
||||||
|
@ -39,6 +41,8 @@ const Config = () => {
|
||||||
edns_cs_enabled,
|
edns_cs_enabled,
|
||||||
disable_ipv6,
|
disable_ipv6,
|
||||||
dnssec_enabled,
|
dnssec_enabled,
|
||||||
|
edns_cs_use_custom,
|
||||||
|
edns_cs_custom_ip,
|
||||||
}}
|
}}
|
||||||
onSubmit={handleFormSubmit}
|
onSubmit={handleFormSubmit}
|
||||||
processing={processingSetConfig}
|
processing={processingSetConfig}
|
||||||
|
|
|
@ -4,18 +4,28 @@ import { Field, reduxForm } from 'redux-form';
|
||||||
import { Trans, withTranslation } from 'react-i18next';
|
import { Trans, withTranslation } from 'react-i18next';
|
||||||
import flow from 'lodash/flow';
|
import flow from 'lodash/flow';
|
||||||
|
|
||||||
import { CheckboxField, renderRadioField, toFloatNumber } from '../../../helpers/form';
|
import {
|
||||||
import { FORM_NAME, QUERY_LOG_INTERVALS_DAYS } from '../../../helpers/constants';
|
CheckboxField,
|
||||||
|
renderRadioField,
|
||||||
|
toFloatNumber,
|
||||||
|
renderTextareaField,
|
||||||
|
} from '../../../helpers/form';
|
||||||
|
import {
|
||||||
|
FORM_NAME,
|
||||||
|
QUERY_LOG_INTERVALS_DAYS,
|
||||||
|
HOUR,
|
||||||
|
DAY,
|
||||||
|
} from '../../../helpers/constants';
|
||||||
import '../FormButton.css';
|
import '../FormButton.css';
|
||||||
|
|
||||||
const getIntervalTitle = (interval, t) => {
|
const getIntervalTitle = (interval, t) => {
|
||||||
switch (interval) {
|
switch (interval) {
|
||||||
case 0.25:
|
case 6 * HOUR:
|
||||||
return t('interval_6_hour');
|
return t('interval_6_hour');
|
||||||
case 1:
|
case DAY:
|
||||||
return t('interval_24_hour');
|
return t('interval_24_hour');
|
||||||
default:
|
default:
|
||||||
return t('interval_days', { count: interval });
|
return t('interval_days', { count: interval / DAY });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -66,6 +76,22 @@ const Form = (props) => {
|
||||||
{getIntervalFields(processing, t, toFloatNumber)}
|
{getIntervalFields(processing, t, toFloatNumber)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<label className="form__label form__label--with-desc">
|
||||||
|
<Trans>ignore_domains_title</Trans>
|
||||||
|
</label>
|
||||||
|
<div className="form__desc form__desc--top">
|
||||||
|
<Trans>ignore_domains_desc_query</Trans>
|
||||||
|
</div>
|
||||||
|
<div className="form__group form__group--settings">
|
||||||
|
<Field
|
||||||
|
name="ignored"
|
||||||
|
type="textarea"
|
||||||
|
className="form-control form-control--textarea font-monospace text-input"
|
||||||
|
component={renderTextareaField}
|
||||||
|
placeholder={t('ignore_domains')}
|
||||||
|
disabled={processing}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<div className="mt-5">
|
<div className="mt-5">
|
||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
|
|
|
@ -10,13 +10,15 @@ class LogsConfig extends Component {
|
||||||
const { t, interval: prevInterval } = this.props;
|
const { t, interval: prevInterval } = this.props;
|
||||||
const { interval } = values;
|
const { interval } = values;
|
||||||
|
|
||||||
|
const data = { ...values, ignored: values.ignored ? values.ignored.split('\n') : [] };
|
||||||
|
|
||||||
if (interval !== prevInterval) {
|
if (interval !== prevInterval) {
|
||||||
// eslint-disable-next-line no-alert
|
// eslint-disable-next-line no-alert
|
||||||
if (window.confirm(t('query_log_retention_confirm'))) {
|
if (window.confirm(t('query_log_retention_confirm'))) {
|
||||||
this.props.setLogsConfig(values);
|
this.props.setLogsConfig(data);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this.props.setLogsConfig(values);
|
this.props.setLogsConfig(data);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -30,7 +32,7 @@ class LogsConfig extends Component {
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
t, enabled, interval, processing, processingClear, anonymize_client_ip,
|
t, enabled, interval, processing, processingClear, anonymize_client_ip, ignored,
|
||||||
} = this.props;
|
} = this.props;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -45,6 +47,7 @@ class LogsConfig extends Component {
|
||||||
enabled,
|
enabled,
|
||||||
interval,
|
interval,
|
||||||
anonymize_client_ip,
|
anonymize_client_ip,
|
||||||
|
ignored: ignored.join('\n'),
|
||||||
}}
|
}}
|
||||||
onSubmit={this.handleFormSubmit}
|
onSubmit={this.handleFormSubmit}
|
||||||
processing={processing}
|
processing={processing}
|
||||||
|
@ -62,6 +65,7 @@ LogsConfig.propTypes = {
|
||||||
enabled: PropTypes.bool.isRequired,
|
enabled: PropTypes.bool.isRequired,
|
||||||
anonymize_client_ip: PropTypes.bool.isRequired,
|
anonymize_client_ip: PropTypes.bool.isRequired,
|
||||||
processing: PropTypes.bool.isRequired,
|
processing: PropTypes.bool.isRequired,
|
||||||
|
ignored: PropTypes.array.isRequired,
|
||||||
processingClear: PropTypes.bool.isRequired,
|
processingClear: PropTypes.bool.isRequired,
|
||||||
setLogsConfig: PropTypes.func.isRequired,
|
setLogsConfig: PropTypes.func.isRequired,
|
||||||
clearLogs: PropTypes.func.isRequired,
|
clearLogs: PropTypes.func.isRequired,
|
||||||
|
|
|
@ -4,23 +4,31 @@ import { Field, reduxForm } from 'redux-form';
|
||||||
import { Trans, withTranslation } from 'react-i18next';
|
import { Trans, withTranslation } from 'react-i18next';
|
||||||
import flow from 'lodash/flow';
|
import flow from 'lodash/flow';
|
||||||
|
|
||||||
import { renderRadioField, toNumber, CheckboxField } from '../../../helpers/form';
|
import {
|
||||||
import { FORM_NAME, STATS_INTERVALS_DAYS, DISABLED_STATS_INTERVAL } from '../../../helpers/constants';
|
renderRadioField,
|
||||||
|
toNumber,
|
||||||
|
CheckboxField,
|
||||||
|
renderTextareaField,
|
||||||
|
} from '../../../helpers/form';
|
||||||
|
import {
|
||||||
|
FORM_NAME,
|
||||||
|
STATS_INTERVALS_DAYS,
|
||||||
|
DAY,
|
||||||
|
} from '../../../helpers/constants';
|
||||||
import '../FormButton.css';
|
import '../FormButton.css';
|
||||||
|
|
||||||
const getIntervalTitle = (interval, t) => {
|
const getIntervalTitle = (intervalMs, t) => {
|
||||||
switch (interval) {
|
switch (intervalMs / DAY) {
|
||||||
case 1:
|
case 1:
|
||||||
return t('interval_24_hour');
|
return t('interval_24_hour');
|
||||||
default:
|
default:
|
||||||
return t('interval_days', { count: interval });
|
return t('interval_days', { count: intervalMs / DAY });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const Form = (props) => {
|
const Form = (props) => {
|
||||||
const {
|
const {
|
||||||
handleSubmit,
|
handleSubmit,
|
||||||
change,
|
|
||||||
processing,
|
processing,
|
||||||
submitting,
|
submitting,
|
||||||
invalid,
|
invalid,
|
||||||
|
@ -38,13 +46,6 @@ const Form = (props) => {
|
||||||
component={CheckboxField}
|
component={CheckboxField}
|
||||||
placeholder={t('statistics_enable')}
|
placeholder={t('statistics_enable')}
|
||||||
disabled={processing}
|
disabled={processing}
|
||||||
onChange={(event) => {
|
|
||||||
if (event.target.checked) {
|
|
||||||
change('interval', STATS_INTERVALS_DAYS[0]);
|
|
||||||
} else {
|
|
||||||
change('interval', DISABLED_STATS_INTERVAL);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<label className="form__label form__label--with-desc">
|
<label className="form__label form__label--with-desc">
|
||||||
|
@ -65,15 +66,26 @@ const Form = (props) => {
|
||||||
placeholder={getIntervalTitle(interval, t)}
|
placeholder={getIntervalTitle(interval, t)}
|
||||||
normalize={toNumber}
|
normalize={toNumber}
|
||||||
disabled={processing}
|
disabled={processing}
|
||||||
onChange={(event) => {
|
|
||||||
if (event.target.checked) {
|
|
||||||
change('enabled', true);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<label className="form__label form__label--with-desc">
|
||||||
|
<Trans>ignore_domains_title</Trans>
|
||||||
|
</label>
|
||||||
|
<div className="form__desc form__desc--top">
|
||||||
|
<Trans>ignore_domains_desc_stats</Trans>
|
||||||
|
</div>
|
||||||
|
<div className="form__group form__group--settings">
|
||||||
|
<Field
|
||||||
|
name="ignored"
|
||||||
|
type="textarea"
|
||||||
|
className="form-control form-control--textarea font-monospace text-input"
|
||||||
|
component={renderTextareaField}
|
||||||
|
placeholder={t('ignore_domains')}
|
||||||
|
disabled={processing}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<div className="mt-5">
|
<div className="mt-5">
|
||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
|
|
|
@ -6,9 +6,13 @@ import Card from '../../ui/Card';
|
||||||
import Form from './Form';
|
import Form from './Form';
|
||||||
|
|
||||||
class StatsConfig extends Component {
|
class StatsConfig extends Component {
|
||||||
handleFormSubmit = (values) => {
|
handleFormSubmit = ({ enabled, interval, ignored }) => {
|
||||||
const { t, interval: prevInterval } = this.props;
|
const { t, interval: prevInterval } = this.props;
|
||||||
const config = { interval: values.interval };
|
const config = {
|
||||||
|
enabled,
|
||||||
|
interval,
|
||||||
|
ignored: ignored ? ignored.split('\n') : [],
|
||||||
|
};
|
||||||
|
|
||||||
if (config.interval < prevInterval) {
|
if (config.interval < prevInterval) {
|
||||||
if (window.confirm(t('statistics_retention_confirm'))) {
|
if (window.confirm(t('statistics_retention_confirm'))) {
|
||||||
|
@ -29,7 +33,7 @@ class StatsConfig extends Component {
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
t, interval, processing, processingReset,
|
t, interval, processing, processingReset, ignored, enabled,
|
||||||
} = this.props;
|
} = this.props;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -42,7 +46,8 @@ class StatsConfig extends Component {
|
||||||
<Form
|
<Form
|
||||||
initialValues={{
|
initialValues={{
|
||||||
interval,
|
interval,
|
||||||
enabled: !!interval,
|
enabled,
|
||||||
|
ignored: ignored.join('\n'),
|
||||||
}}
|
}}
|
||||||
onSubmit={this.handleFormSubmit}
|
onSubmit={this.handleFormSubmit}
|
||||||
processing={processing}
|
processing={processing}
|
||||||
|
@ -57,6 +62,8 @@ class StatsConfig extends Component {
|
||||||
|
|
||||||
StatsConfig.propTypes = {
|
StatsConfig.propTypes = {
|
||||||
interval: PropTypes.number.isRequired,
|
interval: PropTypes.number.isRequired,
|
||||||
|
ignored: PropTypes.array.isRequired,
|
||||||
|
enabled: PropTypes.bool.isRequired,
|
||||||
processing: PropTypes.bool.isRequired,
|
processing: PropTypes.bool.isRequired,
|
||||||
processingReset: PropTypes.bool.isRequired,
|
processingReset: PropTypes.bool.isRequired,
|
||||||
setStatsConfig: PropTypes.func.isRequired,
|
setStatsConfig: PropTypes.func.isRequired,
|
||||||
|
|
|
@ -10,7 +10,7 @@ import Checkbox from '../ui/Checkbox';
|
||||||
import Loading from '../ui/Loading';
|
import Loading from '../ui/Loading';
|
||||||
import PageTitle from '../ui/PageTitle';
|
import PageTitle from '../ui/PageTitle';
|
||||||
import Card from '../ui/Card';
|
import Card from '../ui/Card';
|
||||||
import { getObjectKeysSorted } from '../../helpers/helpers';
|
import { getObjectKeysSorted, captitalizeWords } from '../../helpers/helpers';
|
||||||
import './Settings.css';
|
import './Settings.css';
|
||||||
|
|
||||||
const ORDER_KEY = 'order';
|
const ORDER_KEY = 'order';
|
||||||
|
@ -28,12 +28,6 @@ const SETTINGS = {
|
||||||
subtitle: 'use_adguard_parental_hint',
|
subtitle: 'use_adguard_parental_hint',
|
||||||
[ORDER_KEY]: 1,
|
[ORDER_KEY]: 1,
|
||||||
},
|
},
|
||||||
safesearch: {
|
|
||||||
enabled: false,
|
|
||||||
title: 'enforce_safe_search',
|
|
||||||
subtitle: 'enforce_save_search_hint',
|
|
||||||
[ORDER_KEY]: 2,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class Settings extends Component {
|
class Settings extends Component {
|
||||||
|
@ -44,7 +38,7 @@ class Settings extends Component {
|
||||||
this.props.getFilteringStatus();
|
this.props.getFilteringStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
renderSettings = (settings) => getObjectKeysSorted(settings, ORDER_KEY)
|
renderSettings = (settings) => getObjectKeysSorted(SETTINGS, ORDER_KEY)
|
||||||
.map((key) => {
|
.map((key) => {
|
||||||
const setting = settings[key];
|
const setting = settings[key];
|
||||||
const { enabled } = setting;
|
const { enabled } = setting;
|
||||||
|
@ -55,6 +49,35 @@ class Settings extends Component {
|
||||||
/>;
|
/>;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
renderSafeSearch = () => {
|
||||||
|
const { settings: { settingsList: { safesearch } } } = this.props;
|
||||||
|
const { enabled } = safesearch || {};
|
||||||
|
const searches = { ...(safesearch || {}) };
|
||||||
|
delete searches.enabled;
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Checkbox
|
||||||
|
enabled={enabled}
|
||||||
|
title='enforce_safe_search'
|
||||||
|
subtitle='enforce_save_search_hint'
|
||||||
|
handleChange={({ target: { checked: enabled } }) => this.props.toggleSetting('safesearch', { ...safesearch, enabled })}
|
||||||
|
/>
|
||||||
|
<div className='form__group--inner'>
|
||||||
|
{Object.keys(searches).map((searchKey) => (
|
||||||
|
<Checkbox
|
||||||
|
key={searchKey}
|
||||||
|
enabled={searches[searchKey]}
|
||||||
|
title={captitalizeWords(searchKey)}
|
||||||
|
subtitle=''
|
||||||
|
disabled={!safesearch.enabled}
|
||||||
|
handleChange={({ target: { checked } }) => this.props.toggleSetting('safesearch', { ...safesearch, [searchKey]: checked })}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
settings,
|
settings,
|
||||||
|
@ -92,12 +115,14 @@ class Settings extends Component {
|
||||||
setFiltersConfig={setFiltersConfig}
|
setFiltersConfig={setFiltersConfig}
|
||||||
/>
|
/>
|
||||||
{this.renderSettings(settings.settingsList)}
|
{this.renderSettings(settings.settingsList)}
|
||||||
|
{this.renderSafeSearch()}
|
||||||
</div>
|
</div>
|
||||||
</Card>
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
<div className="col-md-12">
|
<div className="col-md-12">
|
||||||
<LogsConfig
|
<LogsConfig
|
||||||
enabled={queryLogs.enabled}
|
enabled={queryLogs.enabled}
|
||||||
|
ignored={queryLogs.ignored}
|
||||||
interval={queryLogs.interval}
|
interval={queryLogs.interval}
|
||||||
anonymize_client_ip={queryLogs.anonymize_client_ip}
|
anonymize_client_ip={queryLogs.anonymize_client_ip}
|
||||||
processing={queryLogs.processingSetConfig}
|
processing={queryLogs.processingSetConfig}
|
||||||
|
@ -109,6 +134,8 @@ class Settings extends Component {
|
||||||
<div className="col-md-12">
|
<div className="col-md-12">
|
||||||
<StatsConfig
|
<StatsConfig
|
||||||
interval={stats.interval}
|
interval={stats.interval}
|
||||||
|
ignored={stats.ignored}
|
||||||
|
enabled={stats.enabled}
|
||||||
processing={stats.processingSetConfig}
|
processing={stats.processingSetConfig}
|
||||||
processingReset={stats.processingReset}
|
processingReset={stats.processingReset}
|
||||||
setStatsConfig={setStatsConfig}
|
setStatsConfig={setStatsConfig}
|
||||||
|
@ -139,6 +166,8 @@ Settings.propTypes = {
|
||||||
stats: PropTypes.shape({
|
stats: PropTypes.shape({
|
||||||
processingGetConfig: PropTypes.bool,
|
processingGetConfig: PropTypes.bool,
|
||||||
interval: PropTypes.number,
|
interval: PropTypes.number,
|
||||||
|
enabled: PropTypes.bool,
|
||||||
|
ignored: PropTypes.array,
|
||||||
processingSetConfig: PropTypes.bool,
|
processingSetConfig: PropTypes.bool,
|
||||||
processingReset: PropTypes.bool,
|
processingReset: PropTypes.bool,
|
||||||
}),
|
}),
|
||||||
|
@ -149,6 +178,7 @@ Settings.propTypes = {
|
||||||
processingSetConfig: PropTypes.bool,
|
processingSetConfig: PropTypes.bool,
|
||||||
processingClear: PropTypes.bool,
|
processingClear: PropTypes.bool,
|
||||||
processingGetConfig: PropTypes.bool,
|
processingGetConfig: PropTypes.bool,
|
||||||
|
ignored: PropTypes.array,
|
||||||
}),
|
}),
|
||||||
filtering: PropTypes.shape({
|
filtering: PropTypes.shape({
|
||||||
interval: PropTypes.number,
|
interval: PropTypes.number,
|
||||||
|
|
|
@ -11,13 +11,14 @@ class Checkbox extends Component {
|
||||||
subtitle,
|
subtitle,
|
||||||
enabled,
|
enabled,
|
||||||
handleChange,
|
handleChange,
|
||||||
|
disabled,
|
||||||
t,
|
t,
|
||||||
} = this.props;
|
} = this.props;
|
||||||
return (
|
return (
|
||||||
<div className="form__group form__group--checkbox">
|
<div className="form__group form__group--checkbox">
|
||||||
<label className="checkbox checkbox--settings">
|
<label className="checkbox checkbox--settings">
|
||||||
<span className="checkbox__marker"/>
|
<span className="checkbox__marker"/>
|
||||||
<input type="checkbox" className="checkbox__input" onChange={handleChange} checked={enabled}/>
|
<input type="checkbox" className="checkbox__input" onChange={handleChange} checked={enabled} disabled={disabled}/>
|
||||||
<span className="checkbox__label">
|
<span className="checkbox__label">
|
||||||
<span className="checkbox__label-text">
|
<span className="checkbox__label-text">
|
||||||
<span className="checkbox__label-title">{ t(title) }</span>
|
<span className="checkbox__label-title">{ t(title) }</span>
|
||||||
|
@ -35,6 +36,7 @@ Checkbox.propTypes = {
|
||||||
subtitle: PropTypes.string.isRequired,
|
subtitle: PropTypes.string.isRequired,
|
||||||
enabled: PropTypes.bool.isRequired,
|
enabled: PropTypes.bool.isRequired,
|
||||||
handleChange: PropTypes.func.isRequired,
|
handleChange: PropTypes.func.isRequired,
|
||||||
|
disabled: PropTypes.bool,
|
||||||
t: PropTypes.func,
|
t: PropTypes.func,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -211,9 +211,14 @@ export const FILTERED = 'Filtered';
|
||||||
export const NOT_FILTERED = 'NotFiltered';
|
export const NOT_FILTERED = 'NotFiltered';
|
||||||
|
|
||||||
export const DISABLED_STATS_INTERVAL = 0;
|
export const DISABLED_STATS_INTERVAL = 0;
|
||||||
export const STATS_INTERVALS_DAYS = [1, 7, 30, 90];
|
|
||||||
|
|
||||||
export const QUERY_LOG_INTERVALS_DAYS = [0.25, 1, 7, 30, 90];
|
export const HOUR = 60 * 60 * 1000;
|
||||||
|
|
||||||
|
export const DAY = HOUR * 24;
|
||||||
|
|
||||||
|
export const STATS_INTERVALS_DAYS = [DAY, DAY * 7, DAY * 30, DAY * 90];
|
||||||
|
|
||||||
|
export const QUERY_LOG_INTERVALS_DAYS = [HOUR * 6, DAY, DAY * 7, DAY * 30, DAY * 90];
|
||||||
|
|
||||||
export const FILTERS_INTERVALS_HOURS = [0, 1, 12, 24, 72, 168];
|
export const FILTERS_INTERVALS_HOURS = [0, 1, 12, 24, 72, 168];
|
||||||
|
|
||||||
|
|
|
@ -124,10 +124,11 @@ const dhcp = handleActions(
|
||||||
staticLeases: [],
|
staticLeases: [],
|
||||||
}),
|
}),
|
||||||
|
|
||||||
[actions.toggleLeaseModal]: (state) => {
|
[actions.toggleLeaseModal]: (state, { payload }) => {
|
||||||
const newState = {
|
const newState = {
|
||||||
...state,
|
...state,
|
||||||
isModalOpen: !state.isModalOpen,
|
isModalOpen: !state.isModalOpen,
|
||||||
|
leaseModalConfig: payload,
|
||||||
};
|
};
|
||||||
return newState;
|
return newState;
|
||||||
},
|
},
|
||||||
|
@ -200,6 +201,7 @@ const dhcp = handleActions(
|
||||||
leases: [],
|
leases: [],
|
||||||
staticLeases: [],
|
staticLeases: [],
|
||||||
isModalOpen: false,
|
isModalOpen: false,
|
||||||
|
leaseModalConfig: undefined,
|
||||||
dhcp_available: false,
|
dhcp_available: false,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
|
@ -22,11 +22,11 @@ const settings = handleActions(
|
||||||
},
|
},
|
||||||
[actions.toggleSettingStatus]: (state, { payload }) => {
|
[actions.toggleSettingStatus]: (state, { payload }) => {
|
||||||
const { settingsList } = state;
|
const { settingsList } = state;
|
||||||
const { settingKey } = payload;
|
const { settingKey, value } = payload;
|
||||||
|
|
||||||
const setting = settingsList[settingKey];
|
const setting = settingsList[settingKey];
|
||||||
|
|
||||||
const newSetting = {
|
const newSetting = value || {
|
||||||
...setting,
|
...setting,
|
||||||
enabled: !setting.enabled,
|
enabled: !setting.enabled,
|
||||||
};
|
};
|
||||||
|
|
|
@ -25,7 +25,7 @@ const stats = handleActions(
|
||||||
[actions.getStatsConfigFailure]: (state) => ({ ...state, processingGetConfig: false }),
|
[actions.getStatsConfigFailure]: (state) => ({ ...state, processingGetConfig: false }),
|
||||||
[actions.getStatsConfigSuccess]: (state, { payload }) => ({
|
[actions.getStatsConfigSuccess]: (state, { payload }) => ({
|
||||||
...state,
|
...state,
|
||||||
interval: payload.interval,
|
...payload,
|
||||||
processingGetConfig: false,
|
processingGetConfig: false,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ const stats = handleActions(
|
||||||
[actions.setStatsConfigFailure]: (state) => ({ ...state, processingSetConfig: false }),
|
[actions.setStatsConfigFailure]: (state) => ({ ...state, processingSetConfig: false }),
|
||||||
[actions.setStatsConfigSuccess]: (state, { payload }) => ({
|
[actions.setStatsConfigSuccess]: (state, { payload }) => ({
|
||||||
...state,
|
...state,
|
||||||
interval: payload.interval,
|
...payload,
|
||||||
processingSetConfig: false,
|
processingSetConfig: false,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
|
|
@ -1,47 +1,14 @@
|
||||||
package aghnet
|
package aghnet
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net"
|
"fmt"
|
||||||
"strconv"
|
"net/netip"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/AdguardTeam/golibs/errors"
|
||||||
|
"github.com/AdguardTeam/golibs/stringutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
// The maximum lengths of generated hostnames for different IP versions.
|
|
||||||
const (
|
|
||||||
ipv4HostnameMaxLen = len("192-168-100-100")
|
|
||||||
ipv6HostnameMaxLen = len("ff80-f076-0000-0000-0000-0000-0000-0010")
|
|
||||||
)
|
|
||||||
|
|
||||||
// generateIPv4Hostname generates the hostname by IP address version 4.
|
|
||||||
func generateIPv4Hostname(ipv4 net.IP) (hostname string) {
|
|
||||||
hnData := make([]byte, 0, ipv4HostnameMaxLen)
|
|
||||||
for i, part := range ipv4 {
|
|
||||||
if i > 0 {
|
|
||||||
hnData = append(hnData, '-')
|
|
||||||
}
|
|
||||||
hnData = strconv.AppendUint(hnData, uint64(part), 10)
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(hnData)
|
|
||||||
}
|
|
||||||
|
|
||||||
// generateIPv6Hostname generates the hostname by IP address version 6.
|
|
||||||
func generateIPv6Hostname(ipv6 net.IP) (hostname string) {
|
|
||||||
hnData := make([]byte, 0, ipv6HostnameMaxLen)
|
|
||||||
for i, partsNum := 0, net.IPv6len/2; i < partsNum; i++ {
|
|
||||||
if i > 0 {
|
|
||||||
hnData = append(hnData, '-')
|
|
||||||
}
|
|
||||||
for _, val := range ipv6[i*2 : i*2+2] {
|
|
||||||
if val < 10 {
|
|
||||||
hnData = append(hnData, '0')
|
|
||||||
}
|
|
||||||
hnData = strconv.AppendUint(hnData, uint64(val), 16)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(hnData)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GenerateHostname generates the hostname from ip. In case of using IPv4 the
|
// GenerateHostname generates the hostname from ip. In case of using IPv4 the
|
||||||
// result should be like:
|
// result should be like:
|
||||||
//
|
//
|
||||||
|
@ -52,10 +19,42 @@ func generateIPv6Hostname(ipv6 net.IP) (hostname string) {
|
||||||
// ff80-f076-0000-0000-0000-0000-0000-0010
|
// ff80-f076-0000-0000-0000-0000-0000-0010
|
||||||
//
|
//
|
||||||
// ip must be either an IPv4 or an IPv6.
|
// ip must be either an IPv4 or an IPv6.
|
||||||
func GenerateHostname(ip net.IP) (hostname string) {
|
func GenerateHostname(ip netip.Addr) (hostname string) {
|
||||||
if ipv4 := ip.To4(); ipv4 != nil {
|
if !ip.IsValid() {
|
||||||
return generateIPv4Hostname(ipv4)
|
// TODO(s.chzhen): Get rid of it.
|
||||||
|
panic("aghnet generate hostname: invalid ip")
|
||||||
}
|
}
|
||||||
|
|
||||||
return generateIPv6Hostname(ip)
|
ip = ip.Unmap()
|
||||||
|
hostname = ip.StringExpanded()
|
||||||
|
|
||||||
|
if ip.Is4() {
|
||||||
|
return strings.Replace(hostname, ".", "-", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Replace(hostname, ":", "-", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDomainNameSet returns nil and error, if list has duplicate or empty
|
||||||
|
// domain name. Otherwise returns a set, which contains non-FQDN domain names,
|
||||||
|
// and nil error.
|
||||||
|
func NewDomainNameSet(list []string) (set *stringutil.Set, err error) {
|
||||||
|
set = stringutil.NewSet()
|
||||||
|
|
||||||
|
for i, v := range list {
|
||||||
|
host := strings.ToLower(strings.TrimSuffix(v, "."))
|
||||||
|
// TODO(a.garipov): Think about ignoring empty (".") names in the
|
||||||
|
// future.
|
||||||
|
if host == "" {
|
||||||
|
return nil, errors.Error("host name is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if set.Has(host) {
|
||||||
|
return nil, fmt.Errorf("duplicate host name %q at index %d", host, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
set.Add(host)
|
||||||
|
}
|
||||||
|
|
||||||
|
return set, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
package aghnet
|
package aghnet
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net"
|
"net/netip"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -12,19 +12,19 @@ func TestGenerateHostName(t *testing.T) {
|
||||||
testCases := []struct {
|
testCases := []struct {
|
||||||
name string
|
name string
|
||||||
want string
|
want string
|
||||||
ip net.IP
|
ip netip.Addr
|
||||||
}{{
|
}{{
|
||||||
name: "good_ipv4",
|
name: "good_ipv4",
|
||||||
want: "127-0-0-1",
|
want: "127-0-0-1",
|
||||||
ip: net.IP{127, 0, 0, 1},
|
ip: netip.MustParseAddr("127.0.0.1"),
|
||||||
}, {
|
}, {
|
||||||
name: "good_ipv6",
|
name: "good_ipv6",
|
||||||
want: "fe00-0000-0000-0000-0000-0000-0000-0001",
|
want: "fe00-0000-0000-0000-0000-0000-0000-0001",
|
||||||
ip: net.ParseIP("fe00::1"),
|
ip: netip.MustParseAddr("fe00::1"),
|
||||||
}, {
|
}, {
|
||||||
name: "4to6",
|
name: "4to6",
|
||||||
want: "1-2-3-4",
|
want: "1-2-3-4",
|
||||||
ip: net.ParseIP("::ffff:1.2.3.4"),
|
ip: netip.MustParseAddr("::ffff:1.2.3.4"),
|
||||||
}}
|
}}
|
||||||
|
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
|
@ -36,29 +36,6 @@ func TestGenerateHostName(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("invalid", func(t *testing.T) {
|
t.Run("invalid", func(t *testing.T) {
|
||||||
testCases := []struct {
|
assert.Panics(t, func() { GenerateHostname(netip.Addr{}) })
|
||||||
name string
|
|
||||||
ip net.IP
|
|
||||||
}{{
|
|
||||||
name: "bad_ipv4",
|
|
||||||
ip: net.IP{127, 0, 0, 1, 0},
|
|
||||||
}, {
|
|
||||||
name: "bad_ipv6",
|
|
||||||
ip: net.IP{
|
|
||||||
0xff, 0xff, 0xff, 0xff,
|
|
||||||
0xff, 0xff, 0xff, 0xff,
|
|
||||||
0xff, 0xff, 0xff, 0xff,
|
|
||||||
0xff, 0xff, 0xff,
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
name: "nil",
|
|
||||||
ip: nil,
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
assert.Panics(t, func() { GenerateHostname(tc.ip) })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
|
"net/netip"
|
||||||
"os"
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -32,6 +33,8 @@ func normalizeIP(ip net.IP) net.IP {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load lease table from DB
|
// Load lease table from DB
|
||||||
|
//
|
||||||
|
// TODO(s.chzhen): Decrease complexity.
|
||||||
func (s *server) dbLoad() (err error) {
|
func (s *server) dbLoad() (err error) {
|
||||||
dynLeases := []*Lease{}
|
dynLeases := []*Lease{}
|
||||||
staticLeases := []*Lease{}
|
staticLeases := []*Lease{}
|
||||||
|
@ -57,14 +60,15 @@ func (s *server) dbLoad() (err error) {
|
||||||
for i := range obj {
|
for i := range obj {
|
||||||
obj[i].IP = normalizeIP(obj[i].IP)
|
obj[i].IP = normalizeIP(obj[i].IP)
|
||||||
|
|
||||||
if !(len(obj[i].IP) == 4 || len(obj[i].IP) == 16) {
|
ip, ok := netip.AddrFromSlice(obj[i].IP)
|
||||||
|
if !ok {
|
||||||
log.Info("dhcp: invalid IP: %s", obj[i].IP)
|
log.Info("dhcp: invalid IP: %s", obj[i].IP)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
lease := Lease{
|
lease := Lease{
|
||||||
HWAddr: obj[i].HWAddr,
|
HWAddr: obj[i].HWAddr,
|
||||||
IP: obj[i].IP,
|
IP: ip,
|
||||||
Hostname: obj[i].Hostname,
|
Hostname: obj[i].Hostname,
|
||||||
Expiry: time.Unix(obj[i].Expiry, 0),
|
Expiry: time.Unix(obj[i].Expiry, 0),
|
||||||
}
|
}
|
||||||
|
@ -145,7 +149,7 @@ func (s *server) dbStore() (err error) {
|
||||||
|
|
||||||
lease := leaseJSON{
|
lease := leaseJSON{
|
||||||
HWAddr: l.HWAddr,
|
HWAddr: l.HWAddr,
|
||||||
IP: l.IP,
|
IP: l.IP.AsSlice(),
|
||||||
Hostname: l.Hostname,
|
Hostname: l.Hostname,
|
||||||
Expiry: l.Expiry.Unix(),
|
Expiry: l.Expiry.Unix(),
|
||||||
}
|
}
|
||||||
|
@ -162,7 +166,7 @@ func (s *server) dbStore() (err error) {
|
||||||
|
|
||||||
lease := leaseJSON{
|
lease := leaseJSON{
|
||||||
HWAddr: l.HWAddr,
|
HWAddr: l.HWAddr,
|
||||||
IP: l.IP,
|
IP: l.IP.AsSlice(),
|
||||||
Hostname: l.Hostname,
|
Hostname: l.Hostname,
|
||||||
Expiry: l.Expiry.Unix(),
|
Expiry: l.Expiry.Unix(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,13 +41,16 @@ type Lease struct {
|
||||||
// of 1 means that this is a static lease.
|
// of 1 means that this is a static lease.
|
||||||
Expiry time.Time `json:"expires"`
|
Expiry time.Time `json:"expires"`
|
||||||
|
|
||||||
|
// Hostname of the client.
|
||||||
Hostname string `json:"hostname"`
|
Hostname string `json:"hostname"`
|
||||||
|
|
||||||
|
// HWAddr is the physical hardware address (MAC address).
|
||||||
HWAddr net.HardwareAddr `json:"mac"`
|
HWAddr net.HardwareAddr `json:"mac"`
|
||||||
|
|
||||||
// IP is the IP address leased to the client.
|
// IP is the IP address leased to the client.
|
||||||
//
|
//
|
||||||
// TODO(a.garipov): Migrate leases.db and use netip.Addr.
|
// TODO(a.garipov): Migrate leases.db.
|
||||||
IP net.IP `json:"ip"`
|
IP netip.Addr `json:"ip"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clone returns a deep copy of l.
|
// Clone returns a deep copy of l.
|
||||||
|
@ -60,7 +63,7 @@ func (l *Lease) Clone() (clone *Lease) {
|
||||||
Expiry: l.Expiry,
|
Expiry: l.Expiry,
|
||||||
Hostname: l.Hostname,
|
Hostname: l.Hostname,
|
||||||
HWAddr: slices.Clone(l.HWAddr),
|
HWAddr: slices.Clone(l.HWAddr),
|
||||||
IP: slices.Clone(l.IP),
|
IP: l.IP,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,11 +48,11 @@ func TestDB(t *testing.T) {
|
||||||
Expiry: time.Now().Add(time.Hour),
|
Expiry: time.Now().Add(time.Hour),
|
||||||
Hostname: "static-1.local",
|
Hostname: "static-1.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 100},
|
IP: netip.MustParseAddr("192.168.10.100"),
|
||||||
}, {
|
}, {
|
||||||
Hostname: "static-2.local",
|
Hostname: "static-2.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xBB},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xBB},
|
||||||
IP: net.IP{192, 168, 10, 101},
|
IP: netip.MustParseAddr("192.168.10.101"),
|
||||||
}}
|
}}
|
||||||
|
|
||||||
srv4, ok := s.srv4.(*v4Server)
|
srv4, ok := s.srv4.(*v4Server)
|
||||||
|
@ -96,7 +96,7 @@ func TestNormalizeLeases(t *testing.T) {
|
||||||
|
|
||||||
staticLeases := []*Lease{{
|
staticLeases := []*Lease{{
|
||||||
HWAddr: net.HardwareAddr{1, 2, 3, 4},
|
HWAddr: net.HardwareAddr{1, 2, 3, 4},
|
||||||
IP: net.IP{0, 2, 3, 4},
|
IP: netip.MustParseAddr("0.2.3.4"),
|
||||||
}, {
|
}, {
|
||||||
HWAddr: net.HardwareAddr{2, 2, 3, 4},
|
HWAddr: net.HardwareAddr{2, 2, 3, 4},
|
||||||
}}
|
}}
|
||||||
|
|
|
@ -496,18 +496,18 @@ func (s *server) handleDHCPAddStaticLease(w http.ResponseWriter, r *http.Request
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.IP == nil {
|
if !l.IP.IsValid() {
|
||||||
aghhttp.Error(r, w, http.StatusBadRequest, "invalid IP")
|
aghhttp.Error(r, w, http.StatusBadRequest, "invalid IP")
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
l.IP = l.IP.Unmap()
|
||||||
|
|
||||||
var srv DHCPServer
|
var srv DHCPServer
|
||||||
if ip4 := l.IP.To4(); ip4 != nil {
|
if l.IP.Is4() {
|
||||||
l.IP = ip4
|
|
||||||
srv = s.srv4
|
srv = s.srv4
|
||||||
} else {
|
} else {
|
||||||
l.IP = l.IP.To16()
|
|
||||||
srv = s.srv6
|
srv = s.srv6
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -528,27 +528,22 @@ func (s *server) handleDHCPRemoveStaticLease(w http.ResponseWriter, r *http.Requ
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.IP == nil {
|
if !l.IP.IsValid() {
|
||||||
aghhttp.Error(r, w, http.StatusBadRequest, "invalid IP")
|
aghhttp.Error(r, w, http.StatusBadRequest, "invalid IP")
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
ip4 := l.IP.To4()
|
l.IP = l.IP.Unmap()
|
||||||
|
|
||||||
if ip4 == nil {
|
var srv DHCPServer
|
||||||
l.IP = l.IP.To16()
|
if l.IP.Is4() {
|
||||||
|
srv = s.srv4
|
||||||
err = s.srv6.RemoveStaticLease(l)
|
} else {
|
||||||
if err != nil {
|
srv = s.srv6
|
||||||
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
err = srv.RemoveStaticLease(l)
|
||||||
}
|
|
||||||
|
|
||||||
l.IP = ip4
|
|
||||||
err = s.srv4.RemoveStaticLease(l)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,161 @@
|
||||||
|
//go:build darwin || freebsd || linux || openbsd
|
||||||
|
|
||||||
|
package dhcpd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"net/netip"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestServer_handleDHCPStatus(t *testing.T) {
|
||||||
|
const staticName = "static-client"
|
||||||
|
|
||||||
|
staticIP := netip.MustParseAddr("192.168.10.10")
|
||||||
|
staticMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
||||||
|
|
||||||
|
staticLease := &Lease{
|
||||||
|
Expiry: time.Unix(leaseExpireStatic, 0),
|
||||||
|
Hostname: staticName,
|
||||||
|
HWAddr: staticMAC,
|
||||||
|
IP: staticIP,
|
||||||
|
}
|
||||||
|
|
||||||
|
s, err := Create(&ServerConfig{
|
||||||
|
Enabled: true,
|
||||||
|
Conf4: *defaultV4ServerConf(),
|
||||||
|
WorkDir: t.TempDir(),
|
||||||
|
DBFilePath: dbFilename,
|
||||||
|
ConfigModified: func() {},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// checkStatus is a helper that asserts the response of
|
||||||
|
// [*server.handleDHCPStatus].
|
||||||
|
checkStatus := func(t *testing.T, want *dhcpStatusResponse) {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
var req *http.Request
|
||||||
|
req, err = http.NewRequest(http.MethodGet, "", nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
err = json.NewEncoder(b).Encode(&want)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
s.handleDHCPStatus(w, req)
|
||||||
|
assert.Equal(t, http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
assert.JSONEq(t, b.String(), w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaultResponse is a helper that returs the response with default
|
||||||
|
// configuration.
|
||||||
|
defaultResponse := func() *dhcpStatusResponse {
|
||||||
|
conf4 := defaultV4ServerConf()
|
||||||
|
conf4.LeaseDuration = 86400
|
||||||
|
|
||||||
|
resp := &dhcpStatusResponse{
|
||||||
|
V4: *conf4,
|
||||||
|
V6: V6ServerConf{},
|
||||||
|
Leases: []*Lease{},
|
||||||
|
StaticLeases: []*Lease{},
|
||||||
|
Enabled: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp
|
||||||
|
}
|
||||||
|
|
||||||
|
ok := t.Run("status", func(t *testing.T) {
|
||||||
|
resp := defaultResponse()
|
||||||
|
|
||||||
|
checkStatus(t, resp)
|
||||||
|
})
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
ok = t.Run("add_static_lease", func(t *testing.T) {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
err = json.NewEncoder(b).Encode(staticLease)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var r *http.Request
|
||||||
|
r, err = http.NewRequest(http.MethodPost, "", b)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
s.handleDHCPAddStaticLease(w, r)
|
||||||
|
assert.Equal(t, http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
resp := defaultResponse()
|
||||||
|
resp.StaticLeases = []*Lease{staticLease}
|
||||||
|
|
||||||
|
checkStatus(t, resp)
|
||||||
|
})
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
ok = t.Run("add_invalid_lease", func(t *testing.T) {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
|
||||||
|
err = json.NewEncoder(b).Encode(&Lease{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var r *http.Request
|
||||||
|
r, err = http.NewRequest(http.MethodPost, "", b)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
s.handleDHCPAddStaticLease(w, r)
|
||||||
|
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||||
|
})
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
ok = t.Run("remove_static_lease", func(t *testing.T) {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
err = json.NewEncoder(b).Encode(staticLease)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var r *http.Request
|
||||||
|
r, err = http.NewRequest(http.MethodPost, "", b)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
s.handleDHCPRemoveStaticLease(w, r)
|
||||||
|
assert.Equal(t, http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
resp := defaultResponse()
|
||||||
|
|
||||||
|
checkStatus(t, resp)
|
||||||
|
})
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
ok = t.Run("set_config", func(t *testing.T) {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
resp := defaultResponse()
|
||||||
|
resp.Enabled = false
|
||||||
|
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
err = json.NewEncoder(b).Encode(&resp)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var r *http.Request
|
||||||
|
r, err = http.NewRequest(http.MethodPost, "", b)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
s.handleDHCPSetConfig(w, r)
|
||||||
|
assert.Equal(t, http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
checkStatus(t, resp)
|
||||||
|
})
|
||||||
|
require.True(t, ok)
|
||||||
|
}
|
|
@ -16,6 +16,8 @@ import (
|
||||||
//
|
//
|
||||||
// TODO(a.garipov): Perhaps create an optimized version with uint32 for IPv4
|
// TODO(a.garipov): Perhaps create an optimized version with uint32 for IPv4
|
||||||
// ranges? Or use one of uint128 packages?
|
// ranges? Or use one of uint128 packages?
|
||||||
|
//
|
||||||
|
// TODO(e.burkov): Use netip.Addr.
|
||||||
type ipRange struct {
|
type ipRange struct {
|
||||||
start *big.Int
|
start *big.Int
|
||||||
end *big.Int
|
end *big.Int
|
||||||
|
@ -27,8 +29,6 @@ const maxRangeLen = math.MaxUint32
|
||||||
|
|
||||||
// newIPRange creates a new IP address range. start must be less than end. The
|
// newIPRange creates a new IP address range. start must be less than end. The
|
||||||
// resulting range must not be greater than maxRangeLen.
|
// resulting range must not be greater than maxRangeLen.
|
||||||
//
|
|
||||||
// TODO(e.burkov): Use netip.Addr.
|
|
||||||
func newIPRange(start, end net.IP) (r *ipRange, err error) {
|
func newIPRange(start, end net.IP) (r *ipRange, err error) {
|
||||||
defer func() { err = errors.Annotate(err, "invalid ip range: %w") }()
|
defer func() { err = errors.Annotate(err, "invalid ip range: %w") }()
|
||||||
|
|
||||||
|
|
|
@ -96,7 +96,7 @@ func normalizeHostname(hostname string) (norm string, err error) {
|
||||||
// validHostnameForClient accepts the hostname sent by the client and its IP and
|
// validHostnameForClient accepts the hostname sent by the client and its IP and
|
||||||
// returns either a normalized version of that hostname, or a new hostname
|
// returns either a normalized version of that hostname, or a new hostname
|
||||||
// generated from the IP address, or an empty string.
|
// generated from the IP address, or an empty string.
|
||||||
func (s *v4Server) validHostnameForClient(cliHostname string, ip net.IP) (hostname string) {
|
func (s *v4Server) validHostnameForClient(cliHostname string, ip netip.Addr) (hostname string) {
|
||||||
hostname, err := normalizeHostname(cliHostname)
|
hostname, err := normalizeHostname(cliHostname)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Info("dhcpv4: %s", err)
|
log.Info("dhcpv4: %s", err)
|
||||||
|
@ -209,9 +209,8 @@ func (s *v4Server) FindMACbyIP(ip netip.Addr) (mac net.HardwareAddr) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
netIP := ip.AsSlice()
|
|
||||||
for _, l := range s.leases {
|
for _, l := range s.leases {
|
||||||
if l.IP.Equal(netIP) {
|
if l.IP == ip {
|
||||||
if l.Expiry.After(now) || l.IsStatic() {
|
if l.Expiry.After(now) || l.IsStatic() {
|
||||||
return l.HWAddr
|
return l.HWAddr
|
||||||
}
|
}
|
||||||
|
@ -245,7 +244,8 @@ func (s *v4Server) rmLeaseByIndex(i int) {
|
||||||
s.leases = append(s.leases[:i], s.leases[i+1:]...)
|
s.leases = append(s.leases[:i], s.leases[i+1:]...)
|
||||||
|
|
||||||
r := s.conf.ipRange
|
r := s.conf.ipRange
|
||||||
offset, ok := r.offset(l.IP)
|
leaseIP := net.IP(l.IP.AsSlice())
|
||||||
|
offset, ok := r.offset(leaseIP)
|
||||||
if ok {
|
if ok {
|
||||||
s.leasedOffsets.set(offset, false)
|
s.leasedOffsets.set(offset, false)
|
||||||
}
|
}
|
||||||
|
@ -261,7 +261,7 @@ func (s *v4Server) rmDynamicLease(lease *Lease) (err error) {
|
||||||
for i, l := range s.leases {
|
for i, l := range s.leases {
|
||||||
isStatic := l.IsStatic()
|
isStatic := l.IsStatic()
|
||||||
|
|
||||||
if bytes.Equal(l.HWAddr, lease.HWAddr) || l.IP.Equal(lease.IP) {
|
if bytes.Equal(l.HWAddr, lease.HWAddr) || l.IP == lease.IP {
|
||||||
if isStatic {
|
if isStatic {
|
||||||
return errors.Error("static lease already exists")
|
return errors.Error("static lease already exists")
|
||||||
}
|
}
|
||||||
|
@ -289,13 +289,13 @@ const ErrDupHostname = errors.Error("hostname is not unique")
|
||||||
// addLease adds a dynamic or static lease.
|
// addLease adds a dynamic or static lease.
|
||||||
func (s *v4Server) addLease(l *Lease) (err error) {
|
func (s *v4Server) addLease(l *Lease) (err error) {
|
||||||
r := s.conf.ipRange
|
r := s.conf.ipRange
|
||||||
offset, inOffset := r.offset(l.IP)
|
leaseIP := net.IP(l.IP.AsSlice())
|
||||||
|
offset, inOffset := r.offset(leaseIP)
|
||||||
|
|
||||||
if l.IsStatic() {
|
if l.IsStatic() {
|
||||||
// TODO(a.garipov, d.seregin): Subnet can be nil when dhcp server is
|
// TODO(a.garipov, d.seregin): Subnet can be nil when dhcp server is
|
||||||
// disabled.
|
// disabled.
|
||||||
addr := netip.AddrFrom4(*(*[4]byte)(l.IP.To4()))
|
if sn := s.conf.subnet; !sn.Contains(l.IP) {
|
||||||
if sn := s.conf.subnet; !sn.Contains(addr) {
|
|
||||||
return fmt.Errorf("subnet %s does not contain the ip %q", sn, l.IP)
|
return fmt.Errorf("subnet %s does not contain the ip %q", sn, l.IP)
|
||||||
}
|
}
|
||||||
} else if !inOffset {
|
} else if !inOffset {
|
||||||
|
@ -323,7 +323,7 @@ func (s *v4Server) rmLease(lease *Lease) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, l := range s.leases {
|
for i, l := range s.leases {
|
||||||
if l.IP.Equal(lease.IP) {
|
if l.IP == lease.IP {
|
||||||
if !bytes.Equal(l.HWAddr, lease.HWAddr) || l.Hostname != lease.Hostname {
|
if !bytes.Equal(l.HWAddr, lease.HWAddr) || l.Hostname != lease.Hostname {
|
||||||
return fmt.Errorf("lease for ip %s is different: %+v", lease.IP, l)
|
return fmt.Errorf("lease for ip %s is different: %+v", lease.IP, l)
|
||||||
}
|
}
|
||||||
|
@ -350,10 +350,11 @@ func (s *v4Server) AddStaticLease(l *Lease) (err error) {
|
||||||
return ErrUnconfigured
|
return ErrUnconfigured
|
||||||
}
|
}
|
||||||
|
|
||||||
ip := l.IP.To4()
|
l.IP = l.IP.Unmap()
|
||||||
if ip == nil {
|
|
||||||
|
if !l.IP.Is4() {
|
||||||
return fmt.Errorf("invalid ip %q, only ipv4 is supported", l.IP)
|
return fmt.Errorf("invalid ip %q, only ipv4 is supported", l.IP)
|
||||||
} else if gwIP := s.conf.GatewayIP; gwIP == netip.AddrFrom4(*(*[4]byte)(ip)) {
|
} else if gwIP := s.conf.GatewayIP; gwIP == l.IP {
|
||||||
return fmt.Errorf("can't assign the gateway IP %s to the lease", gwIP)
|
return fmt.Errorf("can't assign the gateway IP %s to the lease", gwIP)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,7 +395,7 @@ func (s *v4Server) AddStaticLease(l *Lease) (err error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf(
|
err = fmt.Errorf(
|
||||||
"removing dynamic leases for %s (%s): %w",
|
"removing dynamic leases for %s (%s): %w",
|
||||||
ip,
|
l.IP,
|
||||||
l.HWAddr,
|
l.HWAddr,
|
||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
|
@ -404,7 +405,7 @@ func (s *v4Server) AddStaticLease(l *Lease) (err error) {
|
||||||
|
|
||||||
err = s.addLease(l)
|
err = s.addLease(l)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf("adding static lease for %s (%s): %w", ip, l.HWAddr, err)
|
err = fmt.Errorf("adding static lease for %s (%s): %w", l.IP, l.HWAddr, err)
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -427,7 +428,7 @@ func (s *v4Server) RemoveStaticLease(l *Lease) (err error) {
|
||||||
return ErrUnconfigured
|
return ErrUnconfigured
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(l.IP) != 4 {
|
if !l.IP.Is4() {
|
||||||
return fmt.Errorf("invalid IP")
|
return fmt.Errorf("invalid IP")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -540,8 +541,8 @@ func (s *v4Server) findExpiredLease() int {
|
||||||
func (s *v4Server) reserveLease(mac net.HardwareAddr) (l *Lease, err error) {
|
func (s *v4Server) reserveLease(mac net.HardwareAddr) (l *Lease, err error) {
|
||||||
l = &Lease{HWAddr: slices.Clone(mac)}
|
l = &Lease{HWAddr: slices.Clone(mac)}
|
||||||
|
|
||||||
l.IP = s.nextIP()
|
nextIP := s.nextIP()
|
||||||
if l.IP == nil {
|
if nextIP == nil {
|
||||||
i := s.findExpiredLease()
|
i := s.findExpiredLease()
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -552,6 +553,13 @@ func (s *v4Server) reserveLease(mac net.HardwareAddr) (l *Lease, err error) {
|
||||||
return s.leases[i], nil
|
return s.leases[i], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
netIP, ok := netip.AddrFromSlice(nextIP)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.Error("invalid ip")
|
||||||
|
}
|
||||||
|
|
||||||
|
l.IP = netIP
|
||||||
|
|
||||||
err = s.addLease(l)
|
err = s.addLease(l)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -601,7 +609,8 @@ func (s *v4Server) allocateLease(mac net.HardwareAddr) (l *Lease, err error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.addrAvailable(l.IP) {
|
leaseIP := l.IP.AsSlice()
|
||||||
|
if s.addrAvailable(leaseIP) {
|
||||||
return l, nil
|
return l, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -621,8 +630,9 @@ func (s *v4Server) handleDiscover(req, resp *dhcpv4.DHCPv4) (l *Lease, err error
|
||||||
l = s.findLease(mac)
|
l = s.findLease(mac)
|
||||||
if l != nil {
|
if l != nil {
|
||||||
reqIP := req.RequestedIPAddress()
|
reqIP := req.RequestedIPAddress()
|
||||||
if len(reqIP) != 0 && !reqIP.Equal(l.IP) {
|
leaseIP := net.IP(l.IP.AsSlice())
|
||||||
log.Debug("dhcpv4: different RequestedIP: %s != %s", reqIP, l.IP)
|
if len(reqIP) != 0 && !reqIP.Equal(leaseIP) {
|
||||||
|
log.Debug("dhcpv4: different RequestedIP: %s != %s", reqIP, leaseIP)
|
||||||
}
|
}
|
||||||
|
|
||||||
resp.UpdateOption(dhcpv4.OptMessageType(dhcpv4.MessageTypeOffer))
|
resp.UpdateOption(dhcpv4.OptMessageType(dhcpv4.MessageTypeOffer))
|
||||||
|
@ -672,12 +682,19 @@ func (s *v4Server) checkLease(mac net.HardwareAddr, ip net.IP) (lease *Lease, mi
|
||||||
s.leasesLock.Lock()
|
s.leasesLock.Lock()
|
||||||
defer s.leasesLock.Unlock()
|
defer s.leasesLock.Unlock()
|
||||||
|
|
||||||
|
netIP, ok := netip.AddrFromSlice(ip)
|
||||||
|
if !ok {
|
||||||
|
log.Info("check lease: invalid IP: %s", ip)
|
||||||
|
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
for _, l := range s.leases {
|
for _, l := range s.leases {
|
||||||
if !bytes.Equal(l.HWAddr, mac) {
|
if !bytes.Equal(l.HWAddr, mac) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.IP.Equal(ip) {
|
if l.IP == netIP {
|
||||||
return l, false
|
return l, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -876,9 +893,16 @@ func (s *v4Server) handleDecline(req, resp *dhcpv4.DHCPv4) (err error) {
|
||||||
reqIP = req.ClientIPAddr
|
reqIP = req.ClientIPAddr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
netIP, ok := netip.AddrFromSlice(reqIP)
|
||||||
|
if !ok {
|
||||||
|
log.Info("dhcpv4: invalid IP: %s", reqIP)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
var oldLease *Lease
|
var oldLease *Lease
|
||||||
for _, l := range s.leases {
|
for _, l := range s.leases {
|
||||||
if bytes.Equal(l.HWAddr, mac) && l.IP.Equal(reqIP) {
|
if bytes.Equal(l.HWAddr, mac) && l.IP == netIP {
|
||||||
oldLease = l
|
oldLease = l
|
||||||
|
|
||||||
break
|
break
|
||||||
|
@ -918,8 +942,7 @@ func (s *v4Server) handleDecline(req, resp *dhcpv4.DHCPv4) (err error) {
|
||||||
|
|
||||||
log.Info("dhcpv4: changed ip from %s to %s for %s", reqIP, newLease.IP, mac)
|
log.Info("dhcpv4: changed ip from %s to %s for %s", reqIP, newLease.IP, mac)
|
||||||
|
|
||||||
resp.YourIPAddr = make([]byte, 4)
|
resp.YourIPAddr = net.IP(newLease.IP.AsSlice())
|
||||||
copy(resp.YourIPAddr, newLease.IP)
|
|
||||||
|
|
||||||
resp.UpdateOption(dhcpv4.OptMessageType(dhcpv4.MessageTypeAck))
|
resp.UpdateOption(dhcpv4.OptMessageType(dhcpv4.MessageTypeAck))
|
||||||
|
|
||||||
|
@ -942,8 +965,15 @@ func (s *v4Server) handleRelease(req, resp *dhcpv4.DHCPv4) (err error) {
|
||||||
s.leasesLock.Lock()
|
s.leasesLock.Lock()
|
||||||
defer s.leasesLock.Unlock()
|
defer s.leasesLock.Unlock()
|
||||||
|
|
||||||
|
netIP, ok := netip.AddrFromSlice(reqIP)
|
||||||
|
if !ok {
|
||||||
|
log.Info("dhcpv4: invalid IP: %s", reqIP)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
for _, l := range s.leases {
|
for _, l := range s.leases {
|
||||||
if !bytes.Equal(l.HWAddr, mac) || !l.IP.Equal(reqIP) {
|
if !bytes.Equal(l.HWAddr, mac) || l.IP != netIP {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1016,7 +1046,7 @@ func (s *v4Server) handle(req, resp *dhcpv4.DHCPv4) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
if l != nil {
|
if l != nil {
|
||||||
resp.YourIPAddr = slices.Clone(l.IP)
|
resp.YourIPAddr = net.IP(l.IP.AsSlice())
|
||||||
}
|
}
|
||||||
|
|
||||||
s.updateOptions(req, resp)
|
s.updateOptions(req, resp)
|
||||||
|
|
|
@ -60,7 +60,7 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
anotherName = "another-client"
|
anotherName = "another-client"
|
||||||
)
|
)
|
||||||
|
|
||||||
staticIP := net.IP{192, 168, 10, 10}
|
staticIP := netip.MustParseAddr("192.168.10.10")
|
||||||
anotherIP := DefaultRangeStart
|
anotherIP := DefaultRangeStart
|
||||||
staticMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
staticMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
||||||
anotherMAC := net.HardwareAddr{0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB}
|
anotherMAC := net.HardwareAddr{0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB}
|
||||||
|
@ -81,7 +81,7 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
Expiry: time.Unix(leaseExpireStatic, 0),
|
Expiry: time.Unix(leaseExpireStatic, 0),
|
||||||
Hostname: staticName,
|
Hostname: staticName,
|
||||||
HWAddr: anotherMAC,
|
HWAddr: anotherMAC,
|
||||||
IP: anotherIP.AsSlice(),
|
IP: anotherIP,
|
||||||
})
|
})
|
||||||
assert.ErrorIs(t, err, ErrDupHostname)
|
assert.ErrorIs(t, err, ErrDupHostname)
|
||||||
})
|
})
|
||||||
|
@ -95,7 +95,7 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
Expiry: time.Unix(leaseExpireStatic, 0),
|
Expiry: time.Unix(leaseExpireStatic, 0),
|
||||||
Hostname: anotherName,
|
Hostname: anotherName,
|
||||||
HWAddr: staticMAC,
|
HWAddr: staticMAC,
|
||||||
IP: anotherIP.AsSlice(),
|
IP: anotherIP,
|
||||||
})
|
})
|
||||||
testutil.AssertErrorMsg(t, wantErrMsg, err)
|
testutil.AssertErrorMsg(t, wantErrMsg, err)
|
||||||
})
|
})
|
||||||
|
@ -122,13 +122,14 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
discoverAnOffer := func(
|
discoverAnOffer := func(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
name string,
|
name string,
|
||||||
ip net.IP,
|
netIP netip.Addr,
|
||||||
mac net.HardwareAddr,
|
mac net.HardwareAddr,
|
||||||
) (resp *dhcpv4.DHCPv4) {
|
) (resp *dhcpv4.DHCPv4) {
|
||||||
testutil.CleanupAndRequireSuccess(t, func() (err error) {
|
testutil.CleanupAndRequireSuccess(t, func() (err error) {
|
||||||
return s.ResetLeases(s.GetLeases(LeasesStatic))
|
return s.ResetLeases(s.GetLeases(LeasesStatic))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
ip := net.IP(netIP.AsSlice())
|
||||||
req, err := dhcpv4.NewDiscovery(
|
req, err := dhcpv4.NewDiscovery(
|
||||||
mac,
|
mac,
|
||||||
dhcpv4.WithOption(dhcpv4.OptHostName(name)),
|
dhcpv4.WithOption(dhcpv4.OptHostName(name)),
|
||||||
|
@ -149,7 +150,7 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Run("same_name", func(t *testing.T) {
|
t.Run("same_name", func(t *testing.T) {
|
||||||
resp := discoverAnOffer(t, staticName, anotherIP.AsSlice(), anotherMAC)
|
resp := discoverAnOffer(t, staticName, anotherIP, anotherMAC)
|
||||||
|
|
||||||
req, err := dhcpv4.NewRequestFromOffer(resp, dhcpv4.WithOption(
|
req, err := dhcpv4.NewRequestFromOffer(resp, dhcpv4.WithOption(
|
||||||
dhcpv4.OptHostName(staticName),
|
dhcpv4.OptHostName(staticName),
|
||||||
|
@ -159,11 +160,15 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
res := s4.handle(req, resp)
|
res := s4.handle(req, resp)
|
||||||
require.Positive(t, res)
|
require.Positive(t, res)
|
||||||
|
|
||||||
assert.Equal(t, aghnet.GenerateHostname(resp.YourIPAddr), resp.HostName())
|
var netIP netip.Addr
|
||||||
|
netIP, ok = netip.AddrFromSlice(resp.YourIPAddr)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
assert.Equal(t, aghnet.GenerateHostname(netIP), resp.HostName())
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("same_mac", func(t *testing.T) {
|
t.Run("same_mac", func(t *testing.T) {
|
||||||
resp := discoverAnOffer(t, anotherName, anotherIP.AsSlice(), staticMAC)
|
resp := discoverAnOffer(t, anotherName, anotherIP, staticMAC)
|
||||||
|
|
||||||
req, err := dhcpv4.NewRequestFromOffer(resp, dhcpv4.WithOption(
|
req, err := dhcpv4.NewRequestFromOffer(resp, dhcpv4.WithOption(
|
||||||
dhcpv4.OptHostName(anotherName),
|
dhcpv4.OptHostName(anotherName),
|
||||||
|
@ -177,7 +182,8 @@ func TestV4Server_leasing(t *testing.T) {
|
||||||
require.Len(t, fqdnOptData, 3+len(staticName))
|
require.Len(t, fqdnOptData, 3+len(staticName))
|
||||||
assert.Equal(t, []uint8(staticName), fqdnOptData[3:])
|
assert.Equal(t, []uint8(staticName), fqdnOptData[3:])
|
||||||
|
|
||||||
assert.Equal(t, staticIP, resp.YourIPAddr)
|
ip := net.IP(staticIP.AsSlice())
|
||||||
|
assert.Equal(t, ip, resp.YourIPAddr)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("same_ip", func(t *testing.T) {
|
t.Run("same_ip", func(t *testing.T) {
|
||||||
|
@ -210,7 +216,7 @@ func TestV4Server_AddRemove_static(t *testing.T) {
|
||||||
lease: &Lease{
|
lease: &Lease{
|
||||||
Hostname: "success.local",
|
Hostname: "success.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 150},
|
IP: netip.MustParseAddr("192.168.10.150"),
|
||||||
},
|
},
|
||||||
name: "success",
|
name: "success",
|
||||||
wantErrMsg: "",
|
wantErrMsg: "",
|
||||||
|
@ -218,7 +224,7 @@ func TestV4Server_AddRemove_static(t *testing.T) {
|
||||||
lease: &Lease{
|
lease: &Lease{
|
||||||
Hostname: "probably-router.local",
|
Hostname: "probably-router.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: DefaultGatewayIP.AsSlice(),
|
IP: DefaultGatewayIP,
|
||||||
},
|
},
|
||||||
name: "with_gateway_ip",
|
name: "with_gateway_ip",
|
||||||
wantErrMsg: "dhcpv4: adding static lease: " +
|
wantErrMsg: "dhcpv4: adding static lease: " +
|
||||||
|
@ -227,7 +233,7 @@ func TestV4Server_AddRemove_static(t *testing.T) {
|
||||||
lease: &Lease{
|
lease: &Lease{
|
||||||
Hostname: "ip6.local",
|
Hostname: "ip6.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.ParseIP("ffff::1"),
|
IP: netip.MustParseAddr("ffff::1"),
|
||||||
},
|
},
|
||||||
name: "ipv6",
|
name: "ipv6",
|
||||||
wantErrMsg: `dhcpv4: adding static lease: ` +
|
wantErrMsg: `dhcpv4: adding static lease: ` +
|
||||||
|
@ -236,7 +242,7 @@ func TestV4Server_AddRemove_static(t *testing.T) {
|
||||||
lease: &Lease{
|
lease: &Lease{
|
||||||
Hostname: "bad-mac.local",
|
Hostname: "bad-mac.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 150},
|
IP: netip.MustParseAddr("192.168.10.150"),
|
||||||
},
|
},
|
||||||
name: "bad_mac",
|
name: "bad_mac",
|
||||||
wantErrMsg: `dhcpv4: adding static lease: bad mac address "aa:aa": ` +
|
wantErrMsg: `dhcpv4: adding static lease: bad mac address "aa:aa": ` +
|
||||||
|
@ -245,7 +251,7 @@ func TestV4Server_AddRemove_static(t *testing.T) {
|
||||||
lease: &Lease{
|
lease: &Lease{
|
||||||
Hostname: "bad-lbl-.local",
|
Hostname: "bad-lbl-.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 150},
|
IP: netip.MustParseAddr("192.168.10.150"),
|
||||||
},
|
},
|
||||||
name: "bad_hostname",
|
name: "bad_hostname",
|
||||||
wantErrMsg: `dhcpv4: adding static lease: validating hostname: ` +
|
wantErrMsg: `dhcpv4: adding static lease: validating hostname: ` +
|
||||||
|
@ -287,11 +293,11 @@ func TestV4_AddReplace(t *testing.T) {
|
||||||
dynLeases := []Lease{{
|
dynLeases := []Lease{{
|
||||||
Hostname: "dynamic-1.local",
|
Hostname: "dynamic-1.local",
|
||||||
HWAddr: net.HardwareAddr{0x11, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x11, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 150},
|
IP: netip.MustParseAddr("192.168.10.150"),
|
||||||
}, {
|
}, {
|
||||||
Hostname: "dynamic-2.local",
|
Hostname: "dynamic-2.local",
|
||||||
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 151},
|
IP: netip.MustParseAddr("192.168.10.151"),
|
||||||
}}
|
}}
|
||||||
|
|
||||||
for i := range dynLeases {
|
for i := range dynLeases {
|
||||||
|
@ -302,11 +308,11 @@ func TestV4_AddReplace(t *testing.T) {
|
||||||
stLeases := []*Lease{{
|
stLeases := []*Lease{{
|
||||||
Hostname: "static-1.local",
|
Hostname: "static-1.local",
|
||||||
HWAddr: net.HardwareAddr{0x33, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x33, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 150},
|
IP: netip.MustParseAddr("192.168.10.150"),
|
||||||
}, {
|
}, {
|
||||||
Hostname: "static-2.local",
|
Hostname: "static-2.local",
|
||||||
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 152},
|
IP: netip.MustParseAddr("192.168.10.152"),
|
||||||
}}
|
}}
|
||||||
|
|
||||||
for _, l := range stLeases {
|
for _, l := range stLeases {
|
||||||
|
@ -318,7 +324,7 @@ func TestV4_AddReplace(t *testing.T) {
|
||||||
require.Len(t, ls, 2)
|
require.Len(t, ls, 2)
|
||||||
|
|
||||||
for i, l := range ls {
|
for i, l := range ls {
|
||||||
assert.True(t, stLeases[i].IP.Equal(l.IP))
|
assert.Equal(t, stLeases[i].IP, l.IP)
|
||||||
assert.Equal(t, stLeases[i].HWAddr, l.HWAddr)
|
assert.Equal(t, stLeases[i].HWAddr, l.HWAddr)
|
||||||
assert.True(t, l.IsStatic())
|
assert.True(t, l.IsStatic())
|
||||||
}
|
}
|
||||||
|
@ -511,7 +517,7 @@ func TestV4StaticLease_Get(t *testing.T) {
|
||||||
l := &Lease{
|
l := &Lease{
|
||||||
Hostname: "static-1.local",
|
Hostname: "static-1.local",
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
IP: net.IP{192, 168, 10, 150},
|
IP: netip.MustParseAddr("192.168.10.150"),
|
||||||
}
|
}
|
||||||
err := s.AddStaticLease(l)
|
err := s.AddStaticLease(l)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -537,7 +543,9 @@ func TestV4StaticLease_Get(t *testing.T) {
|
||||||
t.Run("offer", func(t *testing.T) {
|
t.Run("offer", func(t *testing.T) {
|
||||||
assert.Equal(t, dhcpv4.MessageTypeOffer, resp.MessageType())
|
assert.Equal(t, dhcpv4.MessageTypeOffer, resp.MessageType())
|
||||||
assert.Equal(t, mac, resp.ClientHWAddr)
|
assert.Equal(t, mac, resp.ClientHWAddr)
|
||||||
assert.True(t, l.IP.Equal(resp.YourIPAddr))
|
|
||||||
|
ip := net.IP(l.IP.AsSlice())
|
||||||
|
assert.True(t, ip.Equal(resp.YourIPAddr))
|
||||||
|
|
||||||
assert.True(t, resp.Router()[0].Equal(s.conf.GatewayIP.AsSlice()))
|
assert.True(t, resp.Router()[0].Equal(s.conf.GatewayIP.AsSlice()))
|
||||||
assert.True(t, resp.ServerIdentifier().Equal(s.conf.GatewayIP.AsSlice()))
|
assert.True(t, resp.ServerIdentifier().Equal(s.conf.GatewayIP.AsSlice()))
|
||||||
|
@ -562,7 +570,9 @@ func TestV4StaticLease_Get(t *testing.T) {
|
||||||
t.Run("ack", func(t *testing.T) {
|
t.Run("ack", func(t *testing.T) {
|
||||||
assert.Equal(t, dhcpv4.MessageTypeAck, resp.MessageType())
|
assert.Equal(t, dhcpv4.MessageTypeAck, resp.MessageType())
|
||||||
assert.Equal(t, mac, resp.ClientHWAddr)
|
assert.Equal(t, mac, resp.ClientHWAddr)
|
||||||
assert.True(t, l.IP.Equal(resp.YourIPAddr))
|
|
||||||
|
ip := net.IP(l.IP.AsSlice())
|
||||||
|
assert.True(t, ip.Equal(resp.YourIPAddr))
|
||||||
|
|
||||||
assert.True(t, resp.Router()[0].Equal(s.conf.GatewayIP.AsSlice()))
|
assert.True(t, resp.Router()[0].Equal(s.conf.GatewayIP.AsSlice()))
|
||||||
assert.True(t, resp.ServerIdentifier().Equal(s.conf.GatewayIP.AsSlice()))
|
assert.True(t, resp.ServerIdentifier().Equal(s.conf.GatewayIP.AsSlice()))
|
||||||
|
@ -581,7 +591,7 @@ func TestV4StaticLease_Get(t *testing.T) {
|
||||||
ls := s.GetLeases(LeasesStatic)
|
ls := s.GetLeases(LeasesStatic)
|
||||||
require.Len(t, ls, 1)
|
require.Len(t, ls, 1)
|
||||||
|
|
||||||
assert.True(t, l.IP.Equal(ls[0].IP))
|
assert.Equal(t, l.IP, ls[0].IP)
|
||||||
assert.Equal(t, mac, ls[0].HWAddr)
|
assert.Equal(t, mac, ls[0].HWAddr)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -679,7 +689,8 @@ func TestV4DynamicLease_Get(t *testing.T) {
|
||||||
ls := s.GetLeases(LeasesDynamic)
|
ls := s.GetLeases(LeasesDynamic)
|
||||||
require.Len(t, ls, 1)
|
require.Len(t, ls, 1)
|
||||||
|
|
||||||
assert.True(t, net.IP{192, 168, 10, 100}.Equal(ls[0].IP))
|
ip := netip.MustParseAddr("192.168.10.100")
|
||||||
|
assert.Equal(t, ip, ls[0].IP)
|
||||||
assert.Equal(t, mac, ls[0].HWAddr)
|
assert.Equal(t, mac, ls[0].HWAddr)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -860,3 +871,143 @@ func TestV4Server_Send(t *testing.T) {
|
||||||
assert.True(t, resp.IsBroadcast())
|
assert.True(t, resp.IsBroadcast())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestV4Server_FindMACbyIP(t *testing.T) {
|
||||||
|
const (
|
||||||
|
staticName = "static-client"
|
||||||
|
anotherName = "another-client"
|
||||||
|
)
|
||||||
|
|
||||||
|
staticIP := netip.MustParseAddr("192.168.10.10")
|
||||||
|
staticMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
||||||
|
|
||||||
|
anotherIP := netip.MustParseAddr("192.168.100.100")
|
||||||
|
anotherMAC := net.HardwareAddr{0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB}
|
||||||
|
|
||||||
|
s := &v4Server{
|
||||||
|
leases: []*Lease{{
|
||||||
|
Expiry: time.Unix(leaseExpireStatic, 0),
|
||||||
|
Hostname: staticName,
|
||||||
|
HWAddr: staticMAC,
|
||||||
|
IP: staticIP,
|
||||||
|
}, {
|
||||||
|
Expiry: time.Unix(10, 0),
|
||||||
|
Hostname: anotherName,
|
||||||
|
HWAddr: anotherMAC,
|
||||||
|
IP: anotherIP,
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
want net.HardwareAddr
|
||||||
|
ip netip.Addr
|
||||||
|
name string
|
||||||
|
}{{
|
||||||
|
name: "basic",
|
||||||
|
ip: staticIP,
|
||||||
|
want: staticMAC,
|
||||||
|
}, {
|
||||||
|
name: "not_found",
|
||||||
|
ip: netip.MustParseAddr("1.2.3.4"),
|
||||||
|
want: nil,
|
||||||
|
}, {
|
||||||
|
name: "expired",
|
||||||
|
ip: anotherIP,
|
||||||
|
want: nil,
|
||||||
|
}, {
|
||||||
|
name: "v6",
|
||||||
|
ip: netip.MustParseAddr("ffff::1"),
|
||||||
|
want: nil,
|
||||||
|
}}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
mac := s.FindMACbyIP(tc.ip)
|
||||||
|
|
||||||
|
require.Equal(t, tc.want, mac)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestV4Server_handleDecline(t *testing.T) {
|
||||||
|
const (
|
||||||
|
dynamicName = "dynamic-client"
|
||||||
|
anotherName = "another-client"
|
||||||
|
)
|
||||||
|
|
||||||
|
dynamicIP := netip.MustParseAddr("192.168.10.200")
|
||||||
|
dynamicMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
||||||
|
|
||||||
|
s := defaultSrv(t)
|
||||||
|
|
||||||
|
s4, ok := s.(*v4Server)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
s4.leases = []*Lease{{
|
||||||
|
Hostname: dynamicName,
|
||||||
|
HWAddr: dynamicMAC,
|
||||||
|
IP: dynamicIP,
|
||||||
|
}}
|
||||||
|
|
||||||
|
req, err := dhcpv4.New(
|
||||||
|
dhcpv4.WithOption(dhcpv4.OptRequestedIPAddress(net.IP(dynamicIP.AsSlice()))),
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
req.ClientIPAddr = net.IP(dynamicIP.AsSlice())
|
||||||
|
req.ClientHWAddr = dynamicMAC
|
||||||
|
|
||||||
|
resp := &dhcpv4.DHCPv4{}
|
||||||
|
err = s4.handleDecline(req, resp)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
wantResp := &dhcpv4.DHCPv4{
|
||||||
|
YourIPAddr: net.IP(s4.conf.RangeStart.AsSlice()),
|
||||||
|
Options: dhcpv4.OptionsFromList(
|
||||||
|
dhcpv4.OptMessageType(dhcpv4.MessageTypeAck),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
require.Equal(t, wantResp, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestV4Server_handleRelease(t *testing.T) {
|
||||||
|
const (
|
||||||
|
dynamicName = "dymamic-client"
|
||||||
|
anotherName = "another-client"
|
||||||
|
)
|
||||||
|
|
||||||
|
dynamicIP := netip.MustParseAddr("192.168.10.200")
|
||||||
|
dynamicMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
||||||
|
|
||||||
|
s := defaultSrv(t)
|
||||||
|
|
||||||
|
s4, ok := s.(*v4Server)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
s4.leases = []*Lease{{
|
||||||
|
Hostname: dynamicName,
|
||||||
|
HWAddr: dynamicMAC,
|
||||||
|
IP: dynamicIP,
|
||||||
|
}}
|
||||||
|
|
||||||
|
req, err := dhcpv4.New(
|
||||||
|
dhcpv4.WithOption(dhcpv4.OptRequestedIPAddress(net.IP(dynamicIP.AsSlice()))),
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
req.ClientIPAddr = net.IP(dynamicIP.AsSlice())
|
||||||
|
req.ClientHWAddr = dynamicMAC
|
||||||
|
|
||||||
|
resp := &dhcpv4.DHCPv4{}
|
||||||
|
err = s4.handleRelease(req, resp)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
wantResp := &dhcpv4.DHCPv4{
|
||||||
|
Options: dhcpv4.OptionsFromList(
|
||||||
|
dhcpv4.OptMessageType(dhcpv4.MessageTypeAck),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
require.Equal(t, wantResp, resp)
|
||||||
|
}
|
||||||
|
|
|
@ -61,13 +61,13 @@ func ip6InRange(start, ip net.IP) bool {
|
||||||
|
|
||||||
// ResetLeases resets leases.
|
// ResetLeases resets leases.
|
||||||
func (s *v6Server) ResetLeases(leases []*Lease) (err error) {
|
func (s *v6Server) ResetLeases(leases []*Lease) (err error) {
|
||||||
defer func() { err = errors.Annotate(err, "dhcpv4: %w") }()
|
defer func() { err = errors.Annotate(err, "dhcpv6: %w") }()
|
||||||
|
|
||||||
s.leases = nil
|
s.leases = nil
|
||||||
for _, l := range leases {
|
for _, l := range leases {
|
||||||
|
ip := net.IP(l.IP.AsSlice())
|
||||||
if l.Expiry.Unix() != leaseExpireStatic &&
|
if l.Expiry.Unix() != leaseExpireStatic &&
|
||||||
!ip6InRange(s.conf.ipStart, l.IP) {
|
!ip6InRange(s.conf.ipStart, ip) {
|
||||||
|
|
||||||
log.Debug("dhcpv6: skipping a lease with IP %v: not within current IP range", l.IP)
|
log.Debug("dhcpv6: skipping a lease with IP %v: not within current IP range", l.IP)
|
||||||
|
|
||||||
|
@ -119,9 +119,8 @@ func (s *v6Server) FindMACbyIP(ip netip.Addr) (mac net.HardwareAddr) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
netIP := ip.AsSlice()
|
|
||||||
for _, l := range s.leases {
|
for _, l := range s.leases {
|
||||||
if l.IP.Equal(netIP) {
|
if l.IP == ip {
|
||||||
if l.Expiry.After(now) || l.IsStatic() {
|
if l.Expiry.After(now) || l.IsStatic() {
|
||||||
return l.HWAddr
|
return l.HWAddr
|
||||||
}
|
}
|
||||||
|
@ -133,7 +132,8 @@ func (s *v6Server) FindMACbyIP(ip netip.Addr) (mac net.HardwareAddr) {
|
||||||
|
|
||||||
// Remove (swap) lease by index
|
// Remove (swap) lease by index
|
||||||
func (s *v6Server) leaseRemoveSwapByIndex(i int) {
|
func (s *v6Server) leaseRemoveSwapByIndex(i int) {
|
||||||
s.ipAddrs[s.leases[i].IP[15]] = 0
|
leaseIP := s.leases[i].IP.As16()
|
||||||
|
s.ipAddrs[leaseIP[15]] = 0
|
||||||
log.Debug("dhcpv6: removed lease %s", s.leases[i].HWAddr)
|
log.Debug("dhcpv6: removed lease %s", s.leases[i].HWAddr)
|
||||||
|
|
||||||
n := len(s.leases)
|
n := len(s.leases)
|
||||||
|
@ -162,7 +162,7 @@ func (s *v6Server) rmDynamicLease(lease *Lease) (err error) {
|
||||||
l = s.leases[i]
|
l = s.leases[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
if net.IP.Equal(l.IP, lease.IP) {
|
if l.IP == lease.IP {
|
||||||
if l.Expiry.Unix() == leaseExpireStatic {
|
if l.Expiry.Unix() == leaseExpireStatic {
|
||||||
return fmt.Errorf("static lease already exists")
|
return fmt.Errorf("static lease already exists")
|
||||||
}
|
}
|
||||||
|
@ -178,7 +178,7 @@ func (s *v6Server) rmDynamicLease(lease *Lease) (err error) {
|
||||||
func (s *v6Server) AddStaticLease(l *Lease) (err error) {
|
func (s *v6Server) AddStaticLease(l *Lease) (err error) {
|
||||||
defer func() { err = errors.Annotate(err, "dhcpv6: %w") }()
|
defer func() { err = errors.Annotate(err, "dhcpv6: %w") }()
|
||||||
|
|
||||||
if len(l.IP) != net.IPv6len {
|
if !l.IP.Is6() {
|
||||||
return fmt.Errorf("invalid IP")
|
return fmt.Errorf("invalid IP")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -210,7 +210,7 @@ func (s *v6Server) AddStaticLease(l *Lease) (err error) {
|
||||||
func (s *v6Server) RemoveStaticLease(l *Lease) (err error) {
|
func (s *v6Server) RemoveStaticLease(l *Lease) (err error) {
|
||||||
defer func() { err = errors.Annotate(err, "dhcpv6: %w") }()
|
defer func() { err = errors.Annotate(err, "dhcpv6: %w") }()
|
||||||
|
|
||||||
if len(l.IP) != 16 {
|
if !l.IP.Is6() {
|
||||||
return fmt.Errorf("invalid IP")
|
return fmt.Errorf("invalid IP")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,14 +234,15 @@ func (s *v6Server) RemoveStaticLease(l *Lease) (err error) {
|
||||||
// Add a lease
|
// Add a lease
|
||||||
func (s *v6Server) addLease(l *Lease) {
|
func (s *v6Server) addLease(l *Lease) {
|
||||||
s.leases = append(s.leases, l)
|
s.leases = append(s.leases, l)
|
||||||
s.ipAddrs[l.IP[15]] = 1
|
ip := l.IP.As16()
|
||||||
|
s.ipAddrs[ip[15]] = 1
|
||||||
log.Debug("dhcpv6: added lease %s <-> %s", l.IP, l.HWAddr)
|
log.Debug("dhcpv6: added lease %s <-> %s", l.IP, l.HWAddr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove a lease with the same properties
|
// Remove a lease with the same properties
|
||||||
func (s *v6Server) rmLease(lease *Lease) (err error) {
|
func (s *v6Server) rmLease(lease *Lease) (err error) {
|
||||||
for i, l := range s.leases {
|
for i, l := range s.leases {
|
||||||
if net.IP.Equal(l.IP, lease.IP) {
|
if l.IP == lease.IP {
|
||||||
if !bytes.Equal(l.HWAddr, lease.HWAddr) ||
|
if !bytes.Equal(l.HWAddr, lease.HWAddr) ||
|
||||||
l.Hostname != lease.Hostname {
|
l.Hostname != lease.Hostname {
|
||||||
return fmt.Errorf("lease not found")
|
return fmt.Errorf("lease not found")
|
||||||
|
@ -308,18 +309,27 @@ func (s *v6Server) reserveLease(mac net.HardwareAddr) *Lease {
|
||||||
s.leasesLock.Lock()
|
s.leasesLock.Lock()
|
||||||
defer s.leasesLock.Unlock()
|
defer s.leasesLock.Unlock()
|
||||||
|
|
||||||
copy(l.IP, s.conf.ipStart)
|
ip := s.findFreeIP()
|
||||||
l.IP = s.findFreeIP()
|
if ip == nil {
|
||||||
if l.IP == nil {
|
|
||||||
i := s.findExpiredLease()
|
i := s.findExpiredLease()
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
copy(s.leases[i].HWAddr, mac)
|
copy(s.leases[i].HWAddr, mac)
|
||||||
|
|
||||||
return s.leases[i]
|
return s.leases[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
netIP, ok := netip.AddrFromSlice(ip)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
l.IP = netIP
|
||||||
|
|
||||||
s.addLease(&l)
|
s.addLease(&l)
|
||||||
|
|
||||||
return &l
|
return &l
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -388,7 +398,8 @@ func (s *v6Server) checkIA(msg *dhcpv6.Message, lease *Lease) error {
|
||||||
return fmt.Errorf("no IANA.Addr option in %s", msg.Type().String())
|
return fmt.Errorf("no IANA.Addr option in %s", msg.Type().String())
|
||||||
}
|
}
|
||||||
|
|
||||||
if !oiaAddr.IPv6Addr.Equal(lease.IP) {
|
leaseIP := net.IP(lease.IP.AsSlice())
|
||||||
|
if !oiaAddr.IPv6Addr.Equal(leaseIP) {
|
||||||
return fmt.Errorf("invalid IANA.Addr option in %s", msg.Type().String())
|
return fmt.Errorf("invalid IANA.Addr option in %s", msg.Type().String())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -475,7 +486,7 @@ func (s *v6Server) process(msg *dhcpv6.Message, req, resp dhcpv6.DHCPv6) bool {
|
||||||
copy(oia.IaId[:], []byte(valueIAID))
|
copy(oia.IaId[:], []byte(valueIAID))
|
||||||
}
|
}
|
||||||
oiaAddr := &dhcpv6.OptIAAddress{
|
oiaAddr := &dhcpv6.OptIAAddress{
|
||||||
IPv6Addr: lease.IP,
|
IPv6Addr: net.IP(lease.IP.AsSlice()),
|
||||||
PreferredLifetime: lifetime,
|
PreferredLifetime: lifetime,
|
||||||
ValidLifetime: lifetime,
|
ValidLifetime: lifetime,
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,9 @@ package dhcpd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net"
|
"net"
|
||||||
|
"net/netip"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/insomniacslk/dhcp/dhcpv6"
|
"github.com/insomniacslk/dhcp/dhcpv6"
|
||||||
"github.com/insomniacslk/dhcp/iana"
|
"github.com/insomniacslk/dhcp/iana"
|
||||||
|
@ -27,7 +29,7 @@ func TestV6_AddRemove_static(t *testing.T) {
|
||||||
|
|
||||||
// Add static lease.
|
// Add static lease.
|
||||||
l := &Lease{
|
l := &Lease{
|
||||||
IP: net.ParseIP("2001::1"),
|
IP: netip.MustParseAddr("2001::1"),
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
}
|
}
|
||||||
err = s.AddStaticLease(l)
|
err = s.AddStaticLease(l)
|
||||||
|
@ -46,7 +48,7 @@ func TestV6_AddRemove_static(t *testing.T) {
|
||||||
|
|
||||||
// Try to remove non-existent static lease.
|
// Try to remove non-existent static lease.
|
||||||
err = s.RemoveStaticLease(&Lease{
|
err = s.RemoveStaticLease(&Lease{
|
||||||
IP: net.ParseIP("2001::2"),
|
IP: netip.MustParseAddr("2001::2"),
|
||||||
HWAddr: l.HWAddr,
|
HWAddr: l.HWAddr,
|
||||||
})
|
})
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
|
@ -71,10 +73,10 @@ func TestV6_AddReplace(t *testing.T) {
|
||||||
|
|
||||||
// Add dynamic leases.
|
// Add dynamic leases.
|
||||||
dynLeases := []*Lease{{
|
dynLeases := []*Lease{{
|
||||||
IP: net.ParseIP("2001::1"),
|
IP: netip.MustParseAddr("2001::1"),
|
||||||
HWAddr: net.HardwareAddr{0x11, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x11, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
}, {
|
}, {
|
||||||
IP: net.ParseIP("2001::2"),
|
IP: netip.MustParseAddr("2001::2"),
|
||||||
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
@ -83,10 +85,10 @@ func TestV6_AddReplace(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
stLeases := []*Lease{{
|
stLeases := []*Lease{{
|
||||||
IP: net.ParseIP("2001::1"),
|
IP: netip.MustParseAddr("2001::1"),
|
||||||
HWAddr: net.HardwareAddr{0x33, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x33, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
}, {
|
}, {
|
||||||
IP: net.ParseIP("2001::3"),
|
IP: netip.MustParseAddr("2001::3"),
|
||||||
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0x22, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
@ -99,7 +101,7 @@ func TestV6_AddReplace(t *testing.T) {
|
||||||
require.Len(t, ls, 2)
|
require.Len(t, ls, 2)
|
||||||
|
|
||||||
for i, l := range ls {
|
for i, l := range ls {
|
||||||
assert.True(t, stLeases[i].IP.Equal(l.IP))
|
assert.Equal(t, stLeases[i].IP, l.IP)
|
||||||
assert.Equal(t, stLeases[i].HWAddr, l.HWAddr)
|
assert.Equal(t, stLeases[i].HWAddr, l.HWAddr)
|
||||||
assert.EqualValues(t, leaseExpireStatic, l.Expiry.Unix())
|
assert.EqualValues(t, leaseExpireStatic, l.Expiry.Unix())
|
||||||
}
|
}
|
||||||
|
@ -126,7 +128,7 @@ func TestV6GetLease(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
l := &Lease{
|
l := &Lease{
|
||||||
IP: net.ParseIP("2001::1"),
|
IP: netip.MustParseAddr("2001::1"),
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
}
|
}
|
||||||
err = s.AddStaticLease(l)
|
err = s.AddStaticLease(l)
|
||||||
|
@ -158,7 +160,8 @@ func TestV6GetLease(t *testing.T) {
|
||||||
oia = resp.Options.OneIANA()
|
oia = resp.Options.OneIANA()
|
||||||
oiaAddr = oia.Options.OneAddress()
|
oiaAddr = oia.Options.OneAddress()
|
||||||
|
|
||||||
assert.Equal(t, l.IP, oiaAddr.IPv6Addr)
|
ip := net.IP(l.IP.AsSlice())
|
||||||
|
assert.Equal(t, ip, oiaAddr.IPv6Addr)
|
||||||
assert.Equal(t, s.conf.leaseTime.Seconds(), oiaAddr.ValidLifetime.Seconds())
|
assert.Equal(t, s.conf.leaseTime.Seconds(), oiaAddr.ValidLifetime.Seconds())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -182,7 +185,8 @@ func TestV6GetLease(t *testing.T) {
|
||||||
oia = resp.Options.OneIANA()
|
oia = resp.Options.OneIANA()
|
||||||
oiaAddr = oia.Options.OneAddress()
|
oiaAddr = oia.Options.OneAddress()
|
||||||
|
|
||||||
assert.Equal(t, l.IP, oiaAddr.IPv6Addr)
|
ip := net.IP(l.IP.AsSlice())
|
||||||
|
assert.Equal(t, ip, oiaAddr.IPv6Addr)
|
||||||
assert.Equal(t, s.conf.leaseTime.Seconds(), oiaAddr.ValidLifetime.Seconds())
|
assert.Equal(t, s.conf.leaseTime.Seconds(), oiaAddr.ValidLifetime.Seconds())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -308,3 +312,72 @@ func TestIP6InRange(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestV6_FindMACbyIP(t *testing.T) {
|
||||||
|
const (
|
||||||
|
staticName = "static-client"
|
||||||
|
anotherName = "another-client"
|
||||||
|
)
|
||||||
|
|
||||||
|
staticIP := netip.MustParseAddr("2001::1")
|
||||||
|
staticMAC := net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA}
|
||||||
|
|
||||||
|
anotherIP := netip.MustParseAddr("2001::100")
|
||||||
|
anotherMAC := net.HardwareAddr{0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB}
|
||||||
|
|
||||||
|
s := &v6Server{
|
||||||
|
leases: []*Lease{{
|
||||||
|
Expiry: time.Unix(leaseExpireStatic, 0),
|
||||||
|
Hostname: staticName,
|
||||||
|
HWAddr: staticMAC,
|
||||||
|
IP: staticIP,
|
||||||
|
}, {
|
||||||
|
Expiry: time.Unix(10, 0),
|
||||||
|
Hostname: anotherName,
|
||||||
|
HWAddr: anotherMAC,
|
||||||
|
IP: anotherIP,
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
|
||||||
|
s.leases = []*Lease{{
|
||||||
|
Expiry: time.Unix(leaseExpireStatic, 0),
|
||||||
|
Hostname: staticName,
|
||||||
|
HWAddr: staticMAC,
|
||||||
|
IP: staticIP,
|
||||||
|
}, {
|
||||||
|
Expiry: time.Unix(10, 0),
|
||||||
|
Hostname: anotherName,
|
||||||
|
HWAddr: anotherMAC,
|
||||||
|
IP: anotherIP,
|
||||||
|
}}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
want net.HardwareAddr
|
||||||
|
ip netip.Addr
|
||||||
|
name string
|
||||||
|
}{{
|
||||||
|
name: "basic",
|
||||||
|
ip: staticIP,
|
||||||
|
want: staticMAC,
|
||||||
|
}, {
|
||||||
|
name: "not_found",
|
||||||
|
ip: netip.MustParseAddr("ffff::1"),
|
||||||
|
want: nil,
|
||||||
|
}, {
|
||||||
|
name: "expired",
|
||||||
|
ip: anotherIP,
|
||||||
|
want: nil,
|
||||||
|
}, {
|
||||||
|
name: "v4",
|
||||||
|
ip: netip.MustParseAddr("1.2.3.4"),
|
||||||
|
want: nil,
|
||||||
|
}}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
mac := s.FindMACbyIP(tc.ip)
|
||||||
|
|
||||||
|
require.Equal(t, tc.want, mac)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -200,7 +200,7 @@ type FilteringConfig struct {
|
||||||
// EDNSClientSubnet is the settings list for EDNS Client Subnet.
|
// EDNSClientSubnet is the settings list for EDNS Client Subnet.
|
||||||
type EDNSClientSubnet struct {
|
type EDNSClientSubnet struct {
|
||||||
// CustomIP for EDNS Client Subnet.
|
// CustomIP for EDNS Client Subnet.
|
||||||
CustomIP string `yaml:"custom_ip"`
|
CustomIP netip.Addr `yaml:"custom_ip"`
|
||||||
|
|
||||||
// Enabled defines if EDNS Client Subnet is enabled.
|
// Enabled defines if EDNS Client Subnet is enabled.
|
||||||
Enabled bool `yaml:"enabled"`
|
Enabled bool `yaml:"enabled"`
|
||||||
|
@ -340,15 +340,8 @@ func (s *Server) createProxyConfig() (conf proxy.Config, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if srvConf.EDNSClientSubnet.UseCustom {
|
if srvConf.EDNSClientSubnet.UseCustom {
|
||||||
// TODO(s.chzhen): Add wrapper around netip.Addr.
|
|
||||||
var ip net.IP
|
|
||||||
ip, err = netutil.ParseIP(srvConf.EDNSClientSubnet.CustomIP)
|
|
||||||
if err != nil {
|
|
||||||
return conf, fmt.Errorf("edns: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(s.chzhen): Use netip.Addr instead of net.IP inside dnsproxy.
|
// TODO(s.chzhen): Use netip.Addr instead of net.IP inside dnsproxy.
|
||||||
conf.EDNSAddr = ip
|
conf.EDNSAddr = net.IP(srvConf.EDNSClientSubnet.CustomIP.AsSlice())
|
||||||
}
|
}
|
||||||
|
|
||||||
if srvConf.CacheSize != 0 {
|
if srvConf.CacheSize != 0 {
|
||||||
|
@ -377,7 +370,7 @@ func (s *Server) createProxyConfig() (conf proxy.Config, err error) {
|
||||||
|
|
||||||
err = s.prepareTLS(&conf)
|
err = s.prepareTLS(&conf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return conf, fmt.Errorf("validating tls: %w", err)
|
return proxy.Config{}, fmt.Errorf("validating tls: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if c := srvConf.DNSCryptConfig; c.Enabled {
|
if c := srvConf.DNSCryptConfig; c.Enabled {
|
||||||
|
@ -388,7 +381,7 @@ func (s *Server) createProxyConfig() (conf proxy.Config, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.UpstreamConfig == nil || len(conf.UpstreamConfig.Upstreams) == 0 {
|
if conf.UpstreamConfig == nil || len(conf.UpstreamConfig.Upstreams) == 0 {
|
||||||
return conf, errors.Error("no default upstream servers configured")
|
return proxy.Config{}, errors.Error("no default upstream servers configured")
|
||||||
}
|
}
|
||||||
|
|
||||||
return conf, nil
|
return conf, nil
|
||||||
|
|
|
@ -243,17 +243,16 @@ func (s *Server) onDHCPLeaseChanged(flags int) {
|
||||||
lowhost := strings.ToLower(l.Hostname + "." + s.localDomainSuffix)
|
lowhost := strings.ToLower(l.Hostname + "." + s.localDomainSuffix)
|
||||||
|
|
||||||
// Assume that we only process IPv4 now.
|
// Assume that we only process IPv4 now.
|
||||||
//
|
if !l.IP.Is4() {
|
||||||
// TODO(a.garipov): Remove once we switch to netip.Addr more fully.
|
log.Debug("dnsforward: skipping invalid ip from dhcp: bad ipv4 net.IP %v", l.IP)
|
||||||
ip, err := netutil.IPToAddr(l.IP, netutil.AddrFamilyIPv4)
|
|
||||||
if err != nil {
|
|
||||||
log.Debug("dnsforward: skipping invalid ip %v from dhcp: %s", l.IP, err)
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ipToHost[ip] = lowhost
|
leaseIP := l.IP
|
||||||
hostToIP[lowhost] = ip
|
|
||||||
|
ipToHost[leaseIP] = lowhost
|
||||||
|
hostToIP[lowhost] = leaseIP
|
||||||
}
|
}
|
||||||
|
|
||||||
s.setTableHostToIP(hostToIP)
|
s.setTableHostToIP(hostToIP)
|
||||||
|
|
|
@ -1073,7 +1073,7 @@ var testDHCP = &dhcpd.MockInterface{
|
||||||
OnEnabled: func() (ok bool) { return true },
|
OnEnabled: func() (ok bool) { return true },
|
||||||
OnLeases: func(flags dhcpd.GetLeasesFlags) (leases []*dhcpd.Lease) {
|
OnLeases: func(flags dhcpd.GetLeasesFlags) (leases []*dhcpd.Lease) {
|
||||||
return []*dhcpd.Lease{{
|
return []*dhcpd.Lease{{
|
||||||
IP: net.IP{192, 168, 12, 34},
|
IP: netip.MustParseAddr("192.168.12.34"),
|
||||||
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
HWAddr: net.HardwareAddr{0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA},
|
||||||
Hostname: "myhost",
|
Hostname: "myhost",
|
||||||
}}
|
}}
|
||||||
|
|
|
@ -23,26 +23,78 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// jsonDNSConfig is the JSON representation of the DNS server configuration.
|
// jsonDNSConfig is the JSON representation of the DNS server configuration.
|
||||||
|
//
|
||||||
|
// TODO(s.chzhen): Split it into smaller pieces. Use aghalg.NullBool instead
|
||||||
|
// of *bool.
|
||||||
type jsonDNSConfig struct {
|
type jsonDNSConfig struct {
|
||||||
|
// Upstreams is the list of upstream DNS servers.
|
||||||
Upstreams *[]string `json:"upstream_dns"`
|
Upstreams *[]string `json:"upstream_dns"`
|
||||||
|
|
||||||
|
// UpstreamsFile is the file containing upstream DNS servers.
|
||||||
UpstreamsFile *string `json:"upstream_dns_file"`
|
UpstreamsFile *string `json:"upstream_dns_file"`
|
||||||
|
|
||||||
|
// Bootstraps is the list of DNS servers resolving IP addresses of the
|
||||||
|
// upstream DoH/DoT resolvers.
|
||||||
Bootstraps *[]string `json:"bootstrap_dns"`
|
Bootstraps *[]string `json:"bootstrap_dns"`
|
||||||
|
|
||||||
|
// ProtectionEnabled defines if protection is enabled.
|
||||||
ProtectionEnabled *bool `json:"protection_enabled"`
|
ProtectionEnabled *bool `json:"protection_enabled"`
|
||||||
|
|
||||||
|
// RateLimit is the number of requests per second allowed per client.
|
||||||
RateLimit *uint32 `json:"ratelimit"`
|
RateLimit *uint32 `json:"ratelimit"`
|
||||||
|
|
||||||
|
// BlockingMode defines the way blocked responses are constructed.
|
||||||
BlockingMode *BlockingMode `json:"blocking_mode"`
|
BlockingMode *BlockingMode `json:"blocking_mode"`
|
||||||
|
|
||||||
|
// EDNSCSEnabled defines if EDNS Client Subnet is enabled.
|
||||||
EDNSCSEnabled *bool `json:"edns_cs_enabled"`
|
EDNSCSEnabled *bool `json:"edns_cs_enabled"`
|
||||||
|
|
||||||
|
// EDNSCSUseCustom defines if EDNSCSCustomIP should be used.
|
||||||
|
EDNSCSUseCustom *bool `json:"edns_cs_use_custom"`
|
||||||
|
|
||||||
|
// DNSSECEnabled defines if DNSSEC is enabled.
|
||||||
DNSSECEnabled *bool `json:"dnssec_enabled"`
|
DNSSECEnabled *bool `json:"dnssec_enabled"`
|
||||||
|
|
||||||
|
// DisableIPv6 defines if IPv6 addresses should be dropped.
|
||||||
DisableIPv6 *bool `json:"disable_ipv6"`
|
DisableIPv6 *bool `json:"disable_ipv6"`
|
||||||
|
|
||||||
|
// UpstreamMode defines the way DNS requests are constructed.
|
||||||
UpstreamMode *string `json:"upstream_mode"`
|
UpstreamMode *string `json:"upstream_mode"`
|
||||||
|
|
||||||
|
// CacheSize in bytes.
|
||||||
CacheSize *uint32 `json:"cache_size"`
|
CacheSize *uint32 `json:"cache_size"`
|
||||||
|
|
||||||
|
// CacheMinTTL is custom minimum TTL for cached DNS responses.
|
||||||
CacheMinTTL *uint32 `json:"cache_ttl_min"`
|
CacheMinTTL *uint32 `json:"cache_ttl_min"`
|
||||||
|
|
||||||
|
// CacheMaxTTL is custom maximum TTL for cached DNS responses.
|
||||||
CacheMaxTTL *uint32 `json:"cache_ttl_max"`
|
CacheMaxTTL *uint32 `json:"cache_ttl_max"`
|
||||||
|
|
||||||
|
// CacheOptimistic defines if expired entries should be served.
|
||||||
CacheOptimistic *bool `json:"cache_optimistic"`
|
CacheOptimistic *bool `json:"cache_optimistic"`
|
||||||
|
|
||||||
|
// ResolveClients defines if clients IPs should be resolved into hostnames.
|
||||||
ResolveClients *bool `json:"resolve_clients"`
|
ResolveClients *bool `json:"resolve_clients"`
|
||||||
|
|
||||||
|
// UsePrivateRDNS defines if privates DNS resolvers should be used.
|
||||||
UsePrivateRDNS *bool `json:"use_private_ptr_resolvers"`
|
UsePrivateRDNS *bool `json:"use_private_ptr_resolvers"`
|
||||||
|
|
||||||
|
// LocalPTRUpstreams is the list of local private DNS resolvers.
|
||||||
LocalPTRUpstreams *[]string `json:"local_ptr_upstreams"`
|
LocalPTRUpstreams *[]string `json:"local_ptr_upstreams"`
|
||||||
|
|
||||||
|
// BlockingIPv4 is custom IPv4 address for blocked A requests.
|
||||||
BlockingIPv4 net.IP `json:"blocking_ipv4"`
|
BlockingIPv4 net.IP `json:"blocking_ipv4"`
|
||||||
|
|
||||||
|
// BlockingIPv6 is custom IPv6 address for blocked AAAA requests.
|
||||||
BlockingIPv6 net.IP `json:"blocking_ipv6"`
|
BlockingIPv6 net.IP `json:"blocking_ipv6"`
|
||||||
|
|
||||||
|
// EDNSCSCustomIP is custom IP for EDNS Client Subnet.
|
||||||
|
EDNSCSCustomIP netip.Addr `json:"edns_cs_custom_ip"`
|
||||||
|
|
||||||
|
// DefaultLocalPTRUpstreams is used to pass the addresses from
|
||||||
|
// systemResolvers to the front-end. It's not a pointer to the slice since
|
||||||
|
// there is no need to omit it while decoding from JSON.
|
||||||
|
DefaultLocalPTRUpstreams []string `json:"default_local_ptr_upstreams,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) getDNSConfig() (c *jsonDNSConfig) {
|
func (s *Server) getDNSConfig() (c *jsonDNSConfig) {
|
||||||
|
@ -57,7 +109,11 @@ func (s *Server) getDNSConfig() (c *jsonDNSConfig) {
|
||||||
blockingIPv4 := s.conf.BlockingIPv4
|
blockingIPv4 := s.conf.BlockingIPv4
|
||||||
blockingIPv6 := s.conf.BlockingIPv6
|
blockingIPv6 := s.conf.BlockingIPv6
|
||||||
ratelimit := s.conf.Ratelimit
|
ratelimit := s.conf.Ratelimit
|
||||||
|
|
||||||
|
customIP := s.conf.EDNSClientSubnet.CustomIP
|
||||||
enableEDNSClientSubnet := s.conf.EDNSClientSubnet.Enabled
|
enableEDNSClientSubnet := s.conf.EDNSClientSubnet.Enabled
|
||||||
|
useCustom := s.conf.EDNSClientSubnet.UseCustom
|
||||||
|
|
||||||
enableDNSSEC := s.conf.EnableDNSSEC
|
enableDNSSEC := s.conf.EnableDNSSEC
|
||||||
aaaaDisabled := s.conf.AAAADisabled
|
aaaaDisabled := s.conf.AAAADisabled
|
||||||
cacheSize := s.conf.CacheSize
|
cacheSize := s.conf.CacheSize
|
||||||
|
@ -74,6 +130,11 @@ func (s *Server) getDNSConfig() (c *jsonDNSConfig) {
|
||||||
upstreamMode = "parallel"
|
upstreamMode = "parallel"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defLocalPTRUps, err := s.filterOurDNSAddrs(s.sysResolvers.Get())
|
||||||
|
if err != nil {
|
||||||
|
log.Debug("getting dns configuration: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
return &jsonDNSConfig{
|
return &jsonDNSConfig{
|
||||||
Upstreams: &upstreams,
|
Upstreams: &upstreams,
|
||||||
UpstreamsFile: &upstreamFile,
|
UpstreamsFile: &upstreamFile,
|
||||||
|
@ -83,7 +144,9 @@ func (s *Server) getDNSConfig() (c *jsonDNSConfig) {
|
||||||
BlockingIPv4: blockingIPv4,
|
BlockingIPv4: blockingIPv4,
|
||||||
BlockingIPv6: blockingIPv6,
|
BlockingIPv6: blockingIPv6,
|
||||||
RateLimit: &ratelimit,
|
RateLimit: &ratelimit,
|
||||||
|
EDNSCSCustomIP: customIP,
|
||||||
EDNSCSEnabled: &enableEDNSClientSubnet,
|
EDNSCSEnabled: &enableEDNSClientSubnet,
|
||||||
|
EDNSCSUseCustom: &useCustom,
|
||||||
DNSSECEnabled: &enableDNSSEC,
|
DNSSECEnabled: &enableDNSSEC,
|
||||||
DisableIPv6: &aaaaDisabled,
|
DisableIPv6: &aaaaDisabled,
|
||||||
CacheSize: &cacheSize,
|
CacheSize: &cacheSize,
|
||||||
|
@ -94,26 +157,13 @@ func (s *Server) getDNSConfig() (c *jsonDNSConfig) {
|
||||||
ResolveClients: &resolveClients,
|
ResolveClients: &resolveClients,
|
||||||
UsePrivateRDNS: &usePrivateRDNS,
|
UsePrivateRDNS: &usePrivateRDNS,
|
||||||
LocalPTRUpstreams: &localPTRUpstreams,
|
LocalPTRUpstreams: &localPTRUpstreams,
|
||||||
|
DefaultLocalPTRUpstreams: defLocalPTRUps,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleGetConfig handles requests to the GET /control/dns_info endpoint.
|
||||||
func (s *Server) handleGetConfig(w http.ResponseWriter, r *http.Request) {
|
func (s *Server) handleGetConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
defLocalPTRUps, err := s.filterOurDNSAddrs(s.sysResolvers.Get())
|
resp := s.getDNSConfig()
|
||||||
if err != nil {
|
|
||||||
log.Debug("getting dns configuration: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := struct {
|
|
||||||
jsonDNSConfig
|
|
||||||
// DefautLocalPTRUpstreams is used to pass the addresses from
|
|
||||||
// systemResolvers to the front-end. It's not a pointer to the slice
|
|
||||||
// since there is no need to omit it while decoding from JSON.
|
|
||||||
DefautLocalPTRUpstreams []string `json:"default_local_ptr_upstreams,omitempty"`
|
|
||||||
}{
|
|
||||||
jsonDNSConfig: *s.getDNSConfig(),
|
|
||||||
DefautLocalPTRUpstreams: defLocalPTRUps,
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,6 +254,7 @@ func (req *jsonDNSConfig) checkCacheTTL() bool {
|
||||||
return min <= max
|
return min <= max
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleSetConfig handles requests to the POST /control/dns_config endpoint.
|
||||||
func (s *Server) handleSetConfig(w http.ResponseWriter, r *http.Request) {
|
func (s *Server) handleSetConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
req := &jsonDNSConfig{}
|
req := &jsonDNSConfig{}
|
||||||
err := json.NewDecoder(r.Body).Decode(req)
|
err := json.NewDecoder(r.Body).Decode(req)
|
||||||
|
@ -231,8 +282,8 @@ func (s *Server) handleSetConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// setConfigRestartable sets the server parameters. shouldRestart is true if
|
// setConfig sets the server parameters. shouldRestart is true if the server
|
||||||
// the server should be restarted to apply changes.
|
// should be restarted to apply changes.
|
||||||
func (s *Server) setConfig(dc *jsonDNSConfig) (shouldRestart bool) {
|
func (s *Server) setConfig(dc *jsonDNSConfig) (shouldRestart bool) {
|
||||||
s.serverLock.Lock()
|
s.serverLock.Lock()
|
||||||
defer s.serverLock.Unlock()
|
defer s.serverLock.Unlock()
|
||||||
|
@ -250,6 +301,10 @@ func (s *Server) setConfig(dc *jsonDNSConfig) (shouldRestart bool) {
|
||||||
s.conf.FastestAddr = *dc.UpstreamMode == "fastest_addr"
|
s.conf.FastestAddr = *dc.UpstreamMode == "fastest_addr"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if dc.EDNSCSUseCustom != nil && *dc.EDNSCSUseCustom {
|
||||||
|
s.conf.EDNSClientSubnet.CustomIP = dc.EDNSCSCustomIP
|
||||||
|
}
|
||||||
|
|
||||||
setIfNotNil(&s.conf.ProtectionEnabled, dc.ProtectionEnabled)
|
setIfNotNil(&s.conf.ProtectionEnabled, dc.ProtectionEnabled)
|
||||||
setIfNotNil(&s.conf.EnableDNSSEC, dc.DNSSECEnabled)
|
setIfNotNil(&s.conf.EnableDNSSEC, dc.DNSSECEnabled)
|
||||||
setIfNotNil(&s.conf.AAAADisabled, dc.DisableIPv6)
|
setIfNotNil(&s.conf.AAAADisabled, dc.DisableIPv6)
|
||||||
|
@ -281,6 +336,7 @@ func (s *Server) setConfigRestartable(dc *jsonDNSConfig) (shouldRestart bool) {
|
||||||
setIfNotNil(&s.conf.UpstreamDNSFileName, dc.UpstreamsFile),
|
setIfNotNil(&s.conf.UpstreamDNSFileName, dc.UpstreamsFile),
|
||||||
setIfNotNil(&s.conf.BootstrapDNS, dc.Bootstraps),
|
setIfNotNil(&s.conf.BootstrapDNS, dc.Bootstraps),
|
||||||
setIfNotNil(&s.conf.EDNSClientSubnet.Enabled, dc.EDNSCSEnabled),
|
setIfNotNil(&s.conf.EDNSClientSubnet.Enabled, dc.EDNSCSEnabled),
|
||||||
|
setIfNotNil(&s.conf.EDNSClientSubnet.UseCustom, dc.EDNSCSUseCustom),
|
||||||
setIfNotNil(&s.conf.CacheSize, dc.CacheSize),
|
setIfNotNil(&s.conf.CacheSize, dc.CacheSize),
|
||||||
setIfNotNil(&s.conf.CacheMinTTL, dc.CacheMinTTL),
|
setIfNotNil(&s.conf.CacheMinTTL, dc.CacheMinTTL),
|
||||||
setIfNotNil(&s.conf.CacheMaxTTL, dc.CacheMaxTTL),
|
setIfNotNil(&s.conf.CacheMaxTTL, dc.CacheMaxTTL),
|
||||||
|
|
|
@ -181,6 +181,12 @@ func TestDNSForwardHTTP_handleSetConfig(t *testing.T) {
|
||||||
}, {
|
}, {
|
||||||
name: "edns_cs_enabled",
|
name: "edns_cs_enabled",
|
||||||
wantSet: "",
|
wantSet: "",
|
||||||
|
}, {
|
||||||
|
name: "edns_cs_use_custom",
|
||||||
|
wantSet: "",
|
||||||
|
}, {
|
||||||
|
name: "edns_cs_use_custom_bad_ip",
|
||||||
|
wantSet: "decoding request: ParseAddr(\"bad.ip\"): unexpected character (at \"bad.ip\")",
|
||||||
}, {
|
}, {
|
||||||
name: "dnssec_enabled",
|
name: "dnssec_enabled",
|
||||||
wantSet: "",
|
wantSet: "",
|
||||||
|
@ -222,16 +228,20 @@ func TestDNSForwardHTTP_handleSetConfig(t *testing.T) {
|
||||||
Req json.RawMessage `json:"req"`
|
Req json.RawMessage `json:"req"`
|
||||||
Want json.RawMessage `json:"want"`
|
Want json.RawMessage `json:"want"`
|
||||||
}
|
}
|
||||||
loadTestData(t, t.Name()+jsonExt, &data)
|
|
||||||
|
testData := t.Name() + jsonExt
|
||||||
|
loadTestData(t, testData, &data)
|
||||||
|
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
|
// NOTE: Do not use require.Contains, because the size of the data
|
||||||
|
// prevents it from printing a meaningful error message.
|
||||||
caseData, ok := data[tc.name]
|
caseData, ok := data[tc.name]
|
||||||
require.True(t, ok)
|
require.Truef(t, ok, "%q does not contain test data for test case %s", testData, tc.name)
|
||||||
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
s.conf = defaultConf
|
s.conf = defaultConf
|
||||||
s.conf.FilteringConfig.EDNSClientSubnet.Enabled = false
|
s.conf.FilteringConfig.EDNSClientSubnet = &EDNSClientSubnet{}
|
||||||
})
|
})
|
||||||
|
|
||||||
rBody := io.NopCloser(bytes.NewReader(caseData.Req))
|
rBody := io.NopCloser(bytes.NewReader(caseData.Req))
|
||||||
|
|
|
@ -26,7 +26,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
},
|
},
|
||||||
"fastest_addr": {
|
"fastest_addr": {
|
||||||
"upstream_dns": [
|
"upstream_dns": [
|
||||||
|
@ -55,7 +57,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
},
|
},
|
||||||
"parallel": {
|
"parallel": {
|
||||||
"upstream_dns": [
|
"upstream_dns": [
|
||||||
|
@ -84,6 +88,8 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bootstraps": {
|
"bootstraps": {
|
||||||
|
@ -66,7 +68,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"blocking_mode_good": {
|
"blocking_mode_good": {
|
||||||
|
@ -100,7 +104,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"blocking_mode_bad": {
|
"blocking_mode_bad": {
|
||||||
|
@ -134,7 +140,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ratelimit": {
|
"ratelimit": {
|
||||||
|
@ -168,7 +176,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"edns_cs_enabled": {
|
"edns_cs_enabled": {
|
||||||
|
@ -202,7 +212,85 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"edns_cs_use_custom": {
|
||||||
|
"req": {
|
||||||
|
"edns_cs_enabled": true,
|
||||||
|
"edns_cs_use_custom": true,
|
||||||
|
"edns_cs_custom_ip": "1.2.3.4"
|
||||||
|
},
|
||||||
|
"want": {
|
||||||
|
"upstream_dns": [
|
||||||
|
"8.8.8.8:53",
|
||||||
|
"8.8.4.4:53"
|
||||||
|
],
|
||||||
|
"upstream_dns_file": "",
|
||||||
|
"bootstrap_dns": [
|
||||||
|
"9.9.9.10",
|
||||||
|
"149.112.112.10",
|
||||||
|
"2620:fe::10",
|
||||||
|
"2620:fe::fe:10"
|
||||||
|
],
|
||||||
|
"protection_enabled": true,
|
||||||
|
"ratelimit": 0,
|
||||||
|
"blocking_mode": "default",
|
||||||
|
"blocking_ipv4": "",
|
||||||
|
"blocking_ipv6": "",
|
||||||
|
"edns_cs_enabled": true,
|
||||||
|
"dnssec_enabled": false,
|
||||||
|
"disable_ipv6": false,
|
||||||
|
"upstream_mode": "",
|
||||||
|
"cache_size": 0,
|
||||||
|
"cache_ttl_min": 0,
|
||||||
|
"cache_ttl_max": 0,
|
||||||
|
"cache_optimistic": false,
|
||||||
|
"resolve_clients": false,
|
||||||
|
"use_private_ptr_resolvers": false,
|
||||||
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": true,
|
||||||
|
"edns_cs_custom_ip": "1.2.3.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"edns_cs_use_custom_bad_ip": {
|
||||||
|
"req": {
|
||||||
|
"edns_cs_enabled": true,
|
||||||
|
"edns_cs_use_custom": true,
|
||||||
|
"edns_cs_custom_ip": "bad.ip"
|
||||||
|
},
|
||||||
|
"want": {
|
||||||
|
"upstream_dns": [
|
||||||
|
"8.8.8.8:53",
|
||||||
|
"8.8.4.4:53"
|
||||||
|
],
|
||||||
|
"upstream_dns_file": "",
|
||||||
|
"bootstrap_dns": [
|
||||||
|
"9.9.9.10",
|
||||||
|
"149.112.112.10",
|
||||||
|
"2620:fe::10",
|
||||||
|
"2620:fe::fe:10"
|
||||||
|
],
|
||||||
|
"protection_enabled": true,
|
||||||
|
"ratelimit": 0,
|
||||||
|
"blocking_mode": "default",
|
||||||
|
"blocking_ipv4": "",
|
||||||
|
"blocking_ipv6": "",
|
||||||
|
"edns_cs_enabled": false,
|
||||||
|
"dnssec_enabled": false,
|
||||||
|
"disable_ipv6": false,
|
||||||
|
"upstream_mode": "",
|
||||||
|
"cache_size": 0,
|
||||||
|
"cache_ttl_min": 0,
|
||||||
|
"cache_ttl_max": 0,
|
||||||
|
"cache_optimistic": false,
|
||||||
|
"resolve_clients": false,
|
||||||
|
"use_private_ptr_resolvers": false,
|
||||||
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dnssec_enabled": {
|
"dnssec_enabled": {
|
||||||
|
@ -236,7 +324,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"cache_size": {
|
"cache_size": {
|
||||||
|
@ -270,7 +360,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"upstream_mode_parallel": {
|
"upstream_mode_parallel": {
|
||||||
|
@ -304,7 +396,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"upstream_mode_fastest_addr": {
|
"upstream_mode_fastest_addr": {
|
||||||
|
@ -338,7 +432,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"upstream_dns_bad": {
|
"upstream_dns_bad": {
|
||||||
|
@ -374,7 +470,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bootstraps_bad": {
|
"bootstraps_bad": {
|
||||||
|
@ -410,7 +508,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"cache_bad_ttl": {
|
"cache_bad_ttl": {
|
||||||
|
@ -445,7 +545,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"upstream_mode_bad": {
|
"upstream_mode_bad": {
|
||||||
|
@ -479,7 +581,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"local_ptr_upstreams_good": {
|
"local_ptr_upstreams_good": {
|
||||||
|
@ -517,7 +621,9 @@
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": [
|
"local_ptr_upstreams": [
|
||||||
"123.123.123.123"
|
"123.123.123.123"
|
||||||
]
|
],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"local_ptr_upstreams_bad": {
|
"local_ptr_upstreams_bad": {
|
||||||
|
@ -554,7 +660,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"local_ptr_upstreams_null": {
|
"local_ptr_upstreams_null": {
|
||||||
|
@ -588,7 +696,9 @@
|
||||||
"cache_optimistic": false,
|
"cache_optimistic": false,
|
||||||
"resolve_clients": false,
|
"resolve_clients": false,
|
||||||
"use_private_ptr_resolvers": false,
|
"use_private_ptr_resolvers": false,
|
||||||
"local_ptr_upstreams": []
|
"local_ptr_upstreams": [],
|
||||||
|
"edns_cs_use_custom": false,
|
||||||
|
"edns_cs_custom_ip": ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -461,6 +461,7 @@ func (d *DNSFilter) RegisterFilteringHandlers() {
|
||||||
registerHTTP(http.MethodPost, "/control/safesearch/enable", d.handleSafeSearchEnable)
|
registerHTTP(http.MethodPost, "/control/safesearch/enable", d.handleSafeSearchEnable)
|
||||||
registerHTTP(http.MethodPost, "/control/safesearch/disable", d.handleSafeSearchDisable)
|
registerHTTP(http.MethodPost, "/control/safesearch/disable", d.handleSafeSearchDisable)
|
||||||
registerHTTP(http.MethodGet, "/control/safesearch/status", d.handleSafeSearchStatus)
|
registerHTTP(http.MethodGet, "/control/safesearch/status", d.handleSafeSearchStatus)
|
||||||
|
registerHTTP(http.MethodPut, "/control/safesearch/settings", d.handleSafeSearchSettings)
|
||||||
|
|
||||||
registerHTTP(http.MethodGet, "/control/rewrite/list", d.handleRewriteList)
|
registerHTTP(http.MethodGet, "/control/rewrite/list", d.handleRewriteList)
|
||||||
registerHTTP(http.MethodPost, "/control/rewrite/add", d.handleRewriteAdd)
|
registerHTTP(http.MethodPost, "/control/rewrite/add", d.handleRewriteAdd)
|
||||||
|
|
|
@ -17,7 +17,7 @@ type SafeSearch interface {
|
||||||
// SafeSearchConfig is a struct with safe search related settings.
|
// SafeSearchConfig is a struct with safe search related settings.
|
||||||
type SafeSearchConfig struct {
|
type SafeSearchConfig struct {
|
||||||
// CustomResolver is the resolver used by safe search.
|
// CustomResolver is the resolver used by safe search.
|
||||||
CustomResolver Resolver `yaml:"-"`
|
CustomResolver Resolver `yaml:"-" json:"-"`
|
||||||
|
|
||||||
// Enabled indicates if safe search is enabled entirely.
|
// Enabled indicates if safe search is enabled entirely.
|
||||||
Enabled bool `yaml:"enabled" json:"enabled"`
|
Enabled bool `yaml:"enabled" json:"enabled"`
|
||||||
|
|
|
@ -1,29 +1,63 @@
|
||||||
package filtering
|
package filtering
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
||||||
)
|
)
|
||||||
|
|
||||||
// TODO(d.kolyshev): Replace handlers below with the new API.
|
// handleSafeSearchEnable is the handler for POST /control/safesearch/enable
|
||||||
|
// HTTP API.
|
||||||
|
//
|
||||||
|
// Deprecated: Use handleSafeSearchSettings.
|
||||||
func (d *DNSFilter) handleSafeSearchEnable(w http.ResponseWriter, r *http.Request) {
|
func (d *DNSFilter) handleSafeSearchEnable(w http.ResponseWriter, r *http.Request) {
|
||||||
setProtectedBool(&d.confLock, &d.Config.SafeSearchConf.Enabled, true)
|
setProtectedBool(&d.confLock, &d.Config.SafeSearchConf.Enabled, true)
|
||||||
d.Config.ConfigModified()
|
d.Config.ConfigModified()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleSafeSearchDisable is the handler for POST /control/safesearch/disable
|
||||||
|
// HTTP API.
|
||||||
|
//
|
||||||
|
// Deprecated: Use handleSafeSearchSettings.
|
||||||
func (d *DNSFilter) handleSafeSearchDisable(w http.ResponseWriter, r *http.Request) {
|
func (d *DNSFilter) handleSafeSearchDisable(w http.ResponseWriter, r *http.Request) {
|
||||||
setProtectedBool(&d.confLock, &d.Config.SafeSearchConf.Enabled, false)
|
setProtectedBool(&d.confLock, &d.Config.SafeSearchConf.Enabled, false)
|
||||||
d.Config.ConfigModified()
|
d.Config.ConfigModified()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleSafeSearchStatus is the handler for GET /control/safesearch/status
|
||||||
|
// HTTP API.
|
||||||
func (d *DNSFilter) handleSafeSearchStatus(w http.ResponseWriter, r *http.Request) {
|
func (d *DNSFilter) handleSafeSearchStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
resp := &struct {
|
var resp SafeSearchConfig
|
||||||
Enabled bool `json:"enabled"`
|
func() {
|
||||||
}{
|
d.confLock.RLock()
|
||||||
Enabled: protectedBool(&d.confLock, &d.Config.SafeSearchConf.Enabled),
|
defer d.confLock.RUnlock()
|
||||||
}
|
|
||||||
|
resp = d.Config.SafeSearchConf
|
||||||
|
}()
|
||||||
|
|
||||||
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleSafeSearchSettings is the handler for PUT /control/safesearch/settings
|
||||||
|
// HTTP API.
|
||||||
|
func (d *DNSFilter) handleSafeSearchSettings(w http.ResponseWriter, r *http.Request) {
|
||||||
|
req := &SafeSearchConfig{}
|
||||||
|
err := json.NewDecoder(r.Body).Decode(req)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusBadRequest, "reading req: %s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func() {
|
||||||
|
d.confLock.Lock()
|
||||||
|
defer d.confLock.Unlock()
|
||||||
|
|
||||||
|
d.Config.SafeSearchConf = *req
|
||||||
|
}()
|
||||||
|
|
||||||
|
d.Config.ConfigModified()
|
||||||
|
|
||||||
|
aghhttp.OK(w)
|
||||||
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@ import (
|
||||||
"github.com/AdguardTeam/dnsproxy/upstream"
|
"github.com/AdguardTeam/dnsproxy/upstream"
|
||||||
"github.com/AdguardTeam/golibs/errors"
|
"github.com/AdguardTeam/golibs/errors"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
"github.com/AdguardTeam/golibs/netutil"
|
|
||||||
"github.com/AdguardTeam/golibs/stringutil"
|
"github.com/AdguardTeam/golibs/stringutil"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
"golang.org/x/exp/slices"
|
"golang.org/x/exp/slices"
|
||||||
|
@ -859,15 +858,7 @@ func (clients *clientsContainer) updateFromDHCP(add bool) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(a.garipov): Remove once we switch to netip.Addr more fully.
|
ok := clients.addHostLocked(l.IP, l.Hostname, ClientSourceDHCP)
|
||||||
ipAddr, err := netutil.IPToAddrNoMapped(l.IP)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("clients: bad client ip %v from dhcp: %s", l.IP, err)
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ok := clients.addHostLocked(ipAddr, l.Hostname, ClientSourceDHCP)
|
|
||||||
if ok {
|
if ok {
|
||||||
n++
|
n++
|
||||||
}
|
}
|
||||||
|
|
|
@ -275,7 +275,7 @@ func TestClientsAddExisting(t *testing.T) {
|
||||||
t.Skip("skipping dhcp test on windows")
|
t.Skip("skipping dhcp test on windows")
|
||||||
}
|
}
|
||||||
|
|
||||||
ip := net.IP{1, 2, 3, 4}
|
ip := netip.MustParseAddr("1.2.3.4")
|
||||||
|
|
||||||
// First, init a DHCP server with a single static lease.
|
// First, init a DHCP server with a single static lease.
|
||||||
config := &dhcpd.ServerConfig{
|
config := &dhcpd.ServerConfig{
|
||||||
|
|
|
@ -28,6 +28,7 @@ type clientJSON struct {
|
||||||
DisallowedRule *string `json:"disallowed_rule,omitempty"`
|
DisallowedRule *string `json:"disallowed_rule,omitempty"`
|
||||||
|
|
||||||
WHOISInfo *RuntimeClientWHOISInfo `json:"whois_info,omitempty"`
|
WHOISInfo *RuntimeClientWHOISInfo `json:"whois_info,omitempty"`
|
||||||
|
SafeSearchConf *filtering.SafeSearchConfig `json:"safe_search"`
|
||||||
|
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
|
||||||
|
@ -59,7 +60,7 @@ type clientListJSON struct {
|
||||||
Tags []string `json:"supported_tags"`
|
Tags []string `json:"supported_tags"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// respond with information about configured clients
|
// handleGetClients is the handler for GET /control/clients HTTP API.
|
||||||
func (clients *clientsContainer) handleGetClients(w http.ResponseWriter, r *http.Request) {
|
func (clients *clientsContainer) handleGetClients(w http.ResponseWriter, r *http.Request) {
|
||||||
data := clientListJSON{}
|
data := clientListJSON{}
|
||||||
|
|
||||||
|
@ -88,11 +89,15 @@ func (clients *clientsContainer) handleGetClients(w http.ResponseWriter, r *http
|
||||||
_ = aghhttp.WriteJSONResponse(w, r, data)
|
_ = aghhttp.WriteJSONResponse(w, r, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert JSON object to Client object
|
// jsonToClient converts JSON object to Client object.
|
||||||
func jsonToClient(cj clientJSON) (c *Client) {
|
func jsonToClient(cj clientJSON) (c *Client) {
|
||||||
|
var safeSearchConf filtering.SafeSearchConfig
|
||||||
|
if cj.SafeSearchConf != nil {
|
||||||
|
safeSearchConf = *cj.SafeSearchConf
|
||||||
|
} else {
|
||||||
// TODO(d.kolyshev): Remove after cleaning the deprecated
|
// TODO(d.kolyshev): Remove after cleaning the deprecated
|
||||||
// [clientJSON.SafeSearchEnabled] field.
|
// [clientJSON.SafeSearchEnabled] field.
|
||||||
safeSearchConf := filtering.SafeSearchConfig{Enabled: cj.SafeSearchEnabled}
|
safeSearchConf = filtering.SafeSearchConfig{Enabled: cj.SafeSearchEnabled}
|
||||||
|
|
||||||
// Set default service flags for enabled safesearch.
|
// Set default service flags for enabled safesearch.
|
||||||
if safeSearchConf.Enabled {
|
if safeSearchConf.Enabled {
|
||||||
|
@ -103,6 +108,7 @@ func jsonToClient(cj clientJSON) (c *Client) {
|
||||||
safeSearchConf.Yandex = true
|
safeSearchConf.Yandex = true
|
||||||
safeSearchConf.YouTube = true
|
safeSearchConf.YouTube = true
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &Client{
|
return &Client{
|
||||||
Name: cj.Name,
|
Name: cj.Name,
|
||||||
|
@ -111,9 +117,8 @@ func jsonToClient(cj clientJSON) (c *Client) {
|
||||||
UseOwnSettings: !cj.UseGlobalSettings,
|
UseOwnSettings: !cj.UseGlobalSettings,
|
||||||
FilteringEnabled: cj.FilteringEnabled,
|
FilteringEnabled: cj.FilteringEnabled,
|
||||||
ParentalEnabled: cj.ParentalEnabled,
|
ParentalEnabled: cj.ParentalEnabled,
|
||||||
safeSearchConf: safeSearchConf,
|
|
||||||
SafeBrowsingEnabled: cj.SafeBrowsingEnabled,
|
SafeBrowsingEnabled: cj.SafeBrowsingEnabled,
|
||||||
|
safeSearchConf: safeSearchConf,
|
||||||
UseOwnBlockedServices: !cj.UseGlobalBlockedServices,
|
UseOwnBlockedServices: !cj.UseGlobalBlockedServices,
|
||||||
BlockedServices: cj.BlockedServices,
|
BlockedServices: cj.BlockedServices,
|
||||||
|
|
||||||
|
@ -121,7 +126,7 @@ func jsonToClient(cj clientJSON) (c *Client) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert Client object to JSON
|
// clientToJSON converts Client object to JSON.
|
||||||
func clientToJSON(c *Client) (cj *clientJSON) {
|
func clientToJSON(c *Client) (cj *clientJSON) {
|
||||||
// TODO(d.kolyshev): Remove after cleaning the deprecated
|
// TODO(d.kolyshev): Remove after cleaning the deprecated
|
||||||
// [clientJSON.SafeSearchEnabled] field.
|
// [clientJSON.SafeSearchEnabled] field.
|
||||||
|
@ -136,6 +141,7 @@ func clientToJSON(c *Client) (cj *clientJSON) {
|
||||||
FilteringEnabled: c.FilteringEnabled,
|
FilteringEnabled: c.FilteringEnabled,
|
||||||
ParentalEnabled: c.ParentalEnabled,
|
ParentalEnabled: c.ParentalEnabled,
|
||||||
SafeSearchEnabled: safeSearchConf.Enabled,
|
SafeSearchEnabled: safeSearchConf.Enabled,
|
||||||
|
SafeSearchConf: safeSearchConf,
|
||||||
SafeBrowsingEnabled: c.SafeBrowsingEnabled,
|
SafeBrowsingEnabled: c.SafeBrowsingEnabled,
|
||||||
|
|
||||||
UseGlobalBlockedServices: !c.UseOwnBlockedServices,
|
UseGlobalBlockedServices: !c.UseOwnBlockedServices,
|
||||||
|
@ -145,7 +151,7 @@ func clientToJSON(c *Client) (cj *clientJSON) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add a new client
|
// handleAddClient is the handler for POST /control/clients/add HTTP API.
|
||||||
func (clients *clientsContainer) handleAddClient(w http.ResponseWriter, r *http.Request) {
|
func (clients *clientsContainer) handleAddClient(w http.ResponseWriter, r *http.Request) {
|
||||||
cj := clientJSON{}
|
cj := clientJSON{}
|
||||||
err := json.NewDecoder(r.Body).Decode(&cj)
|
err := json.NewDecoder(r.Body).Decode(&cj)
|
||||||
|
@ -172,7 +178,7 @@ func (clients *clientsContainer) handleAddClient(w http.ResponseWriter, r *http.
|
||||||
onConfigModified()
|
onConfigModified()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove client
|
// handleDelClient is the handler for POST /control/clients/delete HTTP API.
|
||||||
func (clients *clientsContainer) handleDelClient(w http.ResponseWriter, r *http.Request) {
|
func (clients *clientsContainer) handleDelClient(w http.ResponseWriter, r *http.Request) {
|
||||||
cj := clientJSON{}
|
cj := clientJSON{}
|
||||||
err := json.NewDecoder(r.Body).Decode(&cj)
|
err := json.NewDecoder(r.Body).Decode(&cj)
|
||||||
|
@ -202,7 +208,7 @@ type updateJSON struct {
|
||||||
Data clientJSON `json:"data"`
|
Data clientJSON `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update client's properties
|
// handleUpdateClient is the handler for POST /control/clients/update HTTP API.
|
||||||
func (clients *clientsContainer) handleUpdateClient(w http.ResponseWriter, r *http.Request) {
|
func (clients *clientsContainer) handleUpdateClient(w http.ResponseWriter, r *http.Request) {
|
||||||
dj := updateJSON{}
|
dj := updateJSON{}
|
||||||
err := json.NewDecoder(r.Body).Decode(&dj)
|
err := json.NewDecoder(r.Body).Decode(&dj)
|
||||||
|
@ -229,7 +235,7 @@ func (clients *clientsContainer) handleUpdateClient(w http.ResponseWriter, r *ht
|
||||||
onConfigModified()
|
onConfigModified()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the list of clients by IP address list
|
// handleFindClient is the handler for GET /control/clients/find HTTP API.
|
||||||
func (clients *clientsContainer) handleFindClient(w http.ResponseWriter, r *http.Request) {
|
func (clients *clientsContainer) handleFindClient(w http.ResponseWriter, r *http.Request) {
|
||||||
q := r.URL.Query()
|
q := r.URL.Query()
|
||||||
data := []map[string]*clientJSON{}
|
data := []map[string]*clientJSON{}
|
||||||
|
|
|
@ -228,34 +228,32 @@ type tlsConfigSettings struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type queryLogConfig struct {
|
type queryLogConfig struct {
|
||||||
|
// Ignored is the list of host names, which should not be written to log.
|
||||||
|
Ignored []string `yaml:"ignored"`
|
||||||
|
|
||||||
|
// Interval is the interval for query log's files rotation.
|
||||||
|
Interval timeutil.Duration `yaml:"interval"`
|
||||||
|
|
||||||
|
// MemSize is the number of entries kept in memory before they are flushed
|
||||||
|
// to disk.
|
||||||
|
MemSize uint32 `yaml:"size_memory"`
|
||||||
|
|
||||||
// Enabled defines if the query log is enabled.
|
// Enabled defines if the query log is enabled.
|
||||||
Enabled bool `yaml:"enabled"`
|
Enabled bool `yaml:"enabled"`
|
||||||
|
|
||||||
// FileEnabled defines, if the query log is written to the file.
|
// FileEnabled defines, if the query log is written to the file.
|
||||||
FileEnabled bool `yaml:"file_enabled"`
|
FileEnabled bool `yaml:"file_enabled"`
|
||||||
|
|
||||||
// Interval is the interval for query log's files rotation.
|
|
||||||
Interval timeutil.Duration `yaml:"interval"`
|
|
||||||
|
|
||||||
// MemSize is the number of entries kept in memory before they are
|
|
||||||
// flushed to disk.
|
|
||||||
MemSize uint32 `yaml:"size_memory"`
|
|
||||||
|
|
||||||
// Ignored is the list of host names, which should not be written to
|
|
||||||
// log.
|
|
||||||
Ignored []string `yaml:"ignored"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type statsConfig struct {
|
type statsConfig struct {
|
||||||
// Enabled defines if the statistics are enabled.
|
|
||||||
Enabled bool `yaml:"enabled"`
|
|
||||||
|
|
||||||
// Interval is the time interval for flushing statistics to the disk in
|
|
||||||
// days.
|
|
||||||
Interval uint32 `yaml:"interval"`
|
|
||||||
|
|
||||||
// Ignored is the list of host names, which should not be counted.
|
// Ignored is the list of host names, which should not be counted.
|
||||||
Ignored []string `yaml:"ignored"`
|
Ignored []string `yaml:"ignored"`
|
||||||
|
|
||||||
|
// Interval is the retention interval for statistics.
|
||||||
|
Interval timeutil.Duration `yaml:"interval"`
|
||||||
|
|
||||||
|
// Enabled defines if the statistics are enabled.
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// config is the global configuration structure.
|
// config is the global configuration structure.
|
||||||
|
@ -286,7 +284,7 @@ var config = &configuration{
|
||||||
CacheSize: 4 * 1024 * 1024,
|
CacheSize: 4 * 1024 * 1024,
|
||||||
|
|
||||||
EDNSClientSubnet: &dnsforward.EDNSClientSubnet{
|
EDNSClientSubnet: &dnsforward.EDNSClientSubnet{
|
||||||
CustomIP: "",
|
CustomIP: netip.Addr{},
|
||||||
Enabled: false,
|
Enabled: false,
|
||||||
UseCustom: false,
|
UseCustom: false,
|
||||||
},
|
},
|
||||||
|
@ -322,7 +320,7 @@ var config = &configuration{
|
||||||
},
|
},
|
||||||
Stats: statsConfig{
|
Stats: statsConfig{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
Interval: 1,
|
Interval: timeutil.Duration{Duration: 1 * timeutil.Day},
|
||||||
Ignored: []string{},
|
Ignored: []string{},
|
||||||
},
|
},
|
||||||
// NOTE: Keep these parameters in sync with the one put into
|
// NOTE: Keep these parameters in sync with the one put into
|
||||||
|
@ -503,7 +501,7 @@ func (c *configuration) write() (err error) {
|
||||||
if Context.stats != nil {
|
if Context.stats != nil {
|
||||||
statsConf := stats.Config{}
|
statsConf := stats.Config{}
|
||||||
Context.stats.WriteDiskConfig(&statsConf)
|
Context.stats.WriteDiskConfig(&statsConf)
|
||||||
config.Stats.Interval = statsConf.LimitDays
|
config.Stats.Interval = timeutil.Duration{Duration: statsConf.Limit}
|
||||||
config.Stats.Enabled = statsConf.Enabled
|
config.Stats.Enabled = statsConf.Enabled
|
||||||
config.Stats.Ignored = statsConf.Ignored.Values()
|
config.Stats.Ignored = statsConf.Ignored.Values()
|
||||||
slices.Sort(config.Stats.Ignored)
|
slices.Sort(config.Stats.Ignored)
|
||||||
|
|
|
@ -8,7 +8,6 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
||||||
|
@ -22,7 +21,6 @@ import (
|
||||||
"github.com/AdguardTeam/golibs/errors"
|
"github.com/AdguardTeam/golibs/errors"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
"github.com/AdguardTeam/golibs/netutil"
|
"github.com/AdguardTeam/golibs/netutil"
|
||||||
"github.com/AdguardTeam/golibs/stringutil"
|
|
||||||
"github.com/ameshkov/dnscrypt/v2"
|
"github.com/ameshkov/dnscrypt/v2"
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
@ -54,13 +52,13 @@ func initDNS() (err error) {
|
||||||
|
|
||||||
statsConf := stats.Config{
|
statsConf := stats.Config{
|
||||||
Filename: filepath.Join(baseDir, "stats.db"),
|
Filename: filepath.Join(baseDir, "stats.db"),
|
||||||
LimitDays: config.Stats.Interval,
|
Limit: config.Stats.Interval.Duration,
|
||||||
ConfigModified: onConfigModified,
|
ConfigModified: onConfigModified,
|
||||||
HTTPRegister: httpRegister,
|
HTTPRegister: httpRegister,
|
||||||
Enabled: config.Stats.Enabled,
|
Enabled: config.Stats.Enabled,
|
||||||
}
|
}
|
||||||
|
|
||||||
set, err := nonDupEmptyHostNames(config.Stats.Ignored)
|
set, err := aghnet.NewDomainNameSet(config.Stats.Ignored)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("statistics: ignored list: %w", err)
|
return fmt.Errorf("statistics: ignored list: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -84,13 +82,16 @@ func initDNS() (err error) {
|
||||||
FileEnabled: config.QueryLog.FileEnabled,
|
FileEnabled: config.QueryLog.FileEnabled,
|
||||||
}
|
}
|
||||||
|
|
||||||
set, err = nonDupEmptyHostNames(config.QueryLog.Ignored)
|
set, err = aghnet.NewDomainNameSet(config.QueryLog.Ignored)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("querylog: ignored list: %w", err)
|
return fmt.Errorf("querylog: ignored list: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.Ignored = set
|
conf.Ignored = set
|
||||||
Context.queryLog = querylog.New(conf)
|
Context.queryLog, err = querylog.New(conf)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("init querylog: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
Context.filters, err = filtering.New(config.DNS.DnsfilterConf, nil)
|
Context.filters, err = filtering.New(config.DNS.DnsfilterConf, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -535,30 +536,6 @@ func closeDNSServer() {
|
||||||
log.Debug("all dns modules are closed")
|
log.Debug("all dns modules are closed")
|
||||||
}
|
}
|
||||||
|
|
||||||
// nonDupEmptyHostNames returns nil and error, if list has duplicate or empty
|
|
||||||
// host name. Otherwise returns a set, which contains lowercase host names
|
|
||||||
// without dot at the end, and nil error.
|
|
||||||
func nonDupEmptyHostNames(list []string) (set *stringutil.Set, err error) {
|
|
||||||
set = stringutil.NewSet()
|
|
||||||
|
|
||||||
for _, v := range list {
|
|
||||||
host := strings.ToLower(strings.TrimSuffix(v, "."))
|
|
||||||
// TODO(a.garipov): Think about ignoring empty (".") names in
|
|
||||||
// the future.
|
|
||||||
if host == "" {
|
|
||||||
return nil, errors.Error("host name is empty")
|
|
||||||
}
|
|
||||||
|
|
||||||
if set.Has(host) {
|
|
||||||
return nil, fmt.Errorf("duplicate host name %q", host)
|
|
||||||
}
|
|
||||||
|
|
||||||
set.Add(host)
|
|
||||||
}
|
|
||||||
|
|
||||||
return set, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// safeSearchResolver is a [filtering.Resolver] implementation used for safe
|
// safeSearchResolver is a [filtering.Resolver] implementation used for safe
|
||||||
// search.
|
// search.
|
||||||
type safeSearchResolver struct{}
|
type safeSearchResolver struct{}
|
||||||
|
|
|
@ -22,7 +22,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// currentSchemaVersion is the current schema version.
|
// currentSchemaVersion is the current schema version.
|
||||||
const currentSchemaVersion = 19
|
const currentSchemaVersion = 20
|
||||||
|
|
||||||
// These aliases are provided for convenience.
|
// These aliases are provided for convenience.
|
||||||
type (
|
type (
|
||||||
|
@ -92,6 +92,7 @@ func upgradeConfigSchema(oldVersion int, diskConf yobj) (err error) {
|
||||||
upgradeSchema16to17,
|
upgradeSchema16to17,
|
||||||
upgradeSchema17to18,
|
upgradeSchema17to18,
|
||||||
upgradeSchema18to19,
|
upgradeSchema18to19,
|
||||||
|
upgradeSchema19to20,
|
||||||
}
|
}
|
||||||
|
|
||||||
n := 0
|
n := 0
|
||||||
|
@ -1064,6 +1065,47 @@ func upgradeSchema18to19(diskConf yobj) (err error) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// upgradeSchema19to20 performs the following changes:
|
||||||
|
//
|
||||||
|
// # BEFORE:
|
||||||
|
// 'statistics':
|
||||||
|
// 'interval': 1
|
||||||
|
//
|
||||||
|
// # AFTER:
|
||||||
|
// 'statistics':
|
||||||
|
// 'interval': 24h
|
||||||
|
func upgradeSchema19to20(diskConf yobj) (err error) {
|
||||||
|
log.Printf("Upgrade yaml: 19 to 20")
|
||||||
|
diskConf["schema_version"] = 20
|
||||||
|
|
||||||
|
statsVal, ok := diskConf["statistics"]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var stats yobj
|
||||||
|
stats, ok = statsVal.(yobj)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unexpected type of stats: %T", statsVal)
|
||||||
|
}
|
||||||
|
|
||||||
|
const field = "interval"
|
||||||
|
|
||||||
|
// Set the initial value from the global configuration structure.
|
||||||
|
statsIvl := 1
|
||||||
|
statsIvlVal, ok := stats[field]
|
||||||
|
if ok {
|
||||||
|
statsIvl, ok = statsIvlVal.(int)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unexpected type of %s: %T", field, statsIvlVal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stats[field] = timeutil.Duration{Duration: time.Duration(statsIvl) * timeutil.Day}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(a.garipov): Replace with log.Output when we port it to our logging
|
// TODO(a.garipov): Replace with log.Output when we port it to our logging
|
||||||
// package.
|
// package.
|
||||||
func funcName() string {
|
func funcName() string {
|
||||||
|
|
|
@ -951,3 +951,98 @@ func TestUpgradeSchema18to19(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUpgradeSchema19to20(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
ivl any
|
||||||
|
want any
|
||||||
|
wantErr string
|
||||||
|
name string
|
||||||
|
}{{
|
||||||
|
ivl: 1,
|
||||||
|
want: timeutil.Duration{Duration: timeutil.Day},
|
||||||
|
wantErr: "",
|
||||||
|
name: "success",
|
||||||
|
}, {
|
||||||
|
ivl: 0.25,
|
||||||
|
want: 0,
|
||||||
|
wantErr: "unexpected type of interval: float64",
|
||||||
|
name: "fail",
|
||||||
|
}}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
conf := yobj{
|
||||||
|
"statistics": yobj{
|
||||||
|
"interval": tc.ivl,
|
||||||
|
},
|
||||||
|
"schema_version": 19,
|
||||||
|
}
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
err := upgradeSchema19to20(conf)
|
||||||
|
|
||||||
|
if tc.wantErr != "" {
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, tc.wantErr, err.Error())
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, conf["schema_version"], 20)
|
||||||
|
|
||||||
|
statsVal, ok := conf["statistics"]
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
var stats yobj
|
||||||
|
stats, ok = statsVal.(yobj)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
var newIvl timeutil.Duration
|
||||||
|
newIvl, ok = stats["interval"].(timeutil.Duration)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
assert.Equal(t, tc.want, newIvl)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("no_stats", func(t *testing.T) {
|
||||||
|
err := upgradeSchema19to20(yobj{})
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("bad_stats", func(t *testing.T) {
|
||||||
|
err := upgradeSchema19to20(yobj{
|
||||||
|
"statistics": 0,
|
||||||
|
})
|
||||||
|
|
||||||
|
testutil.AssertErrorMsg(t, "unexpected type of stats: int", err)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("no_field", func(t *testing.T) {
|
||||||
|
conf := yobj{
|
||||||
|
"statistics": yobj{},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := upgradeSchema19to20(conf)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
statsVal, ok := conf["statistics"]
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
var stats yobj
|
||||||
|
stats, ok = statsVal.(yobj)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
var ivl any
|
||||||
|
ivl, ok = stats["interval"]
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
var ivlVal timeutil.Duration
|
||||||
|
ivlVal, ok = ivl.(timeutil.Duration)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
assert.Equal(t, 24*time.Hour, ivlVal.Duration)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -13,9 +13,11 @@ import (
|
||||||
|
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/internal/aghnet"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
"github.com/AdguardTeam/golibs/stringutil"
|
"github.com/AdguardTeam/golibs/stringutil"
|
||||||
"github.com/AdguardTeam/golibs/timeutil"
|
"github.com/AdguardTeam/golibs/timeutil"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
"golang.org/x/net/idna"
|
"golang.org/x/net/idna"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -25,8 +27,8 @@ type configJSON struct {
|
||||||
// fractional numbers and not mess the API users by changing the units.
|
// fractional numbers and not mess the API users by changing the units.
|
||||||
Interval float64 `json:"interval"`
|
Interval float64 `json:"interval"`
|
||||||
|
|
||||||
// Enabled shows if the querylog is enabled. It is an [aghalg.NullBool]
|
// Enabled shows if the querylog is enabled. It is an aghalg.NullBool to
|
||||||
// to be able to tell when it's set without using pointers.
|
// be able to tell when it's set without using pointers.
|
||||||
Enabled aghalg.NullBool `json:"enabled"`
|
Enabled aghalg.NullBool `json:"enabled"`
|
||||||
|
|
||||||
// AnonymizeClientIP shows if the clients' IP addresses must be anonymized.
|
// AnonymizeClientIP shows if the clients' IP addresses must be anonymized.
|
||||||
|
@ -35,12 +37,39 @@ type configJSON struct {
|
||||||
AnonymizeClientIP aghalg.NullBool `json:"anonymize_client_ip"`
|
AnonymizeClientIP aghalg.NullBool `json:"anonymize_client_ip"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getConfigResp is the JSON structure for the querylog configuration.
|
||||||
|
type getConfigResp struct {
|
||||||
|
// Ignored is the list of host names, which should not be written to log.
|
||||||
|
Ignored []string `json:"ignored"`
|
||||||
|
|
||||||
|
// Interval is the querylog rotation interval in milliseconds.
|
||||||
|
Interval float64 `json:"interval"`
|
||||||
|
|
||||||
|
// Enabled shows if the querylog is enabled. It is an aghalg.NullBool to
|
||||||
|
// be able to tell when it's set without using pointers.
|
||||||
|
Enabled aghalg.NullBool `json:"enabled"`
|
||||||
|
|
||||||
|
// AnonymizeClientIP shows if the clients' IP addresses must be anonymized.
|
||||||
|
// It is an aghalg.NullBool to be able to tell when it's set without using
|
||||||
|
// pointers.
|
||||||
|
//
|
||||||
|
// TODO(a.garipov): Consider using separate setting for statistics.
|
||||||
|
AnonymizeClientIP aghalg.NullBool `json:"anonymize_client_ip"`
|
||||||
|
}
|
||||||
|
|
||||||
// Register web handlers
|
// Register web handlers
|
||||||
func (l *queryLog) initWeb() {
|
func (l *queryLog) initWeb() {
|
||||||
l.conf.HTTPRegister(http.MethodGet, "/control/querylog", l.handleQueryLog)
|
l.conf.HTTPRegister(http.MethodGet, "/control/querylog", l.handleQueryLog)
|
||||||
l.conf.HTTPRegister(http.MethodGet, "/control/querylog_info", l.handleQueryLogInfo)
|
l.conf.HTTPRegister(http.MethodGet, "/control/querylog_info", l.handleQueryLogInfo)
|
||||||
l.conf.HTTPRegister(http.MethodPost, "/control/querylog_clear", l.handleQueryLogClear)
|
l.conf.HTTPRegister(http.MethodPost, "/control/querylog_clear", l.handleQueryLogClear)
|
||||||
l.conf.HTTPRegister(http.MethodPost, "/control/querylog_config", l.handleQueryLogConfig)
|
l.conf.HTTPRegister(http.MethodPost, "/control/querylog_config", l.handleQueryLogConfig)
|
||||||
|
|
||||||
|
l.conf.HTTPRegister(http.MethodGet, "/control/querylog/config", l.handleGetQueryLogConfig)
|
||||||
|
l.conf.HTTPRegister(
|
||||||
|
http.MethodPut,
|
||||||
|
"/control/querylog/config/update",
|
||||||
|
l.handlePutQueryLogConfig,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *queryLog) handleQueryLog(w http.ResponseWriter, r *http.Request) {
|
func (l *queryLog) handleQueryLog(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -64,11 +93,41 @@ func (l *queryLog) handleQueryLogClear(_ http.ResponseWriter, _ *http.Request) {
|
||||||
l.clear()
|
l.clear()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get configuration
|
// handleQueryLogInfo handles requests to the GET /control/querylog_info
|
||||||
|
// endpoint.
|
||||||
|
//
|
||||||
|
// Deprecated: Remove it when migration to the new API is over.
|
||||||
func (l *queryLog) handleQueryLogInfo(w http.ResponseWriter, r *http.Request) {
|
func (l *queryLog) handleQueryLogInfo(w http.ResponseWriter, r *http.Request) {
|
||||||
|
l.lock.Lock()
|
||||||
|
defer l.lock.Unlock()
|
||||||
|
|
||||||
|
ivl := l.conf.RotationIvl
|
||||||
|
|
||||||
|
if !checkInterval(ivl) {
|
||||||
|
// NOTE: If interval is custom we set it to 90 days for compatibility
|
||||||
|
// with old API.
|
||||||
|
ivl = timeutil.Day * 90
|
||||||
|
}
|
||||||
|
|
||||||
_ = aghhttp.WriteJSONResponse(w, r, configJSON{
|
_ = aghhttp.WriteJSONResponse(w, r, configJSON{
|
||||||
Enabled: aghalg.BoolToNullBool(l.conf.Enabled),
|
Enabled: aghalg.BoolToNullBool(l.conf.Enabled),
|
||||||
Interval: l.conf.RotationIvl.Hours() / 24,
|
Interval: ivl.Hours() / 24,
|
||||||
|
AnonymizeClientIP: aghalg.BoolToNullBool(l.conf.AnonymizeClientIP),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleGetQueryLogConfig handles requests to the GET /control/querylog/config
|
||||||
|
// endpoint.
|
||||||
|
func (l *queryLog) handleGetQueryLogConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
|
l.lock.Lock()
|
||||||
|
defer l.lock.Unlock()
|
||||||
|
|
||||||
|
ignored := l.conf.Ignored.Values()
|
||||||
|
slices.Sort(ignored)
|
||||||
|
_ = aghhttp.WriteJSONResponse(w, r, getConfigResp{
|
||||||
|
Ignored: ignored,
|
||||||
|
Interval: float64(l.conf.RotationIvl.Milliseconds()),
|
||||||
|
Enabled: aghalg.BoolToNullBool(l.conf.Enabled),
|
||||||
AnonymizeClientIP: aghalg.BoolToNullBool(l.conf.AnonymizeClientIP),
|
AnonymizeClientIP: aghalg.BoolToNullBool(l.conf.AnonymizeClientIP),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -88,6 +147,8 @@ func AnonymizeIP(ip net.IP) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleQueryLogConfig handles the POST /control/querylog_config queries.
|
// handleQueryLogConfig handles the POST /control/querylog_config queries.
|
||||||
|
//
|
||||||
|
// Deprecated: Remove it when migration to the new API is over.
|
||||||
func (l *queryLog) handleQueryLogConfig(w http.ResponseWriter, r *http.Request) {
|
func (l *queryLog) handleQueryLogConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
// Set NaN as initial value to be able to know if it changed later by
|
// Set NaN as initial value to be able to know if it changed later by
|
||||||
// comparing it to NaN.
|
// comparing it to NaN.
|
||||||
|
@ -103,6 +164,7 @@ func (l *queryLog) handleQueryLogConfig(w http.ResponseWriter, r *http.Request)
|
||||||
}
|
}
|
||||||
|
|
||||||
ivl := time.Duration(float64(timeutil.Day) * newConf.Interval)
|
ivl := time.Duration(float64(timeutil.Day) * newConf.Interval)
|
||||||
|
|
||||||
hasIvl := !math.IsNaN(newConf.Interval)
|
hasIvl := !math.IsNaN(newConf.Interval)
|
||||||
if hasIvl && !checkInterval(ivl) {
|
if hasIvl && !checkInterval(ivl) {
|
||||||
aghhttp.Error(r, w, http.StatusBadRequest, "unsupported interval")
|
aghhttp.Error(r, w, http.StatusBadRequest, "unsupported interval")
|
||||||
|
@ -115,8 +177,6 @@ func (l *queryLog) handleQueryLogConfig(w http.ResponseWriter, r *http.Request)
|
||||||
l.lock.Lock()
|
l.lock.Lock()
|
||||||
defer l.lock.Unlock()
|
defer l.lock.Unlock()
|
||||||
|
|
||||||
// Copy data, modify it, then activate. Other threads (readers) don't need
|
|
||||||
// to use this lock.
|
|
||||||
conf := *l.conf
|
conf := *l.conf
|
||||||
if newConf.Enabled != aghalg.NBNull {
|
if newConf.Enabled != aghalg.NBNull {
|
||||||
conf.Enabled = newConf.Enabled == aghalg.NBTrue
|
conf.Enabled = newConf.Enabled == aghalg.NBTrue
|
||||||
|
@ -138,6 +198,65 @@ func (l *queryLog) handleQueryLogConfig(w http.ResponseWriter, r *http.Request)
|
||||||
l.conf = &conf
|
l.conf = &conf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handlePutQueryLogConfig handles the PUT /control/querylog/config/update
|
||||||
|
// queries.
|
||||||
|
func (l *queryLog) handlePutQueryLogConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
|
newConf := &getConfigResp{}
|
||||||
|
err := json.NewDecoder(r.Body).Decode(newConf)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
set, err := aghnet.NewDomainNameSet(newConf.Ignored)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "ignored: %s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ivl := time.Duration(newConf.Interval) * time.Millisecond
|
||||||
|
err = validateIvl(ivl)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "unsupported interval: %s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if newConf.Enabled == aghalg.NBNull {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "enabled is null")
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if newConf.AnonymizeClientIP == aghalg.NBNull {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "anonymize_client_ip is null")
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
defer l.conf.ConfigModified()
|
||||||
|
|
||||||
|
l.lock.Lock()
|
||||||
|
defer l.lock.Unlock()
|
||||||
|
|
||||||
|
conf := *l.conf
|
||||||
|
|
||||||
|
conf.Ignored = set
|
||||||
|
conf.RotationIvl = ivl
|
||||||
|
conf.Enabled = newConf.Enabled == aghalg.NBTrue
|
||||||
|
|
||||||
|
conf.AnonymizeClientIP = newConf.AnonymizeClientIP == aghalg.NBTrue
|
||||||
|
if conf.AnonymizeClientIP {
|
||||||
|
l.anonymizer.Store(AnonymizeIP)
|
||||||
|
} else {
|
||||||
|
l.anonymizer.Store(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
l.conf = &conf
|
||||||
|
}
|
||||||
|
|
||||||
// "value" -> value, return TRUE
|
// "value" -> value, return TRUE
|
||||||
func getDoubleQuotesEnclosedValue(s *string) bool {
|
func getDoubleQuotesEnclosedValue(s *string) bool {
|
||||||
t := *s
|
t := *s
|
||||||
|
|
|
@ -132,6 +132,20 @@ func checkInterval(ivl time.Duration) (ok bool) {
|
||||||
return ivl == quarterDay || ivl == day || ivl == week || ivl == month || ivl == threeMonths
|
return ivl == quarterDay || ivl == day || ivl == week || ivl == month || ivl == threeMonths
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validateIvl returns an error if ivl is less than an hour or more than a
|
||||||
|
// year.
|
||||||
|
func validateIvl(ivl time.Duration) (err error) {
|
||||||
|
if ivl < time.Hour {
|
||||||
|
return errors.Error("less than an hour")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ivl > timeutil.Day*365 {
|
||||||
|
return errors.Error("more than a year")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (l *queryLog) WriteDiskConfig(c *Config) {
|
func (l *queryLog) WriteDiskConfig(c *Config) {
|
||||||
*c = *l.conf
|
*c = *l.conf
|
||||||
}
|
}
|
||||||
|
@ -258,6 +272,9 @@ func (l *queryLog) Add(params *AddParams) {
|
||||||
|
|
||||||
// ShouldLog returns true if request for the host should be logged.
|
// ShouldLog returns true if request for the host should be logged.
|
||||||
func (l *queryLog) ShouldLog(host string, _, _ uint16) bool {
|
func (l *queryLog) ShouldLog(host string, _, _ uint16) bool {
|
||||||
|
l.lock.Lock()
|
||||||
|
defer l.lock.Unlock()
|
||||||
|
|
||||||
return !l.isIgnored(host)
|
return !l.isIgnored(host)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,13 +22,14 @@ func TestMain(m *testing.M) {
|
||||||
// TestQueryLog tests adding and loading (with filtering) entries from disk and
|
// TestQueryLog tests adding and loading (with filtering) entries from disk and
|
||||||
// memory.
|
// memory.
|
||||||
func TestQueryLog(t *testing.T) {
|
func TestQueryLog(t *testing.T) {
|
||||||
l := newQueryLog(Config{
|
l, err := newQueryLog(Config{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
FileEnabled: true,
|
FileEnabled: true,
|
||||||
RotationIvl: timeutil.Day,
|
RotationIvl: timeutil.Day,
|
||||||
MemSize: 100,
|
MemSize: 100,
|
||||||
BaseDir: t.TempDir(),
|
BaseDir: t.TempDir(),
|
||||||
})
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Add disk entries.
|
// Add disk entries.
|
||||||
addEntry(l, "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
|
addEntry(l, "example.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
|
||||||
|
@ -125,12 +126,13 @@ func TestQueryLog(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryLogOffsetLimit(t *testing.T) {
|
func TestQueryLogOffsetLimit(t *testing.T) {
|
||||||
l := newQueryLog(Config{
|
l, err := newQueryLog(Config{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
RotationIvl: timeutil.Day,
|
RotationIvl: timeutil.Day,
|
||||||
MemSize: 100,
|
MemSize: 100,
|
||||||
BaseDir: t.TempDir(),
|
BaseDir: t.TempDir(),
|
||||||
})
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
entNum = 10
|
entNum = 10
|
||||||
|
@ -199,13 +201,14 @@ func TestQueryLogOffsetLimit(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryLogMaxFileScanEntries(t *testing.T) {
|
func TestQueryLogMaxFileScanEntries(t *testing.T) {
|
||||||
l := newQueryLog(Config{
|
l, err := newQueryLog(Config{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
FileEnabled: true,
|
FileEnabled: true,
|
||||||
RotationIvl: timeutil.Day,
|
RotationIvl: timeutil.Day,
|
||||||
MemSize: 100,
|
MemSize: 100,
|
||||||
BaseDir: t.TempDir(),
|
BaseDir: t.TempDir(),
|
||||||
})
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
const entNum = 10
|
const entNum = 10
|
||||||
// Add entries to the log.
|
// Add entries to the log.
|
||||||
|
@ -227,13 +230,14 @@ func TestQueryLogMaxFileScanEntries(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryLogFileDisabled(t *testing.T) {
|
func TestQueryLogFileDisabled(t *testing.T) {
|
||||||
l := newQueryLog(Config{
|
l, err := newQueryLog(Config{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
FileEnabled: false,
|
FileEnabled: false,
|
||||||
RotationIvl: timeutil.Day,
|
RotationIvl: timeutil.Day,
|
||||||
MemSize: 2,
|
MemSize: 2,
|
||||||
BaseDir: t.TempDir(),
|
BaseDir: t.TempDir(),
|
||||||
})
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
addEntry(l, "example1.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
|
addEntry(l, "example1.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
|
||||||
addEntry(l, "example2.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
|
addEntry(l, "example2.org", net.IPv4(1, 1, 1, 1), net.IPv4(2, 2, 2, 1))
|
||||||
|
@ -254,13 +258,14 @@ func TestQueryLogShouldLog(t *testing.T) {
|
||||||
)
|
)
|
||||||
set := stringutil.NewSet(ignored1, ignored2)
|
set := stringutil.NewSet(ignored1, ignored2)
|
||||||
|
|
||||||
l := newQueryLog(Config{
|
l, err := newQueryLog(Config{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
RotationIvl: timeutil.Day,
|
RotationIvl: timeutil.Day,
|
||||||
MemSize: 100,
|
MemSize: 100,
|
||||||
BaseDir: t.TempDir(),
|
BaseDir: t.TempDir(),
|
||||||
Ignored: set,
|
Ignored: set,
|
||||||
})
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
testCases := []struct {
|
testCases := []struct {
|
||||||
name string
|
name string
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package querylog
|
package querylog
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
|
@ -9,9 +10,7 @@ import (
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghnet"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghnet"
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/filtering"
|
"github.com/AdguardTeam/AdGuardHome/internal/filtering"
|
||||||
"github.com/AdguardTeam/golibs/errors"
|
"github.com/AdguardTeam/golibs/errors"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
|
||||||
"github.com/AdguardTeam/golibs/stringutil"
|
"github.com/AdguardTeam/golibs/stringutil"
|
||||||
"github.com/AdguardTeam/golibs/timeutil"
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -135,12 +134,12 @@ func (p *AddParams) validate() (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// New creates a new instance of the query log.
|
// New creates a new instance of the query log.
|
||||||
func New(conf Config) (ql QueryLog) {
|
func New(conf Config) (ql QueryLog, err error) {
|
||||||
return newQueryLog(conf)
|
return newQueryLog(conf)
|
||||||
}
|
}
|
||||||
|
|
||||||
// newQueryLog crates a new queryLog.
|
// newQueryLog crates a new queryLog.
|
||||||
func newQueryLog(conf Config) (l *queryLog) {
|
func newQueryLog(conf Config) (l *queryLog, err error) {
|
||||||
findClient := conf.FindClient
|
findClient := conf.FindClient
|
||||||
if findClient == nil {
|
if findClient == nil {
|
||||||
findClient = func(_ []string) (_ *Client, _ error) {
|
findClient = func(_ []string) (_ *Client, _ error) {
|
||||||
|
@ -158,13 +157,10 @@ func newQueryLog(conf Config) (l *queryLog) {
|
||||||
l.conf = &Config{}
|
l.conf = &Config{}
|
||||||
*l.conf = conf
|
*l.conf = conf
|
||||||
|
|
||||||
if !checkInterval(conf.RotationIvl) {
|
err = validateIvl(conf.RotationIvl)
|
||||||
log.Info(
|
if err != nil {
|
||||||
"querylog: warning: unsupported rotation interval %s, setting to 1 day",
|
return nil, fmt.Errorf("unsupported interval: %w", err)
|
||||||
conf.RotationIvl,
|
|
||||||
)
|
|
||||||
l.conf.RotationIvl = timeutil.Day
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return l
|
return l, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ func TestQueryLog_Search_findClient(t *testing.T) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
l := newQueryLog(Config{
|
l, err := newQueryLog(Config{
|
||||||
FindClient: findClient,
|
FindClient: findClient,
|
||||||
BaseDir: t.TempDir(),
|
BaseDir: t.TempDir(),
|
||||||
RotationIvl: timeutil.Day,
|
RotationIvl: timeutil.Day,
|
||||||
|
@ -44,6 +44,7 @@ func TestQueryLog_Search_findClient(t *testing.T) {
|
||||||
FileEnabled: true,
|
FileEnabled: true,
|
||||||
AnonymizeClientIP: false,
|
AnonymizeClientIP: false,
|
||||||
})
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
t.Cleanup(l.Close)
|
t.Cleanup(l.Close)
|
||||||
|
|
||||||
q := &dns.Msg{
|
q := &dns.Msg{
|
||||||
|
|
|
@ -7,8 +7,12 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/internal/aghnet"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
|
"github.com/AdguardTeam/golibs/timeutil"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
)
|
)
|
||||||
|
|
||||||
// topAddrs is an alias for the types of the TopFoo fields of statsResponse.
|
// topAddrs is an alias for the types of the TopFoo fields of statsResponse.
|
||||||
|
@ -44,7 +48,7 @@ func (s *StatsCtx) handleStats(w http.ResponseWriter, r *http.Request) {
|
||||||
defer s.lock.Unlock()
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
resp, ok := s.getData(s.limitHours)
|
resp, ok := s.getData(uint32(s.limit.Hours()))
|
||||||
log.Debug("stats: prepared data in %v", time.Since(start))
|
log.Debug("stats: prepared data in %v", time.Since(start))
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
|
@ -63,20 +67,62 @@ type configResp struct {
|
||||||
IntervalDays uint32 `json:"interval"`
|
IntervalDays uint32 `json:"interval"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getConfigResp is the response to the GET /control/stats_info.
|
||||||
|
type getConfigResp struct {
|
||||||
|
// Ignored is the list of host names, which should not be counted.
|
||||||
|
Ignored []string `json:"ignored"`
|
||||||
|
|
||||||
|
// Interval is the statistics rotation interval in milliseconds.
|
||||||
|
Interval float64 `json:"interval"`
|
||||||
|
|
||||||
|
// Enabled shows if statistics are enabled. It is an aghalg.NullBool to be
|
||||||
|
// able to tell when it's set without using pointers.
|
||||||
|
Enabled aghalg.NullBool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
// handleStatsInfo handles requests to the GET /control/stats_info endpoint.
|
// handleStatsInfo handles requests to the GET /control/stats_info endpoint.
|
||||||
|
//
|
||||||
|
// Deprecated: Remove it when migration to the new API is over.
|
||||||
func (s *StatsCtx) handleStatsInfo(w http.ResponseWriter, r *http.Request) {
|
func (s *StatsCtx) handleStatsInfo(w http.ResponseWriter, r *http.Request) {
|
||||||
s.lock.Lock()
|
s.lock.Lock()
|
||||||
defer s.lock.Unlock()
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
resp := configResp{IntervalDays: s.limitHours / 24}
|
days := uint32(s.limit / timeutil.Day)
|
||||||
|
ok := checkInterval(days)
|
||||||
|
if !ok || (s.enabled && days == 0) {
|
||||||
|
// NOTE: If interval is custom we set it to 90 days for compatibility
|
||||||
|
// with old API.
|
||||||
|
days = 90
|
||||||
|
}
|
||||||
|
|
||||||
|
resp := configResp{IntervalDays: days}
|
||||||
if !s.enabled {
|
if !s.enabled {
|
||||||
resp.IntervalDays = 0
|
resp.IntervalDays = 0
|
||||||
}
|
}
|
||||||
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleGetStatsConfig handles requests to the GET /control/stats/config
|
||||||
|
// endpoint.
|
||||||
|
func (s *StatsCtx) handleGetStatsConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
ignored := s.ignored.Values()
|
||||||
|
slices.Sort(ignored)
|
||||||
|
|
||||||
|
resp := getConfigResp{
|
||||||
|
Ignored: ignored,
|
||||||
|
Interval: float64(s.limit.Milliseconds()),
|
||||||
|
Enabled: aghalg.BoolToNullBool(s.enabled),
|
||||||
|
}
|
||||||
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
||||||
|
}
|
||||||
|
|
||||||
// handleStatsConfig handles requests to the POST /control/stats_config
|
// handleStatsConfig handles requests to the POST /control/stats_config
|
||||||
// endpoint.
|
// endpoint.
|
||||||
|
//
|
||||||
|
// Deprecated: Remove it when migration to the new API is over.
|
||||||
func (s *StatsCtx) handleStatsConfig(w http.ResponseWriter, r *http.Request) {
|
func (s *StatsCtx) handleStatsConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
reqData := configResp{}
|
reqData := configResp{}
|
||||||
err := json.NewDecoder(r.Body).Decode(&reqData)
|
err := json.NewDecoder(r.Body).Decode(&reqData)
|
||||||
|
@ -92,8 +138,55 @@ func (s *StatsCtx) handleStatsConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
s.setLimit(int(reqData.IntervalDays))
|
defer s.configModified()
|
||||||
s.configModified()
|
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
limit := time.Duration(reqData.IntervalDays) * timeutil.Day
|
||||||
|
s.setLimit(limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handlePutStatsConfig handles requests to the PUT /control/stats/config/update
|
||||||
|
// endpoint.
|
||||||
|
func (s *StatsCtx) handlePutStatsConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
|
reqData := getConfigResp{}
|
||||||
|
err := json.NewDecoder(r.Body).Decode(&reqData)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusBadRequest, "json decode: %s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
set, err := aghnet.NewDomainNameSet(reqData.Ignored)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "ignored: %s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ivl := time.Duration(reqData.Interval) * time.Millisecond
|
||||||
|
err = validateIvl(ivl)
|
||||||
|
if err != nil {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "unsupported interval: %s", err)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if reqData.Enabled == aghalg.NBNull {
|
||||||
|
aghhttp.Error(r, w, http.StatusUnprocessableEntity, "enabled is null")
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
defer s.configModified()
|
||||||
|
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
s.ignored = set
|
||||||
|
s.limit = ivl
|
||||||
|
s.enabled = reqData.Enabled == aghalg.NBTrue
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleStatsReset handles requests to the POST /control/stats_reset endpoint.
|
// handleStatsReset handles requests to the POST /control/stats_reset endpoint.
|
||||||
|
@ -114,4 +207,7 @@ func (s *StatsCtx) initWeb() {
|
||||||
s.httpRegister(http.MethodPost, "/control/stats_reset", s.handleStatsReset)
|
s.httpRegister(http.MethodPost, "/control/stats_reset", s.handleStatsReset)
|
||||||
s.httpRegister(http.MethodPost, "/control/stats_config", s.handleStatsConfig)
|
s.httpRegister(http.MethodPost, "/control/stats_config", s.handleStatsConfig)
|
||||||
s.httpRegister(http.MethodGet, "/control/stats_info", s.handleStatsInfo)
|
s.httpRegister(http.MethodGet, "/control/stats_info", s.handleStatsInfo)
|
||||||
|
|
||||||
|
s.httpRegister(http.MethodGet, "/control/stats/config", s.handleGetStatsConfig)
|
||||||
|
s.httpRegister(http.MethodPut, "/control/stats/config/update", s.handlePutStatsConfig)
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,152 @@
|
||||||
|
package stats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
||||||
|
"github.com/AdguardTeam/golibs/testutil"
|
||||||
|
"github.com/AdguardTeam/golibs/timeutil"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestHandleStatsConfig(t *testing.T) {
|
||||||
|
const (
|
||||||
|
smallIvl = 1 * time.Minute
|
||||||
|
minIvl = 1 * time.Hour
|
||||||
|
maxIvl = 365 * timeutil.Day
|
||||||
|
)
|
||||||
|
|
||||||
|
conf := Config{
|
||||||
|
Filename: filepath.Join(t.TempDir(), "stats.db"),
|
||||||
|
Limit: time.Hour * 24,
|
||||||
|
Enabled: true,
|
||||||
|
UnitID: func() (id uint32) { return 0 },
|
||||||
|
ConfigModified: func() {},
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
body getConfigResp
|
||||||
|
wantCode int
|
||||||
|
wantErr string
|
||||||
|
}{{
|
||||||
|
name: "set_ivl_1_minIvl",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBTrue,
|
||||||
|
Interval: float64(minIvl.Milliseconds()),
|
||||||
|
Ignored: []string{},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusOK,
|
||||||
|
wantErr: "",
|
||||||
|
}, {
|
||||||
|
name: "small_interval",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBTrue,
|
||||||
|
Interval: float64(smallIvl.Milliseconds()),
|
||||||
|
Ignored: []string{},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusUnprocessableEntity,
|
||||||
|
wantErr: "unsupported interval: less than an hour\n",
|
||||||
|
}, {
|
||||||
|
name: "big_interval",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBTrue,
|
||||||
|
Interval: float64(maxIvl.Milliseconds() + minIvl.Milliseconds()),
|
||||||
|
Ignored: []string{},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusUnprocessableEntity,
|
||||||
|
wantErr: "unsupported interval: more than a year\n",
|
||||||
|
}, {
|
||||||
|
name: "set_ignored_ivl_1_maxIvl",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBTrue,
|
||||||
|
Interval: float64(maxIvl.Milliseconds()),
|
||||||
|
Ignored: []string{
|
||||||
|
"ignor.ed",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusOK,
|
||||||
|
wantErr: "",
|
||||||
|
}, {
|
||||||
|
name: "ignored_duplicate",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBTrue,
|
||||||
|
Interval: float64(minIvl.Milliseconds()),
|
||||||
|
Ignored: []string{
|
||||||
|
"ignor.ed",
|
||||||
|
"ignor.ed",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusUnprocessableEntity,
|
||||||
|
wantErr: "ignored: duplicate host name \"ignor.ed\" at index 1\n",
|
||||||
|
}, {
|
||||||
|
name: "ignored_empty",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBTrue,
|
||||||
|
Interval: float64(minIvl.Milliseconds()),
|
||||||
|
Ignored: []string{
|
||||||
|
"",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusUnprocessableEntity,
|
||||||
|
wantErr: "ignored: host name is empty\n",
|
||||||
|
}, {
|
||||||
|
name: "enabled_is_null",
|
||||||
|
body: getConfigResp{
|
||||||
|
Enabled: aghalg.NBNull,
|
||||||
|
Interval: float64(minIvl.Milliseconds()),
|
||||||
|
Ignored: []string{},
|
||||||
|
},
|
||||||
|
wantCode: http.StatusUnprocessableEntity,
|
||||||
|
wantErr: "enabled is null\n",
|
||||||
|
}}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
s, err := New(conf)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
s.Start()
|
||||||
|
testutil.CleanupAndRequireSuccess(t, s.Close)
|
||||||
|
|
||||||
|
buf, err := json.Marshal(tc.body)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
const (
|
||||||
|
configGet = "/control/stats/config"
|
||||||
|
configPut = "/control/stats/config/update"
|
||||||
|
)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPut, configPut, bytes.NewReader(buf))
|
||||||
|
rw := httptest.NewRecorder()
|
||||||
|
|
||||||
|
s.handlePutStatsConfig(rw, req)
|
||||||
|
require.Equal(t, tc.wantCode, rw.Code)
|
||||||
|
|
||||||
|
if tc.wantCode != http.StatusOK {
|
||||||
|
assert.Equal(t, tc.wantErr, rw.Body.String())
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
resp := httptest.NewRequest(http.MethodGet, configGet, nil)
|
||||||
|
rw = httptest.NewRecorder()
|
||||||
|
|
||||||
|
s.handleGetStatsConfig(rw, resp)
|
||||||
|
require.Equal(t, http.StatusOK, rw.Code)
|
||||||
|
|
||||||
|
ans := getConfigResp{}
|
||||||
|
err = json.Unmarshal(rw.Body.Bytes(), &ans)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, tc.body, ans)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,6 +16,7 @@ import (
|
||||||
"github.com/AdguardTeam/golibs/errors"
|
"github.com/AdguardTeam/golibs/errors"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
"github.com/AdguardTeam/golibs/stringutil"
|
"github.com/AdguardTeam/golibs/stringutil"
|
||||||
|
"github.com/AdguardTeam/golibs/timeutil"
|
||||||
"go.etcd.io/bbolt"
|
"go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -25,6 +26,20 @@ func checkInterval(days uint32) (ok bool) {
|
||||||
return days == 0 || days == 1 || days == 7 || days == 30 || days == 90
|
return days == 0 || days == 1 || days == 7 || days == 30 || days == 90
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validateIvl returns an error if ivl is less than an hour or more than a
|
||||||
|
// year.
|
||||||
|
func validateIvl(ivl time.Duration) (err error) {
|
||||||
|
if ivl < time.Hour {
|
||||||
|
return errors.Error("less than an hour")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ivl > timeutil.Day*365 {
|
||||||
|
return errors.Error("more than a year")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// Config is the configuration structure for the statistics collecting.
|
// Config is the configuration structure for the statistics collecting.
|
||||||
type Config struct {
|
type Config struct {
|
||||||
// UnitID is the function to generate the identifier for current unit. If
|
// UnitID is the function to generate the identifier for current unit. If
|
||||||
|
@ -42,9 +57,8 @@ type Config struct {
|
||||||
// Filename is the name of the database file.
|
// Filename is the name of the database file.
|
||||||
Filename string
|
Filename string
|
||||||
|
|
||||||
// LimitDays is the maximum number of days to collect statistics into the
|
// Limit is an upper limit for collecting statistics.
|
||||||
// current unit.
|
Limit time.Duration
|
||||||
LimitDays uint32
|
|
||||||
|
|
||||||
// Enabled tells if the statistics are enabled.
|
// Enabled tells if the statistics are enabled.
|
||||||
Enabled bool
|
Enabled bool
|
||||||
|
@ -105,11 +119,8 @@ type StatsCtx struct {
|
||||||
// enabled tells if the statistics are enabled.
|
// enabled tells if the statistics are enabled.
|
||||||
enabled bool
|
enabled bool
|
||||||
|
|
||||||
// limitHours is the maximum number of hours to collect statistics into the
|
// limit is an upper limit for collecting statistics.
|
||||||
// current unit.
|
limit time.Duration
|
||||||
//
|
|
||||||
// TODO(s.chzhen): Rewrite to use time.Duration.
|
|
||||||
limitHours uint32
|
|
||||||
|
|
||||||
// ignored is the list of host names, which should not be counted.
|
// ignored is the list of host names, which should not be counted.
|
||||||
ignored *stringutil.Set
|
ignored *stringutil.Set
|
||||||
|
@ -128,9 +139,14 @@ func New(conf Config) (s *StatsCtx, err error) {
|
||||||
httpRegister: conf.HTTPRegister,
|
httpRegister: conf.HTTPRegister,
|
||||||
ignored: conf.Ignored,
|
ignored: conf.Ignored,
|
||||||
}
|
}
|
||||||
if s.limitHours = conf.LimitDays * 24; !checkInterval(conf.LimitDays) {
|
|
||||||
s.limitHours = 24
|
err = validateIvl(conf.Limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unsupported interval: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s.limit = conf.Limit
|
||||||
|
|
||||||
if s.unitIDGen = newUnitID; conf.UnitID != nil {
|
if s.unitIDGen = newUnitID; conf.UnitID != nil {
|
||||||
s.unitIDGen = conf.UnitID
|
s.unitIDGen = conf.UnitID
|
||||||
}
|
}
|
||||||
|
@ -150,7 +166,7 @@ func New(conf Config) (s *StatsCtx, err error) {
|
||||||
return nil, fmt.Errorf("stats: opening a transaction: %w", err)
|
return nil, fmt.Errorf("stats: opening a transaction: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
deleted := deleteOldUnits(tx, id-s.limitHours-1)
|
deleted := deleteOldUnits(tx, id-uint32(s.limit.Hours())-1)
|
||||||
udb = loadUnitFromDB(tx, id)
|
udb = loadUnitFromDB(tx, id)
|
||||||
|
|
||||||
err = finishTxn(tx, deleted > 0)
|
err = finishTxn(tx, deleted > 0)
|
||||||
|
@ -231,7 +247,7 @@ func (s *StatsCtx) Update(e Entry) {
|
||||||
s.lock.Lock()
|
s.lock.Lock()
|
||||||
defer s.lock.Unlock()
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
if !s.enabled || s.limitHours == 0 {
|
if !s.enabled || s.limit == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -263,7 +279,7 @@ func (s *StatsCtx) WriteDiskConfig(dc *Config) {
|
||||||
s.lock.Lock()
|
s.lock.Lock()
|
||||||
defer s.lock.Unlock()
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
dc.LimitDays = s.limitHours / 24
|
dc.Limit = s.limit
|
||||||
dc.Enabled = s.enabled
|
dc.Enabled = s.enabled
|
||||||
dc.Ignored = s.ignored
|
dc.Ignored = s.ignored
|
||||||
}
|
}
|
||||||
|
@ -273,7 +289,7 @@ func (s *StatsCtx) TopClientsIP(maxCount uint) (ips []netip.Addr) {
|
||||||
s.lock.Lock()
|
s.lock.Lock()
|
||||||
defer s.lock.Unlock()
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
limit := s.limitHours
|
limit := uint32(s.limit.Hours())
|
||||||
if !s.enabled || limit == 0 {
|
if !s.enabled || limit == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -377,7 +393,7 @@ func (s *StatsCtx) flush() (cont bool, sleepFor time.Duration) {
|
||||||
return false, 0
|
return false, 0
|
||||||
}
|
}
|
||||||
|
|
||||||
limit := s.limitHours
|
limit := uint32(s.limit.Hours())
|
||||||
if limit == 0 || ptr.id == id {
|
if limit == 0 || ptr.id == id {
|
||||||
return true, time.Second
|
return true, time.Second
|
||||||
}
|
}
|
||||||
|
@ -436,14 +452,14 @@ func (s *StatsCtx) periodicFlush() {
|
||||||
log.Debug("periodic flushing finished")
|
log.Debug("periodic flushing finished")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *StatsCtx) setLimit(limitDays int) {
|
// setLimit sets the limit. s.lock is expected to be locked.
|
||||||
s.lock.Lock()
|
//
|
||||||
defer s.lock.Unlock()
|
// TODO(s.chzhen): Remove it when migration to the new API is over.
|
||||||
|
func (s *StatsCtx) setLimit(limit time.Duration) {
|
||||||
if limitDays != 0 {
|
if limit != 0 {
|
||||||
s.enabled = true
|
s.enabled = true
|
||||||
s.limitHours = uint32(24 * limitDays)
|
s.limit = limit
|
||||||
log.Debug("stats: set limit: %d days", limitDays)
|
log.Debug("stats: set limit: %d days", limit/timeutil.Day)
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/AdguardTeam/golibs/testutil"
|
"github.com/AdguardTeam/golibs/testutil"
|
||||||
|
"github.com/AdguardTeam/golibs/timeutil"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -37,7 +38,7 @@ func TestStats_races(t *testing.T) {
|
||||||
conf := Config{
|
conf := Config{
|
||||||
UnitID: idGen,
|
UnitID: idGen,
|
||||||
Filename: filepath.Join(t.TempDir(), "./stats.db"),
|
Filename: filepath.Join(t.TempDir(), "./stats.db"),
|
||||||
LimitDays: 1,
|
Limit: timeutil.Day,
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := New(conf)
|
s, err := New(conf)
|
||||||
|
|
|
@ -13,6 +13,7 @@ import (
|
||||||
"github.com/AdguardTeam/AdGuardHome/internal/stats"
|
"github.com/AdguardTeam/AdGuardHome/internal/stats"
|
||||||
"github.com/AdguardTeam/golibs/netutil"
|
"github.com/AdguardTeam/golibs/netutil"
|
||||||
"github.com/AdguardTeam/golibs/testutil"
|
"github.com/AdguardTeam/golibs/testutil"
|
||||||
|
"github.com/AdguardTeam/golibs/timeutil"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -52,7 +53,7 @@ func TestStats(t *testing.T) {
|
||||||
handlers := map[string]http.Handler{}
|
handlers := map[string]http.Handler{}
|
||||||
conf := stats.Config{
|
conf := stats.Config{
|
||||||
Filename: filepath.Join(t.TempDir(), "stats.db"),
|
Filename: filepath.Join(t.TempDir(), "stats.db"),
|
||||||
LimitDays: 1,
|
Limit: timeutil.Day,
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
UnitID: constUnitID,
|
UnitID: constUnitID,
|
||||||
HTTPRegister: func(_, url string, handler http.HandlerFunc) {
|
HTTPRegister: func(_, url string, handler http.HandlerFunc) {
|
||||||
|
@ -158,7 +159,7 @@ func TestLargeNumbers(t *testing.T) {
|
||||||
|
|
||||||
conf := stats.Config{
|
conf := stats.Config{
|
||||||
Filename: filepath.Join(t.TempDir(), "stats.db"),
|
Filename: filepath.Join(t.TempDir(), "stats.db"),
|
||||||
LimitDays: 1,
|
Limit: timeutil.Day,
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
UnitID: func() (id uint32) { return atomic.LoadUint32(&curHour) },
|
UnitID: func() (id uint32) { return atomic.LoadUint32(&curHour) },
|
||||||
HTTPRegister: func(_, url string, handler http.HandlerFunc) { handlers[url] = handler },
|
HTTPRegister: func(_, url string, handler http.HandlerFunc) { handlers[url] = handler },
|
||||||
|
|
|
@ -4,6 +4,155 @@
|
||||||
|
|
||||||
## v0.108.0: API changes
|
## v0.108.0: API changes
|
||||||
|
|
||||||
|
## v0.107.27: API changes
|
||||||
|
|
||||||
|
### The new optional fields `"edns_cs_use_custom"` and `"edns_cs_custom_ip"` in `DNSConfig`
|
||||||
|
|
||||||
|
* The new optional fields `"edns_cs_use_custom"` and `"edns_cs_custom_ip"` in
|
||||||
|
`POST /control/dns_config` method makes AdGuard Home use or not use the
|
||||||
|
custom IP for EDNS Client Subnet.
|
||||||
|
|
||||||
|
* The new optional fields `"edns_cs_use_custom"` and `"edns_cs_custom_ip"` in
|
||||||
|
`GET /control/dns_info` method are set if AdGuard Home uses custom IP for
|
||||||
|
EDNS Client Subnet.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## v0.107.27: API changes
|
||||||
|
|
||||||
|
### Deprecated statistics APIs
|
||||||
|
|
||||||
|
* The `GET /control/stats_info` HTTP API; use the new `GET
|
||||||
|
/control/stats/config` API instead.
|
||||||
|
|
||||||
|
**NOTE:** If `interval` was configured by editing configuration file or new
|
||||||
|
HTTP API call `PUT /control/stats/config/update` and it's not equal to
|
||||||
|
previous allowed enum values then it will be equal to `90` days for
|
||||||
|
compatibility reasons.
|
||||||
|
|
||||||
|
* The `POST /control/stats_config` HTTP API; use the new `PUT
|
||||||
|
/control/stats/config/update` API instead.
|
||||||
|
|
||||||
|
### New statistics APIs
|
||||||
|
|
||||||
|
* The new `GET /control/stats/config` HTTP API.
|
||||||
|
|
||||||
|
* The new `PUT /control/stats/config/update` HTTP API allows config updates.
|
||||||
|
|
||||||
|
These `control/stats/config/update` and `control/stats/config` APIs accept and
|
||||||
|
return a JSON object with the following format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"interval": 3600,
|
||||||
|
"ignored": ["example.com"],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Deprecated query log APIs
|
||||||
|
|
||||||
|
* The `GET /control/querylog_info` HTTP API; use the new `GET
|
||||||
|
/control/querylog/config` API instead.
|
||||||
|
|
||||||
|
**NOTE:** If `interval` was configured by editing configuration file or new
|
||||||
|
HTTP API call `PUT /control/querylog/config/update` and it's not equal to
|
||||||
|
previous allowed enum values then it will be equal to `90` days for
|
||||||
|
compatibility reasons.
|
||||||
|
|
||||||
|
* The `POST /control/querylog_config` HTTP API; use the new `PUT
|
||||||
|
/control/querylog/config/update` API instead.
|
||||||
|
|
||||||
|
### New query log APIs
|
||||||
|
|
||||||
|
* The new `GET /control/querylog/config` HTTP API.
|
||||||
|
|
||||||
|
* The new `PUT /control/querylog/config/update` HTTP API allows config updates.
|
||||||
|
|
||||||
|
These `control/querylog/config/update` and `control/querylog/config` APIs
|
||||||
|
accept and return a JSON object with the following format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"anonymize_client_ip": false,
|
||||||
|
"interval": 3600,
|
||||||
|
"ignored": ["example.com"],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## v0.107.27: API changes
|
||||||
|
|
||||||
|
### Deprecated HTTP APIs
|
||||||
|
|
||||||
|
The following HTTP APIs are deprecated:
|
||||||
|
|
||||||
|
* `POST /control/safesearch/enable` is deprecated. Use the new
|
||||||
|
`PUT /control/safesearch/settings`.
|
||||||
|
|
||||||
|
* `POST /control/safesearch/disable` is deprecated. Use the new
|
||||||
|
`PUT /control/safesearch/settings`.
|
||||||
|
|
||||||
|
### New HTTP API `PUT /control/safesearch/settings`
|
||||||
|
|
||||||
|
* The new `PUT /control/safesearch/settings` HTTP API allows safesearch
|
||||||
|
settings updates. It accepts a JSON object with the following format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"bing": false,
|
||||||
|
"duckduckgo": true,
|
||||||
|
"google": false,
|
||||||
|
"pixabay": false,
|
||||||
|
"yandex": true,
|
||||||
|
"youtube": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `GET /control/safesearch/status`
|
||||||
|
|
||||||
|
* The `control/safesearch/status` HTTP API has been changed. It now returns a
|
||||||
|
JSON object with the following format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"bing": false,
|
||||||
|
"duckduckgo": true,
|
||||||
|
"google": false,
|
||||||
|
"pixabay": false,
|
||||||
|
"yandex": true,
|
||||||
|
"youtube": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `/control/clients` HTTP APIs
|
||||||
|
|
||||||
|
The following HTTP APIs have been changed:
|
||||||
|
|
||||||
|
* `GET /control/clients`;
|
||||||
|
* `GET /control/clients/find?ip0=...&ip1=...&ip2=...`;
|
||||||
|
* `POST /control/clients/add`;
|
||||||
|
* `POST /control/clients/update`;
|
||||||
|
|
||||||
|
The `safesearch_enabled` field is deprecated. The new field `safe_search` has
|
||||||
|
been added to JSON objects. It has the following format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"bing": false,
|
||||||
|
"duckduckgo": true,
|
||||||
|
"google": false,
|
||||||
|
"pixabay": false,
|
||||||
|
"yandex": true,
|
||||||
|
"youtube": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## v0.107.23: API changes
|
## v0.107.23: API changes
|
||||||
|
|
|
@ -226,6 +226,14 @@
|
||||||
'$ref': '#/components/schemas/QueryLog'
|
'$ref': '#/components/schemas/QueryLog'
|
||||||
'/querylog_info':
|
'/querylog_info':
|
||||||
'get':
|
'get':
|
||||||
|
'deprecated': true
|
||||||
|
'description': |
|
||||||
|
Deprecated: Use `GET /querylog/config` instead.
|
||||||
|
|
||||||
|
NOTE: If `interval` was configured by editing configuration file or new
|
||||||
|
HTTP API call `PUT /querylog/config/update` and it's not equal to
|
||||||
|
previous allowed enum values then it will be equal to `90` days for
|
||||||
|
compatibility reasons.
|
||||||
'tags':
|
'tags':
|
||||||
- 'log'
|
- 'log'
|
||||||
'operationId': 'queryLogInfo'
|
'operationId': 'queryLogInfo'
|
||||||
|
@ -239,6 +247,9 @@
|
||||||
'$ref': '#/components/schemas/QueryLogConfig'
|
'$ref': '#/components/schemas/QueryLogConfig'
|
||||||
'/querylog_config':
|
'/querylog_config':
|
||||||
'post':
|
'post':
|
||||||
|
'deprecated': true
|
||||||
|
'description': >
|
||||||
|
Deprecated: Use `PUT /querylog/config/update` instead.
|
||||||
'tags':
|
'tags':
|
||||||
- 'log'
|
- 'log'
|
||||||
'operationId': 'queryLogConfig'
|
'operationId': 'queryLogConfig'
|
||||||
|
@ -260,6 +271,34 @@
|
||||||
'responses':
|
'responses':
|
||||||
'200':
|
'200':
|
||||||
'description': 'OK.'
|
'description': 'OK.'
|
||||||
|
'/querylog/config':
|
||||||
|
'get':
|
||||||
|
'tags':
|
||||||
|
- 'log'
|
||||||
|
'operationId': 'getQueryLogConfig'
|
||||||
|
'summary': 'Get query log parameters'
|
||||||
|
'responses':
|
||||||
|
'200':
|
||||||
|
'description': 'OK.'
|
||||||
|
'content':
|
||||||
|
'application/json':
|
||||||
|
'schema':
|
||||||
|
'$ref': '#/components/schemas/GetQueryLogConfigResponse'
|
||||||
|
'/querylog/config/update':
|
||||||
|
'put':
|
||||||
|
'tags':
|
||||||
|
- 'log'
|
||||||
|
'operationId': 'putQueryLogConfig'
|
||||||
|
'summary': 'Set query log parameters'
|
||||||
|
'requestBody':
|
||||||
|
'content':
|
||||||
|
'application/json':
|
||||||
|
'schema':
|
||||||
|
'$ref': '#/components/schemas/PutQueryLogConfigUpdateRequest'
|
||||||
|
'required': true
|
||||||
|
'responses':
|
||||||
|
'200':
|
||||||
|
'description': 'OK.'
|
||||||
'/stats':
|
'/stats':
|
||||||
'get':
|
'get':
|
||||||
'tags':
|
'tags':
|
||||||
|
@ -284,6 +323,14 @@
|
||||||
'description': 'OK.'
|
'description': 'OK.'
|
||||||
'/stats_info':
|
'/stats_info':
|
||||||
'get':
|
'get':
|
||||||
|
'deprecated': true
|
||||||
|
'description': |
|
||||||
|
Deprecated: Use `GET /stats/config` instead.
|
||||||
|
|
||||||
|
NOTE: If `interval` was configured by editing configuration file or new
|
||||||
|
HTTP API call `PUT /stats/config/update` and it's not equal to
|
||||||
|
previous allowed enum values then it will be equal to `90` days for
|
||||||
|
compatibility reasons.
|
||||||
'tags':
|
'tags':
|
||||||
- 'stats'
|
- 'stats'
|
||||||
'operationId': 'statsInfo'
|
'operationId': 'statsInfo'
|
||||||
|
@ -297,6 +344,9 @@
|
||||||
'$ref': '#/components/schemas/StatsConfig'
|
'$ref': '#/components/schemas/StatsConfig'
|
||||||
'/stats_config':
|
'/stats_config':
|
||||||
'post':
|
'post':
|
||||||
|
'deprecated': true
|
||||||
|
'description': >
|
||||||
|
Deprecated: Use `PUT /stats/config/update` instead.
|
||||||
'tags':
|
'tags':
|
||||||
- 'stats'
|
- 'stats'
|
||||||
'operationId': 'statsConfig'
|
'operationId': 'statsConfig'
|
||||||
|
@ -309,6 +359,34 @@
|
||||||
'responses':
|
'responses':
|
||||||
'200':
|
'200':
|
||||||
'description': 'OK.'
|
'description': 'OK.'
|
||||||
|
'/stats/config':
|
||||||
|
'get':
|
||||||
|
'tags':
|
||||||
|
- 'stats'
|
||||||
|
'operationId': 'getStatsConfig'
|
||||||
|
'summary': 'Get statistics parameters'
|
||||||
|
'responses':
|
||||||
|
'200':
|
||||||
|
'description': 'OK.'
|
||||||
|
'content':
|
||||||
|
'application/json':
|
||||||
|
'schema':
|
||||||
|
'$ref': '#/components/schemas/GetStatsConfigResponse'
|
||||||
|
'/stats/config/update':
|
||||||
|
'put':
|
||||||
|
'tags':
|
||||||
|
- 'stats'
|
||||||
|
'operationId': 'putStatsConfig'
|
||||||
|
'summary': 'Set statistics parameters'
|
||||||
|
'requestBody':
|
||||||
|
'content':
|
||||||
|
'application/json':
|
||||||
|
'schema':
|
||||||
|
'$ref': '#/components/schemas/PutStatsConfigUpdateRequest'
|
||||||
|
'required': true
|
||||||
|
'responses':
|
||||||
|
'200':
|
||||||
|
'description': 'OK.'
|
||||||
'/tls/status':
|
'/tls/status':
|
||||||
'get':
|
'get':
|
||||||
'tags':
|
'tags':
|
||||||
|
@ -717,6 +795,7 @@
|
||||||
'sensitivity': 13
|
'sensitivity': 13
|
||||||
'/safesearch/enable':
|
'/safesearch/enable':
|
||||||
'post':
|
'post':
|
||||||
|
'deprecated': true
|
||||||
'tags':
|
'tags':
|
||||||
- 'safesearch'
|
- 'safesearch'
|
||||||
'operationId': 'safesearchEnable'
|
'operationId': 'safesearchEnable'
|
||||||
|
@ -726,6 +805,7 @@
|
||||||
'description': 'OK.'
|
'description': 'OK.'
|
||||||
'/safesearch/disable':
|
'/safesearch/disable':
|
||||||
'post':
|
'post':
|
||||||
|
'deprecated': true
|
||||||
'tags':
|
'tags':
|
||||||
- 'safesearch'
|
- 'safesearch'
|
||||||
'operationId': 'safesearchDisable'
|
'operationId': 'safesearchDisable'
|
||||||
|
@ -733,6 +813,20 @@
|
||||||
'responses':
|
'responses':
|
||||||
'200':
|
'200':
|
||||||
'description': 'OK.'
|
'description': 'OK.'
|
||||||
|
'/safesearch/settings':
|
||||||
|
'put':
|
||||||
|
'tags':
|
||||||
|
- 'safesearch'
|
||||||
|
'operationId': 'safesearchSettings'
|
||||||
|
'summary': 'Update safesearch settings'
|
||||||
|
'requestBody':
|
||||||
|
'content':
|
||||||
|
'application/json':
|
||||||
|
'schema':
|
||||||
|
'$ref': '#/components/schemas/SafeSearchConfig'
|
||||||
|
'responses':
|
||||||
|
'200':
|
||||||
|
'description': 'OK.'
|
||||||
'/safesearch/status':
|
'/safesearch/status':
|
||||||
'get':
|
'get':
|
||||||
'tags':
|
'tags':
|
||||||
|
@ -745,14 +839,7 @@
|
||||||
'content':
|
'content':
|
||||||
'application/json':
|
'application/json':
|
||||||
'schema':
|
'schema':
|
||||||
'type': 'object'
|
'$ref': '#/components/schemas/SafeSearchConfig'
|
||||||
'properties':
|
|
||||||
'enabled':
|
|
||||||
'type': 'boolean'
|
|
||||||
'examples':
|
|
||||||
'response':
|
|
||||||
'value':
|
|
||||||
'enabled': false
|
|
||||||
'/clients':
|
'/clients':
|
||||||
'get':
|
'get':
|
||||||
'tags':
|
'tags':
|
||||||
|
@ -1254,7 +1341,7 @@
|
||||||
'example': 'en'
|
'example': 'en'
|
||||||
'DNSConfig':
|
'DNSConfig':
|
||||||
'type': 'object'
|
'type': 'object'
|
||||||
'description': 'Query log configuration'
|
'description': 'DNS server configuration'
|
||||||
'properties':
|
'properties':
|
||||||
'bootstrap_dns':
|
'bootstrap_dns':
|
||||||
'type': 'array'
|
'type': 'array'
|
||||||
|
@ -1280,8 +1367,6 @@
|
||||||
'type': 'string'
|
'type': 'string'
|
||||||
'protection_enabled':
|
'protection_enabled':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'dhcp_available':
|
|
||||||
'type': 'boolean'
|
|
||||||
'ratelimit':
|
'ratelimit':
|
||||||
'type': 'integer'
|
'type': 'integer'
|
||||||
'blocking_mode':
|
'blocking_mode':
|
||||||
|
@ -1298,6 +1383,10 @@
|
||||||
'type': 'string'
|
'type': 'string'
|
||||||
'edns_cs_enabled':
|
'edns_cs_enabled':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
|
'edns_cs_use_custom':
|
||||||
|
'type': 'boolean'
|
||||||
|
'edns_cs_custom_ip':
|
||||||
|
'type': 'string'
|
||||||
'disable_ipv6':
|
'disable_ipv6':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'dnssec_enabled':
|
'dnssec_enabled':
|
||||||
|
@ -1654,6 +1743,27 @@
|
||||||
- 30
|
- 30
|
||||||
- 90
|
- 90
|
||||||
'type': 'integer'
|
'type': 'integer'
|
||||||
|
'GetStatsConfigResponse':
|
||||||
|
'type': 'object'
|
||||||
|
'description': 'Statistics configuration'
|
||||||
|
'required':
|
||||||
|
- 'enabled'
|
||||||
|
- 'interval'
|
||||||
|
- 'ignored'
|
||||||
|
'properties':
|
||||||
|
'enabled':
|
||||||
|
'description': 'Are statistics enabled'
|
||||||
|
'type': 'boolean'
|
||||||
|
'interval':
|
||||||
|
'description': 'Statistics rotation interval'
|
||||||
|
'type': 'number'
|
||||||
|
'ignored':
|
||||||
|
'description': 'List of host names, which should not be counted'
|
||||||
|
'type': 'array'
|
||||||
|
'items':
|
||||||
|
'type': 'string'
|
||||||
|
'PutStatsConfigUpdateRequest':
|
||||||
|
'$ref': '#/components/schemas/GetStatsConfigResponse'
|
||||||
'DhcpConfig':
|
'DhcpConfig':
|
||||||
'type': 'object'
|
'type': 'object'
|
||||||
'properties':
|
'properties':
|
||||||
|
@ -2057,6 +2167,32 @@
|
||||||
'anonymize_client_ip':
|
'anonymize_client_ip':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'description': "Anonymize clients' IP addresses"
|
'description': "Anonymize clients' IP addresses"
|
||||||
|
'GetQueryLogConfigResponse':
|
||||||
|
'type': 'object'
|
||||||
|
'description': 'Query log configuration'
|
||||||
|
'required':
|
||||||
|
- 'enabled'
|
||||||
|
- 'interval'
|
||||||
|
- 'anonymize_client_ip'
|
||||||
|
- 'ignored'
|
||||||
|
'properties':
|
||||||
|
'enabled':
|
||||||
|
'type': 'boolean'
|
||||||
|
'description': 'Is query log enabled'
|
||||||
|
'interval':
|
||||||
|
'description': >
|
||||||
|
Time period for query log rotation.
|
||||||
|
'type': 'number'
|
||||||
|
'anonymize_client_ip':
|
||||||
|
'type': 'boolean'
|
||||||
|
'description': "Anonymize clients' IP addresses"
|
||||||
|
'ignored':
|
||||||
|
'description': 'List of host names, which should not be written to log'
|
||||||
|
'type': 'array'
|
||||||
|
'items':
|
||||||
|
'type': 'string'
|
||||||
|
'PutQueryLogConfigUpdateRequest':
|
||||||
|
'$ref': '#/components/schemas/GetQueryLogConfigResponse'
|
||||||
'ResultRule':
|
'ResultRule':
|
||||||
'description': 'Applied rule.'
|
'description': 'Applied rule.'
|
||||||
'properties':
|
'properties':
|
||||||
|
@ -2267,6 +2403,24 @@
|
||||||
- 'name'
|
- 'name'
|
||||||
- 'language'
|
- 'language'
|
||||||
- 'theme'
|
- 'theme'
|
||||||
|
'SafeSearchConfig':
|
||||||
|
'type': 'object'
|
||||||
|
'description': 'Safe search settings.'
|
||||||
|
'properties':
|
||||||
|
'enabled':
|
||||||
|
'type': 'boolean'
|
||||||
|
'bing':
|
||||||
|
'type': 'boolean'
|
||||||
|
'duckduckgo':
|
||||||
|
'type': 'boolean'
|
||||||
|
'google':
|
||||||
|
'type': 'boolean'
|
||||||
|
'pixabay':
|
||||||
|
'type': 'boolean'
|
||||||
|
'yandex':
|
||||||
|
'type': 'boolean'
|
||||||
|
'youtube':
|
||||||
|
'type': 'boolean'
|
||||||
'Client':
|
'Client':
|
||||||
'type': 'object'
|
'type': 'object'
|
||||||
'description': 'Client information.'
|
'description': 'Client information.'
|
||||||
|
@ -2289,7 +2443,10 @@
|
||||||
'safebrowsing_enabled':
|
'safebrowsing_enabled':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'safesearch_enabled':
|
'safesearch_enabled':
|
||||||
|
'deprecated': true
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
|
'safe_search':
|
||||||
|
'$ref': '#/components/schemas/SafeSearchConfig'
|
||||||
'use_global_blocked_services':
|
'use_global_blocked_services':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'blocked_services':
|
'blocked_services':
|
||||||
|
@ -2350,6 +2507,7 @@
|
||||||
'parental_enabled': true
|
'parental_enabled': true
|
||||||
'safebrowsing_enabled': true
|
'safebrowsing_enabled': true
|
||||||
'safesearch_enabled': true
|
'safesearch_enabled': true
|
||||||
|
'safe_search': {}
|
||||||
'use_global_blocked_services': true
|
'use_global_blocked_services': true
|
||||||
'blocked_services': null
|
'blocked_services': null
|
||||||
'upstreams': null
|
'upstreams': null
|
||||||
|
@ -2364,6 +2522,7 @@
|
||||||
'parental_enabled': true
|
'parental_enabled': true
|
||||||
'safebrowsing_enabled': true
|
'safebrowsing_enabled': true
|
||||||
'safesearch_enabled': true
|
'safesearch_enabled': true
|
||||||
|
'safe_search': {}
|
||||||
'use_global_blocked_services': true
|
'use_global_blocked_services': true
|
||||||
'blocked_services': null
|
'blocked_services': null
|
||||||
'upstreams': null
|
'upstreams': null
|
||||||
|
@ -2424,7 +2583,10 @@
|
||||||
'safebrowsing_enabled':
|
'safebrowsing_enabled':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'safesearch_enabled':
|
'safesearch_enabled':
|
||||||
|
'deprecated': true
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
|
'safe_search':
|
||||||
|
'$ref': '#/components/schemas/SafeSearchConfig'
|
||||||
'use_global_blocked_services':
|
'use_global_blocked_services':
|
||||||
'type': 'boolean'
|
'type': 'boolean'
|
||||||
'blocked_services':
|
'blocked_services':
|
||||||
|
|
|
@ -178,22 +178,28 @@ manifest file templates, and helper scripts.
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
* `npm install`: install dependencies. Run this first.
|
* `go run main.go help`: print usage.
|
||||||
* `npm run locales:download`: download and save all translations.
|
|
||||||
* `npm run locales:upload`: upload the base `en` locale.
|
* `go run main.go download [-n <count>]`: download and save all translations.
|
||||||
* `npm run locales:summary`: show the current locales summary.
|
`n` is optional flag where count is a number of concurrent downloads.
|
||||||
* `npm run locales:unused`: show the list of unused strings.
|
|
||||||
|
* `go run main.go upload`: upload the base `en` locale.
|
||||||
|
|
||||||
|
* `go run main.go summary`: show the current locales summary.
|
||||||
|
|
||||||
|
* `go run main.go unused`: show the list of unused strings.
|
||||||
|
|
||||||
After the download you'll find the output locales in the `client/src/__locales/`
|
After the download you'll find the output locales in the `client/src/__locales/`
|
||||||
directory.
|
directory.
|
||||||
|
|
||||||
Optional environment:
|
Optional environment:
|
||||||
|
|
||||||
* `SLEEP_TIME`: set the sleep time between downloads for `locales:download`,
|
* `UPLOAD_LANGUAGE`: set an alternative language for `upload`.
|
||||||
in milliseconds. The default is 250 ms.
|
|
||||||
|
|
||||||
* `UPLOAD_LANGUAGE`: set an alternative language for `locales:upload` to
|
* `TWOSKY_URI`: set an alternative URL for `download` or `upload`.
|
||||||
upload.
|
|
||||||
|
* `TWOSKY_PROJECT_ID`: set an alternative project ID for `download` or
|
||||||
|
`upload`.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -182,6 +182,7 @@ run_linter gocyclo --over 10\
|
||||||
./internal/version/\
|
./internal/version/\
|
||||||
./scripts/blocked-services/\
|
./scripts/blocked-services/\
|
||||||
./scripts/vetted-filters/\
|
./scripts/vetted-filters/\
|
||||||
|
./scripts/translations/\
|
||||||
./main.go\
|
./main.go\
|
||||||
;
|
;
|
||||||
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
node_modules
|
|
|
@ -1,41 +0,0 @@
|
||||||
const path = require('path');
|
|
||||||
const twoskyConfig = require('../../.twosky.json')[0];
|
|
||||||
|
|
||||||
const {languages} = twoskyConfig;
|
|
||||||
const LOCALES_DIR = '../../client/src/__locales';
|
|
||||||
const LOCALES_LIST = Object.keys(languages);
|
|
||||||
const BASE_FILE = 'en.json';
|
|
||||||
|
|
||||||
const main = () => {
|
|
||||||
const pathToBaseFile = path.join(LOCALES_DIR, BASE_FILE);
|
|
||||||
const baseLanguageJson = require(pathToBaseFile);
|
|
||||||
|
|
||||||
const summary = {};
|
|
||||||
|
|
||||||
LOCALES_LIST.forEach((locale) => {
|
|
||||||
const pathToFile = path.join(LOCALES_DIR, `${locale}.json`);
|
|
||||||
if (pathToFile === pathToBaseFile) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let total = 0;
|
|
||||||
let translated = 0;
|
|
||||||
|
|
||||||
const languageJson = require(pathToFile);
|
|
||||||
for (let key in baseLanguageJson) {
|
|
||||||
total += 1;
|
|
||||||
if (key in languageJson) {
|
|
||||||
translated += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
summary[locale] = Math.round(translated / total * 10000) / 100;
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('Translations summary:');
|
|
||||||
for (let key in summary) {
|
|
||||||
console.log(`${key}, translated ${summary[key]}%`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
|
@ -1,125 +0,0 @@
|
||||||
// TODO(a.garipov): Rewrite this in Go; add better concurrency controls; add
|
|
||||||
// features for easier maintenance.
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const requestPromise = require('request-promise');
|
|
||||||
const twoskyConfig = require('../../.twosky.json')[0];
|
|
||||||
|
|
||||||
const { project_id: TWOSKY_PROJECT_ID, languages } = twoskyConfig;
|
|
||||||
const LOCALES_DIR = '../../client/src/__locales';
|
|
||||||
const LOCALES_LIST = Object.keys(languages);
|
|
||||||
const BASE_FILE = 'en.json';
|
|
||||||
const TWOSKY_URI = process.env.TWOSKY_URI;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepare params to get translations from twosky
|
|
||||||
* @param {string} locale language shortcut
|
|
||||||
* @param {object} twosky config twosky
|
|
||||||
*/
|
|
||||||
const getRequestUrl = (locale, url, projectId) => {
|
|
||||||
return `${url}/download?format=json&language=${locale}&filename=${BASE_FILE}&project=${projectId}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Promise wrapper for writing in file
|
|
||||||
* @param {string} filename
|
|
||||||
* @param {any} body
|
|
||||||
*/
|
|
||||||
function writeInFile(filename, body) {
|
|
||||||
let normalizedBody = removeEmpty(JSON.parse(body));
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
if (typeof normalizedBody !== 'string') {
|
|
||||||
try {
|
|
||||||
normalizedBody = JSON.stringify(normalizedBody, null, 4) + '\n'; // eslint-disable-line
|
|
||||||
} catch (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.writeFile(filename, normalizedBody, (err) => {
|
|
||||||
if (err) reject(err);
|
|
||||||
resolve('Ok');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear initial from empty value keys
|
|
||||||
* @param {object} initialObject
|
|
||||||
*/
|
|
||||||
function removeEmpty(initialObject) {
|
|
||||||
let processedObject = {};
|
|
||||||
Object.keys(initialObject).forEach(prop => {
|
|
||||||
if (initialObject[prop]) {
|
|
||||||
processedObject[prop] = initialObject[prop];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return processedObject;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Request twosky
|
|
||||||
* @param {string} url
|
|
||||||
* @param {string} locale
|
|
||||||
*/
|
|
||||||
const request = (url, locale) => (
|
|
||||||
requestPromise.get(url)
|
|
||||||
.then((res) => {
|
|
||||||
if (res.length) {
|
|
||||||
const pathToFile = path.join(LOCALES_DIR, `${locale}.json`);
|
|
||||||
return writeInFile(pathToFile, res);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
})
|
|
||||||
.then((res) => {
|
|
||||||
let result = locale;
|
|
||||||
result += res ? ' - OK' : ' - Empty';
|
|
||||||
return result;
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
console.log(err);
|
|
||||||
return `${locale} - Not OK`;
|
|
||||||
}));
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sleep.
|
|
||||||
* @param {number} ms
|
|
||||||
*/
|
|
||||||
const sleep = (ms) => new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, ms);
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download locales
|
|
||||||
*/
|
|
||||||
const download = async () => {
|
|
||||||
const locales = LOCALES_LIST;
|
|
||||||
|
|
||||||
if (!TWOSKY_URI) {
|
|
||||||
console.error('No credentials');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const requests = [];
|
|
||||||
for (let i = 0; i < locales.length; i++) {
|
|
||||||
const locale = locales[i];
|
|
||||||
const url = getRequestUrl(locale, TWOSKY_URI, TWOSKY_PROJECT_ID);
|
|
||||||
requests.push(request(url, locale));
|
|
||||||
|
|
||||||
// Don't request the Crowdin API too aggressively to prevent spurious
|
|
||||||
// 400 errors.
|
|
||||||
const sleepTime = process.env.SLEEP_TIME || 250;
|
|
||||||
await sleep(sleepTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
Promise
|
|
||||||
.all(requests)
|
|
||||||
.then((res) => {
|
|
||||||
res.forEach(item => console.log(item));
|
|
||||||
})
|
|
||||||
.catch(err => console.log(err));
|
|
||||||
};
|
|
||||||
|
|
||||||
download();
|
|
|
@ -0,0 +1,464 @@
|
||||||
|
// translations downloads translations, uploads translations, prints summary
|
||||||
|
// for translations, prints unused strings.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/internal/aghio"
|
||||||
|
"github.com/AdguardTeam/golibs/errors"
|
||||||
|
"github.com/AdguardTeam/golibs/log"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
twoskyConfFile = "./.twosky.json"
|
||||||
|
localesDir = "./client/src/__locales"
|
||||||
|
defaultBaseFile = "en.json"
|
||||||
|
defaultProjectID = "home"
|
||||||
|
srcDir = "./client/src"
|
||||||
|
twoskyURI = "https://twosky.int.agrd.dev/api/v1"
|
||||||
|
|
||||||
|
readLimit = 1 * 1024 * 1024
|
||||||
|
)
|
||||||
|
|
||||||
|
// langCode is a language code.
|
||||||
|
type langCode string
|
||||||
|
|
||||||
|
// languages is a map, where key is language code and value is display name.
|
||||||
|
type languages map[langCode]string
|
||||||
|
|
||||||
|
// textlabel is a text label of localization.
|
||||||
|
type textLabel string
|
||||||
|
|
||||||
|
// locales is a map, where key is text label and value is translation.
|
||||||
|
type locales map[textLabel]string
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
if len(os.Args) == 1 {
|
||||||
|
usage("need a command")
|
||||||
|
}
|
||||||
|
|
||||||
|
if os.Args[1] == "help" {
|
||||||
|
usage("")
|
||||||
|
}
|
||||||
|
|
||||||
|
uriStr := os.Getenv("TWOSKY_URI")
|
||||||
|
if uriStr == "" {
|
||||||
|
uriStr = twoskyURI
|
||||||
|
}
|
||||||
|
|
||||||
|
uri, err := url.Parse(uriStr)
|
||||||
|
check(err)
|
||||||
|
|
||||||
|
projectID := os.Getenv("TWOSKY_PROJECT_ID")
|
||||||
|
if projectID == "" {
|
||||||
|
projectID = defaultProjectID
|
||||||
|
}
|
||||||
|
|
||||||
|
conf, err := readTwoskyConf()
|
||||||
|
check(err)
|
||||||
|
|
||||||
|
switch os.Args[1] {
|
||||||
|
case "summary":
|
||||||
|
err = summary(conf.Languages)
|
||||||
|
check(err)
|
||||||
|
case "download":
|
||||||
|
err = download(uri, projectID, conf.Languages)
|
||||||
|
check(err)
|
||||||
|
case "unused":
|
||||||
|
err = unused()
|
||||||
|
check(err)
|
||||||
|
case "upload":
|
||||||
|
err = upload(uri, projectID, conf.BaseLangcode)
|
||||||
|
check(err)
|
||||||
|
default:
|
||||||
|
usage("unknown command")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check is a simple error-checking helper for scripts.
|
||||||
|
func check(err error) {
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints usage. If addStr is not empty print addStr and exit with code
|
||||||
|
// 1, otherwise exit with code 0.
|
||||||
|
func usage(addStr string) {
|
||||||
|
const usageStr = `Usage: go run main.go <command> [<args>]
|
||||||
|
Commands:
|
||||||
|
help
|
||||||
|
Print usage.
|
||||||
|
summary
|
||||||
|
Print summary.
|
||||||
|
download [-n <count>]
|
||||||
|
Download translations. count is a number of concurrent downloads.
|
||||||
|
unused
|
||||||
|
Print unused strings.
|
||||||
|
upload
|
||||||
|
Upload translations.`
|
||||||
|
|
||||||
|
if addStr != "" {
|
||||||
|
fmt.Printf("%s\n%s\n", addStr, usageStr)
|
||||||
|
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(usageStr)
|
||||||
|
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// twoskyConf is the configuration structure for localization.
|
||||||
|
type twoskyConf struct {
|
||||||
|
Languages languages `json:"languages"`
|
||||||
|
ProjectID string `json:"project_id"`
|
||||||
|
BaseLangcode langCode `json:"base_locale"`
|
||||||
|
LocalizableFiles []string `json:"localizable_files"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// readTwoskyConf returns configuration.
|
||||||
|
func readTwoskyConf() (t twoskyConf, err error) {
|
||||||
|
b, err := os.ReadFile(twoskyConfFile)
|
||||||
|
if err != nil {
|
||||||
|
// Don't wrap the error since it's informative enough as is.
|
||||||
|
return twoskyConf{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var tsc []twoskyConf
|
||||||
|
err = json.Unmarshal(b, &tsc)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("unmarshalling %q: %w", twoskyConfFile, err)
|
||||||
|
|
||||||
|
return twoskyConf{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(tsc) == 0 {
|
||||||
|
err = fmt.Errorf("%q is empty", twoskyConfFile)
|
||||||
|
|
||||||
|
return twoskyConf{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
conf := tsc[0]
|
||||||
|
|
||||||
|
for _, lang := range conf.Languages {
|
||||||
|
if lang == "" {
|
||||||
|
return twoskyConf{}, errors.Error("language is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return conf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// readLocales reads file with name fn and returns a map, where key is text
|
||||||
|
// label and value is localization.
|
||||||
|
func readLocales(fn string) (loc locales, err error) {
|
||||||
|
b, err := os.ReadFile(fn)
|
||||||
|
if err != nil {
|
||||||
|
// Don't wrap the error since it's informative enough as is.
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
loc = make(locales)
|
||||||
|
err = json.Unmarshal(b, &loc)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("unmarshalling %q: %w", fn, err)
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return loc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// summary prints summary for translations.
|
||||||
|
func summary(langs languages) (err error) {
|
||||||
|
basePath := filepath.Join(localesDir, defaultBaseFile)
|
||||||
|
baseLoc, err := readLocales(basePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("summary: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
size := float64(len(baseLoc))
|
||||||
|
|
||||||
|
keys := maps.Keys(langs)
|
||||||
|
slices.Sort(keys)
|
||||||
|
|
||||||
|
for _, lang := range keys {
|
||||||
|
name := filepath.Join(localesDir, string(lang)+".json")
|
||||||
|
if name == basePath {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var loc locales
|
||||||
|
loc, err = readLocales(name)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("summary: reading locales: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
f := float64(len(loc)) * 100 / size
|
||||||
|
|
||||||
|
fmt.Printf("%s\t %6.2f %%\n", lang, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// download and save all translations. uri is the base URL. projectID is the
|
||||||
|
// name of the project.
|
||||||
|
func download(uri *url.URL, projectID string, langs languages) (err error) {
|
||||||
|
var numWorker int
|
||||||
|
|
||||||
|
flagSet := flag.NewFlagSet("download", flag.ExitOnError)
|
||||||
|
flagSet.Usage = func() {
|
||||||
|
usage("download command error")
|
||||||
|
}
|
||||||
|
flagSet.IntVar(&numWorker, "n", 1, "number of concurrent downloads")
|
||||||
|
|
||||||
|
err = flagSet.Parse(os.Args[2:])
|
||||||
|
if err != nil {
|
||||||
|
// Don't wrap the error since there is exit on error.
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if numWorker < 1 {
|
||||||
|
usage("count must be positive")
|
||||||
|
}
|
||||||
|
|
||||||
|
downloadURI := uri.JoinPath("download")
|
||||||
|
|
||||||
|
client := &http.Client{
|
||||||
|
Timeout: 10 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
uriCh := make(chan *url.URL, len(langs))
|
||||||
|
|
||||||
|
for i := 0; i < numWorker; i++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go downloadWorker(&wg, client, uriCh)
|
||||||
|
}
|
||||||
|
|
||||||
|
for lang := range langs {
|
||||||
|
uri = translationURL(downloadURI, defaultBaseFile, projectID, lang)
|
||||||
|
|
||||||
|
uriCh <- uri
|
||||||
|
}
|
||||||
|
|
||||||
|
close(uriCh)
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// downloadWorker downloads translations by received urls and saves them.
|
||||||
|
func downloadWorker(wg *sync.WaitGroup, client *http.Client, uriCh <-chan *url.URL) {
|
||||||
|
defer wg.Done()
|
||||||
|
|
||||||
|
for uri := range uriCh {
|
||||||
|
data, err := getTranslation(client, uri.String())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("download worker: getting translation: %s", err)
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
q := uri.Query()
|
||||||
|
code := q.Get("language")
|
||||||
|
|
||||||
|
name := filepath.Join(localesDir, code+".json")
|
||||||
|
err = os.WriteFile(name, data, 0o664)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("download worker: writing file: %s", err)
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getTranslation returns received translation data or error.
|
||||||
|
func getTranslation(client *http.Client, url string) (data []byte, err error) {
|
||||||
|
resp, err := client.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("requesting: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer log.OnCloserError(resp.Body, log.ERROR)
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
err = fmt.Errorf("url: %q; status code: %s", url, http.StatusText(resp.StatusCode))
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
limitReader, err := aghio.LimitReader(resp.Body, readLimit)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("limit reading: %w", err)
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err = io.ReadAll(limitReader)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("reading all: %w", err)
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// translationURL returns a new url.URL with provided query parameters.
|
||||||
|
func translationURL(oldURL *url.URL, baseFile, projectID string, lang langCode) (uri *url.URL) {
|
||||||
|
uri = &url.URL{}
|
||||||
|
*uri = *oldURL
|
||||||
|
|
||||||
|
q := uri.Query()
|
||||||
|
q.Set("format", "json")
|
||||||
|
q.Set("filename", baseFile)
|
||||||
|
q.Set("project", projectID)
|
||||||
|
q.Set("language", string(lang))
|
||||||
|
|
||||||
|
uri.RawQuery = q.Encode()
|
||||||
|
|
||||||
|
return uri
|
||||||
|
}
|
||||||
|
|
||||||
|
// unused prints unused text labels.
|
||||||
|
func unused() (err error) {
|
||||||
|
fileNames := []string{}
|
||||||
|
basePath := filepath.Join(localesDir, defaultBaseFile)
|
||||||
|
baseLoc, err := readLocales(basePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unused: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
locDir := filepath.Clean(localesDir)
|
||||||
|
|
||||||
|
err = filepath.Walk(srcDir, func(name string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
log.Info("accessing a path %q: %s", name, err)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(name, locDir) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ext := filepath.Ext(name)
|
||||||
|
if ext == ".js" || ext == ".json" {
|
||||||
|
fileNames = append(fileNames, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("filepath walking %q: %w", srcDir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = removeUnused(fileNames, baseLoc)
|
||||||
|
|
||||||
|
return errors.Annotate(err, "removing unused: %w")
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeUnused(fileNames []string, loc locales) (err error) {
|
||||||
|
knownUsed := []textLabel{
|
||||||
|
"blocking_mode_refused",
|
||||||
|
"blocking_mode_nxdomain",
|
||||||
|
"blocking_mode_custom_ip",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range knownUsed {
|
||||||
|
delete(loc, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fn := range fileNames {
|
||||||
|
var buf []byte
|
||||||
|
buf, err = os.ReadFile(fn)
|
||||||
|
if err != nil {
|
||||||
|
// Don't wrap the error since it's informative enough as is.
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for k := range loc {
|
||||||
|
if bytes.Contains(buf, []byte(k)) {
|
||||||
|
delete(loc, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printUnused(loc)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// printUnused text labels to stdout.
|
||||||
|
func printUnused(loc locales) {
|
||||||
|
keys := maps.Keys(loc)
|
||||||
|
slices.Sort(keys)
|
||||||
|
|
||||||
|
for _, v := range keys {
|
||||||
|
fmt.Println(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// upload base translation. uri is the base URL. projectID is the name of the
|
||||||
|
// project. baseLang is the base language code.
|
||||||
|
func upload(uri *url.URL, projectID string, baseLang langCode) (err error) {
|
||||||
|
uploadURI := uri.JoinPath("upload")
|
||||||
|
|
||||||
|
lang := baseLang
|
||||||
|
|
||||||
|
langStr := os.Getenv("UPLOAD_LANGUAGE")
|
||||||
|
if langStr != "" {
|
||||||
|
lang = langCode(langStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
basePath := filepath.Join(localesDir, defaultBaseFile)
|
||||||
|
b, err := os.ReadFile(basePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("upload: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
buf.Write(b)
|
||||||
|
|
||||||
|
uri = translationURL(uploadURI, defaultBaseFile, projectID, lang)
|
||||||
|
|
||||||
|
var client http.Client
|
||||||
|
resp, err := client.Post(uri.String(), "application/json", &buf)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("upload: client post: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
err = errors.WithDeferred(err, resp.Body.Close())
|
||||||
|
}()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("status code is not ok: %q", http.StatusText(resp.StatusCode))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -1,838 +0,0 @@
|
||||||
{
|
|
||||||
"name": "translations",
|
|
||||||
"version": "0.2.0",
|
|
||||||
"lockfileVersion": 2,
|
|
||||||
"requires": true,
|
|
||||||
"packages": {
|
|
||||||
"": {
|
|
||||||
"version": "0.2.0",
|
|
||||||
"dependencies": {
|
|
||||||
"request": "^2.88.0",
|
|
||||||
"request-promise": "^4.2.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/ajv": {
|
|
||||||
"version": "6.5.5",
|
|
||||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.5.5.tgz",
|
|
||||||
"integrity": "sha512-7q7gtRQDJSyuEHjuVgHoUa2VuemFiCMrfQc9Tc08XTAc4Zj/5U1buQJ0HU6i7fKjXU09SVgSmxa4sLvuvS8Iyg==",
|
|
||||||
"dependencies": {
|
|
||||||
"fast-deep-equal": "^2.0.1",
|
|
||||||
"fast-json-stable-stringify": "^2.0.0",
|
|
||||||
"json-schema-traverse": "^0.4.1",
|
|
||||||
"uri-js": "^4.2.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/asn1": {
|
|
||||||
"version": "0.2.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
|
|
||||||
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
|
|
||||||
"dependencies": {
|
|
||||||
"safer-buffer": "~2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/assert-plus": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
|
||||||
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/asynckit": {
|
|
||||||
"version": "0.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
|
||||||
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
|
|
||||||
},
|
|
||||||
"node_modules/aws-sign2": {
|
|
||||||
"version": "0.7.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
|
||||||
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=",
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/aws4": {
|
|
||||||
"version": "1.8.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz",
|
|
||||||
"integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ=="
|
|
||||||
},
|
|
||||||
"node_modules/bcrypt-pbkdf": {
|
|
||||||
"version": "1.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
|
||||||
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
|
|
||||||
"dependencies": {
|
|
||||||
"tweetnacl": "^0.14.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/bluebird": {
|
|
||||||
"version": "3.5.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz",
|
|
||||||
"integrity": "sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw=="
|
|
||||||
},
|
|
||||||
"node_modules/caseless": {
|
|
||||||
"version": "0.12.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
|
|
||||||
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
|
|
||||||
},
|
|
||||||
"node_modules/combined-stream": {
|
|
||||||
"version": "1.0.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz",
|
|
||||||
"integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==",
|
|
||||||
"dependencies": {
|
|
||||||
"delayed-stream": "~1.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 0.8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/core-util-is": {
|
|
||||||
"version": "1.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
|
||||||
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
|
|
||||||
},
|
|
||||||
"node_modules/dashdash": {
|
|
||||||
"version": "1.14.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
|
||||||
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
|
|
||||||
"dependencies": {
|
|
||||||
"assert-plus": "^1.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/delayed-stream": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
|
||||||
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.4.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/ecc-jsbn": {
|
|
||||||
"version": "0.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
|
|
||||||
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
|
|
||||||
"dependencies": {
|
|
||||||
"jsbn": "~0.1.0",
|
|
||||||
"safer-buffer": "^2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/extend": {
|
|
||||||
"version": "3.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
|
||||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
|
||||||
},
|
|
||||||
"node_modules/extsprintf": {
|
|
||||||
"version": "1.3.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
|
|
||||||
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=",
|
|
||||||
"engines": [
|
|
||||||
"node >=0.6.0"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/fast-deep-equal": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
|
|
||||||
"integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk="
|
|
||||||
},
|
|
||||||
"node_modules/fast-json-stable-stringify": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
|
|
||||||
"integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I="
|
|
||||||
},
|
|
||||||
"node_modules/forever-agent": {
|
|
||||||
"version": "0.6.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
|
||||||
"integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=",
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/form-data": {
|
|
||||||
"version": "2.3.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
|
||||||
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
|
||||||
"dependencies": {
|
|
||||||
"asynckit": "^0.4.0",
|
|
||||||
"combined-stream": "^1.0.6",
|
|
||||||
"mime-types": "^2.1.12"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 0.12"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/getpass": {
|
|
||||||
"version": "0.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
|
|
||||||
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
|
|
||||||
"dependencies": {
|
|
||||||
"assert-plus": "^1.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/har-schema": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
|
|
||||||
"integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=4"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/har-validator": {
|
|
||||||
"version": "5.1.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
|
|
||||||
"integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
|
|
||||||
"dependencies": {
|
|
||||||
"ajv": "^6.5.5",
|
|
||||||
"har-schema": "^2.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/http-signature": {
|
|
||||||
"version": "1.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
|
||||||
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
|
|
||||||
"dependencies": {
|
|
||||||
"assert-plus": "^1.0.0",
|
|
||||||
"jsprim": "^1.2.2",
|
|
||||||
"sshpk": "^1.7.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.8",
|
|
||||||
"npm": ">=1.3.7"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/is-typedarray": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
|
||||||
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
|
|
||||||
},
|
|
||||||
"node_modules/isstream": {
|
|
||||||
"version": "0.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
|
||||||
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
|
|
||||||
},
|
|
||||||
"node_modules/jsbn": {
|
|
||||||
"version": "0.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
|
||||||
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
|
|
||||||
},
|
|
||||||
"node_modules/json-schema": {
|
|
||||||
"version": "0.2.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
|
|
||||||
"integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
|
|
||||||
},
|
|
||||||
"node_modules/json-schema-traverse": {
|
|
||||||
"version": "0.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
|
||||||
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
|
|
||||||
},
|
|
||||||
"node_modules/json-stringify-safe": {
|
|
||||||
"version": "5.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
|
||||||
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
|
|
||||||
},
|
|
||||||
"node_modules/jsprim": {
|
|
||||||
"version": "1.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
|
|
||||||
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
|
|
||||||
"engines": [
|
|
||||||
"node >=0.6.0"
|
|
||||||
],
|
|
||||||
"dependencies": {
|
|
||||||
"assert-plus": "1.0.0",
|
|
||||||
"extsprintf": "1.3.0",
|
|
||||||
"json-schema": "0.2.3",
|
|
||||||
"verror": "1.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/lodash": {
|
|
||||||
"version": "4.17.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz",
|
|
||||||
"integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA=="
|
|
||||||
},
|
|
||||||
"node_modules/mime-db": {
|
|
||||||
"version": "1.37.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz",
|
|
||||||
"integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 0.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mime-types": {
|
|
||||||
"version": "2.1.21",
|
|
||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz",
|
|
||||||
"integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==",
|
|
||||||
"dependencies": {
|
|
||||||
"mime-db": "~1.37.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 0.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/oauth-sign": {
|
|
||||||
"version": "0.9.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
|
||||||
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==",
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/performance-now": {
|
|
||||||
"version": "2.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
|
|
||||||
"integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
|
|
||||||
},
|
|
||||||
"node_modules/psl": {
|
|
||||||
"version": "1.1.29",
|
|
||||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz",
|
|
||||||
"integrity": "sha512-AeUmQ0oLN02flVHXWh9sSJF7mcdFq0ppid/JkErufc3hGIV/AMa8Fo9VgDo/cT2jFdOWoFvHp90qqBH54W+gjQ=="
|
|
||||||
},
|
|
||||||
"node_modules/punycode": {
|
|
||||||
"version": "1.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
|
|
||||||
"integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
|
|
||||||
},
|
|
||||||
"node_modules/qs": {
|
|
||||||
"version": "6.5.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
|
|
||||||
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/request": {
|
|
||||||
"version": "2.88.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
|
|
||||||
"integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==",
|
|
||||||
"dependencies": {
|
|
||||||
"aws-sign2": "~0.7.0",
|
|
||||||
"aws4": "^1.8.0",
|
|
||||||
"caseless": "~0.12.0",
|
|
||||||
"combined-stream": "~1.0.6",
|
|
||||||
"extend": "~3.0.2",
|
|
||||||
"forever-agent": "~0.6.1",
|
|
||||||
"form-data": "~2.3.2",
|
|
||||||
"har-validator": "~5.1.0",
|
|
||||||
"http-signature": "~1.2.0",
|
|
||||||
"is-typedarray": "~1.0.0",
|
|
||||||
"isstream": "~0.1.2",
|
|
||||||
"json-stringify-safe": "~5.0.1",
|
|
||||||
"mime-types": "~2.1.19",
|
|
||||||
"oauth-sign": "~0.9.0",
|
|
||||||
"performance-now": "^2.1.0",
|
|
||||||
"qs": "~6.5.2",
|
|
||||||
"safe-buffer": "^5.1.2",
|
|
||||||
"tough-cookie": "~2.4.3",
|
|
||||||
"tunnel-agent": "^0.6.0",
|
|
||||||
"uuid": "^3.3.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 4"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/request-promise": {
|
|
||||||
"version": "4.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/request-promise/-/request-promise-4.2.2.tgz",
|
|
||||||
"integrity": "sha1-0epG1lSm7k+O5qT+oQGMIpEZBLQ=",
|
|
||||||
"dependencies": {
|
|
||||||
"bluebird": "^3.5.0",
|
|
||||||
"request-promise-core": "1.1.1",
|
|
||||||
"stealthy-require": "^1.1.0",
|
|
||||||
"tough-cookie": ">=2.3.3"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/request-promise-core": {
|
|
||||||
"version": "1.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.1.tgz",
|
|
||||||
"integrity": "sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=",
|
|
||||||
"dependencies": {
|
|
||||||
"lodash": "^4.13.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/safe-buffer": {
|
|
||||||
"version": "5.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
|
||||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
|
||||||
},
|
|
||||||
"node_modules/safer-buffer": {
|
|
||||||
"version": "2.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
|
||||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
|
||||||
},
|
|
||||||
"node_modules/sshpk": {
|
|
||||||
"version": "1.15.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.15.2.tgz",
|
|
||||||
"integrity": "sha512-Ra/OXQtuh0/enyl4ETZAfTaeksa6BXks5ZcjpSUNrjBr0DvrJKX+1fsKDPpT9TBXgHAFsa4510aNVgI8g/+SzA==",
|
|
||||||
"dependencies": {
|
|
||||||
"asn1": "~0.2.3",
|
|
||||||
"assert-plus": "^1.0.0",
|
|
||||||
"bcrypt-pbkdf": "^1.0.0",
|
|
||||||
"dashdash": "^1.12.0",
|
|
||||||
"ecc-jsbn": "~0.1.1",
|
|
||||||
"getpass": "^0.1.1",
|
|
||||||
"jsbn": "~0.1.0",
|
|
||||||
"safer-buffer": "^2.0.2",
|
|
||||||
"tweetnacl": "~0.14.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/stealthy-require": {
|
|
||||||
"version": "1.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz",
|
|
||||||
"integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/tough-cookie": {
|
|
||||||
"version": "2.4.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
|
|
||||||
"integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==",
|
|
||||||
"dependencies": {
|
|
||||||
"psl": "^1.1.24",
|
|
||||||
"punycode": "^1.4.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/tunnel-agent": {
|
|
||||||
"version": "0.6.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
|
||||||
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
|
|
||||||
"dependencies": {
|
|
||||||
"safe-buffer": "^5.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/tweetnacl": {
|
|
||||||
"version": "0.14.5",
|
|
||||||
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
|
||||||
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
|
|
||||||
},
|
|
||||||
"node_modules/uri-js": {
|
|
||||||
"version": "4.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
|
|
||||||
"integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
|
|
||||||
"dependencies": {
|
|
||||||
"punycode": "^2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/uri-js/node_modules/punycode": {
|
|
||||||
"version": "2.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
|
||||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/uuid": {
|
|
||||||
"version": "3.3.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
|
||||||
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==",
|
|
||||||
"bin": {
|
|
||||||
"uuid": "bin/uuid"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/verror": {
|
|
||||||
"version": "1.10.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
|
||||||
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
|
|
||||||
"engines": [
|
|
||||||
"node >=0.6.0"
|
|
||||||
],
|
|
||||||
"dependencies": {
|
|
||||||
"assert-plus": "^1.0.0",
|
|
||||||
"core-util-is": "1.0.2",
|
|
||||||
"extsprintf": "^1.2.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"ajv": {
|
|
||||||
"version": "6.5.5",
|
|
||||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.5.5.tgz",
|
|
||||||
"integrity": "sha512-7q7gtRQDJSyuEHjuVgHoUa2VuemFiCMrfQc9Tc08XTAc4Zj/5U1buQJ0HU6i7fKjXU09SVgSmxa4sLvuvS8Iyg==",
|
|
||||||
"requires": {
|
|
||||||
"fast-deep-equal": "^2.0.1",
|
|
||||||
"fast-json-stable-stringify": "^2.0.0",
|
|
||||||
"json-schema-traverse": "^0.4.1",
|
|
||||||
"uri-js": "^4.2.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"asn1": {
|
|
||||||
"version": "0.2.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
|
|
||||||
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
|
|
||||||
"requires": {
|
|
||||||
"safer-buffer": "~2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"assert-plus": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
|
||||||
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
|
|
||||||
},
|
|
||||||
"asynckit": {
|
|
||||||
"version": "0.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
|
||||||
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
|
|
||||||
},
|
|
||||||
"aws-sign2": {
|
|
||||||
"version": "0.7.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
|
||||||
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
|
|
||||||
},
|
|
||||||
"aws4": {
|
|
||||||
"version": "1.8.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz",
|
|
||||||
"integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ=="
|
|
||||||
},
|
|
||||||
"bcrypt-pbkdf": {
|
|
||||||
"version": "1.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
|
||||||
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
|
|
||||||
"requires": {
|
|
||||||
"tweetnacl": "^0.14.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"bluebird": {
|
|
||||||
"version": "3.5.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz",
|
|
||||||
"integrity": "sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw=="
|
|
||||||
},
|
|
||||||
"caseless": {
|
|
||||||
"version": "0.12.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
|
|
||||||
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
|
|
||||||
},
|
|
||||||
"combined-stream": {
|
|
||||||
"version": "1.0.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz",
|
|
||||||
"integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==",
|
|
||||||
"requires": {
|
|
||||||
"delayed-stream": "~1.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"core-util-is": {
|
|
||||||
"version": "1.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
|
||||||
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
|
|
||||||
},
|
|
||||||
"dashdash": {
|
|
||||||
"version": "1.14.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
|
||||||
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
|
|
||||||
"requires": {
|
|
||||||
"assert-plus": "^1.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"delayed-stream": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
|
||||||
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
|
|
||||||
},
|
|
||||||
"ecc-jsbn": {
|
|
||||||
"version": "0.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
|
|
||||||
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
|
|
||||||
"requires": {
|
|
||||||
"jsbn": "~0.1.0",
|
|
||||||
"safer-buffer": "^2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"extend": {
|
|
||||||
"version": "3.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
|
||||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
|
||||||
},
|
|
||||||
"extsprintf": {
|
|
||||||
"version": "1.3.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
|
|
||||||
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
|
|
||||||
},
|
|
||||||
"fast-deep-equal": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
|
|
||||||
"integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk="
|
|
||||||
},
|
|
||||||
"fast-json-stable-stringify": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
|
|
||||||
"integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I="
|
|
||||||
},
|
|
||||||
"forever-agent": {
|
|
||||||
"version": "0.6.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
|
||||||
"integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
|
|
||||||
},
|
|
||||||
"form-data": {
|
|
||||||
"version": "2.3.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
|
||||||
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
|
||||||
"requires": {
|
|
||||||
"asynckit": "^0.4.0",
|
|
||||||
"combined-stream": "^1.0.6",
|
|
||||||
"mime-types": "^2.1.12"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"getpass": {
|
|
||||||
"version": "0.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
|
|
||||||
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
|
|
||||||
"requires": {
|
|
||||||
"assert-plus": "^1.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"har-schema": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
|
|
||||||
"integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
|
|
||||||
},
|
|
||||||
"har-validator": {
|
|
||||||
"version": "5.1.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
|
|
||||||
"integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
|
|
||||||
"requires": {
|
|
||||||
"ajv": "^6.5.5",
|
|
||||||
"har-schema": "^2.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"http-signature": {
|
|
||||||
"version": "1.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
|
||||||
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
|
|
||||||
"requires": {
|
|
||||||
"assert-plus": "^1.0.0",
|
|
||||||
"jsprim": "^1.2.2",
|
|
||||||
"sshpk": "^1.7.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"is-typedarray": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
|
||||||
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
|
|
||||||
},
|
|
||||||
"isstream": {
|
|
||||||
"version": "0.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
|
||||||
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
|
|
||||||
},
|
|
||||||
"jsbn": {
|
|
||||||
"version": "0.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
|
||||||
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
|
|
||||||
},
|
|
||||||
"json-schema": {
|
|
||||||
"version": "0.2.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
|
|
||||||
"integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
|
|
||||||
},
|
|
||||||
"json-schema-traverse": {
|
|
||||||
"version": "0.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
|
||||||
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
|
|
||||||
},
|
|
||||||
"json-stringify-safe": {
|
|
||||||
"version": "5.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
|
||||||
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
|
|
||||||
},
|
|
||||||
"jsprim": {
|
|
||||||
"version": "1.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
|
|
||||||
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
|
|
||||||
"requires": {
|
|
||||||
"assert-plus": "1.0.0",
|
|
||||||
"extsprintf": "1.3.0",
|
|
||||||
"json-schema": "0.2.3",
|
|
||||||
"verror": "1.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"lodash": {
|
|
||||||
"version": "4.17.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz",
|
|
||||||
"integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA=="
|
|
||||||
},
|
|
||||||
"mime-db": {
|
|
||||||
"version": "1.37.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz",
|
|
||||||
"integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg=="
|
|
||||||
},
|
|
||||||
"mime-types": {
|
|
||||||
"version": "2.1.21",
|
|
||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz",
|
|
||||||
"integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==",
|
|
||||||
"requires": {
|
|
||||||
"mime-db": "~1.37.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"oauth-sign": {
|
|
||||||
"version": "0.9.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
|
||||||
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
|
|
||||||
},
|
|
||||||
"performance-now": {
|
|
||||||
"version": "2.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
|
|
||||||
"integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
|
|
||||||
},
|
|
||||||
"psl": {
|
|
||||||
"version": "1.1.29",
|
|
||||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz",
|
|
||||||
"integrity": "sha512-AeUmQ0oLN02flVHXWh9sSJF7mcdFq0ppid/JkErufc3hGIV/AMa8Fo9VgDo/cT2jFdOWoFvHp90qqBH54W+gjQ=="
|
|
||||||
},
|
|
||||||
"punycode": {
|
|
||||||
"version": "1.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
|
|
||||||
"integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
|
|
||||||
},
|
|
||||||
"qs": {
|
|
||||||
"version": "6.5.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
|
|
||||||
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
|
|
||||||
},
|
|
||||||
"request": {
|
|
||||||
"version": "2.88.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
|
|
||||||
"integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==",
|
|
||||||
"requires": {
|
|
||||||
"aws-sign2": "~0.7.0",
|
|
||||||
"aws4": "^1.8.0",
|
|
||||||
"caseless": "~0.12.0",
|
|
||||||
"combined-stream": "~1.0.6",
|
|
||||||
"extend": "~3.0.2",
|
|
||||||
"forever-agent": "~0.6.1",
|
|
||||||
"form-data": "~2.3.2",
|
|
||||||
"har-validator": "~5.1.0",
|
|
||||||
"http-signature": "~1.2.0",
|
|
||||||
"is-typedarray": "~1.0.0",
|
|
||||||
"isstream": "~0.1.2",
|
|
||||||
"json-stringify-safe": "~5.0.1",
|
|
||||||
"mime-types": "~2.1.19",
|
|
||||||
"oauth-sign": "~0.9.0",
|
|
||||||
"performance-now": "^2.1.0",
|
|
||||||
"qs": "~6.5.2",
|
|
||||||
"safe-buffer": "^5.1.2",
|
|
||||||
"tough-cookie": "~2.4.3",
|
|
||||||
"tunnel-agent": "^0.6.0",
|
|
||||||
"uuid": "^3.3.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"request-promise": {
|
|
||||||
"version": "4.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/request-promise/-/request-promise-4.2.2.tgz",
|
|
||||||
"integrity": "sha1-0epG1lSm7k+O5qT+oQGMIpEZBLQ=",
|
|
||||||
"requires": {
|
|
||||||
"bluebird": "^3.5.0",
|
|
||||||
"request-promise-core": "1.1.1",
|
|
||||||
"stealthy-require": "^1.1.0",
|
|
||||||
"tough-cookie": ">=2.3.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"request-promise-core": {
|
|
||||||
"version": "1.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.1.tgz",
|
|
||||||
"integrity": "sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=",
|
|
||||||
"requires": {
|
|
||||||
"lodash": "^4.13.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"safe-buffer": {
|
|
||||||
"version": "5.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
|
||||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
|
||||||
},
|
|
||||||
"safer-buffer": {
|
|
||||||
"version": "2.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
|
||||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
|
||||||
},
|
|
||||||
"sshpk": {
|
|
||||||
"version": "1.15.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.15.2.tgz",
|
|
||||||
"integrity": "sha512-Ra/OXQtuh0/enyl4ETZAfTaeksa6BXks5ZcjpSUNrjBr0DvrJKX+1fsKDPpT9TBXgHAFsa4510aNVgI8g/+SzA==",
|
|
||||||
"requires": {
|
|
||||||
"asn1": "~0.2.3",
|
|
||||||
"assert-plus": "^1.0.0",
|
|
||||||
"bcrypt-pbkdf": "^1.0.0",
|
|
||||||
"dashdash": "^1.12.0",
|
|
||||||
"ecc-jsbn": "~0.1.1",
|
|
||||||
"getpass": "^0.1.1",
|
|
||||||
"jsbn": "~0.1.0",
|
|
||||||
"safer-buffer": "^2.0.2",
|
|
||||||
"tweetnacl": "~0.14.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"stealthy-require": {
|
|
||||||
"version": "1.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz",
|
|
||||||
"integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks="
|
|
||||||
},
|
|
||||||
"tough-cookie": {
|
|
||||||
"version": "2.4.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
|
|
||||||
"integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==",
|
|
||||||
"requires": {
|
|
||||||
"psl": "^1.1.24",
|
|
||||||
"punycode": "^1.4.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"tunnel-agent": {
|
|
||||||
"version": "0.6.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
|
||||||
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
|
|
||||||
"requires": {
|
|
||||||
"safe-buffer": "^5.0.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"tweetnacl": {
|
|
||||||
"version": "0.14.5",
|
|
||||||
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
|
||||||
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
|
|
||||||
},
|
|
||||||
"uri-js": {
|
|
||||||
"version": "4.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
|
|
||||||
"integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
|
|
||||||
"requires": {
|
|
||||||
"punycode": "^2.1.0"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"punycode": {
|
|
||||||
"version": "2.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
|
||||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"uuid": {
|
|
||||||
"version": "3.3.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
|
||||||
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
|
|
||||||
},
|
|
||||||
"verror": {
|
|
||||||
"version": "1.10.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
|
||||||
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
|
|
||||||
"requires": {
|
|
||||||
"assert-plus": "^1.0.0",
|
|
||||||
"core-util-is": "1.0.2",
|
|
||||||
"extsprintf": "^1.2.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
{
|
|
||||||
"name": "translations",
|
|
||||||
"version": "0.2.0",
|
|
||||||
"scripts": {
|
|
||||||
"locales:download": "TWOSKY_URI=https://twosky.int.agrd.dev/api/v1 TWOSKY_PROJECT_ID=home node download.js ; node count.js",
|
|
||||||
"locales:upload": "TWOSKY_URI=https://twosky.int.agrd.dev/api/v1 TWOSKY_PROJECT_ID=home node upload.js",
|
|
||||||
"locales:summary": "node count.js",
|
|
||||||
"locales:unused": "node unused.js"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"request": "^2.88.0",
|
|
||||||
"request-promise": "^4.2.2"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,63 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
const SRC_DIR = '../../client/src/'
|
|
||||||
const LOCALES_DIR = '../../client/src/__locales';
|
|
||||||
const BASE_FILE = path.join(LOCALES_DIR, 'en.json');
|
|
||||||
|
|
||||||
// Strings that may be found by the algorithm,
|
|
||||||
// but in fact they are used.
|
|
||||||
const KNOWN_USED_STRINGS = {
|
|
||||||
'blocking_mode_refused': true,
|
|
||||||
'blocking_mode_nxdomain': true,
|
|
||||||
'blocking_mode_custom_ip': true,
|
|
||||||
}
|
|
||||||
|
|
||||||
function traverseDir(dir, callback) {
|
|
||||||
fs.readdirSync(dir).forEach(file => {
|
|
||||||
let fullPath = path.join(dir, file);
|
|
||||||
if (fs.lstatSync(fullPath).isDirectory()) {
|
|
||||||
traverseDir(fullPath, callback);
|
|
||||||
} else {
|
|
||||||
callback(fullPath);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const contains = (key, files) => {
|
|
||||||
for (let file of files) {
|
|
||||||
if (file.includes(key)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const main = () => {
|
|
||||||
const baseLanguage = require(BASE_FILE);
|
|
||||||
const files = [];
|
|
||||||
|
|
||||||
traverseDir(SRC_DIR, (path) => {
|
|
||||||
const canContain = (path.endsWith('.js') || path.endsWith('.json')) &&
|
|
||||||
!path.includes(LOCALES_DIR);
|
|
||||||
|
|
||||||
if (canContain) {
|
|
||||||
files.push(fs.readFileSync(path).toString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const unused = [];
|
|
||||||
for (let key in baseLanguage) {
|
|
||||||
if (!contains(key, files) && !KNOWN_USED_STRINGS[key]) {
|
|
||||||
unused.push(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Unused keys:');
|
|
||||||
for (let key of unused) {
|
|
||||||
console.log(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
|
@ -1,47 +0,0 @@
|
||||||
const path = require('path');
|
|
||||||
const fs = require('fs');
|
|
||||||
const request = require('request-promise');
|
|
||||||
const twoskyConfig = require('../../.twosky.json')[0];
|
|
||||||
|
|
||||||
const { project_id: TWOSKY_PROJECT_ID, base_locale: DEFAULT_LANGUAGE } = twoskyConfig;
|
|
||||||
const LOCALES_DIR = '../../client/src/__locales';
|
|
||||||
const BASE_FILE = 'en.json';
|
|
||||||
const TWOSKY_URI = process.env.TWOSKY_URI;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepare post params
|
|
||||||
*/
|
|
||||||
const getRequestData = (url, projectId) => {
|
|
||||||
const language = process.env.UPLOAD_LANGUAGE || DEFAULT_LANGUAGE;
|
|
||||||
const formData = {
|
|
||||||
format: 'json',
|
|
||||||
language: language,
|
|
||||||
filename: BASE_FILE,
|
|
||||||
project: projectId,
|
|
||||||
file: fs.createReadStream(path.resolve(LOCALES_DIR, `${language}.json`)),
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(`uploading ${language}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
url: `${url}/upload`,
|
|
||||||
formData
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Make request to twosky to upload new json
|
|
||||||
*/
|
|
||||||
const upload = () => {
|
|
||||||
if (!TWOSKY_URI) {
|
|
||||||
console.error('No credentials');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { url, formData } = getRequestData(TWOSKY_URI, TWOSKY_PROJECT_ID);
|
|
||||||
request
|
|
||||||
.post({ url, formData })
|
|
||||||
.catch(err => console.log(err));
|
|
||||||
};
|
|
||||||
|
|
||||||
upload();
|
|
Loading…
Reference in New Issue