Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/pi-hole/pi-hole.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam Warner <me@adamwarner.co.uk>2022-02-12 23:04:20 +0300
committerGitHub <noreply@github.com>2022-02-12 23:04:20 +0300
commit6ffa2ba1b2a68c7b0919689e137017ae344aed3c (patch)
tree14a995c7c538d412eb4e1154f7fc4886bfa164d1
parent5d68dac90eafe2fd2eb9693507acc1911a048050 (diff)
parente9250d62c562a2a174e2768f0b2513f68f264746 (diff)
Merge pull request #4547 from pi-hole/developmentv5.9
Pi-hole Core v5.9
-rwxr-xr-xadvanced/Scripts/database_migration/gravity-db.sh30
-rwxr-xr-xadvanced/Scripts/list.sh20
-rwxr-xr-xadvanced/Scripts/piholeARPTable.sh4
-rwxr-xr-xadvanced/Scripts/piholeDebug.sh32
-rwxr-xr-xadvanced/Scripts/piholeLogFlush.sh2
-rwxr-xr-xadvanced/Scripts/query.sh10
-rwxr-xr-xadvanced/Scripts/update.sh2
-rwxr-xr-xadvanced/Scripts/utils.sh35
-rwxr-xr-xadvanced/Scripts/webpage.sh52
-rw-r--r--advanced/Templates/gravity_copy.sql3
-rwxr-xr-xautomated install/basic-install.sh13
-rwxr-xr-xgravity.sh89
-rw-r--r--manpages/pihole-FTL.84
-rw-r--r--manpages/pihole-FTL.conf.5313
-rwxr-xr-xpihole13
-rw-r--r--test/_centos_8.Dockerfile2
-rw-r--r--test/test_any_automated_install.py (renamed from test/test_automated_install.py)4
-rw-r--r--test/test_any_utils.py16
-rw-r--r--test/tox.centos_7.ini2
-rw-r--r--test/tox.centos_8.ini2
-rw-r--r--test/tox.debian_10.ini2
-rw-r--r--test/tox.debian_11.ini2
-rw-r--r--test/tox.debian_9.ini2
-rw-r--r--test/tox.fedora_33.ini2
-rw-r--r--test/tox.fedora_34.ini2
-rw-r--r--test/tox.ubuntu_16.ini2
-rw-r--r--test/tox.ubuntu_18.ini2
-rw-r--r--test/tox.ubuntu_20.ini2
-rw-r--r--test/tox.ubuntu_21.ini2
29 files changed, 221 insertions, 445 deletions
diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh
index 09dc1727..a7ba60a9 100755
--- a/advanced/Scripts/database_migration/gravity-db.sh
+++ b/advanced/Scripts/database_migration/gravity-db.sh
@@ -19,13 +19,13 @@ upgrade_gravityDB(){
auditFile="${piholeDir}/auditlog.list"
# Get database version
- version="$(sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")"
+ version="$(pihole-FTL sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")"
if [[ "$version" == "1" ]]; then
# This migration script upgrades the gravity.db file by
# adding the domain_audit table
echo -e " ${INFO} Upgrading gravity database from version 1 to 2"
- sqlite3 "${database}" < "${scriptPath}/1_to_2.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/1_to_2.sql"
version=2
# Store audit domains in database table
@@ -40,28 +40,28 @@ upgrade_gravityDB(){
# renaming the regex table to regex_blacklist, and
# creating a new regex_whitelist table + corresponding linking table and views
echo -e " ${INFO} Upgrading gravity database from version 2 to 3"
- sqlite3 "${database}" < "${scriptPath}/2_to_3.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/2_to_3.sql"
version=3
fi
if [[ "$version" == "3" ]]; then
# This migration script unifies the formally separated domain
# lists into a single table with a UNIQUE domain constraint
echo -e " ${INFO} Upgrading gravity database from version 3 to 4"
- sqlite3 "${database}" < "${scriptPath}/3_to_4.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/3_to_4.sql"
version=4
fi
if [[ "$version" == "4" ]]; then
# This migration script upgrades the gravity and list views
# implementing necessary changes for per-client blocking
echo -e " ${INFO} Upgrading gravity database from version 4 to 5"
- sqlite3 "${database}" < "${scriptPath}/4_to_5.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/4_to_5.sql"
version=5
fi
if [[ "$version" == "5" ]]; then
# This migration script upgrades the adlist view
# to return an ID used in gravity.sh
echo -e " ${INFO} Upgrading gravity database from version 5 to 6"
- sqlite3 "${database}" < "${scriptPath}/5_to_6.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/5_to_6.sql"
version=6
fi
if [[ "$version" == "6" ]]; then
@@ -69,7 +69,7 @@ upgrade_gravityDB(){
# which is automatically associated to all clients not
# having their own group assignments
echo -e " ${INFO} Upgrading gravity database from version 6 to 7"
- sqlite3 "${database}" < "${scriptPath}/6_to_7.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/6_to_7.sql"
version=7
fi
if [[ "$version" == "7" ]]; then
@@ -77,21 +77,21 @@ upgrade_gravityDB(){
# to ensure uniqueness on the group name
# We also add date_added and date_modified columns
echo -e " ${INFO} Upgrading gravity database from version 7 to 8"
- sqlite3 "${database}" < "${scriptPath}/7_to_8.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/7_to_8.sql"
version=8
fi
if [[ "$version" == "8" ]]; then
# This migration fixes some issues that were introduced
# in the previous migration script.
echo -e " ${INFO} Upgrading gravity database from version 8 to 9"
- sqlite3 "${database}" < "${scriptPath}/8_to_9.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/8_to_9.sql"
version=9
fi
if [[ "$version" == "9" ]]; then
# This migration drops unused tables and creates triggers to remove
# obsolete groups assignments when the linked items are deleted
echo -e " ${INFO} Upgrading gravity database from version 9 to 10"
- sqlite3 "${database}" < "${scriptPath}/9_to_10.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/9_to_10.sql"
version=10
fi
if [[ "$version" == "10" ]]; then
@@ -101,31 +101,31 @@ upgrade_gravityDB(){
# to keep the copying process generic (needs the same columns in both the
# source and the destination databases).
echo -e " ${INFO} Upgrading gravity database from version 10 to 11"
- sqlite3 "${database}" < "${scriptPath}/10_to_11.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/10_to_11.sql"
version=11
fi
if [[ "$version" == "11" ]]; then
# Rename group 0 from "Unassociated" to "Default"
echo -e " ${INFO} Upgrading gravity database from version 11 to 12"
- sqlite3 "${database}" < "${scriptPath}/11_to_12.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/11_to_12.sql"
version=12
fi
if [[ "$version" == "12" ]]; then
# Add column date_updated to adlist table
echo -e " ${INFO} Upgrading gravity database from version 12 to 13"
- sqlite3 "${database}" < "${scriptPath}/12_to_13.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/12_to_13.sql"
version=13
fi
if [[ "$version" == "13" ]]; then
# Add columns number and status to adlist table
echo -e " ${INFO} Upgrading gravity database from version 13 to 14"
- sqlite3 "${database}" < "${scriptPath}/13_to_14.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/13_to_14.sql"
version=14
fi
if [[ "$version" == "14" ]]; then
# Changes the vw_adlist created in 5_to_6
echo -e " ${INFO} Upgrading gravity database from version 14 to 15"
- sqlite3 "${database}" < "${scriptPath}/14_to_15.sql"
+ pihole-FTL sqlite3 "${database}" < "${scriptPath}/14_to_15.sql"
version=15
fi
}
diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh
index 8945047e..f3f97da2 100755
--- a/advanced/Scripts/list.sh
+++ b/advanced/Scripts/list.sh
@@ -142,18 +142,18 @@ AddDomain() {
domain="$1"
# Is the domain in the list we want to add it to?
- num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")"
+ num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")"
requestedListname="$(GetListnameFromTypeId "${typeId}")"
if [[ "${num}" -ne 0 ]]; then
- existingTypeId="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")"
+ existingTypeId="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")"
if [[ "${existingTypeId}" == "${typeId}" ]]; then
if [[ "${verbose}" == true ]]; then
echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!"
fi
else
existingListname="$(GetListnameFromTypeId "${existingTypeId}")"
- sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';"
+ pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';"
if [[ "${verbose}" == true ]]; then
echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!"
fi
@@ -169,10 +169,10 @@ AddDomain() {
# Insert only the domain here. The enabled and date_added fields will be filled
# with their default values (enabled = true, date_added = current timestamp)
if [[ -z "${comment}" ]]; then
- sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});"
+ pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});"
else
# also add comment when variable has been set through the "--comment" option
- sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');"
+ pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');"
fi
}
@@ -181,7 +181,7 @@ RemoveDomain() {
domain="$1"
# Is the domain in the list we want to remove it from?
- num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")"
+ num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")"
requestedListname="$(GetListnameFromTypeId "${typeId}")"
@@ -198,14 +198,14 @@ RemoveDomain() {
fi
reload=true
# Remove it from the current list
- sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};"
+ pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};"
}
Displaylist() {
local count num_pipes domain enabled status nicedate requestedListname
requestedListname="$(GetListnameFromTypeId "${typeId}")"
- data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)"
+ data="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)"
if [[ -z $data ]]; then
echo -e "Not showing empty list"
@@ -243,10 +243,10 @@ Displaylist() {
}
NukeList() {
- count=$(sqlite3 "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};")
+ count=$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};")
listname="$(GetListnameFromTypeId "${typeId}")"
if [ "$count" -gt 0 ];then
- sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};"
+ pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};"
echo " ${TICK} Removed ${count} domain(s) from the ${listname}"
else
echo " ${INFO} ${listname} already empty. Nothing to do!"
diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh
index 66d05bf9..5daa025d 100755
--- a/advanced/Scripts/piholeARPTable.sh
+++ b/advanced/Scripts/piholeARPTable.sh
@@ -39,7 +39,7 @@ flushARP(){
# Truncate network_addresses table in pihole-FTL.db
# This needs to be done before we can truncate the network table due to
# foreign key constraints
- if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then
+ if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then
echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table"
echo " Database location: ${DBFILE}"
echo " Output: ${output}"
@@ -47,7 +47,7 @@ flushARP(){
fi
# Truncate network table in pihole-FTL.db
- if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then
+ if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then
echo -e "${OVER} ${CROSS} Failed to truncate network table"
echo " Database location: ${DBFILE}"
echo " Output: ${output}"
diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh
index dc4a2729..7d3e7acf 100755
--- a/advanced/Scripts/piholeDebug.sh
+++ b/advanced/Scripts/piholeDebug.sh
@@ -753,7 +753,7 @@ check_required_ports() {
# Sort the addresses and remove duplicates
while IFS= read -r line; do
ports_in_use+=( "$line" )
- done < <( ss --listening --numeric --tcp --udp --processes --oneline --no-header )
+ done < <( ss --listening --numeric --tcp --udp --processes --no-header )
# Now that we have the values stored,
for i in "${!ports_in_use[@]}"; do
@@ -779,6 +779,21 @@ check_required_ports() {
done
}
+ip_command() {
+ # Obtain and log information from "ip XYZ show" commands
+ echo_current_diagnostic "${2}"
+ local entries=()
+ mapfile -t entries < <(ip "${1}" show)
+ for line in "${entries[@]}"; do
+ log_write " ${line}"
+ done
+}
+
+check_ip_command() {
+ ip_command "addr" "Network interfaces and addresses"
+ ip_command "route" "Network routing table"
+}
+
check_networking() {
# Runs through several of the functions made earlier; we just clump them
# together since they are all related to the networking aspect of things
@@ -787,7 +802,9 @@ check_networking() {
detect_ip_addresses "6"
ping_gateway "4"
ping_gateway "6"
- check_required_ports
+ # Skip the following check if installed in docker container. Unpriv'ed containers do not have access to the information required
+ # to resolve the service name listening - and the container should not start if there was a port conflict anyway
+ [ -z "${PIHOLE_DOCKER_TAG}" ] && check_required_ports
}
check_x_headers() {
@@ -871,7 +888,7 @@ dig_at() {
# This helps emulate queries to different domains that a user might query
# It will also give extra assurance that Pi-hole is correctly resolving and blocking domains
local random_url
- random_url=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity ORDER BY RANDOM() LIMIT 1")
+ random_url=$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity ORDER BY RANDOM() LIMIT 1")
# Next we need to check if Pi-hole can resolve a domain when the query is sent to it's IP address
# This better emulates how clients will interact with Pi-hole as opposed to above where Pi-hole is
@@ -1185,7 +1202,7 @@ show_db_entries() {
IFS=$'\r\n'
local entries=()
mapfile -t entries < <(\
- sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \
+ pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \
-cmd ".headers on" \
-cmd ".mode column" \
-cmd ".width ${widths}" \
@@ -1210,7 +1227,7 @@ show_FTL_db_entries() {
IFS=$'\r\n'
local entries=()
mapfile -t entries < <(\
- sqlite3 "${PIHOLE_FTL_DB_FILE}" \
+ pihole-FTL sqlite3 "${PIHOLE_FTL_DB_FILE}" \
-cmd ".headers on" \
-cmd ".mode column" \
-cmd ".width ${widths}" \
@@ -1267,7 +1284,7 @@ analyze_gravity_list() {
log_write "${COL_GREEN}${gravity_permissions}${COL_NC}"
show_db_entries "Info table" "SELECT property,value FROM info" "20 40"
- gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")"
+ gravity_updated_raw="$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")"
gravity_updated="$(date -d @"${gravity_updated_raw}")"
log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}"
log_write ""
@@ -1275,7 +1292,7 @@ analyze_gravity_list() {
OLD_IFS="$IFS"
IFS=$'\r\n'
local gravity_sample=()
- mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10")
+ mapfile -t gravity_sample < <(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10")
log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}"
for line in "${gravity_sample[@]}"; do
@@ -1452,6 +1469,7 @@ check_selinux
check_firewalld
processor_check
disk_usage
+check_ip_command
check_networking
check_name_resolution
check_dhcp_servers
diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh
index 5c6a2c68..7547a5fd 100755
--- a/advanced/Scripts/piholeLogFlush.sh
+++ b/advanced/Scripts/piholeLogFlush.sh
@@ -63,7 +63,7 @@ else
fi
fi
# Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history)
- deleted=$(sqlite3 "${DBFILE}" "DELETE FROM queries WHERE timestamp >= strftime('%s','now')-86400; select changes() from queries limit 1")
+ deleted=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM queries WHERE timestamp >= strftime('%s','now')-86400; select changes() from queries limit 1")
# Restart pihole-FTL to force reloading history
sudo pihole restartdns
diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh
index 0fd9871a..20c891bf 100755
--- a/advanced/Scripts/query.sh
+++ b/advanced/Scripts/query.sh
@@ -121,7 +121,7 @@ scanDatabaseTable() {
fi
# Send prepared query to gravity database
- result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null
+ result="$(pihole-FTL sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null
if [[ -z "${result}" ]]; then
# Return early when there are no matches in this table
return
@@ -164,7 +164,7 @@ scanRegexDatabaseTable() {
type="${3:-}"
# Query all regex from the corresponding database tables
- mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null)
+ mapfile -t regexList < <(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null)
# If we have regexps to process
if [[ "${#regexList[@]}" -ne 0 ]]; then
@@ -233,7 +233,7 @@ for result in "${results[@]}"; do
adlistAddress="${extra/|*/}"
extra="${extra#*|}"
if [[ "${extra}" == "0" ]]; then
- extra="(disabled)"
+ extra=" (disabled)"
else
extra=""
fi
@@ -241,7 +241,7 @@ for result in "${results[@]}"; do
if [[ -n "${blockpage}" ]]; then
echo "0 ${adlistAddress}"
elif [[ -n "${exact}" ]]; then
- echo " - ${adlistAddress} ${extra}"
+ echo " - ${adlistAddress}${extra}"
else
if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then
count=""
@@ -256,7 +256,7 @@ for result in "${results[@]}"; do
[[ "${count}" -gt "${max_count}" ]] && continue
echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}"
else
- echo " ${match} ${extra}"
+ echo " ${match}${extra}"
fi
fi
done
diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh
index d18d2e78..9da85c89 100755
--- a/advanced/Scripts/update.sh
+++ b/advanced/Scripts/update.sh
@@ -41,7 +41,7 @@ GitCheckUpdateAvail() {
cd "${directory}" || return
# Fetch latest changes in this repo
- git fetch --tags --quiet origin
+ git fetch --quiet origin
# Check current branch. If it is master, then check for the latest available tag instead of latest commit.
curBranch=$(git rev-parse --abbrev-ref HEAD)
diff --git a/advanced/Scripts/utils.sh b/advanced/Scripts/utils.sh
new file mode 100755
index 00000000..887816cc
--- /dev/null
+++ b/advanced/Scripts/utils.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+# Pi-hole: A black hole for Internet advertisements
+# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
+# Network-wide ad blocking via your own hardware.
+#
+# Script to hold utility functions for use in other scripts
+#
+# This file is copyright under the latest version of the EUPL.
+# Please see LICENSE file for your rights under this license.
+
+# Basic Housekeeping rules
+# - Functions must be self contained
+# - Functions must be added in alphabetical order
+# - Functions must be documented
+# - New functions must have a test added for them in test/test_any_utils.py
+
+#######################
+# Takes three arguments key, value, and file.
+# Checks the target file for the existence of the key
+# - If it exists, it changes the value
+# - If it does not exist, it adds the value
+#
+# Example usage:
+# addOrEditKeyValuePair "BLOCKING_ENABLED" "true" "/etc/pihole/setupVars.conf"
+#######################
+addOrEditKeyValPair() {
+ local key="${1}"
+ local value="${2}"
+ local file="${3}"
+ if grep -q "^${key}=" "${file}"; then
+ sed -i "/^${key}=/c\\${key}=${value}" "${file}"
+ else
+ echo "${key}=${value}" >> "${file}"
+ fi
+}
diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh
index 4f44eca8..aa4795dd 100755
--- a/advanced/Scripts/webpage.sh
+++ b/advanced/Scripts/webpage.sh
@@ -37,15 +37,16 @@ Example: pihole -a -p password
Set options for the Admin Console
Options:
- -p, password Set Admin Console password
- -c, celsius Set Celsius as preferred temperature unit
- -f, fahrenheit Set Fahrenheit as preferred temperature unit
- -k, kelvin Set Kelvin as preferred temperature unit
- -e, email Set an administrative contact address for the Block Page
- -h, --help Show this help dialog
- -i, interface Specify dnsmasq's interface listening behavior
- -l, privacylevel Set privacy level (0 = lowest, 3 = highest)
- -t, teleporter Backup configuration as an archive"
+ -p, password Set Admin Console password
+ -c, celsius Set Celsius as preferred temperature unit
+ -f, fahrenheit Set Fahrenheit as preferred temperature unit
+ -k, kelvin Set Kelvin as preferred temperature unit
+ -e, email Set an administrative contact address for the Block Page
+ -h, --help Show this help dialog
+ -i, interface Specify dnsmasq's interface listening behavior
+ -l, privacylevel Set privacy level (0 = lowest, 3 = highest)
+ -t, teleporter Backup configuration as an archive
+ -t, teleporter myname.tar.gz Backup configuration to archive with name myname.tar.gz as specified"
exit 0
}
@@ -523,13 +524,13 @@ CustomizeAdLists() {
if CheckUrl "${address}"; then
if [[ "${args[2]}" == "enable" ]]; then
- sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'"
+ pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'"
elif [[ "${args[2]}" == "disable" ]]; then
- sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'"
+ pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'"
elif [[ "${args[2]}" == "add" ]]; then
- sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')"
+ pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')"
elif [[ "${args[2]}" == "del" ]]; then
- sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'"
+ pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'"
else
echo "Not permitted"
return 1
@@ -640,12 +641,17 @@ Interfaces:
}
Teleporter() {
- local datetimestamp
- local host
- datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S")
- host=$(hostname)
- host="${host//./_}"
- php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-${host:-noname}-teleporter_${datetimestamp}.tar.gz"
+ local filename
+ filename="${args[2]}"
+ if [[ -z "${filename}" ]]; then
+ local datetimestamp
+ local host
+ datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S")
+ host=$(hostname)
+ host="${host//./_}"
+ filename="pi-hole-${host:-noname}-teleporter_${datetimestamp}.tar.gz"
+ fi
+ php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "${filename}"
}
checkDomain()
@@ -681,12 +687,12 @@ addAudit()
done
# Insert only the domain here. The date_added field will be
# filled with its default value (date_added = current timestamp)
- sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};"
+ pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};"
}
clearAudit()
{
- sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;"
+ pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;"
}
SetPrivacyLevel() {
@@ -733,7 +739,7 @@ RemoveCustomDNSAddress() {
validHost="$(checkDomain "${host}")"
if [[ -n "${validHost}" ]]; then
if valid_ip "${ip}" || valid_ip6 "${ip}" ; then
- sed -i "/^${ip} ${validHost}$/d" "${dnscustomfile}"
+ sed -i "/^${ip} ${validHost}$/Id" "${dnscustomfile}"
else
echo -e " ${CROSS} Invalid IP has been passed"
exit 1
@@ -786,7 +792,7 @@ RemoveCustomCNAMERecord() {
if [[ -n "${validDomain}" ]]; then
validTarget="$(checkDomain "${target}")"
if [[ -n "${validTarget}" ]]; then
- sed -i "/cname=${validDomain},${validTarget}$/d" "${dnscustomcnamefile}"
+ sed -i "/cname=${validDomain},${validTarget}$/Id" "${dnscustomcnamefile}"
else
echo " ${CROSS} Invalid Target Passed!"
exit 1
diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql
index 4a2a9b22..3bea731d 100644
--- a/advanced/Templates/gravity_copy.sql
+++ b/advanced/Templates/gravity_copy.sql
@@ -12,14 +12,17 @@ INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group";
INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit;
INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist;
+DELETE FROM OLD.domainlist_by_group WHERE domainlist_id NOT IN (SELECT id FROM OLD.domainlist);
INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group;
INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist;
+DELETE FROM OLD.adlist_by_group WHERE adlist_id NOT IN (SELECT id FROM OLD.adlist);
INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group;
INSERT OR REPLACE INTO info SELECT * FROM OLD.info;
INSERT OR REPLACE INTO client SELECT * FROM OLD.client;
+DELETE FROM OLD.client_by_group WHERE client_id NOT IN (SELECT id FROM OLD.client);
INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group;
diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh
index 3780f7b0..1e004b8b 100755
--- a/automated install/basic-install.sh
+++ b/automated install/basic-install.sh
@@ -287,7 +287,7 @@ package_manager_detect() {
# Packages required to run this install script (stored as an array)
INSTALLER_DEPS=(git iproute2 whiptail ca-certificates)
# Packages required to run Pi-hole (stored as an array)
- PIHOLE_DEPS=(cron curl iputils-ping psmisc sudo unzip idn2 sqlite3 libcap2-bin dns-root-data libcap2 netcat)
+ PIHOLE_DEPS=(cron curl iputils-ping psmisc sudo unzip idn2 libcap2-bin dns-root-data libcap2 netcat-openbsd)
# Packages required for the Web admin interface (stored as an array)
# It's useful to separate this from Pi-hole, since the two repos are also setup separately
PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-sqlite3" "${phpVer}-xml" "${phpVer}-intl")
@@ -332,7 +332,7 @@ package_manager_detect() {
PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l"
OS_CHECK_DEPS=(grep bind-utils)
INSTALLER_DEPS=(git iproute newt procps-ng which chkconfig ca-certificates)
- PIHOLE_DEPS=(cronie curl findutils sudo unzip libidn2 psmisc sqlite libcap nmap-ncat)
+ PIHOLE_DEPS=(cronie curl findutils sudo unzip libidn2 psmisc libcap nmap-ncat)
PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml php-json php-intl)
LIGHTTPD_USER="lighttpd"
LIGHTTPD_GROUP="lighttpd"
@@ -1371,7 +1371,12 @@ install_manpage() {
# Testing complete, copy the files & update the man db
install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8
install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8
- install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.conf.5 /usr/local/share/man/man5/pihole-FTL.conf.5
+
+ # remove previously installed "pihole-FTL.conf.5" man page
+ if [[ -f "/usr/local/share/man/man5/pihole-FTL.conf.5" ]]; then
+ rm /usr/local/share/man/man5/pihole-FTL.conf.5
+ fi
+
if mandb -q &>/dev/null; then
# Updated successfully
printf "%b %b man pages installed and database updated\\n" "${OVER}" "${TICK}"
@@ -1379,7 +1384,7 @@ install_manpage() {
else
# Something is wrong with the system's man installation, clean up
# our files, (leave everything how we found it).
- rm /usr/local/share/man/man8/pihole.8 /usr/local/share/man/man8/pihole-FTL.8 /usr/local/share/man/man5/pihole-FTL.conf.5
+ rm /usr/local/share/man/man8/pihole.8 /usr/local/share/man/man8/pihole-FTL.8
printf "%b %b man page db not updated, man pages not installed\\n" "${OVER}" "${CROSS}"
fi
}
diff --git a/gravity.sh b/gravity.sh
index a6ab3c86..9c11fa98 100755
--- a/gravity.sh
+++ b/gravity.sh
@@ -73,9 +73,9 @@ if [[ -r "${piholeDir}/pihole.conf" ]]; then
echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}"
fi
-# Generate new sqlite3 file from schema template
+# Generate new SQLite3 file from schema template
generate_gravity_database() {
- if ! sqlite3 "${gravityDBfile}" < "${gravityDBschema}"; then
+ if ! pihole-FTL sqlite3 "${gravityDBfile}" < "${gravityDBschema}"; then
echo -e " ${CROSS} Unable to create ${gravityDBfile}"
return 1
fi
@@ -85,12 +85,12 @@ generate_gravity_database() {
# Copy data from old to new database file and swap them
gravity_swap_databases() {
- local str copyGravity
+ local str copyGravity oldAvail
str="Building tree"
echo -ne " ${INFO} ${str}..."
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
- output=$( { sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
+ output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -102,22 +102,6 @@ gravity_swap_databases() {
str="Swapping databases"
echo -ne " ${INFO} ${str}..."
- # Gravity copying SQL script
- copyGravity="$(cat "${gravityDBcopy}")"
- if [[ "${gravityDBfile}" != "${gravityDBfile_default}" ]]; then
- # Replace default gravity script location by custom location
- copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
- fi
-
- output=$( { sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
- status="$?"
-
- if [[ "${status}" -ne 0 ]]; then
- echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}"
- return 1
- fi
- echo -e "${OVER} ${TICK} ${str}"
-
# Swap databases and remove or conditionally rename old database
# Number of available blocks on disk
availableBlocks=$(stat -f --format "%a" "${gravityDIR}")
@@ -125,18 +109,24 @@ gravity_swap_databases() {
gravityBlocks=$(stat --format "%b" ${gravityDBfile})
# Only keep the old database if available disk space is at least twice the size of the existing gravity.db.
# Better be safe than sorry...
+ oldAvail=false
if [ "${availableBlocks}" -gt "$((gravityBlocks * 2))" ] && [ -f "${gravityDBfile}" ]; then
- echo -e " ${TICK} The old database remains available."
+ oldAvail=true
mv "${gravityDBfile}" "${gravityOLDfile}"
else
rm "${gravityDBfile}"
fi
mv "${gravityTEMPfile}" "${gravityDBfile}"
+ echo -e "${OVER} ${TICK} ${str}"
+
+ if $oldAvail; then
+ echo -e " ${TICK} The old database remains available."
+ fi
}
# Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp() {
- output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -177,7 +167,7 @@ database_table_from_file() {
# Get MAX(id) from domainlist when INSERTing into this table
if [[ "${table}" == "domainlist" ]]; then
- rowid="$(sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")"
+ rowid="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")"
if [[ -z "$rowid" ]]; then
rowid=0
fi
@@ -207,7 +197,7 @@ database_table_from_file() {
# Store domains in database table specified by ${table}
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
- output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -227,7 +217,7 @@ database_table_from_file() {
# Update timestamp of last update of this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_updated() {
- output=$( { printf ".timeout 30000\\nUPDATE adlist SET date_updated = (cast(strftime('%%s', 'now') as int)) WHERE id = %i;\\n" "${1}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\nUPDATE adlist SET date_updated = (cast(strftime('%%s', 'now') as int)) WHERE id = %i;\\n" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -238,7 +228,7 @@ database_adlist_updated() {
# Check if a column with name ${2} exists in gravity table with name ${1}
gravity_column_exists() {
- output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
if [[ "${output}" == "1" ]]; then
return 0 # Bash 0 is success
fi
@@ -253,7 +243,7 @@ database_adlist_number() {
return;
fi
- output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_source_lines}" "${num_invalid}" "${1}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_source_lines}" "${num_invalid}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -269,7 +259,7 @@ database_adlist_status() {
return;
fi
- output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -386,9 +376,9 @@ gravity_DownloadBlocklists() {
fi
# Retrieve source URLs from gravity database
- # We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true)
- mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
- mapfile -t sourceIDs <<< "$(sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
+ # We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
+ mapfile -t sources <<< "$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
+ mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
# Parse source domains from $sources
mapfile -t sourceDomains <<< "$(
@@ -402,14 +392,12 @@ gravity_DownloadBlocklists() {
)"
local str="Pulling blocklist source list into range"
+ echo -e "${OVER} ${TICK} ${str}"
- if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then
- echo -e "${OVER} ${TICK} ${str}"
- else
- echo -e "${OVER} ${CROSS} ${str}"
+ if [[ -z "${sources[*]}" ]] || [[ -z "${sourceDomains[*]}" ]]; then
echo -e " ${INFO} No source list found, or it is empty"
echo ""
- return 1
+ unset sources
fi
local url domain agent cmd_ext str target compression
@@ -419,7 +407,7 @@ gravity_DownloadBlocklists() {
str="Preparing new gravity database"
echo -ne " ${INFO} ${str}..."
rm "${gravityTEMPfile}" > /dev/null 2>&1
- output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
+ output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -477,9 +465,28 @@ gravity_DownloadBlocklists() {
echo ""
done
+ str="Creating new gravity databases"
+ echo -ne " ${INFO} ${str}..."
+
+ # Gravity copying SQL script
+ copyGravity="$(cat "${gravityDBcopy}")"
+ if [[ "${gravityDBfile}" != "${gravityDBfile_default}" ]]; then
+ # Replace default gravity script location by custom location
+ copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
+ fi
+
+ output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
+ status="$?"
+
+ if [[ "${status}" -ne 0 ]]; then
+ echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}"
+ return 1
+ fi
+ echo -e "${OVER} ${TICK} ${str}"
+
str="Storing downloaded domains in new gravity database"
echo -ne " ${INFO} ${str}..."
- output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | sqlite3 "${gravityTEMPfile}"; } 2>&1 )
+ output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | pihole-FTL sqlite3 "${gravityTEMPfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
@@ -784,12 +791,12 @@ gravity_Table_Count() {
local table="${1}"
local str="${2}"
local num
- num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")"
+ num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")"
if [[ "${table}" == "vw_gravity" ]]; then
local unique
- unique="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")"
+ unique="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")"
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
- sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
+ pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
else
echo -e " ${INFO} Number of ${str}: ${num}"
fi
diff --git a/manpages/pihole-FTL.8 b/manpages/pihole-FTL.8
index c7b69d83..c1b7550f 100644
--- a/manpages/pihole-FTL.8
+++ b/manpages/pihole-FTL.8
@@ -144,7 +144,9 @@ Command line arguments can be arbitrarily combined, e.g:
Start ftl in foreground with more verbose logging, process everything and shutdown immediately
.br
.SH "SEE ALSO"
-\fBpihole\fR(8), \fBpihole-FTL.conf\fR(5)
+\fBpihole\fR(8)
+.br
+\fBFor FTL's config options please see https://docs.pi-hole.net/ftldns/configfile/\fR
.br
.SH "COLOPHON"
diff --git a/manpages/pihole-FTL.conf.5 b/manpages/pihole-FTL.conf.5
deleted file mode 100644
index 42405436..00000000
--- a/manpages/pihole-FTL.conf.5
+++ /dev/null
@@ -1,313 +0,0 @@
-.TH "pihole-FTL.conf" "5" "pihole-FTL.conf" "pihole-FTL.conf" "November 2020"
-.SH "NAME"
-
-pihole-FTL.conf - FTL's config file
-.br
-.SH "DESCRIPTION"
-
-/etc/pihole/pihole-FTL.conf will be read by \fBpihole-FTL(8)\fR on startup.
-.br
-For each setting the option shown first is the default.
-.br
-
-\fBBLOCKINGMODE=IP|IP-AAAA-NODATA|NODATA|NXDOMAIN|NULL\fR
-.br
- How should FTL reply to blocked queries?
-
- IP - Pi-hole's IPs for blocked domains
-
- IP-AAAA-NODATA - Pi-hole's IP + NODATA-IPv6 for blocked domains
-
- NODATA - Using NODATA for blocked domains
-
- NXDOMAIN - NXDOMAIN for blocked domains
-
- NULL - Null IPs for blocked domains
-.br
-
-\fBCNAME_DEEP_INSPECT=true|false\fR
-.br
- Use this option to disable deep CNAME inspection. This might be beneficial for very low-end devices.
-.br
-
-\fBBLOCK_ESNI=true|false\fR
-.br
- Block requests to _esni.* sub-domains.
-.br
-
-\fBMAXLOGAGE=24.0\fR
-.br
- Up to how many hours of queries should be imported from the database and logs?
-.br
- Maximum is 744 (31 days)
-.br
-
-\fBPRIVACYLEVEL=0|1|2|3|4\fR
-.br
- Privacy level used to collect Pi-hole statistics.
-.br
- 0 - show everything
-.br
- 1 - hide domains
-.br
- 2 - hide domains and clients
-.br
- 3 - anonymous mode (hide everything)
-.br
- 4 - disable all statistics
-.br
-
-\fBIGNORE_LOCALHOST=no|yes\fR
-.br
- Should FTL ignore queries coming from the local machine?
-.br
-
-\fBAAAA_QUERY_ANALYSIS=yes|no\fR
-.br
- Should FTL analyze AAAA queries?
-.br
-
-\fBANALYZE_ONLY_A_AND_AAAA=false|true\fR
-.br
- Should FTL only analyze A and AAAA queries?
-.br
-
-\fBSOCKET_LISTENING=localonly|all\fR
-.br
- Listen only for local socket connections on the API port or permit all connections.
-.br
-
-\fBFTLPORT=4711\fR
-.br
- On which port should FTL be listening?
-.br
-
-\fBRESOLVE_IPV6=yes|no\fR
-.br
- Should FTL try to resolve IPv6 addresses to hostnames?
-.br
-
-\fBRESOLVE_IPV4=yes|no\fR
-.br
- Should FTL try to resolve IPv4 addresses to hostnames?
-.br
-
-\fBDELAY_STARTUP=0\fR
-.br
- Time in seconds (between 0 and 300) to delay FTL startup.
-.br
-
-\fBNICE=-10\fR
-.br
- Set the niceness of the Pi-hole FTL process.
-.br
- Can be disabled altogether by setting a value of -999.
-.br
-
-\fBNAMES_FROM_NETDB=true|false\fR
-.br
- Control whether FTL should use a fallback option and try to obtain client names from checking the network table.
-.br
- E.g. IPv6 clients without a hostname will be compared via MAC address to known clients.
-.br
-
-\fB\fBREFRESH_HOSTNAMES=IPV4|ALL|NONE\fR
-.br
- Change how (and if) hourly PTR requests are made to check for changes in client and upstream server hostnames:
-.br
- IPV4 - Do the hourly PTR lookups only for IPv4 addresses resolving issues in networks with many short-lived PE IPv6 addresses.
-.br
- ALL - Do the hourly PTR lookups for all addresses. This can create a lot of PTR queries in networks with many IPv6 addresses.
-.br
- NONE - Don't do hourly PTR lookups. Look up hostnames once (when first seeing a client) and never again. Future hostname changes may be missed.
-.br
-
-\fBMAXNETAGE=365\fR
-.br
- IP addresses (and associated host names) older than the specified number of days are removed.
-.br
- This avoids dead entries in the network overview table.
-.br
-
-\fBEDNS0_ECS=true|false\fR
-.br
- Should we overwrite the query source when client information is provided through EDNS0 client subnet (ECS) information?
-.br
-
-\fBPARSE_ARP_CACHE=true|false\fR
-.br
- Parse ARP cache to fill network overview table.
-.br
-
-\fBDBIMPORT=yes|no\fR
-.br
- Should FTL load information from the database on startup to be aware of the most recent history?
-.br
-
-\fBMAXDBDAYS=365\fR
-.br
- How long should queries be stored in the database? Setting this to 0 disables the database
-.br
-
-\fBDBINTERVAL=1.0\fR
-.br
- How often do we store queries in FTL's database [minutes]?
-.br
- Accepts value between 0.1 (6 sec) and 1440 (1 day)
-.br
-
-\fBDBFILE=/etc/pihole/pihole-FTL.db\fR
-.br
- Specify path and filename of FTL's SQLite long-term database.
-.br
- Setting this to DBFILE= disables the database altogether
-.br
-
-\fBLOGFILE=/var/log/pihole-FTL.log\fR
-.br
- The location of FTL's log file.
-.br
-
-\fBPIDFILE=/run/pihole-FTL.pid\fR
-.br
- The file which contains the PID of FTL's main process.
-.br
-
-\fBPORTFILE=/run/pihole-FTL.port\fR
-.br
- Specify path and filename where the FTL process will write its API port number.
-.br
-
-\fBSOCKETFILE=/run/pihole/FTL.sock\fR
-.br
- The file containing the socket FTL's API is listening on.
-.br
-
-\fBSETUPVARSFILE=/etc/pihole/setupVars.conf\fR
-.br
- The config file of Pi-hole containing, e.g., the current blocking status (do not change).
-.br
-
-\fBMACVENDORDB=/etc/pihole/macvendor.db\fR
-.br
- The database containing MAC -> Vendor information for the network table.
-.br
-
-\fBGRAVITYDB=/etc/pihole/gravity.db\fR
-.br
- Specify path and filename of FTL's SQLite3 gravity database. This database contains all domains relevant for Pi-hole's DNS blocking.
-.br
-
-\fBDEBUG_ALL=false|true\fR
-.br
- Enable all debug flags. If this is set to true, all other debug config options are ignored.
-.br
-
-\fBDEBUG_DATABASE=false|true\fR
-.br
- Print debugging information about database actions such as SQL statements and performance.
-.br
-
-\fBDEBUG_NETWORKING=false|true\fR
-.br
- Prints a list of the detected network interfaces on the startup of FTL.
-.br
-
-\fBDEBUG_LOCKS=false|true\fR
-.br
- Print information about shared memory locks.
-.br
- Messages will be generated when waiting, obtaining, and releasing a lock.
-.br
-
-\fBDEBUG_QUERIES=false|true\fR
-.br
- Print extensive DNS query information (domains, types, replies, etc.).
-.br
-
-\fBDEBUG_FLAGS=false|true\fR
-.br
- Print flags of queries received by the DNS hooks.
-.br
- Only effective when \fBDEBUG_QUERIES\fR is enabled as well.
-
-\fBDEBUG_SHMEM=false|true\fR
-.br
- Print information about shared memory buffers.
-.br
- Messages are either about creating or enlarging shmem objects or string injections.
-.br
-
-\fBDEBUG_GC=false|true\fR
-.br
- Print information about garbage collection (GC):
-.br
- What is to be removed, how many have been removed and how long did GC take.
-.br
-
-\fBDEBUG_ARP=false|true\fR
-.br
- Print information about ARP table processing:
-.br
- How long did parsing take, whether read MAC addresses are valid, and if the macvendor.db file exists.
-.br
-
-\fBDEBUG_REGEX=false|true\fR
-.br
- Controls if FTL should print extended details about regex matching.
-.br
-
-\fBDEBUG_API=false|true\fR
-.br
- Print extra debugging information during telnet API calls.
-.br
- Currently only used to send extra information when getting all queries.
-.br
-
-\fBDEBUG_OVERTIME=false|true\fR
-.br
- Print information about overTime memory operations, such as initializing or moving overTime slots.
-.br
-
-\fBDEBUG_EXTBLOCKED=false|true\fR
-.br
- Print information about why FTL decided that certain queries were recognized as being externally blocked.
-.br
-
-\fBDEBUG_CAPS=false|true\fR
-.br
- Print information about POSIX capabilities granted to the FTL process.
-.br
- The current capabilities are printed on receipt of SIGHUP i.e. after executing `killall -HUP pihole-FTL`.
-.br
-
-\fBDEBUG_DNSMASQ_LINES=false|true\fR
-.br
- Print file and line causing a dnsmasq event into FTL's log files.
-.br
- This is handy to implement additional hooks missing from FTL.
-.br
-
-\fBDEBUG_VECTORS=false|true\fR
-.br
- FTL uses dynamically allocated vectors for various tasks.
-.br
- This config option enables extensive debugging information such as information about allocation, referencing, deletion, and appending.
-.br
-
-\fBDEBUG_RESOLVER=false|true\fR
-.br
- Extensive information about hostname resolution like which DNS servers are used in the first and second hostname resolving tries.
-.br
-
-.SH "SEE ALSO"
-
-\fBpihole\fR(8), \fBpihole-FTL\fR(8)
-.br
-.SH "COLOPHON"
-
-Pi-hole : The Faster-Than-Light (FTL) Engine is a lightweight, purpose-built daemon used to provide statistics needed for the Pi-hole Web Interface, and its API can be easily integrated into your own projects. Although it is an optional component of the Pi-hole ecosystem, it will be installed by default to provide statistics. As the name implies, FTL does its work \fIvery quickly\fR!
-.br
-
-Get sucked into the latest news and community activity by entering Pi-hole's orbit. Information about Pi-hole, and the latest version of the software can be found at https://pi-hole.net
-.br
diff --git a/pihole b/pihole
index 8af47dc8..56d47eca 100755
--- a/pihole
+++ b/pihole
@@ -21,6 +21,9 @@ readonly FTL_PID_FILE="/run/pihole-FTL.pid"
readonly colfile="${PI_HOLE_SCRIPT_DIR}/COL_TABLE"
source "${colfile}"
+readonly utilsfile="${PI_HOLE_SCRIPT_DIR}/utils.sh"
+source "${utilsfile}"
+
webpageFunc() {
source "${PI_HOLE_SCRIPT_DIR}/webpage.sh"
main "$@"
@@ -223,8 +226,7 @@ Time:
fi
local str="Pi-hole Disabled"
- sed -i "/BLOCKING_ENABLED=/d" "${setupVars}"
- echo "BLOCKING_ENABLED=false" >> "${setupVars}"
+ addOrEditKeyValPair "BLOCKING_ENABLED" "false" "${setupVars}"
fi
else
# Enable Pi-hole
@@ -236,8 +238,7 @@ Time:
echo -e " ${INFO} Enabling blocking"
local str="Pi-hole Enabled"
- sed -i "/BLOCKING_ENABLED=/d" "${setupVars}"
- echo "BLOCKING_ENABLED=true" >> "${setupVars}"
+ addOrEditKeyValPair "BLOCKING_ENABLED" "true" "${setupVars}"
fi
restartDNS reload-lists
@@ -260,7 +261,7 @@ Options:
elif [[ "${1}" == "off" ]]; then
# Disable logging
sed -i 's/^log-queries/#log-queries/' /etc/dnsmasq.d/01-pihole.conf
- sed -i 's/^QUERY_LOGGING=true/QUERY_LOGGING=false/' /etc/pihole/setupVars.conf
+ addOrEditKeyValPair "QUERY_LOGGING" "false" "${setupVars}"
if [[ "${2}" != "noflush" ]]; then
# Flush logs
"${PI_HOLE_BIN_DIR}"/pihole -f
@@ -270,7 +271,7 @@ Options:
elif [[ "${1}" == "on" ]]; then
# Enable logging
sed -i 's/^#log-queries/log-queries/' /etc/dnsmasq.d/01-pihole.conf
- sed -i 's/^QUERY_LOGGING=false/QUERY_LOGGING=true/' /etc/pihole/setupVars.conf
+ addOrEditKeyValPair "QUERY_LOGGING" "true" "${setupVars}"
echo -e " ${INFO} Enabling logging..."
local str="Logging has been enabled!"
else
diff --git a/test/_centos_8.Dockerfile b/test/_centos_8.Dockerfile
index fddb3ed1..86e5a778 100644
--- a/test/_centos_8.Dockerfile
+++ b/test/_centos_8.Dockerfile
@@ -1,4 +1,4 @@
-FROM centos:8
+FROM quay.io/centos/centos:stream8
RUN yum install -y git
ENV GITDIR /etc/.pihole
diff --git a/test/test_automated_install.py b/test/test_any_automated_install.py
index 7959e100..b7b4ccd8 100644
--- a/test/test_automated_install.py
+++ b/test/test_any_automated_install.py
@@ -351,10 +351,6 @@ def test_installPihole_fresh_install_readableFiles(host):
'r', '/usr/local/share/man/man8/pihole-FTL.8', piholeuser)
actual_rc = host.run(check_man).rc
assert exit_status_success == actual_rc
- check_man = test_cmd.format(
- 'r', '/usr/local/share/man/man5/pihole-FTL.conf.5', piholeuser)
- actual_rc = host.run(check_man).rc
- assert exit_status_success == actual_rc
# check not readable sudoers file
check_sudo = test_cmd.format(
'r', '/etc/sudoers.d/pihole', piholeuser)
diff --git a/test/test_any_utils.py b/test/test_any_utils.py
new file mode 100644
index 00000000..ba9b2d23
--- /dev/null
+++ b/test/test_any_utils.py
@@ -0,0 +1,16 @@
+def test_key_val_replacement_works(host):
+ ''' Confirms addOrEditKeyValPair provides the expected output '''
+ host.run('''
+ setupvars=./testoutput
+ source /opt/pihole/utils.sh
+ addOrEditKeyValPair "KEY_ONE" "value1" "./testoutput"
+ addOrEditKeyValPair "KEY_TWO" "value2" "./testoutput"
+ addOrEditKeyValPair "KEY_ONE" "value3" "./testoutput"
+ addOrEditKeyValPair "KEY_FOUR" "value4" "./testoutput"
+ cat ./testoutput
+ ''')
+ output = host.run('''
+ cat ./testoutput
+ ''')
+ expected_stdout = 'KEY_ONE=value3\nKEY_TWO=value2\nKEY_FOUR=value4\n'
+ assert expected_stdout == output.stdout
diff --git a/test/tox.centos_7.ini b/test/tox.centos_7.ini
index 88940fdd..319465dd 100644
--- a/test/tox.centos_7.ini
+++ b/test/tox.centos_7.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _centos_7.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_7_support.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_7_support.py
diff --git a/test/tox.centos_8.ini b/test/tox.centos_8.ini
index 5088da16..c7926289 100644
--- a/test/tox.centos_8.ini
+++ b/test/tox.centos_8.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _centos_8.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_8_support.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_8_support.py
diff --git a/test/tox.debian_10.ini b/test/tox.debian_10.ini
index 9c2a05d1..3b182cdc 100644
--- a/test/tox.debian_10.ini
+++ b/test/tox.debian_10.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _debian_10.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
diff --git a/test/tox.debian_11.ini b/test/tox.debian_11.ini
index f3cdbe84..c7e41a91 100644
--- a/test/tox.debian_11.ini
+++ b/test/tox.debian_11.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _debian_11.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
diff --git a/test/tox.debian_9.ini b/test/tox.debian_9.ini
index b46e0a49..56b9d37f 100644
--- a/test/tox.debian_9.ini
+++ b/test/tox.debian_9.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _debian_9.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
diff --git a/test/tox.fedora_33.ini b/test/tox.fedora_33.ini
index d33fbf53..b17bd563 100644
--- a/test/tox.fedora_33.ini
+++ b/test/tox.fedora_33.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _fedora_33.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
diff --git a/test/tox.fedora_34.ini b/test/tox.fedora_34.ini
index 819291fa..26856984 100644
--- a/test/tox.fedora_34.ini
+++ b/test/tox.fedora_34.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _fedora_34.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
diff --git a/test/tox.ubuntu_16.ini b/test/tox.ubuntu_16.ini
index bce948a2..f8f6e92a 100644
--- a/test/tox.ubuntu_16.ini
+++ b/test/tox.ubuntu_16.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _ubuntu_16.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
diff --git a/test/tox.ubuntu_18.ini b/test/tox.ubuntu_18.ini
index cf7a3642..a2513dfd 100644
--- a/test/tox.ubuntu_18.ini
+++ b/test/tox.ubuntu_18.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _ubuntu_18.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
diff --git a/test/tox.ubuntu_20.ini b/test/tox.ubuntu_20.ini
index 03b605ce..fb3d20d7 100644
--- a/test/tox.ubuntu_20.ini
+++ b/test/tox.ubuntu_20.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _ubuntu_20.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
diff --git a/test/tox.ubuntu_21.ini b/test/tox.ubuntu_21.ini
index 12b1ac0b..070d3a72 100644
--- a/test/tox.ubuntu_21.ini
+++ b/test/tox.ubuntu_21.ini
@@ -5,4 +5,4 @@ envlist = py38
whitelist_externals = docker
deps = -rrequirements.txt
commands = docker build -f _ubuntu_21.Dockerfile -t pytest_pihole:test_container ../
- pytest {posargs:-vv -n auto} ./test_automated_install.py
+ pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py