Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/pi-hole/pi-hole.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam Warner <me@adamwarner.co.uk>2020-05-10 21:07:53 +0300
committerGitHub <noreply@github.com>2020-05-10 21:07:53 +0300
commit4d25f695267590b61a4061f9bb43448005d99b85 (patch)
treeee8bbc96efe66313c92e06b1a3376599fbf909a7 /advanced
parent9e490775ff3b20f378acc9db7cec2ae6023fff7f (diff)
parente728d7f76199df35377c06547b1842101db7e0db (diff)
Merge pull request #3321 from pi-hole/release/v5.0v5.0
Pi-hole core v5.0
Diffstat (limited to 'advanced')
-rw-r--r--advanced/01-pihole.conf3
-rw-r--r--advanced/Scripts/database_migration/gravity-db.sh113
-rw-r--r--advanced/Scripts/database_migration/gravity/10_to_11.sql16
-rw-r--r--advanced/Scripts/database_migration/gravity/11_to_12.sql19
-rw-r--r--advanced/Scripts/database_migration/gravity/1_to_2.sql14
-rw-r--r--advanced/Scripts/database_migration/gravity/2_to_3.sql65
-rw-r--r--advanced/Scripts/database_migration/gravity/3_to_4.sql96
-rw-r--r--advanced/Scripts/database_migration/gravity/4_to_5.sql38
-rw-r--r--advanced/Scripts/database_migration/gravity/5_to_6.sql18
-rw-r--r--advanced/Scripts/database_migration/gravity/6_to_7.sql35
-rw-r--r--advanced/Scripts/database_migration/gravity/7_to_8.sql35
-rw-r--r--advanced/Scripts/database_migration/gravity/8_to_9.sql27
-rw-r--r--advanced/Scripts/database_migration/gravity/9_to_10.sql29
-rwxr-xr-xadvanced/Scripts/list.sh350
-rwxr-xr-xadvanced/Scripts/piholeARPTable.sh66
-rw-r--r--advanced/Scripts/piholeCheckout.sh3
-rwxr-xr-xadvanced/Scripts/piholeDebug.sh128
-rwxr-xr-xadvanced/Scripts/piholeLogFlush.sh4
-rwxr-xr-x[-rw-r--r--]advanced/Scripts/query.sh231
-rwxr-xr-xadvanced/Scripts/updatecheck.sh3
-rwxr-xr-xadvanced/Scripts/version.sh51
-rwxr-xr-xadvanced/Scripts/webpage.sh162
-rw-r--r--advanced/Scripts/wildcard_regex_converter.sh2
-rw-r--r--advanced/Templates/gravity.db.sql188
-rw-r--r--advanced/Templates/gravity_copy.sql42
-rw-r--r--advanced/Templates/pihole-FTL.service6
-rw-r--r--advanced/bash-completion/pihole4
-rw-r--r--advanced/index.php69
28 files changed, 1409 insertions, 408 deletions
diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf
index 38d2c0b5..2c8b3749 100644
--- a/advanced/01-pihole.conf
+++ b/advanced/01-pihole.conf
@@ -18,9 +18,8 @@
# WITHIN /etc/dnsmasq.d/yourname.conf #
###############################################################################
-addn-hosts=/etc/pihole/gravity.list
-addn-hosts=/etc/pihole/black.list
addn-hosts=/etc/pihole/local.list
+addn-hosts=/etc/pihole/custom.list
domain-needed
diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh
new file mode 100644
index 00000000..70090a3b
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity-db.sh
@@ -0,0 +1,113 @@
+#!/usr/bin/env bash
+# shellcheck disable=SC1090
+
+# Pi-hole: A black hole for Internet advertisements
+# (c) 2019 Pi-hole, LLC (https://pi-hole.net)
+# Network-wide ad blocking via your own hardware.
+#
+# Updates gravity.db database
+#
+# This file is copyright under the latest version of the EUPL.
+# Please see LICENSE file for your rights under this license.
+
+readonly scriptPath="/etc/.pihole/advanced/Scripts/database_migration/gravity"
+
+upgrade_gravityDB(){
+ local database piholeDir auditFile version
+ database="${1}"
+ piholeDir="${2}"
+ auditFile="${piholeDir}/auditlog.list"
+
+ # Get database version
+ version="$(sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")"
+
+ if [[ "$version" == "1" ]]; then
+ # This migration script upgrades the gravity.db file by
+ # adding the domain_audit table
+ echo -e " ${INFO} Upgrading gravity database from version 1 to 2"
+ sqlite3 "${database}" < "${scriptPath}/1_to_2.sql"
+ version=2
+
+ # Store audit domains in database table
+ if [ -e "${auditFile}" ]; then
+ echo -e " ${INFO} Migrating content of ${auditFile} into new database"
+ # database_table_from_file is defined in gravity.sh
+ database_table_from_file "domain_audit" "${auditFile}"
+ fi
+ fi
+ if [[ "$version" == "2" ]]; then
+ # This migration script upgrades the gravity.db file by
+ # renaming the regex table to regex_blacklist, and
+ # creating a new regex_whitelist table + corresponding linking table and views
+ echo -e " ${INFO} Upgrading gravity database from version 2 to 3"
+ sqlite3 "${database}" < "${scriptPath}/2_to_3.sql"
+ version=3
+ fi
+ if [[ "$version" == "3" ]]; then
+ # This migration script unifies the formally separated domain
+ # lists into a single table with a UNIQUE domain constraint
+ echo -e " ${INFO} Upgrading gravity database from version 3 to 4"
+ sqlite3 "${database}" < "${scriptPath}/3_to_4.sql"
+ version=4
+ fi
+ if [[ "$version" == "4" ]]; then
+ # This migration script upgrades the gravity and list views
+ # implementing necessary changes for per-client blocking
+ echo -e " ${INFO} Upgrading gravity database from version 4 to 5"
+ sqlite3 "${database}" < "${scriptPath}/4_to_5.sql"
+ version=5
+ fi
+ if [[ "$version" == "5" ]]; then
+ # This migration script upgrades the adlist view
+ # to return an ID used in gravity.sh
+ echo -e " ${INFO} Upgrading gravity database from version 5 to 6"
+ sqlite3 "${database}" < "${scriptPath}/5_to_6.sql"
+ version=6
+ fi
+ if [[ "$version" == "6" ]]; then
+ # This migration script adds a special group with ID 0
+ # which is automatically associated to all clients not
+ # having their own group assignments
+ echo -e " ${INFO} Upgrading gravity database from version 6 to 7"
+ sqlite3 "${database}" < "${scriptPath}/6_to_7.sql"
+ version=7
+ fi
+ if [[ "$version" == "7" ]]; then
+ # This migration script recreated the group table
+ # to ensure uniqueness on the group name
+ # We also add date_added and date_modified columns
+ echo -e " ${INFO} Upgrading gravity database from version 7 to 8"
+ sqlite3 "${database}" < "${scriptPath}/7_to_8.sql"
+ version=8
+ fi
+ if [[ "$version" == "8" ]]; then
+ # This migration fixes some issues that were introduced
+ # in the previous migration script.
+ echo -e " ${INFO} Upgrading gravity database from version 8 to 9"
+ sqlite3 "${database}" < "${scriptPath}/8_to_9.sql"
+ version=9
+ fi
+ if [[ "$version" == "9" ]]; then
+ # This migration drops unused tables and creates triggers to remove
+ # obsolete groups assignments when the linked items are deleted
+ echo -e " ${INFO} Upgrading gravity database from version 9 to 10"
+ sqlite3 "${database}" < "${scriptPath}/9_to_10.sql"
+ version=10
+ fi
+ if [[ "$version" == "10" ]]; then
+ # This adds timestamp and an optional comment field to the client table
+ # These fields are only temporary and will be replaces by the columns
+ # defined in gravity.db.sql during gravity swapping. We add them here
+ # to keep the copying process generic (needs the same columns in both the
+ # source and the destination databases).
+ echo -e " ${INFO} Upgrading gravity database from version 10 to 11"
+ sqlite3 "${database}" < "${scriptPath}/10_to_11.sql"
+ version=11
+ fi
+ if [[ "$version" == "11" ]]; then
+ # Rename group 0 from "Unassociated" to "Default"
+ echo -e " ${INFO} Upgrading gravity database from version 11 to 12"
+ sqlite3 "${database}" < "${scriptPath}/11_to_12.sql"
+ version=12
+ fi
+}
diff --git a/advanced/Scripts/database_migration/gravity/10_to_11.sql b/advanced/Scripts/database_migration/gravity/10_to_11.sql
new file mode 100644
index 00000000..b073f83b
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/10_to_11.sql
@@ -0,0 +1,16 @@
+.timeout 30000
+
+BEGIN TRANSACTION;
+
+ALTER TABLE client ADD COLUMN date_added INTEGER;
+ALTER TABLE client ADD COLUMN date_modified INTEGER;
+ALTER TABLE client ADD COLUMN comment TEXT;
+
+CREATE TRIGGER tr_client_update AFTER UPDATE ON client
+ BEGIN
+ UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
+ END;
+
+UPDATE info SET value = 11 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/database_migration/gravity/11_to_12.sql b/advanced/Scripts/database_migration/gravity/11_to_12.sql
new file mode 100644
index 00000000..45fbc845
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/11_to_12.sql
@@ -0,0 +1,19 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+UPDATE "group" SET name = 'Default' WHERE id = 0;
+UPDATE "group" SET description = 'The default group' WHERE id = 0;
+
+DROP TRIGGER IF EXISTS tr_group_zero;
+
+CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
+ BEGIN
+ INSERT OR IGNORE INTO "group" (id,enabled,name,description) VALUES (0,1,'Default','The default group');
+ END;
+
+UPDATE info SET value = 12 WHERE property = 'version';
+
+COMMIT; \ No newline at end of file
diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql
new file mode 100644
index 00000000..6d57a6fe
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql
@@ -0,0 +1,14 @@
+.timeout 30000
+
+BEGIN TRANSACTION;
+
+CREATE TABLE domain_audit
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ domain TEXT UNIQUE NOT NULL,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int))
+);
+
+UPDATE info SET value = 2 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql
new file mode 100644
index 00000000..fd7c24d2
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql
@@ -0,0 +1,65 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+ALTER TABLE regex RENAME TO regex_blacklist;
+
+CREATE TABLE regex_blacklist_by_group
+(
+ regex_blacklist_id INTEGER NOT NULL REFERENCES regex_blacklist (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (regex_blacklist_id, group_id)
+);
+
+INSERT INTO regex_blacklist_by_group SELECT * FROM regex_by_group;
+DROP TABLE regex_by_group;
+DROP VIEW vw_regex;
+DROP TRIGGER tr_regex_update;
+
+CREATE VIEW vw_regex_blacklist AS SELECT DISTINCT domain
+ FROM regex_blacklist
+ LEFT JOIN regex_blacklist_by_group ON regex_blacklist_by_group.regex_blacklist_id = regex_blacklist.id
+ LEFT JOIN "group" ON "group".id = regex_blacklist_by_group.group_id
+ WHERE regex_blacklist.enabled = 1 AND (regex_blacklist_by_group.group_id IS NULL OR "group".enabled = 1)
+ ORDER BY regex_blacklist.id;
+
+CREATE TRIGGER tr_regex_blacklist_update AFTER UPDATE ON regex_blacklist
+ BEGIN
+ UPDATE regex_blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
+ END;
+
+CREATE TABLE regex_whitelist
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ domain TEXT UNIQUE NOT NULL,
+ enabled BOOLEAN NOT NULL DEFAULT 1,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ comment TEXT
+);
+
+CREATE TABLE regex_whitelist_by_group
+(
+ regex_whitelist_id INTEGER NOT NULL REFERENCES regex_whitelist (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (regex_whitelist_id, group_id)
+);
+
+CREATE VIEW vw_regex_whitelist AS SELECT DISTINCT domain
+ FROM regex_whitelist
+ LEFT JOIN regex_whitelist_by_group ON regex_whitelist_by_group.regex_whitelist_id = regex_whitelist.id
+ LEFT JOIN "group" ON "group".id = regex_whitelist_by_group.group_id
+ WHERE regex_whitelist.enabled = 1 AND (regex_whitelist_by_group.group_id IS NULL OR "group".enabled = 1)
+ ORDER BY regex_whitelist.id;
+
+CREATE TRIGGER tr_regex_whitelist_update AFTER UPDATE ON regex_whitelist
+ BEGIN
+ UPDATE regex_whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
+ END;
+
+
+UPDATE info SET value = 3 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/database_migration/gravity/3_to_4.sql b/advanced/Scripts/database_migration/gravity/3_to_4.sql
new file mode 100644
index 00000000..352b1baa
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/3_to_4.sql
@@ -0,0 +1,96 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+CREATE TABLE domainlist
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ type INTEGER NOT NULL DEFAULT 0,
+ domain TEXT UNIQUE NOT NULL,
+ enabled BOOLEAN NOT NULL DEFAULT 1,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ comment TEXT
+);
+
+ALTER TABLE whitelist ADD COLUMN type INTEGER;
+UPDATE whitelist SET type = 0;
+INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
+ SELECT type,domain,enabled,date_added,date_modified,comment FROM whitelist;
+
+ALTER TABLE blacklist ADD COLUMN type INTEGER;
+UPDATE blacklist SET type = 1;
+INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
+ SELECT type,domain,enabled,date_added,date_modified,comment FROM blacklist;
+
+ALTER TABLE regex_whitelist ADD COLUMN type INTEGER;
+UPDATE regex_whitelist SET type = 2;
+INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
+ SELECT type,domain,enabled,date_added,date_modified,comment FROM regex_whitelist;
+
+ALTER TABLE regex_blacklist ADD COLUMN type INTEGER;
+UPDATE regex_blacklist SET type = 3;
+INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
+ SELECT type,domain,enabled,date_added,date_modified,comment FROM regex_blacklist;
+
+DROP TABLE whitelist_by_group;
+DROP TABLE blacklist_by_group;
+DROP TABLE regex_whitelist_by_group;
+DROP TABLE regex_blacklist_by_group;
+CREATE TABLE domainlist_by_group
+(
+ domainlist_id INTEGER NOT NULL REFERENCES domainlist (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (domainlist_id, group_id)
+);
+
+DROP TRIGGER tr_whitelist_update;
+DROP TRIGGER tr_blacklist_update;
+DROP TRIGGER tr_regex_whitelist_update;
+DROP TRIGGER tr_regex_blacklist_update;
+CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist
+ BEGIN
+ UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
+ END;
+
+DROP VIEW vw_whitelist;
+CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 0
+ ORDER BY domainlist.id;
+
+DROP VIEW vw_blacklist;
+CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 1
+ ORDER BY domainlist.id;
+
+DROP VIEW vw_regex_whitelist;
+CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 2
+ ORDER BY domainlist.id;
+
+DROP VIEW vw_regex_blacklist;
+CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 3
+ ORDER BY domainlist.id;
+
+UPDATE info SET value = 4 WHERE property = 'version';
+
+COMMIT; \ No newline at end of file
diff --git a/advanced/Scripts/database_migration/gravity/4_to_5.sql b/advanced/Scripts/database_migration/gravity/4_to_5.sql
new file mode 100644
index 00000000..2ad906fc
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/4_to_5.sql
@@ -0,0 +1,38 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+DROP TABLE gravity;
+CREATE TABLE gravity
+(
+ domain TEXT NOT NULL,
+ adlist_id INTEGER NOT NULL REFERENCES adlist (id),
+ PRIMARY KEY(domain, adlist_id)
+);
+
+DROP VIEW vw_gravity;
+CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id
+ FROM gravity
+ LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id
+ LEFT JOIN adlist ON adlist.id = gravity.adlist_id
+ LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
+ WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1);
+
+CREATE TABLE client
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ ip TEXT NOL NULL UNIQUE
+);
+
+CREATE TABLE client_by_group
+(
+ client_id INTEGER NOT NULL REFERENCES client (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (client_id, group_id)
+);
+
+UPDATE info SET value = 5 WHERE property = 'version';
+
+COMMIT; \ No newline at end of file
diff --git a/advanced/Scripts/database_migration/gravity/5_to_6.sql b/advanced/Scripts/database_migration/gravity/5_to_6.sql
new file mode 100644
index 00000000..d2bb3145
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/5_to_6.sql
@@ -0,0 +1,18 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+DROP VIEW vw_adlist;
+CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id
+ FROM adlist
+ LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id
+ LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
+ WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ ORDER BY adlist.id;
+
+UPDATE info SET value = 6 WHERE property = 'version';
+
+COMMIT;
+
diff --git a/advanced/Scripts/database_migration/gravity/6_to_7.sql b/advanced/Scripts/database_migration/gravity/6_to_7.sql
new file mode 100644
index 00000000..22d9dfaf
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/6_to_7.sql
@@ -0,0 +1,35 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
+
+INSERT INTO domainlist_by_group (domainlist_id, group_id) SELECT id, 0 FROM domainlist;
+INSERT INTO client_by_group (client_id, group_id) SELECT id, 0 FROM client;
+INSERT INTO adlist_by_group (adlist_id, group_id) SELECT id, 0 FROM adlist;
+
+CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
+ BEGIN
+ INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_client_add AFTER INSERT ON client
+ BEGIN
+ INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
+ BEGIN
+ INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
+ BEGIN
+ INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
+ END;
+
+UPDATE info SET value = 7 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/database_migration/gravity/7_to_8.sql b/advanced/Scripts/database_migration/gravity/7_to_8.sql
new file mode 100644
index 00000000..ccf0c148
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/7_to_8.sql
@@ -0,0 +1,35 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+ALTER TABLE "group" RENAME TO "group__";
+
+CREATE TABLE "group"
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ enabled BOOLEAN NOT NULL DEFAULT 1,
+ name TEXT UNIQUE NOT NULL,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ description TEXT
+);
+
+CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
+ BEGIN
+ UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
+ END;
+
+INSERT OR IGNORE INTO "group" (id,enabled,name,description) SELECT id,enabled,name,description FROM "group__";
+
+DROP TABLE "group__";
+
+CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
+ BEGIN
+ INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
+ END;
+
+UPDATE info SET value = 8 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/database_migration/gravity/8_to_9.sql b/advanced/Scripts/database_migration/gravity/8_to_9.sql
new file mode 100644
index 00000000..0d873e2a
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/8_to_9.sql
@@ -0,0 +1,27 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+DROP TRIGGER IF EXISTS tr_group_update;
+DROP TRIGGER IF EXISTS tr_group_zero;
+
+PRAGMA legacy_alter_table=ON;
+ALTER TABLE "group" RENAME TO "group__";
+PRAGMA legacy_alter_table=OFF;
+ALTER TABLE "group__" RENAME TO "group";
+
+CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
+ BEGIN
+ UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
+ END;
+
+CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
+ BEGIN
+ INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
+ END;
+
+UPDATE info SET value = 9 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/database_migration/gravity/9_to_10.sql b/advanced/Scripts/database_migration/gravity/9_to_10.sql
new file mode 100644
index 00000000..a5636a23
--- /dev/null
+++ b/advanced/Scripts/database_migration/gravity/9_to_10.sql
@@ -0,0 +1,29 @@
+.timeout 30000
+
+PRAGMA FOREIGN_KEYS=OFF;
+
+BEGIN TRANSACTION;
+
+DROP TABLE IF EXISTS whitelist;
+DROP TABLE IF EXISTS blacklist;
+DROP TABLE IF EXISTS regex_whitelist;
+DROP TABLE IF EXISTS regex_blacklist;
+
+CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist
+ BEGIN
+ DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id;
+ END;
+
+CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist
+ BEGIN
+ DELETE FROM adlist_by_group WHERE adlist_id = OLD.id;
+ END;
+
+CREATE TRIGGER tr_client_delete AFTER DELETE ON client
+ BEGIN
+ DELETE FROM client_by_group WHERE client_id = OLD.id;
+ END;
+
+UPDATE info SET value = 10 WHERE property = 'version';
+
+COMMIT;
diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh
index c1d95aae..77a5dece 100755
--- a/advanced/Scripts/list.sh
+++ b/advanced/Scripts/list.sh
@@ -11,69 +11,87 @@
# Globals
basename=pihole
piholeDir=/etc/"${basename}"
-whitelist="${piholeDir}"/whitelist.txt
-blacklist="${piholeDir}"/blacklist.txt
+gravityDBfile="${piholeDir}/gravity.db"
-readonly regexlist="/etc/pihole/regex.list"
reload=false
addmode=true
verbose=true
wildcard=false
+web=false
domList=()
-listMain=""
-listAlt=""
+typeId=""
+comment=""
+declare -i domaincount
+domaincount=0
colfile="/opt/pihole/COL_TABLE"
source ${colfile}
+# IDs are hard-wired to domain interpretation in the gravity database scheme
+# Clients (including FTL) will read them through the corresponding views
+readonly whitelist="0"
+readonly blacklist="1"
+readonly regex_whitelist="2"
+readonly regex_blacklist="3"
+
+GetListnameFromTypeId() {
+ if [[ "$1" == "${whitelist}" ]]; then
+ echo "whitelist"
+ elif [[ "$1" == "${blacklist}" ]]; then
+ echo "blacklist"
+ elif [[ "$1" == "${regex_whitelist}" ]]; then
+ echo "regex whitelist"
+ elif [[ "$1" == "${regex_blacklist}" ]]; then
+ echo "regex blacklist"
+ fi
+}
-helpFunc() {
- if [[ "${listMain}" == "${whitelist}" ]]; then
- param="w"
- type="white"
- elif [[ "${listMain}" == "${regexlist}" && "${wildcard}" == true ]]; then
- param="-wild"
- type="wildcard black"
- elif [[ "${listMain}" == "${regexlist}" ]]; then
- param="-regex"
- type="regex black"
- else
- param="b"
- type="black"
+GetListParamFromTypeId() {
+ if [[ "${typeId}" == "${whitelist}" ]]; then
+ echo "w"
+ elif [[ "${typeId}" == "${blacklist}" ]]; then
+ echo "b"
+ elif [[ "${typeId}" == "${regex_whitelist}" && "${wildcard}" == true ]]; then
+ echo "-white-wild"
+ elif [[ "${typeId}" == "${regex_whitelist}" ]]; then
+ echo "-white-regex"
+ elif [[ "${typeId}" == "${regex_blacklist}" && "${wildcard}" == true ]]; then
+ echo "-wild"
+ elif [[ "${typeId}" == "${regex_blacklist}" ]]; then
+ echo "-regex"
fi
+}
+
+helpFunc() {
+ local listname param
+
+ listname="$(GetListnameFromTypeId "${typeId}")"
+ param="$(GetListParamFromTypeId)"
echo "Usage: pihole -${param} [options] <domain> <domain2 ...>
Example: 'pihole -${param} site.com', or 'pihole -${param} site1.com site2.com'
-${type^}list one or more domains
+${listname^} one or more domains
Options:
- -d, --delmode Remove domain(s) from the ${type}list
- -nr, --noreload Update ${type}list without refreshing dnsmasq
+ -d, --delmode Remove domain(s) from the ${listname}
+ -nr, --noreload Update ${listname} without reloading the DNS server
-q, --quiet Make output less verbose
-h, --help Show this help dialog
- -l, --list Display all your ${type}listed domains
+ -l, --list Display all your ${listname}listed domains
--nuke Removes all entries in a list"
exit 0
}
-EscapeRegexp() {
- # This way we may safely insert an arbitrary
- # string in our regular expressions
- # This sed is intentionally executed in three steps to ease maintainability
- # The first sed removes any amount of leading dots
- echo $* | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g"
-}
-
-HandleOther() {
+ValidateDomain() {
# Convert to lowercase
domain="${1,,}"
# Check validity of domain (don't check for regex entries)
if [[ "${#domain}" -le 253 ]]; then
- if [[ "${listMain}" == "${regexlist}" && "${wildcard}" == false ]]; then
+ if [[ ( "${typeId}" == "${regex_blacklist}" || "${typeId}" == "${regex_whitelist}" ) && "${wildcard}" == false ]]; then
validDomain="${domain}"
else
validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check
@@ -82,194 +100,182 @@ HandleOther() {
fi
if [[ -n "${validDomain}" ]]; then
- domList=("${domList[@]}" ${validDomain})
+ domList=("${domList[@]}" "${validDomain}")
else
echo -e " ${CROSS} ${domain} is not a valid argument or domain name!"
fi
-}
-PoplistFile() {
- # Check whitelist file exists, and if not, create it
- if [[ ! -f "${whitelist}" ]]; then
- touch "${whitelist}"
- fi
-
- # Check blacklist file exists, and if not, create it
- if [[ ! -f "${blacklist}" ]]; then
- touch "${blacklist}"
- fi
+ domaincount=$((domaincount+1))
+}
+ProcessDomainList() {
for dom in "${domList[@]}"; do
- # Logic: If addmode then add to desired list and remove from the other; if delmode then remove from desired list but do not add to the other
+ # Format domain into regex filter if requested
+ if [[ "${wildcard}" == true ]]; then
+ dom="(^|\\.)${dom//\./\\.}$"
+ fi
+
+ # Logic: If addmode then add to desired list and remove from the other;
+ # if delmode then remove from desired list but do not add to the other
if ${addmode}; then
- AddDomain "${dom}" "${listMain}"
- RemoveDomain "${dom}" "${listAlt}"
+ AddDomain "${dom}"
else
- RemoveDomain "${dom}" "${listMain}"
+ RemoveDomain "${dom}"
fi
done
}
AddDomain() {
- list="$2"
- domain=$(EscapeRegexp "$1")
-
- [[ "${list}" == "${whitelist}" ]] && listname="whitelist"
- [[ "${list}" == "${blacklist}" ]] && listname="blacklist"
-
- if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then
- [[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only"
- [[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only"
- bool=true
- # Is the domain in the list we want to add it to?
- grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false
-
- if [[ "${bool}" == false ]]; then
- # Domain not found in the whitelist file, add it!
- if [[ "${verbose}" == true ]]; then
- echo -e " ${INFO} Adding ${1} to ${listname}..."
- fi
- reload=true
- # Add it to the list we want to add it to
- echo "$1" >> "${list}"
- else
- if [[ "${verbose}" == true ]]; then
- echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!"
- fi
+ local domain num requestedListname existingTypeId existingListname
+ domain="$1"
+
+ # Is the domain in the list we want to add it to?
+ num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")"
+ requestedListname="$(GetListnameFromTypeId "${typeId}")"
+
+ if [[ "${num}" -ne 0 ]]; then
+ existingTypeId="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")"
+ if [[ "${existingTypeId}" == "${typeId}" ]]; then
+ if [[ "${verbose}" == true ]]; then
+ echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!"
fi
- elif [[ "${list}" == "${regexlist}" ]]; then
- [[ -z "${type}" ]] && type="--wildcard-only"
- bool=true
- domain="${1}"
-
- [[ "${wildcard}" == true ]] && domain="(^|\\.)${domain//\./\\.}$"
-
- # Is the domain in the list?
- # Search only for exactly matching lines
- grep -Fx "${domain}" "${regexlist}" > /dev/null 2>&1 || bool=false
-
- if [[ "${bool}" == false ]]; then
- if [[ "${verbose}" == true ]]; then
- echo -e " ${INFO} Adding ${domain} to regex list..."
- fi
- reload="restart"
- echo "$domain" >> "${regexlist}"
- else
- if [[ "${verbose}" == true ]]; then
- echo -e " ${INFO} ${domain} already exists in regex list, no need to add!"
- fi
+ else
+ existingListname="$(GetListnameFromTypeId "${existingTypeId}")"
+ sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';"
+ if [[ "${verbose}" == true ]]; then
+ echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!"
fi
+ fi
+ return
+ fi
+
+ # Domain not found in the table, add it!
+ if [[ "${verbose}" == true ]]; then
+ echo -e " ${INFO} Adding ${domain} to the ${requestedListname}..."
+ fi
+ reload=true
+ # Insert only the domain here. The enabled and date_added fields will be filled
+ # with their default values (enabled = true, date_added = current timestamp)
+ if [[ -z "${comment}" ]]; then
+ sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});"
+ else
+ # also add comment when variable has been set through the "--comment" option
+ sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');"
fi
}
RemoveDomain() {
- list="$2"
- domain=$(EscapeRegexp "$1")
-
- [[ "${list}" == "${whitelist}" ]] && listname="whitelist"
- [[ "${list}" == "${blacklist}" ]] && listname="blacklist"
-
- if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then
- bool=true
- [[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only"
- [[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only"
- # Is it in the list? Logic follows that if its whitelisted it should not be blacklisted and vice versa
- grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false
- if [[ "${bool}" == true ]]; then
- # Remove it from the other one
- echo -e " ${INFO} Removing $1 from ${listname}..."
- # /I flag: search case-insensitive
- sed -i "/${domain}/Id" "${list}"
- reload=true
- else
- if [[ "${verbose}" == true ]]; then
- echo -e " ${INFO} ${1} does not exist in ${listname}, no need to remove!"
- fi
- fi
- elif [[ "${list}" == "${regexlist}" ]]; then
- [[ -z "${type}" ]] && type="--wildcard-only"
- domain="${1}"
-
- [[ "${wildcard}" == true ]] && domain="(^|\\.)${domain//\./\\.}$"
-
- bool=true
- # Is it in the list?
- grep -Fx "${domain}" "${regexlist}" > /dev/null 2>&1 || bool=false
- if [[ "${bool}" == true ]]; then
- # Remove it from the other one
- echo -e " ${INFO} Removing $domain from regex list..."
- local lineNumber
- lineNumber=$(grep -Fnx "$domain" "${list}" | cut -f1 -d:)
- sed -i "${lineNumber}d" "${list}"
- reload=true
- else
- if [[ "${verbose}" == true ]]; then
- echo -e " ${INFO} ${domain} does not exist in regex list, no need to remove!"
- fi
- fi
+ local domain num requestedListname
+ domain="$1"
+
+ # Is the domain in the list we want to remove it from?
+ num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")"
+
+ requestedListname="$(GetListnameFromTypeId "${typeId}")"
+
+ if [[ "${num}" -eq 0 ]]; then
+ if [[ "${verbose}" == true ]]; then
+ echo -e " ${INFO} ${domain} does not exist in ${requestedListname}, no need to remove!"
+ fi
+ return
fi
-}
-# Update Gravity
-Reload() {
- echo ""
- pihole -g --skip-download "${type:-}"
+ # Domain found in the table, remove it!
+ if [[ "${verbose}" == true ]]; then
+ echo -e " ${INFO} Removing ${domain} from the ${requestedListname}..."
+ fi
+ reload=true
+ # Remove it from the current list
+ sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};"
}
Displaylist() {
- if [[ -f ${listMain} ]]; then
- if [[ "${listMain}" == "${whitelist}" ]]; then
- string="gravity resistant domains"
- else
- string="domains caught in the sinkhole"
- fi
- verbose=false
- echo -e "Displaying $string:\n"
+ local count num_pipes domain enabled status nicedate requestedListname
+
+ requestedListname="$(GetListnameFromTypeId "${typeId}")"
+ data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)"
+
+ if [[ -z $data ]]; then
+ echo -e "Not showing empty list"
+ else
+ echo -e "Displaying ${requestedListname}:"
count=1
- while IFS= read -r RD || [ -n "${RD}" ]; do
- echo " ${count}: ${RD}"
+ while IFS= read -r line
+ do
+ # Count number of pipes seen in this line
+ # This is necessary because we can only detect the pipe separating the fields
+ # from the end backwards as the domain (which is the first field) may contain
+ # pipe symbols as they are perfectly valid regex filter control characters
+ num_pipes="$(grep -c "^" <<< "$(grep -o "|" <<< "${line}")")"
+
+ # Extract domain and enabled status based on the obtained number of pipe characters
+ domain="$(cut -d'|' -f"-$((num_pipes-1))" <<< "${line}")"
+ enabled="$(cut -d'|' -f"$((num_pipes))" <<< "${line}")"
+ datemod="$(cut -d'|' -f"$((num_pipes+1))" <<< "${line}")"
+
+ # Translate boolean status into human readable string
+ if [[ "${enabled}" -eq 1 ]]; then
+ status="enabled"
+ else
+ status="disabled"
+ fi
+
+ # Get nice representation of numerical date stored in database
+ nicedate=$(date --rfc-2822 -d "@${datemod}")
+
+ echo " ${count}: ${domain} (${status}, last modified ${nicedate})"
count=$((count+1))
- done < "${listMain}"
- else
- echo -e " ${COL_LIGHT_RED}${listMain} does not exist!${COL_NC}"
+ done <<< "${data}"
fi
exit 0;
}
NukeList() {
- if [[ -f "${listMain}" ]]; then
- # Back up original list
- cp "${listMain}" "${listMain}.bck~"
- # Empty out file
- echo "" > "${listMain}"
+ sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};"
+}
+
+GetComment() {
+ comment="$1"
+ if [[ "${comment}" =~ [^a-zA-Z0-9_\#:/\.,\ -] ]]; then
+ echo " ${CROSS} Found invalid characters in domain comment!"
+ exit
fi
}
-for var in "$@"; do
- case "${var}" in
- "-w" | "whitelist" ) listMain="${whitelist}"; listAlt="${blacklist}";;
- "-b" | "blacklist" ) listMain="${blacklist}"; listAlt="${whitelist}";;
- "--wild" | "wildcard" ) listMain="${regexlist}"; wildcard=true;;
- "--regex" | "regex" ) listMain="${regexlist}";;
+while (( "$#" )); do
+ case "${1}" in
+ "-w" | "whitelist" ) typeId=0;;
+ "-b" | "blacklist" ) typeId=1;;
+ "--white-regex" | "white-regex" ) typeId=2;;
+ "--white-wild" | "white-wild" ) typeId=2; wildcard=true;;
+ "--wild" | "wildcard" ) typeId=3; wildcard=true;;
+ "--regex" | "regex" ) typeId=3;;
"-nr"| "--noreload" ) reload=false;;
"-d" | "--delmode" ) addmode=false;;
"-q" | "--quiet" ) verbose=false;;
"-h" | "--help" ) helpFunc;;
"-l" | "--list" ) Displaylist;;
"--nuke" ) NukeList;;
- * ) HandleOther "${var}";;
+ "--web" ) web=true;;
+ "--comment" ) GetComment "${2}"; shift;;
+ * ) ValidateDomain "${1}";;
esac
+ shift
done
shift
-if [[ $# = 0 ]]; then
+if [[ ${domaincount} == 0 ]]; then
helpFunc
fi
-PoplistFile
+ProcessDomainList
+
+# Used on web interface
+if $web; then
+echo "DONE"
+fi
if [[ "${reload}" != false ]]; then
- # Ensure that "restart" is used for Wildcard updates
- Reload "${reload}"
+ pihole restartdns reload-lists
fi
diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh
new file mode 100755
index 00000000..b6b552c9
--- /dev/null
+++ b/advanced/Scripts/piholeARPTable.sh
@@ -0,0 +1,66 @@
+#!/usr/bin/env bash
+# shellcheck disable=SC1090
+
+# Pi-hole: A black hole for Internet advertisements
+# (c) 2019 Pi-hole, LLC (https://pi-hole.net)
+# Network-wide ad blocking via your own hardware.
+#
+# ARP table interaction
+#
+# This file is copyright under the latest version of the EUPL.
+# Please see LICENSE file for your rights under this license.
+
+coltable="/opt/pihole/COL_TABLE"
+if [[ -f ${coltable} ]]; then
+ source ${coltable}
+fi
+
+# Determine database location
+# Obtain DBFILE=... setting from pihole-FTL.db
+# Constructed to return nothing when
+# a) the setting is not present in the config file, or
+# b) the setting is commented out (e.g. "#DBFILE=...")
+FTLconf="/etc/pihole/pihole-FTL.conf"
+if [ -e "$FTLconf" ]; then
+ DBFILE="$(sed -n -e 's/^\s*DBFILE\s*=\s*//p' ${FTLconf})"
+fi
+# Test for empty string. Use standard path in this case.
+if [ -z "$DBFILE" ]; then
+ DBFILE="/etc/pihole/pihole-FTL.db"
+fi
+
+
+flushARP(){
+ local output
+ if [[ "${args[1]}" != "quiet" ]]; then
+ echo -ne " ${INFO} Flushing network table ..."
+ fi
+
+ # Truncate network_addresses table in pihole-FTL.db
+ # This needs to be done before we can truncate the network table due to
+ # foreign key contraints
+ if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then
+ echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table"
+ echo " Database location: ${DBFILE}"
+ echo " Output: ${output}"
+ return 1
+ fi
+
+ # Truncate network table in pihole-FTL.db
+ if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then
+ echo -e "${OVER} ${CROSS} Failed to truncate network table"
+ echo " Database location: ${DBFILE}"
+ echo " Output: ${output}"
+ return 1
+ fi
+
+ if [[ "${args[1]}" != "quiet" ]]; then
+ echo -e "${OVER} ${TICK} Flushed network table"
+ fi
+}
+
+args=("$@")
+
+case "${args[0]}" in
+ "arpflush" ) flushARP;;
+esac
diff --git a/advanced/Scripts/piholeCheckout.sh b/advanced/Scripts/piholeCheckout.sh
index dd57117f..31009dd9 100644
--- a/advanced/Scripts/piholeCheckout.sh
+++ b/advanced/Scripts/piholeCheckout.sh
@@ -95,6 +95,7 @@ checkout() {
local path
path="development/${binary}"
echo "development" > /etc/pihole/ftlbranch
+ chmod 644 /etc/pihole/ftlbranch
elif [[ "${1}" == "master" ]] ; then
# Shortcut to check out master branches
echo -e " ${INFO} Shortcut \"master\" detected - checking out master branches..."
@@ -108,6 +109,7 @@ checkout() {
local path
path="master/${binary}"
echo "master" > /etc/pihole/ftlbranch
+ chmod 644 /etc/pihole/ftlbranch
elif [[ "${1}" == "core" ]] ; then
str="Fetching branches from ${piholeGitUrl}"
echo -ne " ${INFO} $str"
@@ -169,6 +171,7 @@ checkout() {
if check_download_exists "$path"; then
echo " ${TICK} Branch ${2} exists"
echo "${2}" > /etc/pihole/ftlbranch
+ chmod 644 /etc/pihole/ftlbranch
FTLinstall "${binary}"
restart_service pihole-FTL
enable_service pihole-FTL
diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh
index d46944d6..d61edf08 100755
--- a/advanced/Scripts/piholeDebug.sh
+++ b/advanced/Scripts/piholeDebug.sh
@@ -89,16 +89,40 @@ PIHOLE_WILDCARD_CONFIG_FILE="${DNSMASQ_D_DIRECTORY}/03-wildcard.conf"
WEB_SERVER_CONFIG_FILE="${WEB_SERVER_CONFIG_DIRECTORY}/lighttpd.conf"
#WEB_SERVER_CUSTOM_CONFIG_FILE="${WEB_SERVER_CONFIG_DIRECTORY}/external.conf"
-PIHOLE_DEFAULT_AD_LISTS="${PIHOLE_DIRECTORY}/adlists.default"
-PIHOLE_USER_DEFINED_AD_LISTS="${PIHOLE_DIRECTORY}/adlists.list"
-PIHOLE_BLACKLIST_FILE="${PIHOLE_DIRECTORY}/blacklist.txt"
-PIHOLE_BLOCKLIST_FILE="${PIHOLE_DIRECTORY}/gravity.list"
PIHOLE_INSTALL_LOG_FILE="${PIHOLE_DIRECTORY}/install.log"
PIHOLE_RAW_BLOCKLIST_FILES="${PIHOLE_DIRECTORY}/list.*"
PIHOLE_LOCAL_HOSTS_FILE="${PIHOLE_DIRECTORY}/local.list"
PIHOLE_LOGROTATE_FILE="${PIHOLE_DIRECTORY}/logrotate"
PIHOLE_SETUP_VARS_FILE="${PIHOLE_DIRECTORY}/setupVars.conf"
-PIHOLE_WHITELIST_FILE="${PIHOLE_DIRECTORY}/whitelist.txt"
+PIHOLE_FTL_CONF_FILE="${PIHOLE_DIRECTORY}/pihole-FTL.conf"
+
+# Read the value of an FTL config key. The value is printed to stdout.
+#
+# Args:
+# 1. The key to read
+# 2. The default if the setting or config does not exist
+get_ftl_conf_value() {
+ local key=$1
+ local default=$2
+ local value
+
+ # Obtain key=... setting from pihole-FTL.conf
+ if [[ -e "$PIHOLE_FTL_CONF_FILE" ]]; then
+ # Constructed to return nothing when
+ # a) the setting is not present in the config file, or
+ # b) the setting is commented out (e.g. "#DBFILE=...")
+ value="$(sed -n -e "s/^\\s*$key=\\s*//p" ${PIHOLE_FTL_CONF_FILE})"
+ fi
+
+ # Test for missing value. Use default value in this case.
+ if [[ -z "$value" ]]; then
+ value="$default"
+ fi
+
+ echo "$value"
+}
+
+PIHOLE_GRAVITY_DB_FILE="$(get_ftl_conf_value "GRAVITYDB" "${PIHOLE_DIRECTORY}/gravity.db")"
PIHOLE_COMMAND="${BIN_DIRECTORY}/pihole"
PIHOLE_COLTABLE_FILE="${BIN_DIRECTORY}/COL_TABLE"
@@ -109,7 +133,7 @@ FTL_PORT="${RUN_DIRECTORY}/pihole-FTL.port"
PIHOLE_LOG="${LOG_DIRECTORY}/pihole.log"
PIHOLE_LOG_GZIPS="${LOG_DIRECTORY}/pihole.log.[0-9].*"
PIHOLE_DEBUG_LOG="${LOG_DIRECTORY}/pihole_debug.log"
-PIHOLE_FTL_LOG="${LOG_DIRECTORY}/pihole-FTL.log"
+PIHOLE_FTL_LOG="$(get_ftl_conf_value "LOGFILE" "${LOG_DIRECTORY}/pihole-FTL.log")"
PIHOLE_WEB_SERVER_ACCESS_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/access.log"
PIHOLE_WEB_SERVER_ERROR_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/error.log"
@@ -142,16 +166,11 @@ REQUIRED_FILES=("${PIHOLE_CRON_FILE}"
"${PIHOLE_DHCP_CONFIG_FILE}"
"${PIHOLE_WILDCARD_CONFIG_FILE}"
"${WEB_SERVER_CONFIG_FILE}"
-"${PIHOLE_DEFAULT_AD_LISTS}"
-"${PIHOLE_USER_DEFINED_AD_LISTS}"
-"${PIHOLE_BLACKLIST_FILE}"
-"${PIHOLE_BLOCKLIST_FILE}"
"${PIHOLE_INSTALL_LOG_FILE}"
"${PIHOLE_RAW_BLOCKLIST_FILES}"
"${PIHOLE_LOCAL_HOSTS_FILE}"
"${PIHOLE_LOGROTATE_FILE}"
"${PIHOLE_SETUP_VARS_FILE}"
-"${PIHOLE_WHITELIST_FILE}"
"${PIHOLE_COMMAND}"
"${PIHOLE_COLTABLE_FILE}"
"${FTL_PID}"
@@ -795,7 +814,7 @@ dig_at() {
# This helps emulate queries to different domains that a user might query
# It will also give extra assurance that Pi-hole is correctly resolving and blocking domains
local random_url
- random_url=$(shuf -n 1 "${PIHOLE_BLOCKLIST_FILE}")
+ random_url=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity ORDER BY RANDOM() LIMIT 1")
# First, do a dig on localhost to see if Pi-hole can use itself to block a domain
if local_dig=$(dig +tries=1 +time=2 -"${protocol}" "${random_url}" @${local_address} +short "${record_type}"); then
@@ -977,8 +996,7 @@ list_files_in_dir() {
if [[ -d "${dir_to_parse}/${each_file}" ]]; then
# If it's a directoy, do nothing
:
- elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_BLOCKLIST_FILE}" ]] || \
- [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \
+ elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_INSTALL_LOG_FILE}" ]] || \
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_SETUP_VARS_FILE}" ]] || \
@@ -1063,31 +1081,71 @@ head_tail_log() {
IFS="$OLD_IFS"
}
-analyze_gravity_list() {
- echo_current_diagnostic "Gravity list"
- local head_line
- local tail_line
- # Put the current Internal Field Separator into another variable so it can be restored later
+show_db_entries() {
+ local title="${1}"
+ local query="${2}"
+ local widths="${3}"
+
+ echo_current_diagnostic "${title}"
+
OLD_IFS="$IFS"
- # Get the lines that are in the file(s) and store them in an array for parsing later
IFS=$'\r\n'
+ local entries=()
+ mapfile -t entries < <(\
+ sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \
+ -cmd ".headers on" \
+ -cmd ".mode column" \
+ -cmd ".width ${widths}" \
+ "${query}"\
+ )
+
+ for line in "${entries[@]}"; do
+ log_write " ${line}"
+ done
+
+ IFS="$OLD_IFS"
+}
+
+show_groups() {
+ show_db_entries "Groups" "SELECT id,CASE enabled WHEN '0' THEN ' 0' WHEN '1' THEN ' 1' ELSE enabled END enabled,name,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,description FROM \"group\"" "4 7 50 19 19 50"
+}
+
+show_adlists() {
+ show_db_entries "Adlists" "SELECT id,CASE enabled WHEN '0' THEN ' 0' WHEN '1' THEN ' 1' ELSE enabled END enabled,GROUP_CONCAT(adlist_by_group.group_id) group_ids,address,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM adlist LEFT JOIN adlist_by_group ON adlist.id = adlist_by_group.adlist_id GROUP BY id;" "4 7 12 100 19 19 50"
+}
+
+show_domainlist() {
+ show_db_entries "Domainlist (0/1 = exact white-/blacklist, 2/3 = regex white-/blacklist)" "SELECT id,CASE type WHEN '0' THEN '0 ' WHEN '1' THEN ' 1 ' WHEN '2' THEN ' 2 ' WHEN '3' THEN ' 3' ELSE type END type,CASE enabled WHEN '0' THEN ' 0' WHEN '1' THEN ' 1' ELSE enabled END enabled,GROUP_CONCAT(domainlist_by_group.group_id) group_ids,domain,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist LEFT JOIN domainlist_by_group ON domainlist.id = domainlist_by_group.domainlist_id GROUP BY id;" "4 4 7 12 100 19 19 50"
+}
+
+show_clients() {
+ show_db_entries "Clients" "SELECT id,GROUP_CONCAT(client_by_group.group_id) group_ids,ip,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM client LEFT JOIN client_by_group ON client.id = client_by_group.client_id GROUP BY id;" "4 12 100 19 19 50"
+}
+
+analyze_gravity_list() {
+ echo_current_diagnostic "Gravity List and Database"
+
local gravity_permissions
- gravity_permissions=$(ls -ld "${PIHOLE_BLOCKLIST_FILE}")
+ gravity_permissions=$(ls -ld "${PIHOLE_GRAVITY_DB_FILE}")
log_write "${COL_GREEN}${gravity_permissions}${COL_NC}"
- local gravity_head=()
- mapfile -t gravity_head < <(head -n 4 ${PIHOLE_BLOCKLIST_FILE})
- log_write " ${COL_CYAN}-----head of $(basename ${PIHOLE_BLOCKLIST_FILE})------${COL_NC}"
- for head_line in "${gravity_head[@]}"; do
- log_write " ${head_line}"
- done
+
+ show_db_entries "Info table" "SELECT property,value FROM info" "20 40"
+ gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")"
+ gravity_updated="$(date -d @"${gravity_updated_raw}")"
+ log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}"
log_write ""
- local gravity_tail=()
- mapfile -t gravity_tail < <(tail -n 4 ${PIHOLE_BLOCKLIST_FILE})
- log_write " ${COL_CYAN}-----tail of $(basename ${PIHOLE_BLOCKLIST_FILE})------${COL_NC}"
- for tail_line in "${gravity_tail[@]}"; do
- log_write " ${tail_line}"
+
+ OLD_IFS="$IFS"
+ IFS=$'\r\n'
+ local gravity_sample=()
+ mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10")
+ log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}"
+
+ for line in "${gravity_sample[@]}"; do
+ log_write " ${line}"
done
- # Set the IFS back to what it was
+
+ log_write ""
IFS="$OLD_IFS"
}
@@ -1238,6 +1296,10 @@ process_status
parse_setup_vars
check_x_headers
analyze_gravity_list
+show_groups
+show_domainlist
+show_clients
+show_adlists
show_content_of_pihole_files
parse_locale
analyze_pihole_log
diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh
index 561fbce7..51e94d7c 100755
--- a/advanced/Scripts/piholeLogFlush.sh
+++ b/advanced/Scripts/piholeLogFlush.sh
@@ -39,8 +39,9 @@ if [[ "$@" == *"once"* ]]; then
# Note that moving the file is not an option, as
# dnsmasq would happily continue writing into the
# moved file (it will have the same file handler)
- cp /var/log/pihole.log /var/log/pihole.log.1
+ cp -p /var/log/pihole.log /var/log/pihole.log.1
echo " " > /var/log/pihole.log
+ chmod 644 /var/log/pihole.log
fi
else
# Manual flushing
@@ -53,6 +54,7 @@ else
echo " " > /var/log/pihole.log
if [ -f /var/log/pihole.log.1 ]; then
echo " " > /var/log/pihole.log.1
+ chmod 644 /var/log/pihole.log.1
fi
fi
# Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history)
diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh
index 2dead97d..7518e6c4 100644..100755
--- a/advanced/Scripts/query.sh
+++ b/advanced/Scripts/query.sh
@@ -11,10 +11,8 @@
# Globals
piholeDir="/etc/pihole"
-adListsList="$piholeDir/adlists.list"
-wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf"
+gravityDBfile="${piholeDir}/gravity.db"
options="$*"
-adlist=""
all=""
exact=""
blockpage=""
@@ -23,27 +21,10 @@ matchType="match"
colfile="/opt/pihole/COL_TABLE"
source "${colfile}"
-# Print each subdomain
-# e.g: foo.bar.baz.com = "foo.bar.baz.com bar.baz.com baz.com com"
-processWildcards() {
- IFS="." read -r -a array <<< "${1}"
- for (( i=${#array[@]}-1; i>=0; i-- )); do
- ar=""
- for (( j=${#array[@]}-1; j>${#array[@]}-i-2; j-- )); do
- if [[ $j == $((${#array[@]}-1)) ]]; then
- ar="${array[$j]}"
- else
- ar="${array[$j]}.${ar}"
- fi
- done
- echo "${ar}"
- done
-}
-
# Scan an array of files for matching strings
scanList(){
# Escape full stops
- local domain="${1//./\\.}" lists="${2}" type="${3:-}"
+ local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" type="${3:-}"
# Prevent grep from printing file path
cd "$piholeDir" || exit 1
@@ -52,11 +33,18 @@ scanList(){
export LC_CTYPE=C
# /dev/null forces filename to be printed when only one list has been generated
- # shellcheck disable=SC2086
case "${type}" in
- "exact" ) grep -i -E "(^|\\s)${domain}($|\\s|#)" ${lists} /dev/null 2>/dev/null;;
- "wc" ) grep -i -o -m 1 "/${domain}/" ${lists} 2>/dev/null;;
- * ) grep -i "${domain}" ${lists} /dev/null 2>/dev/null;;
+ "exact" ) grep -i -E -l "(^|(?<!#)\\s)${esc_domain}($|\\s|#)" ${lists} /dev/null 2>/dev/null;;
+ # Iterate through each regexp and check whether it matches the domainQuery
+ # If it does, print the matching regexp and continue looping
+ # Input 1 - regexps | Input 2 - domainQuery
+ "regex" )
+ for list in ${lists}; do
+ if [[ "${domain}" =~ ${list} ]]; then
+ printf "%b\n" "${list}";
+ fi
+ done;;
+ * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;;
esac
}
@@ -66,23 +54,16 @@ Example: 'pihole -q -exact domain.com'
Query the adlists for a specified domain
Options:
- -adlist Print the name of the block list URL
-exact Search the block lists for exact domain matches
-all Return all query matches within a block list
-h, --help Show this help dialog"
exit 0
fi
-if [[ ! -e "$adListsList" ]]; then
- echo -e "${COL_LIGHT_RED}The file $adListsList was not found${COL_NC}"
- exit 1
-fi
-
# Handle valid options
if [[ "${options}" == *"-bp"* ]]; then
exact="exact"; blockpage=true
else
- [[ "${options}" == *"-adlist"* ]] && adlist=true
[[ "${options}" == *"-all"* ]] && all=true
if [[ "${options}" == *"-exact"* ]]; then
exact="exact"; matchType="exact ${matchType}"
@@ -107,69 +88,115 @@ if [[ -n "${str:-}" ]]; then
exit 1
fi
-# Scan Whitelist and Blacklist
-lists="whitelist.txt blacklist.txt"
-mapfile -t results <<< "$(scanList "${domainQuery}" "${lists}" "${exact}")"
-if [[ -n "${results[*]}" ]]; then
+scanDatabaseTable() {
+ local domain table type querystr result extra
+ domain="$(printf "%q" "${1}")"
+ table="${2}"
+ type="${3:-}"
+
+ # As underscores are legitimate parts of domains, we escape them when using the LIKE operator.
+ # Underscores are SQLite wildcards matching exactly one character. We obviously want to suppress this
+ # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched
+ # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores.
+ if [[ "${table}" == "gravity" ]]; then
+ case "${exact}" in
+ "exact" ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";;
+ * ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
+ esac
+ else
+ case "${exact}" in
+ "exact" ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain = '${domain}'";;
+ * ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
+ esac
+ fi
+
+ # Send prepared query to gravity database
+ result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null
+ if [[ -z "${result}" ]]; then
+ # Return early when there are no matches in this table
+ return
+ fi
+
+ if [[ "${table}" == "gravity" ]]; then
+ echo "${result}"
+ return
+ fi
+
+ # Mark domain as having been white-/blacklist matched (global variable)
wbMatch=true
- # Loop through each result in order to print unique file title once
+
+ # Print table name
+ if [[ -z "${blockpage}" ]]; then
+ echo " ${matchType^} found in ${COL_BOLD}exact ${table}${COL_NC}"
+ fi
+
+ # Loop over results and print them
+ mapfile -t results <<< "${result}"
for result in "${results[@]}"; do
- fileName="${result%%.*}"
if [[ -n "${blockpage}" ]]; then
echo "π ${result}"
exit 0
- elif [[ -n "${exact}" ]]; then
- echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}"
+ fi
+ domain="${result/|*}"
+ if [[ "${result#*|}" == "0" ]]; then
+ extra=" (disabled)"
else
- # Only print filename title once per file
- if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then
- echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}"
- fileName_prev="${fileName}"
- fi
- echo " ${result#*:}"
+ extra=""
fi
+ echo " ${domain}${extra}"
done
-fi
+}
-# Scan Wildcards
-if [[ -e "${wildcardlist}" ]]; then
- # Determine all subdomains, domain and TLDs
- mapfile -t wildcards <<< "$(processWildcards "${domainQuery}")"
- for match in "${wildcards[@]}"; do
- # Search wildcard list for matches
- mapfile -t results <<< "$(scanList "${match}" "${wildcardlist}" "wc")"
- if [[ -n "${results[*]}" ]]; then
- if [[ -z "${wcMatch:-}" ]] && [[ -z "${blockpage}" ]]; then
+scanRegexDatabaseTable() {
+ local domain list
+ domain="${1}"
+ list="${2}"
+ type="${3:-}"
+
+ # Query all regex from the corresponding database tables
+ mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null)
+
+ # If we have regexps to process
+ if [[ "${#regexList[@]}" -ne 0 ]]; then
+ # Split regexps over a new line
+ str_regexList=$(printf '%s\n' "${regexList[@]}")
+ # Check domain against regexps
+ mapfile -t regexMatches < <(scanList "${domain}" "${str_regexList}" "regex")
+ # If there were regex matches
+ if [[ "${#regexMatches[@]}" -ne 0 ]]; then
+ # Split matching regexps over a new line
+ str_regexMatches=$(printf '%s\n' "${regexMatches[@]}")
+ # Form a "matched" message
+ str_message="${matchType^} found in ${COL_BOLD}regex ${list}${COL_NC}"
+ # Form a "results" message
+ str_result="${COL_BOLD}${str_regexMatches}${COL_NC}"
+ # If we are displaying more than just the source of the block
+ if [[ -z "${blockpage}" ]]; then
+ # Set the wildcard match flag
wcMatch=true
- echo " ${matchType^} found in ${COL_BOLD}Wildcards${COL_NC}:"
+ # Echo the "matched" message, indented by one space
+ echo " ${str_message}"
+ # Echo the "results" message, each line indented by three spaces
+ # shellcheck disable=SC2001
+ echo "${str_result}" | sed 's/^/ /'
+ else
+ echo "π .wildcard"
+ exit 0
fi
- case "${blockpage}" in
- true ) echo "π ${wildcardlist##*/}"; exit 0;;
- * ) echo " *.${match}";;
- esac
fi
- done
-fi
+ fi
+}
+
+# Scan Whitelist and Blacklist
+scanDatabaseTable "${domainQuery}" "whitelist" "0"
+scanDatabaseTable "${domainQuery}" "blacklist" "1"
-# Get version sorted *.domains filenames (without dir path)
-lists=("$(cd "$piholeDir" || exit 0; printf "%s\\n" -- *.domains | sort -V)")
-
-# Query blocklists for occurences of domain
-mapfile -t results <<< "$(scanList "${domainQuery}" "${lists[*]}" "${exact}")"
-
-# Remove unwanted content from $results
-# Each line in $results is formatted as such: [fileName]:[line]
-# 1. Delete lines starting with #
-# 2. Remove comments after domain
-# 3. Remove hosts format IP address
-# 4. Remove any lines that no longer contain the queried domain name (in case the matched domain name was in a comment)
-esc_domain="${domainQuery//./\\.}"
-mapfile -t results <<< "$(IFS=$'\n'; sed \
- -e "/:#/d" \
- -e "s/[ \\t]#.*//g" \
- -e "s/:.*[ \\t]/:/g" \
- -e "/${esc_domain}/!d" \
- <<< "${results[*]}")"
+# Scan Regex table
+scanRegexDatabaseTable "${domainQuery}" "whitelist" "2"
+scanRegexDatabaseTable "${domainQuery}" "blacklist" "3"
+
+# Query block lists
+mapfile -t results <<< "$(scanDatabaseTable "${domainQuery}" "gravity")"
# Handle notices
if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then
@@ -184,15 +211,6 @@ elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 100 ]]; then
exit 0
fi
-# Get adlist file content as array
-if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then
- for adlistUrl in $(< "${adListsList}"); do
- if [[ "${adlistUrl:0:4}" =~ (http|www.) ]]; then
- adlists+=("${adlistUrl}")
- fi
- done
-fi
-
# Print "Exact matches for" title
if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then
plural=""; [[ "${#results[*]}" -gt 1 ]] && plural="es"
@@ -200,28 +218,25 @@ if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then
fi
for result in "${results[@]}"; do
- fileName="${result/:*/}"
-
- # Determine *.domains URL using filename's number
- if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then
- fileNum="${fileName/list./}"; fileNum="${fileNum%%.*}"
- fileName="${adlists[$fileNum]}"
-
- # Discrepency occurs when adlists has been modified, but Gravity has not been run
- if [[ -z "${fileName}" ]]; then
- fileName="${COL_LIGHT_RED}(no associated adlists URL found)${COL_NC}"
- fi
+ match="${result/|*/}"
+ extra="${result#*|}"
+ adlistAddress="${extra/|*/}"
+ extra="${extra#*|}"
+ if [[ "${extra}" == "0" ]]; then
+ extra="(disabled)"
+ else
+ extra=""
fi
if [[ -n "${blockpage}" ]]; then
- echo "${fileNum} ${fileName}"
+ echo "0 ${adlistAddress}"
elif [[ -n "${exact}" ]]; then
- echo " ${fileName}"
+ echo " - ${adlistAddress} ${extra}"
else
- if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then
+ if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then
count=""
- echo " ${matchType^} found in ${COL_BOLD}${fileName}${COL_NC}:"
- fileName_prev="${fileName}"
+ echo " ${matchType^} found in ${COL_BOLD}${adlistAddress}${COL_NC}:"
+ adlistAddress_prev="${adlistAddress}"
fi
: $((count++))
@@ -231,7 +246,7 @@ for result in "${results[@]}"; do
[[ "${count}" -gt "${max_count}" ]] && continue
echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}"
else
- echo " ${result#*:}"
+ echo " ${match} ${extra}"
fi
fi
done
diff --git a/advanced/Scripts/updatecheck.sh b/advanced/Scripts/updatecheck.sh
index 26dc2ac2..afb03ebb 100755
--- a/advanced/Scripts/updatecheck.sh
+++ b/advanced/Scripts/updatecheck.sh
@@ -51,6 +51,7 @@ if [[ "$2" == "remote" ]]; then
GITHUB_CORE_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/pi-hole/releases/latest' 2> /dev/null)")"
echo -n "${GITHUB_CORE_VERSION}" > "${GITHUB_VERSION_FILE}"
+ chmod 644 "${GITHUB_VERSION_FILE}"
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
GITHUB_WEB_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/AdminLTE/releases/latest' 2> /dev/null)")"
@@ -66,6 +67,7 @@ else
CORE_BRANCH="$(get_local_branch /etc/.pihole)"
echo -n "${CORE_BRANCH}" > "${LOCAL_BRANCH_FILE}"
+ chmod 644 "${LOCAL_BRANCH_FILE}"
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
WEB_BRANCH="$(get_local_branch /var/www/html/admin)"
@@ -79,6 +81,7 @@ else
CORE_VERSION="$(get_local_version /etc/.pihole)"
echo -n "${CORE_VERSION}" > "${LOCAL_VERSION_FILE}"
+ chmod 644 "${LOCAL_VERSION_FILE}"
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
WEB_VERSION="$(get_local_version /var/www/html/admin)"
diff --git a/advanced/Scripts/version.sh b/advanced/Scripts/version.sh
index f6d4d344..d2c41cba 100755
--- a/advanced/Scripts/version.sh
+++ b/advanced/Scripts/version.sh
@@ -84,6 +84,21 @@ getRemoteVersion(){
# Get the version from the remote origin
local daemon="${1}"
local version
+ local cachedVersions
+ local arrCache
+ cachedVersions="/etc/pihole/GitHubVersions"
+
+ #If the above file exists, then we can read from that. Prevents overuse of Github API
+ if [[ -f "$cachedVersions" ]]; then
+ IFS=' ' read -r -a arrCache < "$cachedVersions"
+ case $daemon in
+ "pi-hole" ) echo "${arrCache[0]}";;
+ "AdminLTE" ) echo "${arrCache[1]}";;
+ "FTL" ) echo "${arrCache[2]}";;
+ esac
+
+ return 0
+ fi
version=$(curl --silent --fail "https://api.github.com/repos/pi-hole/${daemon}/releases/latest" | \
awk -F: '$1 ~/tag_name/ { print $2 }' | \
@@ -97,22 +112,48 @@ getRemoteVersion(){
return 0
}
+getLocalBranch(){
+ # Get the checked out branch of the local directory
+ local directory="${1}"
+ local branch
+
+ # Local FTL btranch is stored in /etc/pihole/ftlbranch
+ if [[ "$1" == "FTL" ]]; then
+ branch="$(pihole-FTL branch)"
+ else
+ cd "${directory}" 2> /dev/null || { echo "${DEFAULT}"; return 1; }
+ branch=$(git rev-parse --abbrev-ref HEAD || echo "$DEFAULT")
+ fi
+ if [[ ! "${branch}" =~ ^v ]]; then
+ if [[ "${branch}" == "master" ]]; then
+ echo ""
+ elif [[ "${branch}" == "HEAD" ]]; then
+ echo "in detached HEAD state at "
+ else
+ echo "${branch} "
+ fi
+ else
+ # Branch started in "v"
+ echo "release "
+ fi
+ return 0
+}
+
versionOutput() {
[[ "$1" == "pi-hole" ]] && GITDIR=$COREGITDIR
[[ "$1" == "AdminLTE" ]] && GITDIR=$WEBGITDIR
[[ "$1" == "FTL" ]] && GITDIR="FTL"
- [[ "$2" == "-c" ]] || [[ "$2" == "--current" ]] || [[ -z "$2" ]] && current=$(getLocalVersion $GITDIR)
+ [[ "$2" == "-c" ]] || [[ "$2" == "--current" ]] || [[ -z "$2" ]] && current=$(getLocalVersion $GITDIR) && branch=$(getLocalBranch $GITDIR)
[[ "$2" == "-l" ]] || [[ "$2" == "--latest" ]] || [[ -z "$2" ]] && latest=$(getRemoteVersion "$1")
if [[ "$2" == "-h" ]] || [[ "$2" == "--hash" ]]; then
- [[ "$3" == "-c" ]] || [[ "$3" == "--current" ]] || [[ -z "$3" ]] && curHash=$(getLocalHash "$GITDIR")
+ [[ "$3" == "-c" ]] || [[ "$3" == "--current" ]] || [[ -z "$3" ]] && curHash=$(getLocalHash "$GITDIR") && branch=$(getLocalBranch $GITDIR)
[[ "$3" == "-l" ]] || [[ "$3" == "--latest" ]] || [[ -z "$3" ]] && latHash=$(getRemoteHash "$1" "$(cd "$GITDIR" 2> /dev/null && git rev-parse --abbrev-ref HEAD)")
fi
-
if [[ -n "$current" ]] && [[ -n "$latest" ]]; then
- output="${1^} version is $current (Latest: $latest)"
+ output="${1^} version is $branch$current (Latest: $latest)"
elif [[ -n "$current" ]] && [[ -z "$latest" ]]; then
- output="Current ${1^} version is $current"
+ output="Current ${1^} version is $branch$current."
elif [[ -z "$current" ]] && [[ -n "$latest" ]]; then
output="Latest ${1^} version is $latest"
elif [[ "$curHash" == "N/A" ]] || [[ "$latHash" == "N/A" ]]; then
diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh
index 3b17b6b5..a518a428 100755
--- a/advanced/Scripts/webpage.sh
+++ b/advanced/Scripts/webpage.sh
@@ -17,6 +17,9 @@ readonly FTLconf="/etc/pihole/pihole-FTL.conf"
# 03 -> wildcards
readonly dhcpstaticconfig="/etc/dnsmasq.d/04-pihole-static-dhcp.conf"
readonly PI_HOLE_BIN_DIR="/usr/local/bin"
+readonly dnscustomfile="/etc/pihole/custom.list"
+
+readonly gravityDBfile="/etc/pihole/gravity.db"
coltable="/opt/pihole/COL_TABLE"
if [[ -f ${coltable} ]]; then
@@ -33,7 +36,6 @@ Options:
-c, celsius Set Celsius as preferred temperature unit
-f, fahrenheit Set Fahrenheit as preferred temperature unit
-k, kelvin Set Kelvin as preferred temperature unit
- -r, hostrecord Add a name to the DNS associated to an IPv4/IPv6 address
-e, email Set an administrative contact address for the Block Page
-h, --help Show this help dialog
-i, interface Specify dnsmasq's interface listening behavior
@@ -86,9 +88,9 @@ SetTemperatureUnit() {
HashPassword() {
# Compute password hash twice to avoid rainbow table vulnerability
- return=$(echo -n ${1} | sha256sum | sed 's/\s.*$//')
- return=$(echo -n ${return} | sha256sum | sed 's/\s.*$//')
- echo ${return}
+ return=$(echo -n "${1}" | sha256sum | sed 's/\s.*$//')
+ return=$(echo -n "${return}" | sha256sum | sed 's/\s.*$//')
+ echo "${return}"
}
SetWebPassword() {
@@ -142,18 +144,18 @@ ProcessDNSSettings() {
delete_dnsmasq_setting "server"
COUNTER=1
- while [[ 1 ]]; do
+ while true ; do
var=PIHOLE_DNS_${COUNTER}
if [ -z "${!var}" ]; then
break;
fi
add_dnsmasq_setting "server" "${!var}"
- let COUNTER=COUNTER+1
+ (( COUNTER++ ))
done
# The option LOCAL_DNS_PORT is deprecated
# We apply it once more, and then convert it into the current format
- if [ ! -z "${LOCAL_DNS_PORT}" ]; then
+ if [ -n "${LOCAL_DNS_PORT}" ]; then
add_dnsmasq_setting "server" "127.0.0.1#${LOCAL_DNS_PORT}"
add_setting "PIHOLE_DNS_${COUNTER}" "127.0.0.1#${LOCAL_DNS_PORT}"
delete_setting "LOCAL_DNS_PORT"
@@ -176,14 +178,13 @@ ProcessDNSSettings() {
if [[ "${DNSSEC}" == true ]]; then
echo "dnssec
-trust-anchor=.,19036,8,2,49AAC11D7B6F6446702E54A1607371607A1A41855200FD2CE1CDDE32F24E8FB5
trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC683457104237C7F8EC8D
" >> "${dnsmasqconfig}"
fi
delete_dnsmasq_setting "host-record"
- if [ ! -z "${HOSTRECORD}" ]; then
+ if [ -n "${HOSTRECORD}" ]; then
add_dnsmasq_setting "host-record" "${HOSTRECORD}"
fi
@@ -328,6 +329,7 @@ dhcp-option=option:router,${DHCP_ROUTER}
dhcp-leasefile=/etc/pihole/dhcp.leases
#quiet-dhcp
" > "${dhcpconfig}"
+ chmod 644 "${dhcpconfig}"
if [[ "${PIHOLE_DOMAIN}" != "none" ]]; then
echo "domain=${PIHOLE_DOMAIN}" >> "${dhcpconfig}"
@@ -398,22 +400,38 @@ SetWebUILayout() {
change_setting "WEBUIBOXEDLAYOUT" "${args[2]}"
}
+CheckUrl(){
+ local regex
+ # Check for characters NOT allowed in URLs
+ regex="[^a-zA-Z0-9:/?&%=~._-]"
+ if [[ "${1}" =~ ${regex} ]]; then
+ return 1
+ else
+ return 0
+ fi
+}
+
CustomizeAdLists() {
- list="/etc/pihole/adlists.list"
-
- if [[ "${args[2]}" == "enable" ]]; then
- sed -i "\\@${args[3]}@s/^#http/http/g" "${list}"
- elif [[ "${args[2]}" == "disable" ]]; then
- sed -i "\\@${args[3]}@s/^http/#http/g" "${list}"
- elif [[ "${args[2]}" == "add" ]]; then
- if [[ $(grep -c "^${args[3]}$" "${list}") -eq 0 ]] ; then
- echo "${args[3]}" >> ${list}
+ local address
+ address="${args[3]}"
+ local comment
+ comment="${args[4]}"
+
+ if CheckUrl "${address}"; then
+ if [[ "${args[2]}" == "enable" ]]; then
+ sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'"
+ elif [[ "${args[2]}" == "disable" ]]; then
+ sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'"
+ elif [[ "${args[2]}" == "add" ]]; then
+ sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')"
+ elif [[ "${args[2]}" == "del" ]]; then
+ sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'"
+ else
+ echo "Not permitted"
+ return 1
fi
- elif [[ "${args[2]}" == "del" ]]; then
- var=$(echo "${args[3]}" | sed 's/\//\\\//g')
- sed -i "/${var}/Id" "${list}"
else
- echo "Not permitted"
+ echo "Invalid Url"
return 1
fi
}
@@ -459,32 +477,6 @@ RemoveDHCPStaticAddress() {
sed -i "/dhcp-host=${mac}.*/d" "${dhcpstaticconfig}"
}
-SetHostRecord() {
- if [[ "${1}" == "-h" ]] || [[ "${1}" == "--help" ]]; then
- echo "Usage: pihole -a hostrecord <domain> [IPv4-address],[IPv6-address]
-Example: 'pihole -a hostrecord home.domain.com 192.168.1.1,2001:db8:a0b:12f0::1'
-Add a name to the DNS associated to an IPv4/IPv6 address
-
-Options:
- \"\" Empty: Remove host record
- -h, --help Show this help dialog"
- exit 0
- fi
-
- if [[ -n "${args[3]}" ]]; then
- change_setting "HOSTRECORD" "${args[2]},${args[3]}"
- echo -e " ${TICK} Setting host record for ${args[2]} to ${args[3]}"
- else
- change_setting "HOSTRECORD" ""
- echo -e " ${TICK} Removing host record"
- fi
-
- ProcessDNSSettings
-
- # Restart dnsmasq to load new configuration
- RestartDNS
-}
-
SetAdminEmail() {
if [[ "${1}" == "-h" ]] || [[ "${1}" == "--help" ]]; then
echo "Usage: pihole -a email <address>
@@ -498,6 +490,16 @@ Options:
fi
if [[ -n "${args[2]}" ]]; then
+
+ # Sanitize email address in case of security issues
+ # Regex from https://stackoverflow.com/a/2138832/4065967
+ local regex
+ regex="^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}\$"
+ if [[ ! "${args[2]}" =~ ${regex} ]]; then
+ echo -e " ${CROSS} Invalid email address"
+ exit 0
+ fi
+
change_setting "ADMIN_EMAIL" "${args[2]}"
echo -e " ${TICK} Setting admin contact to ${args[2]}"
else
@@ -523,10 +525,10 @@ Interfaces:
fi
if [[ "${args[2]}" == "all" ]]; then
- echo -e " ${INFO} Listening on all interfaces, permiting all origins. Please use a firewall!"
+ echo -e " ${INFO} Listening on all interfaces, permitting all origins. Please use a firewall!"
change_setting "DNSMASQ_LISTENING" "all"
elif [[ "${args[2]}" == "local" ]]; then
- echo -e " ${INFO} Listening on all interfaces, permiting origins from one hop away (LAN)"
+ echo -e " ${INFO} Listening on all interfaces, permitting origins from one hop away (LAN)"
change_setting "DNSMASQ_LISTENING" "local"
else
echo -e " ${INFO} Listening only on interface ${PIHOLE_INTERFACE}"
@@ -543,23 +545,50 @@ Interfaces:
}
Teleporter() {
- local datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S")
+ local datetimestamp
+ datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S")
php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-teleporter_${datetimestamp}.tar.gz"
}
+checkDomain()
+{
+ local domain validDomain
+ # Convert to lowercase
+ domain="${1,,}"
+ validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check
+ validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label
+ echo "${validDomain}"
+}
+
addAudit()
{
shift # skip "-a"
shift # skip "audit"
- for var in "$@"
+ local domains validDomain
+ domains=""
+ for domain in "$@"
do
- echo "${var}" >> /etc/pihole/auditlog.list
+ # Check domain to be added. Only continue if it is valid
+ validDomain="$(checkDomain "${domain}")"
+ if [[ -n "${validDomain}" ]]; then
+ # Put comma in between domains when there is
+ # more than one domains to be added
+ # SQL INSERT allows adding multiple rows at once using the format
+ ## INSERT INTO table (domain) VALUES ('abc.de'),('fgh.ij'),('klm.no'),('pqr.st');
+ if [[ -n "${domains}" ]]; then
+ domains="${domains},"
+ fi
+ domains="${domains}('${domain}')"
+ fi
done
+ # Insert only the domain here. The date_added field will be
+ # filled with its default value (date_added = current timestamp)
+ sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};"
}
clearAudit()
{
- echo -n "" > /etc/pihole/auditlog.list
+ sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;"
}
SetPrivacyLevel() {
@@ -569,6 +598,28 @@ SetPrivacyLevel() {
fi
}
+AddCustomDNSAddress() {
+ echo -e " ${TICK} Adding custom DNS entry..."
+
+ ip="${args[2]}"
+ host="${args[3]}"
+ echo "${ip} ${host}" >> "${dnscustomfile}"
+
+ # Restart dnsmasq to load new custom DNS entries
+ RestartDNS
+}
+
+RemoveCustomDNSAddress() {
+ echo -e " ${TICK} Removing custom DNS entry..."
+
+ ip="${args[2]}"
+ host="${args[3]}"
+ sed -i "/${ip} ${host}/d" "${dnscustomfile}"
+
+ # Restart dnsmasq to update removed custom DNS entries
+ RestartDNS
+}
+
main() {
args=("$@")
@@ -592,7 +643,6 @@ main() {
"resolve" ) ResolutionSettings;;
"addstaticdhcp" ) AddDHCPStaticAddress;;
"removestaticdhcp" ) RemoveDHCPStaticAddress;;
- "-r" | "hostrecord" ) SetHostRecord "$3";;
"-e" | "email" ) SetAdminEmail "$3";;
"-i" | "interface" ) SetListeningMode "$@";;
"-t" | "teleporter" ) Teleporter;;
@@ -600,6 +650,8 @@ main() {
"audit" ) addAudit "$@";;
"clearaudit" ) clearAudit;;
"-l" | "privacylevel" ) SetPrivacyLevel;;
+ "addcustomdns" ) AddCustomDNSAddress;;
+ "removecustomdns" ) RemoveCustomDNSAddress;;
* ) helpFunc;;
esac
diff --git a/advanced/Scripts/wildcard_regex_converter.sh b/advanced/Scripts/wildcard_regex_converter.sh
index 8c9578a3..b4b6b4a1 100644
--- a/advanced/Scripts/wildcard_regex_converter.sh
+++ b/advanced/Scripts/wildcard_regex_converter.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# Pi-hole: A black hole for Internet advertisements
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
# Network-wide ad blocking via your own hardware.
diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql
new file mode 100644
index 00000000..27b8797f
--- /dev/null
+++ b/advanced/Templates/gravity.db.sql
@@ -0,0 +1,188 @@
+PRAGMA foreign_keys=OFF;
+BEGIN TRANSACTION;
+
+CREATE TABLE "group"
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ enabled BOOLEAN NOT NULL DEFAULT 1,
+ name TEXT UNIQUE NOT NULL,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ description TEXT
+);
+INSERT INTO "group" (id,enabled,name,description) VALUES (0,1,'Default','The default group');
+
+CREATE TABLE domainlist
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ type INTEGER NOT NULL DEFAULT 0,
+ domain TEXT UNIQUE NOT NULL,
+ enabled BOOLEAN NOT NULL DEFAULT 1,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ comment TEXT
+);
+
+CREATE TABLE adlist
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ address TEXT UNIQUE NOT NULL,
+ enabled BOOLEAN NOT NULL DEFAULT 1,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ comment TEXT
+);
+
+CREATE TABLE adlist_by_group
+(
+ adlist_id INTEGER NOT NULL REFERENCES adlist (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (adlist_id, group_id)
+);
+
+CREATE TABLE gravity
+(
+ domain TEXT NOT NULL,
+ adlist_id INTEGER NOT NULL REFERENCES adlist (id)
+);
+
+CREATE TABLE info
+(
+ property TEXT PRIMARY KEY,
+ value TEXT NOT NULL
+);
+
+INSERT INTO "info" VALUES('version','12');
+
+CREATE TABLE domain_audit
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ domain TEXT UNIQUE NOT NULL,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int))
+);
+
+CREATE TABLE domainlist_by_group
+(
+ domainlist_id INTEGER NOT NULL REFERENCES domainlist (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (domainlist_id, group_id)
+);
+
+CREATE TABLE client
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ ip TEXT NOL NULL UNIQUE,
+ date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
+ comment TEXT
+);
+
+CREATE TABLE client_by_group
+(
+ client_id INTEGER NOT NULL REFERENCES client (id),
+ group_id INTEGER NOT NULL REFERENCES "group" (id),
+ PRIMARY KEY (client_id, group_id)
+);
+
+CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist
+ BEGIN
+ UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address;
+ END;
+
+CREATE TRIGGER tr_client_update AFTER UPDATE ON client
+ BEGIN
+ UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE ip = NEW.ip;
+ END;
+
+CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist
+ BEGIN
+ UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
+ END;
+
+CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 0
+ ORDER BY domainlist.id;
+
+CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 1
+ ORDER BY domainlist.id;
+
+CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 2
+ ORDER BY domainlist.id;
+
+CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
+ FROM domainlist
+ LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
+ LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
+ WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ AND domainlist.type = 3
+ ORDER BY domainlist.id;
+
+CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id
+ FROM gravity
+ LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id
+ LEFT JOIN adlist ON adlist.id = gravity.adlist_id
+ LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
+ WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1);
+
+CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id
+ FROM adlist
+ LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id
+ LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
+ WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1)
+ ORDER BY adlist.id;
+
+CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
+ BEGIN
+ INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_client_add AFTER INSERT ON client
+ BEGIN
+ INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
+ BEGIN
+ INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
+ BEGIN
+ UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
+ END;
+
+CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
+ BEGIN
+ INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Default');
+ END;
+
+CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist
+ BEGIN
+ DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id;
+ END;
+
+CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist
+ BEGIN
+ DELETE FROM adlist_by_group WHERE adlist_id = OLD.id;
+ END;
+
+CREATE TRIGGER tr_client_delete AFTER DELETE ON client
+ BEGIN
+ DELETE FROM client_by_group WHERE client_id = OLD.id;
+ END;
+
+COMMIT;
diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql
new file mode 100644
index 00000000..4a2a9b22
--- /dev/null
+++ b/advanced/Templates/gravity_copy.sql
@@ -0,0 +1,42 @@
+.timeout 30000
+
+ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD;
+
+BEGIN TRANSACTION;
+
+DROP TRIGGER tr_domainlist_add;
+DROP TRIGGER tr_client_add;
+DROP TRIGGER tr_adlist_add;
+
+INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group";
+INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit;
+
+INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist;
+INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group;
+
+INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist;
+INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group;
+
+INSERT OR REPLACE INTO info SELECT * FROM OLD.info;
+
+INSERT OR REPLACE INTO client SELECT * FROM OLD.client;
+INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group;
+
+
+CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
+ BEGIN
+ INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_client_add AFTER INSERT ON client
+ BEGIN
+ INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
+ BEGIN
+ INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
+ END;
+
+
+COMMIT;
diff --git a/advanced/Templates/pihole-FTL.service b/advanced/Templates/pihole-FTL.service
index 8a4c7ce6..f32a5e89 100644
--- a/advanced/Templates/pihole-FTL.service
+++ b/advanced/Templates/pihole-FTL.service
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
### BEGIN INIT INFO
# Provides: pihole-FTL
# Required-Start: $remote_fs $syslog
@@ -48,7 +48,8 @@ start() {
chown pihole:pihole /etc/pihole /etc/pihole/dhcp.leases 2> /dev/null
chown pihole:pihole /var/log/pihole-FTL.log /var/log/pihole.log
chmod 0644 /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port /var/log/pihole.log
- echo "nameserver 127.0.0.1" | /sbin/resolvconf -a lo.piholeFTL
+ # Chown database files to the user FTL runs as. We ignore errors as the files may not (yet) exist
+ chown pihole:pihole /etc/pihole/pihole-FTL.db /etc/pihole/gravity.db 2> /dev/null
if setcap CAP_NET_BIND_SERVICE,CAP_NET_RAW,CAP_NET_ADMIN+eip "$(which pihole-FTL)"; then
su -s /bin/sh -c "/usr/bin/pihole-FTL" "$FTLUSER"
else
@@ -62,7 +63,6 @@ start() {
# Stop the service
stop() {
if is_running; then
- /sbin/resolvconf -d lo.piholeFTL
kill "$(get_pid)"
for i in {1..5}; do
if ! is_running; then
diff --git a/advanced/bash-completion/pihole b/advanced/bash-completion/pihole
index 7ba0dad8..88282b02 100644
--- a/advanced/bash-completion/pihole
+++ b/advanced/bash-completion/pihole
@@ -7,7 +7,7 @@ _pihole() {
case "${prev}" in
"pihole")
- opts="admin blacklist checkout chronometer debug disable enable flush help logging query reconfigure regex restartdns status tail uninstall updateGravity updatePihole version wildcard whitelist"
+ opts="admin blacklist checkout chronometer debug disable enable flush help logging query reconfigure regex restartdns status tail uninstall updateGravity updatePihole version wildcard whitelist arpflush"
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
;;
"whitelist"|"blacklist"|"wildcard"|"regex")
@@ -15,7 +15,7 @@ _pihole() {
COMPREPLY=( $(compgen -W "${opts_lists}" -- ${cur}) )
;;
"admin")
- opts_admin="celsius email fahrenheit hostrecord interface kelvin password privacylevel"
+ opts_admin="celsius email fahrenheit interface kelvin password privacylevel"
COMPREPLY=( $(compgen -W "${opts_admin}" -- ${cur}) )
;;
"checkout")
diff --git a/advanced/index.php b/advanced/index.php
index 78135e1a..4f2a17f7 100644
--- a/advanced/index.php
+++ b/advanced/index.php
@@ -6,8 +6,8 @@
* This file is copyright under the latest version of the EUPL.
* Please see LICENSE file for your rights under this license. */
-// Sanitise HTTP_HOST output
-$serverName = htmlspecialchars($_SERVER["HTTP_HOST"]);
+// Sanitize SERVER_NAME output
+$serverName = htmlspecialchars($_SERVER["SERVER_NAME"]);
// Remove external ipv6 brackets if any
$serverName = preg_replace('/^\[(.*)\]$/', '${1}', $serverName);
@@ -50,16 +50,24 @@ function setHeader($type = "x") {
}
// Determine block page type
-if ($serverName === "pi.hole") {
+if ($serverName === "pi.hole"
+ || (!empty($_SERVER["VIRTUAL_HOST"]) && $serverName === $_SERVER["VIRTUAL_HOST"])) {
// Redirect to Web Interface
exit(header("Location: /admin"));
} elseif (filter_var($serverName, FILTER_VALIDATE_IP) || in_array($serverName, $authorizedHosts)) {
// Set Splash Page output
$splashPage = "
- <html><head>
+ <html>
+ <head>
$viewPort
- <link rel='stylesheet' href='/pihole/blockingpage.css' type='text/css'/>
- </head><body id='splashpage'><img src='/admin/img/logo.svg'/><br/>Pi-<b>hole</b>: Your black hole for Internet advertisements<br><a href='/admin'>Did you mean to go to the admin panel?</a></body></html>
+ <link rel='stylesheet' href='pihole/blockingpage.css' type='text/css'/>
+ </head>
+ <body id='splashpage'>
+ <img src='admin/img/logo.svg'/><br/>
+ Pi-<b>hole</b>: Your black hole for Internet advertisements<br/>
+ <a href='/admin'>Did you mean to go to the admin panel?</a>
+ </body>
+ </html>
";
// Set splash/landing page based off presence of $landPage
@@ -68,7 +76,7 @@ if ($serverName === "pi.hole") {
// Unset variables so as to not be included in $landPage
unset($serverName, $svPasswd, $svEmail, $authorizedHosts, $validExtTypes, $currentUrlExt, $viewPort);
- // Render splash/landing page when directly browsing via IP or authorised hostname
+ // Render splash/landing page when directly browsing via IP or authorized hostname
exit($renderPage);
} elseif ($currentUrlExt === "js") {
// Serve Pi-hole Javascript for blocked domains requesting JS
@@ -96,26 +104,30 @@ if ($serverName === "pi.hole") {
// Define admin email address text based off $svEmail presence
$bpAskAdmin = !empty($svEmail) ? '<a href="mailto:'.$svEmail.'?subject=Site Blocked: '.$serverName.'"></a>' : "<span/>";
-// Determine if at least one block list has been generated
-$blocklistglob = glob("/etc/pihole/list.0.*.domains");
-if ($blocklistglob === array()) {
- die("[ERROR] There are no domain lists generated lists within <code>/etc/pihole/</code>! Please update gravity by running <code>pihole -g</code>, or repair Pi-hole using <code>pihole -r</code>.");
+// Get possible non-standard location of FTL's database
+$FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf");
+if (isset($FTLsettings["GRAVITYDB"])) {
+ $gravityDBFile = $FTLsettings["GRAVITYDB"];
+} else {
+ $gravityDBFile = "/etc/pihole/gravity.db";
}
-// Set location of adlists file
-if (is_file("/etc/pihole/adlists.list")) {
- $adLists = "/etc/pihole/adlists.list";
-} elseif (is_file("/etc/pihole/adlists.default")) {
- $adLists = "/etc/pihole/adlists.default";
-} else {
- die("[ERROR] File not found: <code>/etc/pihole/adlists.list</code>");
+// Connect to gravity.db
+try {
+ $db = new SQLite3($gravityDBFile, SQLITE3_OPEN_READONLY);
+} catch (Exception $exception) {
+ die("[ERROR]: Failed to connect to gravity.db");
}
-// Get all URLs starting with "http" or "www" from adlists and re-index array numerically
-$adlistsUrls = array_values(preg_grep("/(^http)|(^www)/i", file($adLists, FILE_IGNORE_NEW_LINES)));
+// Get all adlist addresses
+$adlistResults = $db->query("SELECT address FROM vw_adlist");
+$adlistsUrls = array();
+while ($row = $adlistResults->fetchArray()) {
+ array_push($adlistsUrls, $row[0]);
+}
if (empty($adlistsUrls))
- die("[ERROR]: There are no adlist URL's found within <code>$adLists</code>");
+ die("[ERROR]: There are no adlists enabled");
// Get total number of blocklists (Including Whitelist, Blacklist & Wildcard lists)
$adlistsCount = count($adlistsUrls) + 3;
@@ -127,7 +139,12 @@ ini_set("default_socket_timeout", 3);
function queryAds($serverName) {
// Determine the time it takes while querying adlists
$preQueryTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"];
- $queryAds = file("http://127.0.0.1/admin/scripts/pi-hole/php/queryads.php?domain=$serverName&bp", FILE_IGNORE_NEW_LINES);
+ $queryAdsURL = sprintf(
+ "http://127.0.0.1:%s/admin/scripts/pi-hole/php/queryads.php?domain=%s&bp",
+ $_SERVER["SERVER_PORT"],
+ $serverName
+ );
+ $queryAds = file($queryAdsURL, FILE_IGNORE_NEW_LINES);
$queryAds = array_values(array_filter(preg_replace("/data:\s+/", "", $queryAds)));
$queryTime = sprintf("%.0f", (microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]) - $preQueryTime);
@@ -205,7 +222,7 @@ $phVersion = exec("cd /etc/.pihole/ && git describe --long --tags");
if (explode("-", $phVersion)[1] != "0")
$execTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"];
-// Please Note: Text is added via CSS to allow an admin to provide a localised
+// Please Note: Text is added via CSS to allow an admin to provide a localized
// language without the need to edit this file
setHeader();
@@ -222,10 +239,10 @@ setHeader();
<?=$viewPort ?>
<meta name="robots" content="noindex,nofollow"/>
<meta http-equiv="x-dns-prefetch-control" content="off">
- <link rel="shortcut icon" href="//pi.hole/admin/img/favicon.png" type="image/x-icon"/>
- <link rel="stylesheet" href="//pi.hole/pihole/blockingpage.css" type="text/css"/>
+ <link rel="shortcut icon" href="admin/img/favicon.png" type="image/x-icon"/>
+ <link rel="stylesheet" href="pihole/blockingpage.css" type="text/css"/>
<title>● <?=$serverName ?></title>
- <script src="//pi.hole/admin/scripts/vendor/jquery.min.js"></script>
+ <script src="admin/scripts/vendor/jquery.min.js"></script>
<script>
window.onload = function () {
<?php