Skip to content

Commit

Permalink
Merge pull request #70 from pi-hole/master
Browse files Browse the repository at this point in the history
Official update v5.17.3
  • Loading branch information
arevindh authored Jan 22, 2024
2 parents 243e045 + 19bfa08 commit b27737e
Show file tree
Hide file tree
Showing 15 changed files with 88 additions and 76 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,16 @@ jobs:
steps:
-
name: Checkout repository
uses: actions/checkout@v4.1.0
uses: actions/checkout@v4.1.1
# Initializes the CodeQL tools for scanning.
-
name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v3
with:
languages: 'python'
-
name: Autobuild
uses: github/codeql-action/autobuild@v2
uses: github/codeql-action/autobuild@v3
-
name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v3
4 changes: 2 additions & 2 deletions .github/workflows/stale.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
issues: write

steps:
- uses: actions/stale@v8.0.0
- uses: actions/stale@v9.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30
Expand All @@ -40,7 +40,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4.1.0
uses: actions/checkout@v4.1.1
- name: Remove 'stale' label
run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }}
env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/stale_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
pull-requests: write

steps:
- uses: actions/stale@v8.0.0
- uses: actions/stale@v9.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
# Do not automatically mark PR/issue as stale
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/sync-back-to-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
name: Syncing branches
steps:
- name: Checkout
uses: actions/checkout@v4.1.0
uses: actions/checkout@v4.1.1
- name: Opening pull request
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
env:
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4.1.0
uses: actions/checkout@v4.1.1

- name: Check scripts in repository are executable
run: |
Expand Down Expand Up @@ -65,10 +65,10 @@ jobs:
DISTRO: ${{matrix.distro}}
steps:
- name: Checkout repository
uses: actions/checkout@v4.1.0
uses: actions/checkout@v4.1.1

- name: Set up Python 3.10
uses: actions/setup-python@v4.7.1
uses: actions/setup-python@v5.0.0
with:
python-version: "3.10"

Expand Down
30 changes: 15 additions & 15 deletions advanced/Scripts/database_migration/gravity-db.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ upgrade_gravityDB(){
auditFile="${piholeDir}/auditlog.list"

# Get database version
version="$(pihole-FTL sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")"
version="$(pihole-FTL sqlite3 -ni "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")"

if [[ "$version" == "1" ]]; then
# This migration script upgrades the gravity.db file by
# adding the domain_audit table
echo -e " ${INFO} Upgrading gravity database from version 1 to 2"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/1_to_2.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/1_to_2.sql"
version=2

# Store audit domains in database table
Expand All @@ -40,58 +40,58 @@ upgrade_gravityDB(){
# renaming the regex table to regex_blacklist, and
# creating a new regex_whitelist table + corresponding linking table and views
echo -e " ${INFO} Upgrading gravity database from version 2 to 3"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/2_to_3.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/2_to_3.sql"
version=3
fi
if [[ "$version" == "3" ]]; then
# This migration script unifies the formally separated domain
# lists into a single table with a UNIQUE domain constraint
echo -e " ${INFO} Upgrading gravity database from version 3 to 4"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/3_to_4.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/3_to_4.sql"
version=4
fi
if [[ "$version" == "4" ]]; then
# This migration script upgrades the gravity and list views
# implementing necessary changes for per-client blocking
echo -e " ${INFO} Upgrading gravity database from version 4 to 5"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/4_to_5.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/4_to_5.sql"
version=5
fi
if [[ "$version" == "5" ]]; then
# This migration script upgrades the adlist view
# to return an ID used in gravity.sh
echo -e " ${INFO} Upgrading gravity database from version 5 to 6"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/5_to_6.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/5_to_6.sql"
version=6
fi
if [[ "$version" == "6" ]]; then
# This migration script adds a special group with ID 0
# which is automatically associated to all clients not
# having their own group assignments
echo -e " ${INFO} Upgrading gravity database from version 6 to 7"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/6_to_7.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/6_to_7.sql"
version=7
fi
if [[ "$version" == "7" ]]; then
# This migration script recreated the group table
# to ensure uniqueness on the group name
# We also add date_added and date_modified columns
echo -e " ${INFO} Upgrading gravity database from version 7 to 8"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/7_to_8.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/7_to_8.sql"
version=8
fi
if [[ "$version" == "8" ]]; then
# This migration fixes some issues that were introduced
# in the previous migration script.
echo -e " ${INFO} Upgrading gravity database from version 8 to 9"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/8_to_9.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/8_to_9.sql"
version=9
fi
if [[ "$version" == "9" ]]; then
# This migration drops unused tables and creates triggers to remove
# obsolete groups assignments when the linked items are deleted
echo -e " ${INFO} Upgrading gravity database from version 9 to 10"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/9_to_10.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/9_to_10.sql"
version=10
fi
if [[ "$version" == "10" ]]; then
Expand All @@ -101,31 +101,31 @@ upgrade_gravityDB(){
# to keep the copying process generic (needs the same columns in both the
# source and the destination databases).
echo -e " ${INFO} Upgrading gravity database from version 10 to 11"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/10_to_11.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/10_to_11.sql"
version=11
fi
if [[ "$version" == "11" ]]; then
# Rename group 0 from "Unassociated" to "Default"
echo -e " ${INFO} Upgrading gravity database from version 11 to 12"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/11_to_12.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/11_to_12.sql"
version=12
fi
if [[ "$version" == "12" ]]; then
# Add column date_updated to adlist table
echo -e " ${INFO} Upgrading gravity database from version 12 to 13"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/12_to_13.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/12_to_13.sql"
version=13
fi
if [[ "$version" == "13" ]]; then
# Add columns number and status to adlist table
echo -e " ${INFO} Upgrading gravity database from version 13 to 14"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/13_to_14.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/13_to_14.sql"
version=14
fi
if [[ "$version" == "14" ]]; then
# Changes the vw_adlist created in 5_to_6
echo -e " ${INFO} Upgrading gravity database from version 14 to 15"
pihole-FTL sqlite3 "${database}" < "${scriptPath}/14_to_15.sql"
pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/14_to_15.sql"
version=15
fi
}
20 changes: 10 additions & 10 deletions advanced/Scripts/list.sh
Original file line number Diff line number Diff line change
Expand Up @@ -150,18 +150,18 @@ AddDomain() {
domain="$1"

# Is the domain in the list we want to add it to?
num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")"
num="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")"
requestedListname="$(GetListnameFromTypeId "${typeId}")"

if [[ "${num}" -ne 0 ]]; then
existingTypeId="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")"
existingTypeId="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")"
if [[ "${existingTypeId}" == "${typeId}" ]]; then
if [[ "${verbose}" == true ]]; then
echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!"
fi
else
existingListname="$(GetListnameFromTypeId "${existingTypeId}")"
pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';"
pihole-FTL sqlite3 -ni "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';"
if [[ "${verbose}" == true ]]; then
echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!"
fi
Expand All @@ -177,10 +177,10 @@ AddDomain() {
# Insert only the domain here. The enabled and date_added fields will be filled
# with their default values (enabled = true, date_added = current timestamp)
if [[ -z "${comment}" ]]; then
pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});"
pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});"
else
# also add comment when variable has been set through the "--comment" option
pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');"
pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');"
fi
}

Expand All @@ -189,7 +189,7 @@ RemoveDomain() {
domain="$1"

# Is the domain in the list we want to remove it from?
num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")"
num="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")"

requestedListname="$(GetListnameFromTypeId "${typeId}")"

Expand All @@ -206,14 +206,14 @@ RemoveDomain() {
fi
reload=true
# Remove it from the current list
pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};"
pihole-FTL sqlite3 -ni "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};"
}

Displaylist() {
local count num_pipes domain enabled status nicedate requestedListname

requestedListname="$(GetListnameFromTypeId "${typeId}")"
data="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)"
data="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)"

if [[ -z $data ]]; then
echo -e "Not showing empty list"
Expand Down Expand Up @@ -251,10 +251,10 @@ Displaylist() {
}

NukeList() {
count=$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};")
count=$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};")
listname="$(GetListnameFromTypeId "${typeId}")"
if [ "$count" -gt 0 ];then
pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};"
pihole-FTL sqlite3 -ni "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};"
echo " ${TICK} Removed ${count} domain(s) from the ${listname}"
else
echo " ${INFO} ${listname} already empty. Nothing to do!"
Expand Down
4 changes: 2 additions & 2 deletions advanced/Scripts/piholeARPTable.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,15 @@ flushARP(){
# Truncate network_addresses table in pihole-FTL.db
# This needs to be done before we can truncate the network table due to
# foreign key constraints
if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then
if ! output=$(pihole-FTL sqlite3 -ni "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then
echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table"
echo " Database location: ${DBFILE}"
echo " Output: ${output}"
return 1
fi

# Truncate network table in pihole-FTL.db
if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then
if ! output=$(pihole-FTL sqlite3 -ni "${DBFILE}" "DELETE FROM network" 2>&1); then
echo -e "${OVER} ${CROSS} Failed to truncate network table"
echo " Database location: ${DBFILE}"
echo " Output: ${output}"
Expand Down
12 changes: 6 additions & 6 deletions advanced/Scripts/piholeDebug.sh
Original file line number Diff line number Diff line change
Expand Up @@ -865,7 +865,7 @@ dig_at() {
# This helps emulate queries to different domains that a user might query
# It will also give extra assurance that Pi-hole is correctly resolving and blocking domains
local random_url
random_url=$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity WHERE domain not like '||%^' ORDER BY RANDOM() LIMIT 1")
random_url=$(pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity WHERE domain not like '||%^' ORDER BY RANDOM() LIMIT 1")
# Fallback if no non-ABP style domains were found
if [ -z "${random_url}" ]; then
random_url="flurry.com"
Expand Down Expand Up @@ -1226,7 +1226,7 @@ show_db_entries() {
IFS=$'\r\n'
local entries=()
mapfile -t entries < <(\
pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \
pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" \
-cmd ".headers on" \
-cmd ".mode column" \
-cmd ".width ${widths}" \
Expand All @@ -1251,7 +1251,7 @@ show_FTL_db_entries() {
IFS=$'\r\n'
local entries=()
mapfile -t entries < <(\
pihole-FTL sqlite3 "${PIHOLE_FTL_DB_FILE}" \
pihole-FTL sqlite3 -ni "${PIHOLE_FTL_DB_FILE}" \
-cmd ".headers on" \
-cmd ".mode column" \
-cmd ".width ${widths}" \
Expand Down Expand Up @@ -1317,15 +1317,15 @@ analyze_gravity_list() {
fi

show_db_entries "Info table" "SELECT property,value FROM info" "20 40"
gravity_updated_raw="$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")"
gravity_updated_raw="$(pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")"
gravity_updated="$(date -d @"${gravity_updated_raw}")"
log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}"
log_write ""

OLD_IFS="$IFS"
IFS=$'\r\n'
local gravity_sample=()
mapfile -t gravity_sample < <(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10")
mapfile -t gravity_sample < <(pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10")
log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}"

for line in "${gravity_sample[@]}"; do
Expand Down Expand Up @@ -1357,7 +1357,7 @@ database_integrity_check(){

log_write "${INFO} Checking foreign key constraints of ${database} ... (this can take several minutes)"
unset result
result="$(pihole-FTL sqlite3 "${database}" -cmd ".headers on" -cmd ".mode column" "PRAGMA foreign_key_check" 2>&1 & spinner)"
result="$(pihole-FTL sqlite3 -ni "${database}" -cmd ".headers on" -cmd ".mode column" "PRAGMA foreign_key_check" 2>&1 & spinner)"
if [[ -z ${result} ]]; then
log_write "${TICK} No foreign key errors in ${database}"
else
Expand Down
2 changes: 1 addition & 1 deletion advanced/Scripts/piholeLogFlush.sh
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ else
fi
fi
# Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history)
deleted=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM query_storage WHERE timestamp >= strftime('%s','now')-86400; select changes() from query_storage limit 1")
deleted=$(pihole-FTL sqlite3 -ni "${DBFILE}" "DELETE FROM query_storage WHERE timestamp >= strftime('%s','now')-86400; select changes() from query_storage limit 1")

# Restart pihole-FTL to force reloading history
sudo pihole restartdns
Expand Down
6 changes: 3 additions & 3 deletions advanced/Scripts/query.sh
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ scanDatabaseTable() {
# Are there ABP entries on gravity?
# Return 1 if abp_domain=1 or Zero if abp_domain=0 or not set
abpquerystr="SELECT EXISTS (SELECT 1 FROM info WHERE property='abp_domains' and value='1')"
abpfound="$(pihole-FTL sqlite3 "${gravityDBfile}" "${abpquerystr}")" 2> /dev/null
abpfound="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "${abpquerystr}")" 2> /dev/null

# Create search string for ABP entries only if needed
if [ "${abpfound}" -eq 1 ]; then
Expand Down Expand Up @@ -129,7 +129,7 @@ scanDatabaseTable() {
fi

# Send prepared query to gravity database
result="$(pihole-FTL sqlite3 -separator ',' "${gravityDBfile}" "${querystr}")" 2> /dev/null
result="$(pihole-FTL sqlite3 -ni -separator ',' "${gravityDBfile}" "${querystr}")" 2> /dev/null
if [[ -z "${result}" ]]; then
# Return early when there are no matches in this table
return
Expand Down Expand Up @@ -166,7 +166,7 @@ scanRegexDatabaseTable() {
list_type="${3:-}"

# Query all regex from the corresponding database tables
mapfile -t regexList < <(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${list_type}" 2> /dev/null)
mapfile -t regexList < <(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${list_type}" 2> /dev/null)

# If we have regexps to process
if [[ "${#regexList[@]}" -ne 0 ]]; then
Expand Down
Loading

0 comments on commit b27737e

Please sign in to comment.