"Fossies" - the Fresh Open Source Software Archive  

Source code changes of the file "gravity.sh" between
pi-hole-5.3.1.tar.gz and pi-hole-5.4.tar.gz

About: Pi-hole is a DNS sinkhole that protects your devices from unwanted content like Internet advertisements, without installing any client-side software.

gravity.sh  (pi-hole-5.3.1):gravity.sh  (pi-hole-5.4)
skipping to change at line 50 skipping to change at line 50
GRAVITYDB="${gravityDBfile_default}" GRAVITYDB="${gravityDBfile_default}"
gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql" gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql"
gravityDBcopy="${piholeGitDir}/advanced/Templates/gravity_copy.sql" gravityDBcopy="${piholeGitDir}/advanced/Templates/gravity_copy.sql"
domainsExtension="domains" domainsExtension="domains"
# Source setupVars from install script # Source setupVars from install script
setupVars="${piholeDir}/setupVars.conf" setupVars="${piholeDir}/setupVars.conf"
if [[ -f "${setupVars}" ]];then if [[ -f "${setupVars}" ]];then
source "${setupVars}" source "${setupVars}"
# Remove CIDR mask from IPv4/6 addresses
IPV4_ADDRESS="${IPV4_ADDRESS%/*}"
IPV6_ADDRESS="${IPV6_ADDRESS%/*}"
# Determine if IPv4/6 addresses exist
if [[ -z "${IPV4_ADDRESS}" ]] && [[ -z "${IPV6_ADDRESS}" ]]; then
echo -e " ${COL_LIGHT_RED}No IP addresses found! Please run 'pihole -r' to
reconfigure${COL_NC}"
exit 1
fi
else else
echo -e " ${COL_LIGHT_RED}Installation Failure: ${setupVars} does not exist! ${COL_NC} echo -e " ${COL_LIGHT_RED}Installation Failure: ${setupVars} does not exist! ${COL_NC}
Please run 'pihole -r', and choose the 'reconfigure' option to fix." Please run 'pihole -r', and choose the 'reconfigure' option to fix."
exit 1 exit 1
fi fi
# Source pihole-FTL from install script # Source pihole-FTL from install script
pihole_FTL="${piholeDir}/pihole-FTL.conf" pihole_FTL="${piholeDir}/pihole-FTL.conf"
if [[ -f "${pihole_FTL}" ]]; then if [[ -f "${pihole_FTL}" ]]; then
source "${pihole_FTL}" source "${pihole_FTL}"
fi fi
# Set this only after sourcing pihole-FTL.conf as the gravity database path may # Set this only after sourcing pihole-FTL.conf as the gravity database path may
# have changed # have changed
gravityDBfile="${GRAVITYDB}" gravityDBfile="${GRAVITYDB}"
gravityTEMPfile="${GRAVITYDB}_temp" gravityTEMPfile="${GRAVITYDB}_temp"
gravityDIR="$(dirname -- "${gravityDBfile}")"
gravityOLDfile="${gravityDIR}/gravity_old.db"
if [[ -z "${BLOCKINGMODE}" ]] ; then if [[ -z "${BLOCKINGMODE}" ]] ; then
BLOCKINGMODE="NULL" BLOCKINGMODE="NULL"
fi fi
# Determine if superseded pihole.conf exists # Determine if superseded pihole.conf exists
if [[ -r "${piholeDir}/pihole.conf" ]]; then if [[ -r "${piholeDir}/pihole.conf" ]]; then
echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${ COL_NC}" echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${ COL_NC}"
fi fi
skipping to change at line 126 skipping to change at line 118
output=$( { sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 ) output=$( { sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
status="$?" status="$?"
if [[ "${status}" -ne 0 ]]; then if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravit yTEMPfile}\\n ${output}" echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravit yTEMPfile}\\n ${output}"
return 1 return 1
fi fi
echo -e "${OVER} ${TICK} ${str}" echo -e "${OVER} ${TICK} ${str}"
# Swap databases and remove old database # Swap databases and remove or conditionally rename old database
rm "${gravityDBfile}" # Number of available blocks on disk
availableBlocks=$(stat -f --format "%a" "${gravityDIR}")
# Number of blocks, used by gravity.db
gravityBlocks=$(stat --format "%b" ${gravityDBfile})
# Only keep the old database if available disk space is at least twice the siz
e of the existing gravity.db.
# Better be safe than sorry...
if [ "${availableBlocks}" -gt "$(("${gravityBlocks}" * 2))" ] && [ -f "${gravi
tyDBfile}" ]; then
echo -e " ${TICK} The old database remains available."
mv "${gravityDBfile}" "${gravityOLDfile}"
else
rm "${gravityDBfile}"
fi
mv "${gravityTEMPfile}" "${gravityDBfile}" mv "${gravityTEMPfile}" "${gravityDBfile}"
} }
# Update timestamp when the gravity table was last updated successfully # Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp() { update_gravity_timestamp() {
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,val ue) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravit yDBfile}"; } 2>&1 ) output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,val ue) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravit yDBfile}"; } 2>&1 )
status="$?" status="$?"
if [[ "${status}" -ne 0 ]]; then if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${grav ityDBfile}\\n ${output}" echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${grav ityDBfile}\\n ${output}"
skipping to change at line 567 skipping to change at line 570
sha1sum "${target}" > "${target}.sha1" sha1sum "${target}" > "${target}.sha1"
# We assume here it was changed upstream # We assume here it was changed upstream
database_adlist_status "${adlistID}" "1" database_adlist_status "${adlistID}" "1"
database_adlist_updated "${adlistID}" database_adlist_updated "${adlistID}"
fi fi
} }
# Download specified URL and perform checks on HTTP status and file content # Download specified URL and perform checks on HTTP status and file content
gravity_DownloadBlocklistFromUrl() { gravity_DownloadBlocklistFromUrl() {
local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5 }" target="${6}" compression="${7}" local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5 }" target="${6}" compression="${7}"
local heisenbergCompensator="" patternBuffer str httpCode success="" local heisenbergCompensator="" patternBuffer str httpCode success="" ip
# Create temp file to store content on disk instead of RAM # Create temp file to store content on disk instead of RAM
patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb") patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb")
# Determine if $saveLocation has read permission # Determine if $saveLocation has read permission
if [[ -r "${saveLocation}" && $url != "file"* ]]; then if [[ -r "${saveLocation}" && $url != "file"* ]]; then
# Have curl determine if a remote file has been modified since last retrieva l # Have curl determine if a remote file has been modified since last retrieva l
# Uses "Last-Modified" header, which certain web servers do not provide (e.g : raw github urls) # Uses "Last-Modified" header, which certain web servers do not provide (e.g : raw github urls)
# Note: Don't do this for local files, always download them # Note: Don't do this for local files, always download them
heisenbergCompensator="-z ${saveLocation}" heisenbergCompensator="-z ${saveLocation}"
fi fi
str="Status:" str="Status:"
echo -ne " ${INFO} ${str} Pending..." echo -ne " ${INFO} ${str} Pending..."
blocked=false blocked=false
case $BLOCKINGMODE in case $BLOCKINGMODE in
"IP-NODATA-AAAA"|"IP") "IP-NODATA-AAAA"|"IP")
if [[ $(dig "${domain}" +short | grep "${IPV4_ADDRESS}" -c) -ge 1 ]]; th # Get IP address of this domain
en ip="$(dig "${domain}" +short)"
# Check if this IP matches any IP of the system
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<< "$(ip a)") -gt 0 ]];
then
blocked=true blocked=true
fi;; fi;;
"NXDOMAIN") "NXDOMAIN")
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
blocked=true blocked=true
fi;; fi;;
"NULL"|*) "NULL"|*)
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
blocked=true blocked=true
fi;; fi;;
skipping to change at line 788 skipping to change at line 794
# Output count of blacklisted domains and regex filters # Output count of blacklisted domains and regex filters
gravity_ShowCount() { gravity_ShowCount() {
gravity_Table_Count "vw_gravity" "gravity domains" "" gravity_Table_Count "vw_gravity" "gravity domains" ""
gravity_Table_Count "vw_blacklist" "exact blacklisted domains" gravity_Table_Count "vw_blacklist" "exact blacklisted domains"
gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters" gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters"
gravity_Table_Count "vw_whitelist" "exact whitelisted domains" gravity_Table_Count "vw_whitelist" "exact whitelisted domains"
gravity_Table_Count "vw_regex_whitelist" "regex whitelist filters" gravity_Table_Count "vw_regex_whitelist" "regex whitelist filters"
} }
# Parse list of domains into hosts format
gravity_ParseDomainsIntoHosts() {
awk -v ipv4="$IPV4_ADDRESS" -v ipv6="$IPV6_ADDRESS" '{
# Remove windows CR line endings
sub(/\r$/, "")
# Parse each line as "ipaddr domain"
if(ipv6 && ipv4) {
print ipv4" "$0"\n"ipv6" "$0
} else if(!ipv6) {
print ipv4" "$0
} else {
print ipv6" "$0
}
}' >> "${2}" < "${1}"
}
# Create "localhost" entries into hosts format # Create "localhost" entries into hosts format
gravity_generateLocalList() { gravity_generateLocalList() {
local hostname
if [[ -s "/etc/hostname" ]]; then
hostname=$(< "/etc/hostname")
elif command -v hostname &> /dev/null; then
hostname=$(hostname -f)
else
echo -e " ${CROSS} Unable to determine fully qualified domain name of host"
return 0
fi
echo -e "${hostname}\\npi.hole" > "${localList}.tmp"
# Empty $localList if it already exists, otherwise, create it # Empty $localList if it already exists, otherwise, create it
: > "${localList}" echo "### Do not modify this file, it will be overwritten by pihole -g" > "${l ocalList}"
chmod 644 "${localList}" chmod 644 "${localList}"
gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}"
# Add additional LAN hosts provided by OpenVPN (if available) # Add additional LAN hosts provided by OpenVPN (if available)
if [[ -f "${VPNList}" ]]; then if [[ -f "${VPNList}" ]]; then
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >> "${localList}" awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >> "${localList}"
fi fi
} }
# Trap Ctrl-C # Trap Ctrl-C
gravity_Trap() { gravity_Trap() {
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}" ; gravity_Cleanup "error"; }' INT trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}" ; gravity_Cleanup "error"; }' INT
} }
skipping to change at line 893 skipping to change at line 868
} }
for var in "$@"; do for var in "$@"; do
case "${var}" in case "${var}" in
"-f" | "--force" ) forceDelete=true;; "-f" | "--force" ) forceDelete=true;;
"-r" | "--recreate" ) recreate_database=true;; "-r" | "--recreate" ) recreate_database=true;;
"-h" | "--help" ) helpFunc;; "-h" | "--help" ) helpFunc;;
esac esac
done done
# Remove OLD (backup) gravity file, if it exists
if [[ -f "${gravityOLDfile}" ]]; then
rm "${gravityOLDfile}"
fi
# Trap Ctrl-C # Trap Ctrl-C
gravity_Trap gravity_Trap
if [[ "${recreate_database:-}" == true ]]; then if [[ "${recreate_database:-}" == true ]]; then
str="Restoring from migration backup" str="Restoring from migration backup"
echo -ne "${INFO} ${str}..." echo -ne "${INFO} ${str}..."
rm "${gravityDBfile}" rm "${gravityDBfile}"
pushd "${piholeDir}" > /dev/null || exit pushd "${piholeDir}" > /dev/null || exit
cp migration_backup/* . cp migration_backup/* .
popd > /dev/null || exit popd > /dev/null || exit
 End of changes. 10 change blocks. 
48 lines changed or deleted 29 lines changed or added

Home  |  About  |  Features  |  All  |  Newest  |  Dox  |  Diffs  |  RSS Feeds  |  Screenshots  |  Comments  |  Imprint  |  Privacy  |  HTTP(S)