feat: enforce_base as first fragment andwarn on duplicate hostnames

This commit is contained in:
ljm42
2025-12-02 20:28:28 -07:00
parent afa09b7200
commit 17e25a91f4
2 changed files with 92 additions and 18 deletions
+3 -2
View File
@@ -229,8 +229,9 @@ fi
/bin/mkdir -p /etc/hosts.d
{
/bin/echo "# Do not edit, generated by rc.S.cont"
/bin/echo "127.0.0.1 ${NAME}"
} >/etc/hosts.d/hostname
/bin/echo "127.0.0.1 ${NAME} localhost"
/bin/echo "::1 localhost"
} >/etc/hosts.d/_base
/usr/local/sbin/rebuild_hosts
# LimeTech - restore the configured timezone
+89 -16
View File
@@ -6,10 +6,14 @@
# IPADDR hostname [alias...]
#
# This script:
# - Reads all regular files in /etc/hosts.d, in sorted order.
# - Reads all regular files in /etc/hosts.d, in sorted order,
# with a special case: a fragment named "_base" is always processed first
# (typically generated by rc.S.cont to define the hostname + loopbacks).
# - Merges entries with the same IP onto a single line.
# - Deduplicates hostnames per IP.
# - Tracks which fragments contributed to each IP and annotates with '# from ...'.
# - Ensures minimal localhost entries exist (127.0.0.1 / ::1) if no fragments exist.
# - Logs a warning if a hostname is associated with multiple IPs in the same family.
# - Writes the result atomically to /etc/hosts with perms root:root 0644.
#
# Do NOT edit /etc/hosts directly; edit /etc/hosts.d/* instead.
@@ -40,7 +44,7 @@ tmpfile=$(mktemp "${HOSTS_FILE}.XXXXXX")
done
shopt -u nullglob
echo
} > "$tmpfile"
} >"$tmpfile"
# IP -> "host1 host2 ..."
declare -A IP_TO_HOSTS
@@ -48,6 +52,10 @@ declare -A IP_TO_HOSTS
# IP -> "fragment1 fragment2 ..."
declare -A IP_TO_SOURCES
# Hostname -> "ip1 ip2 ..." (tracked separately for IPv4 and IPv6)
declare -A HOST_TO_IPS_V4
declare -A HOST_TO_IPS_V6
# Preserve IP order (first time we see an IP, record it here)
IP_ORDER=()
@@ -69,6 +77,10 @@ add_hosts_line() {
# We store hostnames as a space-separated string.
local current_hosts="${IP_TO_HOSTS[$ip]}"
# Determine IP family once (simple heuristic: ':' means IPv6)
local family
family=$([[ "$ip" == *:* ]] && echo v6 || echo v4)
# Iterate over all hostnames passed to this function
for host in "$@"; do
# Skip empty tokens (paranoia / defensive programming)
@@ -82,22 +94,36 @@ add_hosts_line() {
*" $host "*) ;; # already present, do nothing
*) current_hosts="$current_hosts $host" ;; # append new hostname
esac
# Track IPs per hostname for duplicate detection
if [[ "$family" == "v4" ]]; then
local ips_v4="${HOST_TO_IPS_V4[$host]:-}"
case " $ips_v4 " in
*" $ip "*) ;; # already recorded for this host
*) ips_v4="$ips_v4 $ip" ;;
esac
HOST_TO_IPS_V4["$host"]="${ips_v4# }"
else
local ips_v6="${HOST_TO_IPS_V6[$host]:-}"
case " $ips_v6 " in
*" $ip "*) ;;
*) ips_v6="$ips_v6 $ip" ;;
esac
HOST_TO_IPS_V6["$host"]="${ips_v6# }"
fi
done
# Strip the leading space if we added anything
IP_TO_HOSTS["$ip"]="${current_hosts# }"
# Track which fragment files contributed to this IP.
# This is just for the trailing "# from hostname avahi" comment.
# This is just for the trailing "# from _base avahi builtin" comment.
if [[ -n "$CURRENT_SOURCE" ]]; then
local current_sources="${IP_TO_SOURCES[$ip]:-}"
case " $current_sources " in
*" $CURRENT_SOURCE "*) ;; # already recorded
*" $CURRENT_SOURCE "*) ;;
*) current_sources="$current_sources $CURRENT_SOURCE" ;;
esac
# Again, strip leading space
IP_TO_SOURCES["$ip"]="${current_sources# }"
fi
}
@@ -105,6 +131,7 @@ add_hosts_line() {
process_file() {
local file="$1"
local line ip
# Remember which fragment we are processing for source tracking
CURRENT_SOURCE="$(basename "$file")"
@@ -112,9 +139,8 @@ process_file() {
# Strip comments (anything after '#')
line="${line%%#*}"
# Trim leading whitespace
# Trim leading and trailing whitespace
line="${line#"${line%%[![:space:]]*}"}"
# Trim trailing whitespace
line="${line%"${line##*[![:space:]]}"}"
# Skip empty lines
@@ -131,24 +157,69 @@ process_file() {
[[ $# -eq 0 ]] && continue
add_hosts_line "$ip" "$@"
done < "$file"
done <"$file"
}
check_duplicate_hostnames() {
local host ips
# IPv4 duplicates
for host in "${!HOST_TO_IPS_V4[@]}"; do
ips="${HOST_TO_IPS_V4[$host]}"
# Count how many distinct IPs this hostname maps to
set -- $ips
if (( $# > 1 )); then
log "Warning: hostname '$host' appears on multiple IPv4 addresses: $ips"
fi
done
# IPv6 duplicates
for host in "${!HOST_TO_IPS_V6[@]}"; do
ips="${HOST_TO_IPS_V6[$host]}"
set -- $ips
if (( $# > 1 )); then
log "Warning: hostname '$host' appears on multiple IPv6 addresses: $ips"
fi
done
}
log "Rebuilding $HOSTS_FILE..."
# Collect fragment files (simple glob) and force _base first if present
shopt -s nullglob
fragment_files=("$HOSTS_DIR"/*)
all_files=("$HOSTS_DIR"/*)
shopt -u nullglob
fragment_files=()
# Force the core base fragment first if it exists
if [[ -f "$HOSTS_DIR/_base" ]]; then
fragment_files+=("$HOSTS_DIR/_base")
fi
# Then add all other fragments in normal glob order
for f in "${all_files[@]}"; do
# Skip _base if we already added it
[[ "$f" == "$HOSTS_DIR/_base" ]] && continue
[[ -f "$f" ]] || continue
fragment_files+=("$f")
done
# Process each fragment
for f in "${fragment_files[@]}"; do
[[ -f "$f" ]] || continue
process_file "$f"
done
# Define the minimum
# If no fragments defined any IPs at all, provide a minimal fallback
CURRENT_SOURCE="builtin"
add_hosts_line "127.0.0.1" "localhost"
add_hosts_line "::1" "localhost"
if (( ${#IP_ORDER[@]} == 0 )); then
add_hosts_line "127.0.0.1" "localhost"
add_hosts_line "::1" "localhost"
fi
# Warn about hostnames bound to multiple IPs (per family)
check_duplicate_hostnames
# Emit merged lines in original IP discovery order
for ip in "${IP_ORDER[@]}"; do
@@ -156,9 +227,9 @@ for ip in "${IP_ORDER[@]}"; do
sources="${IP_TO_SOURCES[$ip]:-}"
if [[ -n "$sources" ]]; then
printf "%-15s %s # from %s\n" "$ip" "$hosts" "$sources" >> "$tmpfile"
printf "%-15s %s # from %s\n" "$ip" "$hosts" "$sources" >>"$tmpfile"
else
printf "%-15s %s\n" "$ip" "$hosts" >> "$tmpfile"
printf "%-15s %s\n" "$ip" "$hosts" >>"$tmpfile"
fi
done
@@ -167,3 +238,5 @@ chmod 0644 "$tmpfile" 2>/dev/null || true
chown root:root "$tmpfile" 2>/dev/null || true
mv "$tmpfile" "$HOSTS_FILE"
# Summary log: how many IPs and how many fragments contributed
log "${#IP_ORDER[@]} merged IP entries from ${#fragment_files[@]} contributing fragments written to $HOSTS_FILE"