#!/bin/bash

# Copyright (C) 2025 Pädagogisches Landesinstitut Rheinland-Pfalz
# Copyright (C) 2025 Daniel Teichmann <daniel.teichmann@das-netzwerkteam.de>
#
# This script is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.

set -eo pipefail

# Unset proxy variables to avoid unintended network interference
unset http_proxy
unset https_proxy
unset ftp_proxy

COMMON_FILE="/usr/share/debian-edu-router/debian-edu-router.common"
# Load common functions, variables, and logging routines.
if [[ -s "${COMMON_FILE}" ]]; then
	source "${COMMON_FILE}"
else
	echo "Could not load common file at ${COMMON_FILE}."
	exit 0
fi

# When the script exits, log a final message.
function finish {
	# If we crash, do not keep file there, delete it.
	rm -f "${FILTERLISTS_PATH}/work-in-progress"

	notice_log "$0 will exit now."
}
trap finish EXIT

###############################################################################
# Global Variables
###############################################################################
WORK_DIR="/var/lib/debian-edu-router/d-e-r-p.c-f/blacklists-toulouse.working"
FILTERLISTS_PATH="/var/lib/debian-edu-router/filterlists.d"
RSYNC_SOURCE="rsync://ftp.ut-capitole.fr/blacklist"
RSYNC_OPTIONS="-aht --info=NAME,STATS"

# Directories for selected categories
SELECT_CATEGORIES_DIR="/var/lib/debian-edu-router/d-e-r-p.c-f/blacklists-toulouse.selected_categories"
TEMPLATES_DIR="/usr/share/debian-edu-router/templates"

# Directory where the repository is downloaded (do not work directly here)
ORIG_DIR="${WORK_DIR}/blacklists.orig"
# IMPORTANT: The downloaded repository holds the important files under the "dest" subdirectory.
SOURCE_DIR="${ORIG_DIR}/dest"

# Define two alternating intermediate working directories.
SQUID_DIR_A="${WORK_DIR}/blacklists.squid_A"
SQUID_DIR_B="${WORK_DIR}/blacklists.squid_B"

# Determine current SQUID directory based on a state file.
STATE_FILE="${WORK_DIR}/last_squid"
if [[ -f "${STATE_FILE}" ]]; then
	last_squid=$(cat "${STATE_FILE}")
	if [[ "${last_squid}" = "A" ]]; then
		CURRENT_SQUID="B"
	else
		CURRENT_SQUID="A"
	fi
else
	CURRENT_SQUID="A"
fi

if [[ "${CURRENT_SQUID}" = "A" ]]; then
	CURRENT_SQUID_DIR="${SQUID_DIR_A}"
else
	CURRENT_SQUID_DIR="${SQUID_DIR_B}"
fi

# Clear the current SQUID directory to start fresh.
rm -rf "${CURRENT_SQUID_DIR}"
ensure_dir "${CURRENT_SQUID_DIR}"

# Header template for each final .toulouse file
HEADER_TEMPLATE="${TEMPLATES_DIR}/ProxyHeader.toulouse"

# Path to Squid's snippets.d/.
SNIPPETS_PATH="/etc/debian-edu-router/squid-snippets.d"

###############################################################################
# Function: update_timestamp
# Writes the current epoch time to last-updated.
# Globals:
#   WORK_DIR
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function update_timestamp() {
	date +%s > "${WORK_DIR}/last-updated"
}

###############################################################################
# Function: perform_rsync
# Downloads the blacklist repository via rsync if allowed (once every 12 hours),
# unless D_E_R_DEBUG is set (in which case download is forced).
# Globals:
#   WORK_DIR, ORIG_DIR, RSYNC_SOURCE, RSYNC_OPTIONS, D_E_R_DEBUG, FORCE_DOWNLOAD
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function perform_rsync() {
	if [[ -z "$FORCE_DOWNLOAD" ]]; then
		if [[ -s "${WORK_DIR}/last-updated" ]]; then
			last_update=$(cat "${WORK_DIR}/last-updated")
			current_time=$(date +%s)
			date_diff=$(( (current_time - last_update) / 3600 ))
			if [[ "${date_diff}" -lt 12 ]]; then
				if [[ -n "${D_E_R_DEBUG}" ]]; then
					notice_log "Last rsync run was ${date_diff} hours ago, but \$D_E_R_DEBUG is set, forcing rsync download."
					FORCE_DOWNLOAD="true"
				else
					notice_log "Skipping rsync download (last update ${date_diff} hours ago)"
					return
				fi
			fi
		fi
	else
		notice_log "Forcing rsync download."
	fi

	notice_log "Starting rsync download from ${RSYNC_SOURCE}"
	ensure_dir "${ORIG_DIR}"
	rsync ${RSYNC_OPTIONS} "${RSYNC_SOURCE}" "${ORIG_DIR}"
	update_timestamp
	notice_log "Rsync download completed and timestamp updated"
}

###############################################################################
# Function: remove_blacklist_files
# If REMOVE_BLACKLISTS is "true", remove all related blacklist files (except templates)
# and log each deletion using notice_log.
# Globals:
#   FILTERLISTS_PATH, WORK_DIR, ORIG_DIR, SELECT_CATEGORIES_DIR
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function remove_blacklist_files() {
	warning_log "Removing all related blacklist files!"
	notice_log "Selection files for black-/whitelist categories won't be deleted. Please purge the content filter package to fully remove them."

	# Remove final symlinks in FILTERLISTS_PATH
	for file in "${FILTERLISTS_PATH}"/ProxyBlacklistSite*.toulouse.d "${FILTERLISTS_PATH}"/ProxyWhitelistSite*.toulouse.d; do
		if [[ -e "${file}" ]]; then
			notice_log "Removing ${file}"
			rm -rf "${file}"
		fi
	done

	# Remove SQUID directories
	for dir in "${WORK_DIR}"/blacklists.squid_*; do
		if [[ -d "${dir}" ]]; then
			notice_log "Removing ${dir}"
			rm -rf "${dir}"
		fi
	done
	if [[ -e "${WORK_DIR}/last_squid" ]]; then
		notice_log "Removing ${WORK_DIR}/last_squid"
		rm -rf "${WORK_DIR}/last_squid"
	fi

	# if [[ -d "${ORIG_DIR}" ]]; then
	# 	notice_log "Removing ${ORIG_DIR}"
	# 	rm -rf "${ORIG_DIR}"
	# fi

	if [[ -e "${WORK_DIR}/last-updated" ]]; then
		notice_log "Removing ${WORK_DIR}/last-updated"
		rm -rf "${WORK_DIR}/last-updated"
	fi
}

###############################################################################
# Function: create_categories_file
# Creates a new categories file with default categories enabled.
# Globals:
#   SOURCE_DIR
# Arguments:
#   $1 - Path to the categories file to create
#   $2 - Type of categories ("black" or "white")
#   $3 - Path to the header template for this file
#   $4 - Path to the default categories file (optional)
# Returns:
#   None
###############################################################################
function create_categories_file() {
	local categories_file="$1"
	local category_type="$2"
	local header_template="$3"
	local default_categories_file="$4"
	local temp_file=$(mktemp)

	notice_log "Creating new categories file: ${categories_file}"

	# Get all current categories from source directory
	local current_categories=()

	for categorie in "${SOURCE_DIR}"/*; do
		if [[ -d "${categorie}" && -f "${categorie}/usage" ]]; then
			usage=$(grep -v '^[[:space:]]*#' "${categorie}/usage" | head -n 1 | tr -d '[:space:]')
			if [[ "${usage}" = "${category_type}" ]]; then
				cat_name=$(basename "${categorie}")
				# Validate category name (only allow alphanumeric, underscore, hyphen)
				if [[ "${cat_name}" =~ ^[a-zA-Z0-9_-]+$ ]]; then
					current_categories+=("${cat_name}")
				else
					warning_log "Invalid characters in category name '${cat_name}', skipping..."
				fi
			fi
		fi
	done

	# Read default categories if file exists
	local default_categories=()
	if [[ -n "${default_categories_file}" && -s "${default_categories_file}" ]]; then
		while IFS= read -r line; do
			# Skip comments and empty lines
			if [[ "${line}" =~ ^[[:space:]]*# ]] || [[ -z "${line// /}" ]]; then
				continue
			fi

			# Remove any trailing whitespace
			default_categories+=("$(echo "${line}" | tr -d '[:space:]')")
		done < "${default_categories_file}"

		notice_log "Using default categories from ${default_categories_file}"
	fi

	# Create the header for the file
	cat "${header_template}" > "${temp_file}"

	# Add the declaration of the associative array
	echo "declare -A categories" >> "${temp_file}"

	# Add all categories to the file
	for category in $(printf '%s\n' "${current_categories[@]}" | sort); do
		local is_default_on=false

		# Check if category is in the default list
		for default in "${default_categories[@]}"; do
			if [[ "${default}" == "${category}" ]]; then
				is_default_on=true
				break
			fi
		done

		if [[ "${is_default_on}" == true ]]; then
			echo "categories[\"${category}\"]=true" >> "${temp_file}"
		else
			echo "categories[\"${category}\"]=false" >> "${temp_file}"
		fi
	done

	# Move the temp file to the final destination
	mv "${temp_file}" "${categories_file}"
	notice_log "Created ${categories_file} with all available categories"
}

###############################################################################
# Function: update_categories_file
# Updates an existing categories file with new available categories.
# Globals:
#   SOURCE_DIR
# Arguments:
#   $1 - Path to the categories file to update
#   $2 - Type of categories ("black" or "white")
#   $3 - Path to the header template for this file
# Returns:
#   None
###############################################################################
function update_categories_file() {
	local categories_file="$1"
	local category_type="$2"
	local header_template="$3"
	local current_date=$(date +%Y-%m-%d)
	local temp_file=$(mktemp)

	notice_log "Updating categories file: ${categories_file}"

	# Get all current categories from source directory
	local current_categories=()

	for categorie in "${SOURCE_DIR}"/*; do
		if [[ -d "${categorie}" && -f "${categorie}/usage" ]]; then
			usage=$(grep -v '^[[:space:]]*#' "${categorie}/usage" | head -n 1 | tr -d '[:space:]')
			if [[ "${usage}" = "${category_type}" ]]; then
				cat_name=$(basename "${categorie}")
				# Validate category name (only allow alphanumeric, underscore, hyphen)
				if [[ "${cat_name}" =~ ^[a-zA-Z0-9_-]+$ ]]; then
					current_categories+=("${cat_name}")
				else
					warning_log "Invalid characters in category name '${cat_name}', skipping..."
				fi
			fi
		fi
	done

	# Load existing categories file to get current settings
	local existing_categories=()
	declare -A existing_settings

	# Only source the file if it contains the "declare -A categories" line
	if grep -q "declare -A categories" "${categories_file}"; then
		# Source the file to get the categories array
		source "${categories_file}"

		# Copy the settings to our local array
		for key in "${!categories[@]}"; do
			if [[ "${categories["${key}"]}" != "true" ]] && [[ "${categories["${key}"]}" != "false" ]]; then
				error_log "Invalid value for category '${key}': '${categories["${key}"]}'. Expected 'true' or 'false'."
			fi

			existing_settings["${key}"]="${categories["${key}"]}"
			existing_categories+=("${key}")
		done

		# Unset the global categories array to avoid conflicts
		unset categories
	else
		warning_log "Categories file ${categories_file} has invalid format, file will be fully recreated..."
		create_categories_file "$1" "$2" "$3"
		return
	fi

	# Load the header from the template
	cat "${header_template}" > "${temp_file}"

	# Add the declaration of the associative array
	echo "declare -A categories" >> "${temp_file}"

	# First, add all existing categories with their current settings
	for category in $(printf '%s\n' "${existing_categories[@]}" | sort); do
		local found=false

		# Check if category still exists
		for current in "${current_categories[@]}"; do
			if [[ "${current}" == "${category}" ]]; then
				found=true
				break
			fi
		done

		if [[ "${found}" == true ]]; then
			# Category still exists, keep its current setting
			echo "categories[\"${category}\"]=${existing_settings["${category}"]}" >> "${temp_file}"
		else
			# Category no longer exists, mark it as removed but keep its setting
			echo "categories[\"${category}\"]=${existing_settings["${category}"]}  # Removed by University of Toulouse on ${current_date}" >> "${temp_file}"
			warning_log "University of Toulouse removed category '${category}'..."
		fi
	done

	# Then add any new categories that weren't in the existing file
	for category in $(printf '%s\n' "${current_categories[@]}" | sort); do
		local is_existing=false

		# Check if category already exists in processed entries
		for existing in "${existing_categories[@]}"; do
			if [[ "${existing}" == "${category}" ]]; then
				is_existing=true
				break
			fi
		done

		if [[ "${is_existing}" == false ]]; then
			echo "categories[\"${category}\"]=false  # New category - automatically added on ${current_date}" >> "${temp_file}"
			notice_log "Added new category '${category}' (disabled by default)"
		fi
	done

	# Replace original file with updated temp file
	mv "${temp_file}" "${categories_file}"
	notice_log "Updated ${categories_file} with current categories"
}

###############################################################################
# Function: handle_selected_categories_files
# Checks if the header templates and default category lists exist
# and creates or updates both blacklisted and whitelisted categories files.
# Globals:
#   SELECT_CATEGORIES_DIR, TEMPLATES_DIR
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function handle_selected_categories_files() {
	ensure_dir "${SELECT_CATEGORIES_DIR}"
	BLACKLISTED_FILE="${SELECT_CATEGORIES_DIR}/blacklisted_categories"
	WHITELISTED_FILE="${SELECT_CATEGORIES_DIR}/whitelisted_categories"

	# Check if header templates exist
	if [[ ! -f "${TEMPLATES_DIR}/header_blacklisted_categories" ]]; then
		error_log "Header template for blacklisted categories not found at ${TEMPLATES_DIR}/header_blacklisted_categories"
		exit 1
	fi

	if [[ ! -f "${TEMPLATES_DIR}/header_whitelisted_categories" ]]; then
		error_log "Header template for whitelisted categories not found at ${TEMPLATES_DIR}/header_whitelisted_categories"
		exit 1
	fi

	# Check for default category files
	local default_blacklist="${SELECT_CATEGORIES_DIR}/default_selected_blacklisted_categories"
	local default_whitelist="${SELECT_CATEGORIES_DIR}/default_selected_whitelisted_categories"

	if [[ ! -f "${default_blacklist}" ]]; then
		warning_log "Default blacklist categories file not found at ${default_blacklist}"
		default_blacklist=""
	fi

	if [[ ! -f "${default_whitelist}" ]]; then
		warning_log "Default whitelist categories file not found at ${default_whitelist}"
		default_whitelist=""
	fi

	# Handle blacklisted categories file
	if [[ ! -f "${BLACKLISTED_FILE}" ]]; then
		# Create new file
		create_categories_file "${BLACKLISTED_FILE}" "black" "${TEMPLATES_DIR}/header_blacklisted_categories" "${default_blacklist}"
	else
		# Update existing file
		update_categories_file "${BLACKLISTED_FILE}" "black" "${TEMPLATES_DIR}/header_blacklisted_categories"
	fi

	# Handle whitelisted categories file
	if [[ ! -f "${WHITELISTED_FILE}" ]]; then
		# Create new file
		create_categories_file "${WHITELISTED_FILE}" "white" "${TEMPLATES_DIR}/header_whitelisted_categories" "${default_whitelist}"
	else
		# Update existing file
		update_categories_file "${WHITELISTED_FILE}" "white" "${TEMPLATES_DIR}/header_whitelisted_categories"
	fi
}

###############################################################################
# Function: is_category_enabled
# Checks if a category is enabled in the appropriate categories file.
# Globals:
#   SELECT_CATEGORIES_DIR
# Arguments:
#   $1 - Category name
#   $2 - List type ("black" or "white")
# Returns:
#   0 if category is enabled, 1 otherwise
###############################################################################
function is_category_enabled() {
	local category="$1"
	local list_type="$2"
	local categories_file=""

	if [[ "${list_type}" = "black" ]]; then
		categories_file="${SELECT_CATEGORIES_DIR}/blacklisted_categories"
	else
		categories_file="${SELECT_CATEGORIES_DIR}/whitelisted_categories"
	fi

	# Source the categories file to get the associative array
	if [[ ! -f "${categories_file}" ]]; then
		debug_log "Categories file ${categories_file} not found"
		return 1
	fi

	# Unset any existing categories array to avoid conflicts
	unset categories

	# Source the file to get the categories array
	source "${categories_file}"

	# Check if the category is enabled (set to true)
	if [[ "${categories["${category}"]}" == "true" ]]; then
		return 0
	else
		return 1
	fi
}

###############################################################################
# Function: process_category
# Processes one category by reading its usage file (from SOURCE_DIR),
# then copying its domains and urls into the intermediate working area (CURRENT_SQUID_DIR)
# with a header prepended. Only categories with a valid usage file ("black" or "white")
# that are enabled in the corresponding selected categories file are processed.
# Globals:
#   SOURCE_DIR, CURRENT_SQUID_DIR, HEADER_TEMPLATE, WORK_DIR, SELECT_CATEGORIES_DIR
# Arguments:
#   Category name (directory name)
# Returns:
#   None
###############################################################################
function process_category() {
	local category="${1}"
	local category_path="${SOURCE_DIR}/${category}"
	local usage_file="${category_path}/usage"
	local list_type=""
	local line=""

	if [[ ! -s "${usage_file}" ]]; then
		warning_log "Usage file missing for category '${category}', skipping..."
		return
	fi

	while IFS= read -r line; do
		if [[ "${line}" =~ ^[[:space:]]*# ]] || [[ -z "${line// }" ]]; then
			continue
		else
			list_type="${line}"
			break
		fi
	done < "${usage_file}"

	if [[ "${list_type}" != "black" && "${list_type}" != "white" ]]; then
		error_log "Invalid usage type '${list_type}' for category '${category}', skipping..."
		return
	fi

	# Check if the category is enabled
	if ! is_category_enabled "${category}" "${list_type}"; then
		debug_log "Category $(printf "%-28s" "\"${category}\"") is disabled in \$SELECT_CATEGORIES_DIR/${list_type}listed_categories, skipping..."
		return
	fi

	debug_log "Processing category '${category}' as a '${list_type}' list"

	local domains_target_dir=""
	local urls_target_dir=""

	if [[ "${list_type}" = "black" ]]; then
		domains_target_dir="${CURRENT_SQUID_DIR}/ProxyBlacklistSite.toulouse.d"
		urls_target_dir="${CURRENT_SQUID_DIR}/ProxyBlacklistSiteURL.toulouse.d"
	else
		domains_target_dir="${CURRENT_SQUID_DIR}/ProxyWhitelistSite.toulouse.d"
		urls_target_dir="${CURRENT_SQUID_DIR}/ProxyWhitelistSiteURL.toulouse.d"
	fi

	ensure_dir "${domains_target_dir}"
	ensure_dir "${urls_target_dir}"

	local header_content=""
	if [[ -s "${HEADER_TEMPLATE}" ]]; then
		header_content=$(<"${HEADER_TEMPLATE}")
	else
		warning_log "Header template not found at ${HEADER_TEMPLATE}"
	fi

	# Process domains file
	local domains_file="${category_path}/domains"
	if [[ -s "${domains_file}" ]]; then
		local target_file="${domains_target_dir}/${category}.toulouse"
		local debug_target_file="${target_file/#${WORK_DIR}/\$WORK_DIR}"
		debug_log "Processing domains file for category '${category}' into '${debug_target_file}'"
		{
			echo "${header_content}"
			cat "${domains_file}"
		} > "${target_file}"
	else
		debug_log "No domains file found for category '${category}'"
	fi

	# Process urls file
	local urls_file="${category_path}/urls"
	if [[ -s "${urls_file}" ]]; then
		local target_file="${urls_target_dir}/${category}.toulouse"
		local debug_target_file="${target_file/#${WORK_DIR}/\$WORK_DIR}"
		debug_log "Processing urls file for category '${category}' into '${debug_target_file}'"
		{
			echo "${header_content}"
			cat "${urls_file}"
		} > "${target_file}"
	else
		debug_log "No urls file found for category '${category}'"
	fi
}

###############################################################################
# Function: generate_squid_acl_file
# Generates a Squid ACL configuration file by including all relevant files
# from the previously processed black/white lists.
# Globals:
#   SNIPPETS_PATH - Path to the snippets.d directory of Squid.
#   FILTERLISTS_PATH - Path to the directory containing filter lists.
# Arguments:
#   None
# Returns:
#   None
# Output:
#   Creates a new configuration file named
#   "26_squid_acls.toulouse_d-e-r-p.c-f.conf" in the $SNIPPETS_PATH directory.
###############################################################################
function generate_squid_acl_file() {
    local squid_snippet_conf="${SNIPPETS_PATH}/26_squid_acls.toulouse_d-e-r-p.c-f.conf"
    local current_date=$(date '+%A, %B %d, %Y, %I:%M %p %Z')

    # Define mapping of list types to ACL types
    declare -A acl_types=(
        ["ProxyWhitelistSite"]="dstdomain"
        ["ProxyBlacklistSite"]="dstdomain"
        ["ProxyWhitelistSiteURL"]="url_regex"
        ["ProxyBlacklistSiteURL"]="url_regex"
    )

    # Start creating the Squid configuration file
    {
        echo "# Debian Edu Router Plugin: Content Filter"
        echo "# DO NOT MODIFY THIS FILE, EXCEPT YOU KNOW WHAT YOU ARE DOING."
        echo "# Automatically generated on ${current_date}"
        echo ""
        echo "# ACLs - University of Toulouse blacklists/whitelists ACLs by"
		echo "#        $0"
		echo ""

        # Iterate over each list type and generate corresponding ACL entries
        for acl_name in "${!acl_types[@]}"; do
            local acl_type="${acl_types[$acl_name]}"
            local filepath="${FILTERLISTS_PATH}/${acl_name}.toulouse.d"

            echo "###"
            if [[ -d "${filepath}" ]]; then
                for file in "${filepath}"/*; do
                    if [[ -s "${file}" ]]; then
                        echo "acl ${acl_name} ${acl_type} \"${file}\""
                    fi
                done
            else
                echo "# Directory not found: ${filepath}"
            fi
            echo "###"
			echo
        done

    } > "${squid_snippet_conf}"

    # Log the completion of the file generation
    notice_log "Squid ACL configuration file generated at: ${squid_snippet_conf}"
}

###############################################################################
# Function: ensure_symlink
# Ensures that a given final target (symlink) exists and points to the correct source.
# If not, it is removed and recreated.
# Globals:
#   None
# Arguments:
#   $1 - final target path
#   $2 - source path
# Returns:
#   None
###############################################################################
function ensure_symlink() {
	local target="$1"
	local source="$2"
	if [[ -L "${target}" ]]; then
		if [[ "$(readlink "${target}")" != "${source}" ]]; then
			rm -f "${target}"
			ln -s "${source}" "${target}"
		fi
	else
		if [[ -e "${target}" ]]; then
			rm -rf "${target}"
		fi
		ln -s "${source}" "${target}"
	fi
}

###############################################################################
# Function: print_usage
# Prints usage information for the script and exits.
# Globals:
#   None
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function print_usage() {
	notice_log "Usage: $0 [-r|--remove-blacklists]"
	notice_log "Default behavior (without arguments) is to just download and update blacklists."
	exit 0
}

###############################################################################
# Function: parse_arguments
# Parses command line arguments and sets global variables accordingly.
# Globals:
#   REMOVE_BLACKLISTS
# Arguments:
#   Command line arguments ($@)
# Returns:
#   None
###############################################################################
function parse_arguments() {
	# Parse command line arguments
	while [[ $# -gt 0 ]]; do
		case "$1" in
			-r|--remove-blacklists)
				REMOVE_BLACKLISTS="true"
				shift
				;;
			-h|--help)
				print_usage
				;;
			*)
				echo "$0 Unknown argument: $1"
				print_usage
				exit 1
				;;
		esac
	done
}


###############################################################################
# Main Script Execution
###############################################################################
parse_arguments "$@"

# If REMOVE_BLACKLISTS is set to "true", remove all related files and exit.
if [[ "${REMOVE_BLACKLISTS}" = "true" ]]; then
	remove_blacklist_files
	exit 0
fi

# Block squid_d-e-r_acl_watcher.service from restarting/reloading Squid.
echo "BLOCKING_PID=\"$$\""              >  "${FILTERLISTS_PATH}/work-in-progress"
echo "BLOCKING_DATE=\"$(LANG=c date)\"" >> "${FILTERLISTS_PATH}/work-in-progress"

ensure_dir "${WORK_DIR}"
ensure_dir "${ORIG_DIR}"

debug_log "File paths in log messages are shortened using following variables, please copy into your terminal:"
debug_log "  - WORK_DIR=\"${WORK_DIR}\""
debug_log "  - FILTERLISTS_PATH=\"${FILTERLISTS_PATH}\""
debug_log "  - SELECT_CATEGORIES_DIR=\"$SELECT_CATEGORIES_DIR\""

perform_rsync
if ! [[ -d "${SOURCE_DIR}" ]]; then
	warning_log "Source directory \$SOURCE_DIR does not exist. Forcing resync of blacklists files..."
	FORCE_DOWNLOAD="true" perform_rsync
fi

# Create or update selected categories files
handle_selected_categories_files

notice_log "Starting processing of categories..."
for category in "${SOURCE_DIR}"/*; do
	if [[ -d "${category}" ]]; then
		category_name="$(basename "${category}")"
		process_category "${category_name}"
	fi
done
notice_log "Category processing completed."

# Symlink stage: Create final symlinks instead of copying files.
notice_log "Creating final symlinks in ${FILTERLISTS_PATH}"

debug_log "Creating symlink ${FILTERLISTS_PATH}/ProxyBlacklistSite.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyBlacklistSite.toulouse.d"
ensure_symlink "${FILTERLISTS_PATH}/ProxyBlacklistSite.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyBlacklistSite.toulouse.d"

debug_log "Creating symlink ${FILTERLISTS_PATH}/ProxyBlacklistSiteURL.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyBlacklistSiteURL.toulouse.d"
ensure_symlink "${FILTERLISTS_PATH}/ProxyBlacklistSiteURL.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyBlacklistSiteURL.toulouse.d"

debug_log "Creating symlink ${FILTERLISTS_PATH}/ProxyWhitelistSite.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyWhitelistSite.toulouse.d"
ensure_symlink "${FILTERLISTS_PATH}/ProxyWhitelistSite.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyWhitelistSite.toulouse.d"

debug_log "Creating symlink ${FILTERLISTS_PATH}/ProxyWhitelistSiteURL.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyWhitelistSiteURL.toulouse.d"
ensure_symlink "${FILTERLISTS_PATH}/ProxyWhitelistSiteURL.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyWhitelistSiteURL.toulouse.d"

notice_log "Symlink creation completed successfully."

# Squid ACL stage: Create ACL file for Squid, which includes all enabled categories.
generate_squid_acl_file

# Update state for next round.
echo "${CURRENT_SQUID}" > "${STATE_FILE}"

# Remove WIP file.
rm -f "${FILTERLISTS_PATH}/work-in-progress"

# Reload Squid.
notice_log "Reloading all Squid processes..."
manage_unit "start" "squid_d-e-r_acl_watcher.service" && {
	notice_log "Success!"
} || {
	error_log "Error happened while reloading all Squid processes!"
}

# End of script, finish() will be run now.
