diff --git a/.gitignore b/.gitignore index 906afbd..ef1203f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,6 @@ -gravity-sync.log -gravity-sync.cron -gravity-sync.conf -gravity-sync.md5 -logs/* -settings/gravity-sync.conf dev .vscode .DS_Store .nova +settings/* +logs/* \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..d988cdd --- /dev/null +++ b/Dockerfile @@ -0,0 +1,9 @@ +FROM photon:4.0 +LABEL maintainer="Michael Stanclift " + +RUN tdnf update -y \ + && tdnf install -y curl git rsync openssh + +RUN curl -sSL http://gravity.vmstan.com/beta | GS_DOCKER=1 && GS_DEV=4.0.0 bash + +CMD gravity-sync version \ No newline at end of file diff --git a/README.md b/README.md index 4cf28f7..cb9f4ff 100644 --- a/README.md +++ b/README.md @@ -14,24 +14,23 @@ What is better than a [Pi-hole](https://github.com/pi-hole/pi-hole) blocking tra - [Seriously. Why two Pi-hole?](https://github.com/vmstan/gravity-sync/wiki/Frequent-Questions#why-do-i-need-more-than-one-pi-hole) -But if you have more than one Pi-hole in your network you'll want a simple way to keep the list configurations and local DNS settings identical between the two. That's where Gravity Sync comes in. With proper preparation, it should only take a few minutes to install. Ideally you setup Gravity Sync and forget about it -- and in the long term, it would be awesome if the Pi-hole team made this entire script unnecessary. +But if you have redundant Pi-hole in your network you'll want a simple way to keep the list configurations and local DNS settings identical between the two. That's where Gravity Sync comes in. Setup should only take a few minutes. ## Features -Gravity Sync replicates the Pi-hole Domain Database (`gravity.db`), which includes: -- Blocklist settings with status and comments. +Gravity Sync replicates the core of Pi-hole's resolver settings, which includes: + +- Adlist settings with status and comments. - Domain whitelist and blacklist along with status with comments. - Custom RegEx whitelist and blacklists. -- Clients and groups along with any list assignments. - -Gravity Sync can also (optionally) replicate the Local DNS Records and CNAMEs configuration files. +- Clients and groups, along with any list assignments. +- Local DNS Records. +- Local CNAME Records. ### Limitations Gravity Sync will **not**: -- Overwrite device specific Pi-hole settings specific to the local network configuration. -- Change the Pi-hole admin/API passwords, nor does not leverage these at all. - Modify the individual Pi-hole's upstream DNS resolvers. - Sync DHCP settings or monitor device leases. - Merge long term data, query logs, or statistics. @@ -39,12 +38,10 @@ Gravity Sync will **not**: ## Setup Steps 1. [Review System Requirements](https://github.com/vmstan/gravity-sync/wiki/System-Requirements) -2. [Prepare Your Pi-hole](https://github.com/vmstan/gravity-sync/wiki/Installing#primary-pi-hole) -3. [Install Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#secondary-pi-hole) (or [Upgrade](https://github.com/vmstan/gravity-sync/wiki/Updating)) -4. [Configure Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#configuration) -5. [Execute Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#execution) -6. [Automate Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#automation) -7. [Profit](https://memory-alpha.fandom.com/wiki/Rules_of_Acquisition) +2. [Install Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing) +3. [Configure Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#configuration) +4. [Execute Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#execution) +5. [Automate Gravity Sync](https://github.com/vmstan/gravity-sync/wiki/Installing#automation) ## Disclaimer @@ -58,4 +55,4 @@ Please refer to the [Wiki](https://github.com/vmstan/gravity-sync/wiki) for more - [Frequently Asked Questions](https://github.com/vmstan/gravity-sync/wiki/Frequent-Questions) - [Advanced Installation Options](https://github.com/vmstan/gravity-sync/wiki/Under-The-Covers) -- [Changelog](https://github.com/vmstan/gravity-sync/wiki/Changelog) \ No newline at end of file +- [Changelog](https://github.com/vmstan/gravity-sync/wiki/Changelog) diff --git a/VERSION b/VERSION index 240bba9..0c89fc9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.7.0 \ No newline at end of file +4.0.0 \ No newline at end of file diff --git a/backup/BACKUP.md b/backup/BACKUP.md deleted file mode 100644 index 4f90019..0000000 --- a/backup/BACKUP.md +++ /dev/null @@ -1,5 +0,0 @@ -Your backup files will be stored here. -They will be wiped out by Git. -This file is required for Git to create the folder. -It serves no other purpose. -Live long and prosper. \ No newline at end of file diff --git a/gravity-sync b/gravity-sync new file mode 100755 index 0000000..9811fca --- /dev/null +++ b/gravity-sync @@ -0,0 +1,2193 @@ +#!/usr/bin/env bash +# shellcheck disable=SC2086,SC1091 +GS_RUN_START=$SECONDS + +# GRAVITY SYNC BY VMSTAN ##################### +PROGRAM='Gravity Sync' +GS_VERSION='4.0.0' + +# For documentation or the changelog/updates visit https://github.com/vmstan/gravity-sync +# Requires Pi-Hole 5.x or higher already be installed, for help visit https://pi-hole.net + +# REQUIRED SETTINGS ########################## + +# Run 'gravity-sync config' to get started, it will customize the script for your environment +# You should NOT to change the values of any variables here, to customize your install +# Only add replacement variables to gravity-sync.conf, which will overwrite these defaults +# Gravity Sync 4.0 introduces a new configuration file format, there is no direct upgrade path + +# CUSTOM VARIABLES ########################### + +# Pi-hole Folder/File Customization - Only need to be customized when using containers +LOCAL_PIHOLE_DIRECTORY='/etc/pihole' # replace in gravity-sync.conf to overwrite +REMOTE_PIHOLE_DIRECTORY='/etc/pihole' # replace in gravity-sync.conf to overwrite +LOCAL_DNSMASQ_DIRECTORY='/etc/dnsmasq.d' # replace in gravity-sync.conf to overwrite +REMOTE_DNSMASQ_DIRECTORY='/etc/dnsmasq.d' # replace in gravity-sync.conf to overwrite +LOCAL_FILE_OWNER='pihole:pihole' # replace in gravity-sync.conf to overwrite +REMOTE_FILE_OWNER='pihole:pihole' # replace in gravity-sync.conf to overwrite + +# Pi-hole Docker/Podman container name - Docker will pattern match anything set below +LOCAL_DOCKER_CONTAINER='pihole' # replace in gravity-sync.conf to overwrite +REMOTE_DOCKER_CONTAINER='pihole' # replace in gravity-sync.conf to overwrite + +# STANDARD VARIABLES ######################### + +DEFAULT_PIHOLE_DIRECTORY='/etc/pihole' # Default Pi-hole data directory +LOCAL_PIHOLE_BINARY='/usr/local/bin/pihole' # Local Pi-hole binary directory (default) +REMOTE_PIHOLE_BINARY='/usr/local/bin/pihole' # Remote Pi-hole binary directory (default) +LOCAL_FTL_BINARY='/usr/bin/pihole-FTL' # Local FTL binary directory (default) +REMOTE_FTL_BINARY='/usr/bin/pihole-FTL' # Remote FTL binary directory (default) +LOCAL_DOCKER_BINARY='/usr/bin/docker' # Local Docker binary directory (default) +REMOTE_DOCKER_BINARY='/usr/bin/docker' # Remote Docker binary directory (default) +LOCAL_PODMAN_BINARY='/usr/bin/podman' # Local Podman binary directory (default) +REMOTE_PODMAN_BINARY='/usr/bin/podman' # Remote Podman binary directory (default) +PIHOLE_CONTAINER_IMAGE='pihole/pihole' # Official Pi-hole container image name + +############################################### +####### THE NEEDS OF THE MANY, OUTWEIGH ####### +############ THE NEEDS OF THE FEW ############# +############################################### + +PH_GRAVITY_FI='gravity.db' # Pi-hole database file name +PH_CUSTOM_DNS='custom.list' # Pi-hole DNS lookup filename +PH_CNAME_CONF='05-pihole-custom-cname.conf' # DNSMASQ CNAME alias file + +# Backup Customization +GS_BACKUP_TIMEOUT='240' # replace in gravity-sync.conf to overwrite +GS_BACKUP_INTEGRITY_WAIT='5' # replace in gravity-sync.conf to overwrite +GS_BACKUP_EXT='gsb' # replace in gravity-sync.conf to overwrite + +# GS Folder/File Locations +GS_FILEPATH='/usr/local/bin/gravity-sync' +GS_ETC_PATH="/etc/gravity-sync" # replace in gravity-sync.conf to overwrite +GS_CONFIG_FILE='gravity-sync.conf' # replace in gravity-sync.conf to overwrite +GS_SYNCING_LOG='gs-sync.log' # replace in gravity-sync.conf to overwrite +GS_GRAVITY_FI_MD5_LOG='gs-gravity.md5' # replace in gravity-sync.conf to overwrite +GS_CUSTOM_DNS_MD5_LOG='gs-clist.md5' # replace in gravity-sync.conf to overwrite +GS_CNAME_CONF_MD5_LOG='gs-cname.md5' # replace in gravity-sync.conf to overwrite + +# SSH Customization +GS_SSH_PORT='22' # replace in gravity-sync.conf to overwrite +GS_SSH_PKIF="${GS_ETC_PATH}/gravity-sync.rsa" # replace in gravity-sync.conf to overwrite + +# Github Customization +GS_LOCAL_REPO="${GS_ETC_PATH}/.gs" # replace in gravity-sync.conf to overwrite + +# OS Settings +OS_DAEMON_PATH='/etc/systemd/system' +OS_TMP='/tmp' +OS_SSH_CMD='ssh' + +# Interface Settings +UI_GRAVITY_NAME='Gravity Database' +UI_CUSTOM_NAME='DNS Records' +UI_CNAME_NAME='DNS CNAMEs' +# Reused UI Text +UI_CORE_LOADING='Loading' +UI_CORE_EVALUATING='Evaluating arguments' +UI_CORE_INIT="Initializing ${PROGRAM} (${GS_VERSION})" +UI_CORE_APP='Pi-hole' +UI_CORE_APP_DNS='DNSMASQ' +UI_EXIT_CALC_END='after' +UI_EXIT_ABORT='exited' +UI_EXIT_COMPLETE='completed' +UI_EXIT_CALC_TIMER='seconds' +UI_HASHING_HASHING='Hashing the remote' +UI_HASHING_COMPARING='Comparing to the local' +UI_HASHING_DIFFERENCE='Differences detected in the' +UI_HASHING_DETECTED='has been detected on the' +UI_HASHING_NOT_DETECTED='not detected on the' +UI_HASHING_REMOTE="remote ${UI_CORE_APP}" +UI_HASHING_LOCAL="local ${UI_CORE_APP}" +UI_HASHING_REHASHING='Rehashing the remote' +UI_HASHING_RECOMPARING='Recomparing to local' +UI_VALIDATING='Validating pathways to' +UI_VALIDATING_FAIL_CONTAINER='Unable to validate running container instance of' +UI_VALIDATING_FAIL_FOLDER='Unable to validate configuration folder for' +UI_VALIDATING_FAIL_BINARY='Unable to validate the availability of' +UI_SET_LOCAL_FILE_OWNERSHIP='Setting file ownership on' +UI_SET_FILE_PERMISSION='Setting file permissions on' +UI_PULL_REMOTE='Pulling the remote' +UI_PUSH_LOCAL='Pushing the local' +UI_REPLACE_LOCAL='Replacing the local' +UI_FTLDNS_CONFIG_PULL_RELOAD='Reloading local FTLDNS services' +UI_FTLDNS_CONFIG_PUSH_RELOAD='Reloading remote FTLDNS services' +UI_LOGGING_RECENT_COMPLETE='Recent complete executions of' +UI_BACKUP_REMOTE='Performing backup of remote' +UI_BACKUP_LOCAL='Performing backup of local' +UI_BACKUP_INTEGRITY="Checking ${UI_GRAVITY_NAME} copy integrity" +UI_BACKUP_INTEGRITY_FAILED='Integrity check has failed for the remote' +UI_BACKUP_INTEGRITY_DELETE='Removing failed copies' +UI_CONFIG_ALREADY='already exists' +UI_CONFIG_CONFIRM='Proceeding will replace your existing configuration' +UI_CONFIG_ERASING='Erasing existing' +UI_CONFIG_LOCAL='local host' +UI_CONFIG_CONTAINER_NAME='container name' +UI_CONFIG_SAVING='Saving' +UI_CONFIG_ETC_VOLUME_PATH="'etc' volume path" +UI_CONFIG_VOLUME_OWNER='volume ownership' + +## Script Colors +RED='\033[0;91m' +GREEN='\033[0;92m' +CYAN='\033[0;96m' +YELLOW='\033[0;93m' +PURPLE='\033[0;95m' +BLUE='\033[0;94m' +BOLD='\033[1m' +NC='\033[0m' + +## Message Codes +FAIL="${RED}✗${NC}" +WARN="${PURPLE}!${NC}" +GOOD="${GREEN}✓${NC}" +STAT="${CYAN}∞${NC}" +INFO="${YELLOW}»${NC}" +INF1="${CYAN}›${NC}" +NEED="${BLUE}?${NC}" +LOGO="${PURPLE}∞${NC}" + +## Echo Stack +### Informative +function echo_info { + echo -e "${INFO} ${YELLOW}${MESSAGE}${NC}" +} + +function echo_prompt { + echo -e "${INF1} ${CYAN}${MESSAGE}${NC}" +} + +### Warning +function echo_warn { + echo -e "${WARN} ${PURPLE}${MESSAGE}${NC}" +} + +### Executing +function echo_stat { + echo -en "${STAT} ${MESSAGE}" +} + +### Success +function echo_good { + echo -e "\r${GOOD} ${MESSAGE}" +} + +### Success +function echo_good_clean { + echo -e "\r${GOOD} ${MESSAGE}" +} + +### Failure +function echo_fail { + echo -e "\r${FAIL} ${MESSAGE}" +} + +### Request +function echo_need { + echo -en "${NEED} ${BOLD}${MESSAGE}:${NC} " +} + +### Indent +function echo_over { + echo -e " ${MESSAGE}" +} + +### Gravity Sync Logo +function echo_grav { + echo -e "${LOGO} ${BOLD}${MESSAGE}${NC}" +} + +### Lines +function echo_blank { + echo -e "" +} + +# Standard Output +function start_gs { + MESSAGE="${UI_CORE_INIT}" + echo_grav + + import_gs_config + detect_local_pihole + detect_remote_pihole + detect_gs_peer + set_pihole_exec + + MESSAGE="${UI_CORE_EVALUATING}" + echo_stat + + validate_sudo_status +} + +# Standard Output No Config +function start_gs_no_config { + MESSAGE="${UI_CORE_INIT}" + echo_grav + + MESSAGE="${UI_CORE_EVALUATING}" + echo_stat +} + +## Import Settings +function import_gs_config { + MESSAGE="${UI_CORE_LOADING} ${GS_CONFIG_FILE}" + echo -en "${STAT} $MESSAGE" + if [ -f ${GS_ETC_PATH}/${GS_CONFIG_FILE} ]; then + # shellcheck source=/etc/gravity-sync/gravity-sync.conf + source ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + else + echo_fail + + MESSAGE="Missing ${GS_CONFIG_FILE}" + echo_warn + + GS_TASK_TYPE='CONFIG' + config_generate + fi +} + +## Invalid Tasks +function task_invalid { + echo_fail + list_gs_arguments +} + +## Error Validation +function error_validate { + if [ "$?" != "0" ]; then + echo_fail + exit 1 + else + echo_good + fi +} + +function set_pihole_exec { + if [ "$LOCAL_PIHOLE_TYPE" == "default" ]; then + PH_EXEC="${LOCAL_PIHOLE_BINARY}" + FTL_EXEC="${LOCAL_FTL_BINARY}" + elif [ "$LOCAL_PIHOLE_TYPE" == "docker" ]; then + PH_EXEC="sudo ${LOCAL_DOCKER_BINARY} exec $(sudo ${LOCAL_DOCKER_BINARY} ps -qf name=${LOCAL_DOCKER_CONTAINER}) pihole" + FTL_EXEC="sudo ${LOCAL_DOCKER_BINARY} exec $(sudo ${LOCAL_DOCKER_BINARY} ps -qf name=${LOCAL_DOCKER_CONTAINER}) pihole-FTL" + elif [ "$LOCAL_PIHOLE_TYPE" == "podman" ]; then + PH_EXEC="sudo ${LOCAL_PODMAN_BINARY} exec ${LOCAL_DOCKER_CONTAINER} pihole" + FTL_EXEC="sudo ${LOCAL_PODMAN_BINARY} exec ${LOCAL_DOCKER_CONTAINER} pihole-FTL" + fi + + if [ "$REMOTE_PIHOLE_TYPE" == "default" ]; then + RH_EXEC="${REMOTE_PIHOLE_BINARY}" + RFTL_EXEC="${REMOTE_FTL_BINARY}" + elif [ "$REMOTE_PIHOLE_TYPE" == "docker" ]; then + RH_EXEC="sudo ${REMOTE_DOCKER_BINARY} exec \$(sudo ${REMOTE_DOCKER_BINARY} ps -qf name=${REMOTE_DOCKER_CONTAINER}) pihole" + RFTL_EXEC="sudo ${REMOTE_DOCKER_BINARY} exec \$(sudo ${REMOTE_DOCKER_BINARY} ps -qf name=${REMOTE_DOCKER_CONTAINER}) pihole-FTL" + elif [ "$REMOTE_PIHOLE_TYPE" == "podman" ]; then + RH_EXEC="sudo ${REMOTE_PODMAN_BINARY} exec ${REMOTE_DOCKER_CONTAINER} pihole" + RFTL_EXEC="sudo ${REMOTE_PODMAN_BINARY} exec ${REMOTE_DOCKER_CONTAINER} pihole" + fi +} + +## Compare Task +function task_compare { + GS_TASK_TYPE='COMPARE' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + show_target + validate_ph_folders + validate_dns_folders + previous_md5 + md5_compare + exit_with_changes +} + +## Pull Task +function task_pull { + GS_TASK_TYPE='PULL' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + show_target + validate_ph_folders + validate_dns_folders + pull_gs + exit +} + +## Pull Gravity +function pull_gs_grav { + + backup_local_gravity + backup_remote_gravity + backup_remote_gravity_integrity + + MESSAGE="${UI_PULL_REMOTE} ${UI_GRAVITY_NAME}" + echo_stat + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}" + RSYNC_TARGET="${OS_TMP}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}" + create_rsync_cmd + + MESSAGE="${UI_REPLACE_LOCAL} ${UI_GRAVITY_NAME}" + echo_stat + sudo mv ${OS_TMP}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} >/dev/null 2>&1 + error_validate + + validate_gravity_permissions +} + +## Pull Custom +function pull_gs_custom { + if [ "$REMOTE_PH_CUSTOM_DNS" == "1" ]; then + backup_local_custom + backup_remote_custom + + MESSAGE="${UI_PULL_REMOTE} ${UI_CUSTOM_NAME}" + echo_stat + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}.${GS_BACKUP_EXT}" + RSYNC_TARGET="${OS_TMP}/${PH_CUSTOM_DNS}.${GS_BACKUP_EXT}" + create_rsync_cmd + + MESSAGE="${UI_REPLACE_LOCAL} ${UI_CUSTOM_NAME}" + echo_stat + sudo mv ${OS_TMP}/${PH_CUSTOM_DNS}.${GS_BACKUP_EXT} ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} >/dev/null 2>&1 + error_validate + + validate_custom_permissions + fi +} + +## Pull CNAME +function pull_gs_cname { + if [ "$REMOTE_CNAME_DNS" == "1" ]; then + backup_local_cname + backup_remote_cname + + MESSAGE="${UI_PULL_REMOTE} ${UI_CNAME_NAME}" + echo_stat + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_PIHOLE_DIRECTORY}/${PH_CNAME_CONF}.${GS_BACKUP_EXT}" + RSYNC_TARGET="${OS_TMP}/${PH_CNAME_CONF}.${GS_BACKUP_EXT}" + create_rsync_cmd + + MESSAGE="${UI_REPLACE_LOCAL} ${UI_CNAME_NAME}" + echo_stat + sudo mv ${OS_TMP}/${PH_CNAME_CONF}.${GS_BACKUP_EXT} ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} >/dev/null 2>&1 + error_validate + + validate_cname_permissions + fi +} + +## Pull Reload +function pull_gs_reload { + sleep 1 + + MESSAGE="Updating local FTLDNS configuration" + echo_stat + ${PH_EXEC} restartdns reload-lists >/dev/null 2>&1 + error_validate + + if [ "${GS_TASK_TYPE}" == SMART ]; then + if [ "${REMOTE_DNS_CHANGE}" == "1" ] || [ "${LOCAL_DNS_CHANGE}" == "1" ] || [ "${REMOTE_CNAME_CHANGE}" == "1" ] || [ "${LOCAL_CNAME_CHANGE}" == "1" ]; then + MESSAGE="${UI_FTLDNS_CONFIG_PULL_RELOAD}" + echo_stat + ${PH_EXEC} restartdns >/dev/null 2>&1 + error_validate + fi + else + MESSAGE="${UI_FTLDNS_CONFIG_PULL_RELOAD}" + echo_stat + ${PH_EXEC} restartdns >/dev/null 2>&1 + error_validate + fi +} + +## Pull Function +function pull_gs { + previous_md5 + md5_compare + pull_gs_grav + pull_gs_custom + pull_gs_cname + pull_gs_reload + md5_recheck + logs_export + exit_with_changes +} + +## Push Task +function task_push { + GS_TASK_TYPE='PUSH' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + show_target + validate_ph_folders + validate_dns_folders + push_gs + exit +} + +## Push Gravity +function push_gs_grav { + backup_remote_gravity + backup_local_gravity + backup_local_gravity_integrity + + MESSAGE="${UI_PUSH_LOCAL} ${UI_GRAVITY_NAME}" + echo_stat + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}" + RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}" + create_rsync_cmd + + MESSAGE="${UI_SET_LOCAL_FILE_OWNERSHIP} ${UI_GRAVITY_NAME}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chown ${REMOTE_FILE_OWNER} ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}" + create_ssh_cmd + + MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_GRAVITY_NAME}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chmod 664 ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}" + create_ssh_cmd +} + +## Push Custom +function push_gs_custom { + if [ "$REMOTE_PH_CUSTOM_DNS" == "1" ]; then + backup_remote_custom + backup_local_custom + + MESSAGE="${UI_PUSH_LOCAL} ${UI_CUSTOM_NAME}" + echo_stat + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}.${GS_BACKUP_EXT}" + RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}" + create_rsync_cmd + + MESSAGE="${UI_SET_LOCAL_FILE_OWNERSHIP} ${UI_CUSTOM_NAME}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chown root:root ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}" + create_ssh_cmd + + MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_CUSTOM_NAME}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chmod 644 ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}" + create_ssh_cmd + fi +} + +## Push Custom +function push_gs_cname { + if [ "$REMOTE_CNAME_DNS" == "1" ]; then + backup_remote_cname + backup_local_cname + + MESSAGE="${UI_PUSH_LOCAL} ${UI_CNAME_NAME}" + echo_stat + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${LOCAL_PIHOLE_DIRECTORY}/${PH_CNAME_CONF}.${GS_BACKUP_EXT}" + RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}" + create_rsync_cmd + + MESSAGE="${UI_SET_LOCAL_FILE_OWNERSHIP} ${UI_CNAME_NAME}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chown root:root ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}" + create_ssh_cmd + + + MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_CNAME_NAME}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chmod 644 ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}" + create_ssh_cmd + fi +} + +## Push Reload +function push_gs_reload { + sleep 1 + + MESSAGE="Updating remote FTLDNS configuration" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="${RH_EXEC} restartdns reload-lists" + create_ssh_cmd + + if [ "${GS_TASK_TYPE}" == SMART ]; then + if [ "${REMOTE_DNS_CHANGE}" == "1" ] || [ "${LOCAL_DNS_CHANGE}" == "1" ] || [ "${REMOTE_CNAME_CHANGE}" == "1" ] || [ "${LOCAL_CNAME_CHANGE}" == "1" ]; then + MESSAGE="${UI_FTLDNS_CONFIG_PUSH_RELOAD}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="${RH_EXEC} restartdns" + create_ssh_cmd + fi + else + MESSAGE="${UI_FTLDNS_CONFIG_PUSH_RELOAD}" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="${RH_EXEC} restartdns" + create_ssh_cmd + fi +} + +## Push Function +function push_gs { + previous_md5 + md5_compare + push_gs_grav + push_gs_custom + push_gs_cname + push_gs_reload + md5_recheck + logs_export + exit_with_changes +} + +## Smart Task +function task_smart { + GS_TASK_TYPE='SMART' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + show_target + validate_ph_folders + validate_dns_folders + smart_gs + exit +} + +## Smart Sync Function +function smart_gs { + MESSAGE="Starting ${GS_TASK_TYPE} Analysis" + echo_info + + previous_md5 + md5_compare + + REMOTE_GRAVITY_CHANGE="0" + LOCAL_GRAVITY_CHANGE="0" + REMOTE_DNS_CHANGE="0" + LOCAL_DNS_CHANGE="0" + REMOTE_CNAME_CHANGE="0" + LOCAL_CNAME_CHANGE="0" + + if [ "${REMOTE_DB_MD5}" != "${LAST_REMOTE_DB_MD5}" ]; then + REMOTE_GRAVITY_CHANGE="1" + fi + + if [ "${LOCAL_DB_MD5}" != "${LAST_LOCAL_DB_MD5}" ]; then + LOCAL_GRAVITY_CHANGE="1" + fi + + if [ "${REMOTE_GRAVITY_CHANGE}" == "${LOCAL_GRAVITY_CHANGE}" ]; then + if [ "${REMOTE_GRAVITY_CHANGE}" != "0" ]; then + MESSAGE="Both ${UI_GRAVITY_NAME} have changed" + echo_warn + + REMOTE_GRAVITY_DATE=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "stat -c %Y ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}") + LOCAL_GRAVITY_DATE=$(stat -c %Y ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}) + + if (( "$REMOTE_GRAVITY_DATE" >= "$LOCAL_GRAVITY_DATE" )); then + MESSAGE="Remote ${UI_GRAVITY_NAME} was last changed" + echo_warn + + pull_gs_grav + GS_PULL_RESTART="1" + else + MESSAGE="Local ${UI_GRAVITY_NAME} was last changed" + echo_warn + + push_gs_grav + GS_PUSH_RESTART="1" + fi + fi + else + if [ "${REMOTE_GRAVITY_CHANGE}" != "0" ]; then + pull_gs_grav + GS_PULL_RESTART="1" + elif [ "${LOCAL_GRAVITY_CHANGE}" != "0" ]; then + push_gs_grav + GS_PUSH_RESTART="1" + fi + fi + + if [ "${REMOTE_CL_MD5}" != "${LAST_REMOTE_CL_MD5}" ]; then + REMOTE_DNS_CHANGE="1" + fi + + if [ "${LOCAL_CL_MD5}" != "${LAST_LOCAL_CL_MD5}" ]; then + LOCAL_DNS_CHANGE="1" + fi + + if [ -f "${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}" ]; then + if [ "${REMOTE_DNS_CHANGE}" == "${LOCAL_DNS_CHANGE}" ]; then + if [ "${REMOTE_DNS_CHANGE}" != "0" ]; then + MESSAGE="Both ${UI_CUSTOM_NAME} have changed" + echo_warn + + REMOTE_DNS_DATE=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "stat -c %Y ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}") + LOCAL_DNS_DATE=$(stat -c %Y ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}) + + if (( "$REMOTE_DNS_DATE" >= "$LOCAL_DNS_DATE" )); then + MESSAGE="Remote ${UI_CUSTOM_NAME} was last changed" + echo_warn + + pull_gs_custom + GS_PULL_RESTART="1" + else + MESSAGE="Local ${UI_CUSTOM_NAME} was last changed" + echo_warn + + push_gs_custom + GS_PUSH_RESTART="1" + fi + fi + else + if [ "${REMOTE_DNS_CHANGE}" != "0" ]; then + pull_gs_custom + GS_PULL_RESTART="1" + elif [ "${LOCAL_DNS_CHANGE}" != "0" ]; then + push_gs_custom + GS_PUSH_RESTART="1" + fi + fi + else + pull_gs_custom + GS_PULL_RESTART="1" + fi + + if [ "${REMOTE_CN_MD5}" != "${LAST_REMOTE_CN_MD5}" ]; then + REMOTE_CNAME_CHANGE="1" + fi + + if [ "${LOCAL_CN_MD5}" != "${LAST_LOCAL_CN_MD5}" ]; then + LOCAL_CNAME_CHANGE="1" + fi + + if [ -f "${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}" ]; then + if [ "${REMOTE_CNAME_CHANGE}" == "${LOCAL_CNAME_CHANGE}" ]; then + if [ "${REMOTE_CNAME_CHANGE}" != "0" ]; then + MESSAGE="Both ${UI_CNAME_NAME} have Changed" + echo_warn + + REMOTE_CNAME_DATE=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "stat -c %Y ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}") + LOCAL_CNAME_DATE=$(stat -c %Y ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}) + + if (( "$REMOTE_CNAME_DATE" >= "$LOCAL_CNAME_DATE" )); then + MESSAGE="Remote ${UI_CNAME_NAME} was last changed" + echo_warn + + pull_gs_cname + GS_PULL_RESTART="1" + else + MESSAGE="Local ${UI_CNAME_NAME} was last changed" + echo_warn + + push_gs_cname + GS_PUSH_RESTART="1" + fi + fi + else + if [ "${REMOTE_CNAME_CHANGE}" != "0" ]; then + pull_gs_cname + GS_PULL_RESTART="1" + elif [ "${LOCAL_CNAME_CHANGE}" != "0" ]; then + push_gs_cname + GS_PUSH_RESTART="1" + fi + fi + else + pull_gs_cname + GS_PULL_RESTART="1" + fi + + if [ "$GS_PULL_RESTART" == "1" ]; then + pull_gs_reload + fi + + if [ "$GS_PUSH_RESTART" == "1" ]; then + push_gs_reload + fi + + md5_recheck + + logs_export + exit_with_changes +} + +function backup_local_gravity { + MESSAGE="${UI_BACKUP_LOCAL} ${UI_GRAVITY_NAME}" + echo_stat + + if [ "$LOCAL_PIHOLE_TYPE" == "default" ]; then + sudo ${FTL_EXEC} sql ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} ".backup '${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}'" + error_validate + elif [ "$LOCAL_PIHOLE_TYPE" == "docker" ]; then + sudo ${FTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} ".backup '${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}'" + error_validate + elif [ "$LOCAL_PIHOLE_TYPE" == "podman" ]; then + sudo ${FTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} ".backup '${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}'" + error_validate + fi +} + +function backup_local_gravity_integrity { + MESSAGE="${UI_BACKUP_INTEGRITY}" + echo_stat + + sleep $GS_BACKUP_INTEGRITY_WAIT + + if [ "$LOCAL_PIHOLE_TYPE" == "default" ]; then + LOCAL_INTEGRITY_CHECK=$(${FTL_EXEC} sql ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} 'PRAGMA integrity_check;' | sed 's/\s.*$//') + error_validate + elif [ "$LOCAL_PIHOLE_TYPE" == "docker" ]; then + LOCAL_INTEGRITY_CHECK=$(${FTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} 'PRAGMA integrity_check;' | sed 's/\s.*$//') + error_validate + elif [ "$LOCAL_PIHOLE_TYPE" == "podman" ]; then + LOCAL_INTEGRITY_CHECK=$(${FTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} 'PRAGMA integrity_check;' | sed 's/\s.*$//') + error_validate + fi + + if [ "$LOCAL_INTEGRITY_CHECK" != 'ok' ]; then + MESSAGE="${UI_BACKUP_INTEGRITY_FAILED} ${UI_GRAVITY_NAME}" + echo_fail + + MESSAGE="${UI_BACKUP_INTEGRITY_DELETE} ${UI_GRAVITY_NAME}" + echo_stat + + sudo rm ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} + error_validate + + exit_no_change + fi +} + +function backup_local_custom { + if [ -f ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} ]; then + MESSAGE="${UI_BACKUP_LOCAL} ${UI_CUSTOM_NAME}" + echo_stat + + sudo cp ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}.${GS_BACKUP_EXT} + error_validate + else + MESSAGE="No local ${PH_CUSTOM_DNS} detected" + echo_warn + fi +} + +function backup_local_cname { + if [ -f ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} ]; then + MESSAGE="${UI_BACKUP_LOCAL} ${UI_CNAME_NAME}" + echo_stat + + sudo cp ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} ${LOCAL_PIHOLE_DIRECTORY}/${PH_CNAME_CONF}.${GS_BACKUP_EXT} + error_validate + else + MESSAGE="No local ${PH_CNAME_CONF} detected" + echo_warn + fi +} + +function backup_remote_gravity { + MESSAGE="${UI_BACKUP_REMOTE} ${UI_GRAVITY_NAME}" + echo_stat + + if [ "$REMOTE_PIHOLE_TYPE" == "default" ]; then + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo ${RFTL_EXEC} sql ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} \".backup '${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}'\"" + create_ssh_cmd + elif [ "$REMOTE_PIHOLE_TYPE" == "docker" ]; then + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo ${RFTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} \".backup '${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}'\"" + create_ssh_cmd + elif [ "$REMOTE_PIHOLE_TYPE" == "podman" ]; then + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo ${RFTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} \".backup '${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}'\"" + create_ssh_cmd + fi +} + +function backup_remote_gravity_integrity { + MESSAGE="${UI_BACKUP_INTEGRITY}" + echo_stat + + sleep $GS_BACKUP_INTEGRITY_WAIT + + if [ "$REMOTE_PIHOLE_TYPE" == "default" ]; then + REMOTE_INTEGRITY_CHECK=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "${RFTL_EXEC} sql ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} 'PRAGMA integrity_check;'" | sed 's/\s.*$//') + error_validate + elif [ "$REMOTE_PIHOLE_TYPE" == "docker" ]; then + REMOTE_INTEGRITY_CHECK=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "${RFTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} 'PRAGMA integrity_check;'" | sed 's/\s.*$//') + error_validate + elif [ "$REMOTE_PIHOLE_TYPE" == "podman" ]; then + REMOTE_INTEGRITY_CHECK=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "${RFTL_EXEC} sql ${DEFAULT_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT} 'PRAGMA integrity_check;'" | sed 's/\s.*$//') + error_validate + fi + + if [ "$REMOTE_INTEGRITY_CHECK" != 'ok' ]; then + MESSAGE="${UI_BACKUP_INTEGRITY_FAILED} ${UI_GRAVITY_NAME}" + echo_fail + + MESSAGE="${UI_BACKUP_INTEGRITY_DELETE} ${UI_GRAVITY_NAME}" + echo_stat + + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo rm ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}.${GS_BACKUP_EXT}" + create_ssh_cmd + + exit_no_change + fi +} + +function backup_remote_custom { + MESSAGE="${UI_BACKUP_REMOTE} ${UI_CUSTOM_NAME}" + echo_stat + + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo cp ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}.${GS_BACKUP_EXT}" + create_ssh_cmd +} + +function backup_remote_cname { + MESSAGE="${UI_BACKUP_REMOTE} ${UI_CNAME_NAME}" + echo_stat + + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo cp ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} ${REMOTE_PIHOLE_DIRECTORY}/${PH_CNAME_CONF}.${GS_BACKUP_EXT}" + create_ssh_cmd +} + +function backup_cleanup { + MESSAGE="Purging backups" + echo_stat + # git clean -fq + sudo rm -f ${LOCAL_PIHOLE_DIRECTORY}/*.${GS_BACKUP_EXT} + error_validate + + # MESSAGE="${UI_BACKUP_PURGE} on remote" + # echo_stat + # CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + # CMD_REQUESTED="sudo rm -f ${REMOTE_PIHOLE_DIRECTORY}/*.${GS_BACKUP_EXT}" + # create_ssh_cmd +} + +function md5_compare { + GS_HASH_MARK='0' + + MESSAGE="${UI_HASHING_HASHING} ${UI_GRAVITY_NAME}" + echo_stat + REMOTE_DB_MD5=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}" | sed 's/\s.*$//') + error_validate + + MESSAGE="${UI_HASHING_COMPARING} ${UI_GRAVITY_NAME}" + echo_stat + LOCAL_DB_MD5=$(md5sum ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} | sed 's/\s.*$//') + error_validate + + if [ "$REMOTE_DB_MD5" == "$LAST_REMOTE_DB_MD5" ] && [ "$LOCAL_DB_MD5" == "$LAST_LOCAL_DB_MD5" ]; then + GS_HASH_MARK=$((GS_HASH_MARK+0)) + else + MESSAGE="${UI_HASHING_DIFFERENCE} ${UI_GRAVITY_NAME}" + echo_warn + GS_HASH_MARK=$((GS_HASH_MARK+1)) + fi + + if [ -f ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} ]; then + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}; then + REMOTE_PH_CUSTOM_DNS="1" + MESSAGE="${UI_HASHING_HASHING} ${UI_CUSTOM_NAME}" + echo_stat + + REMOTE_CL_MD5=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} | sed 's/\s.*$//'") + error_validate + + MESSAGE="${UI_HASHING_COMPARING} ${UI_CUSTOM_NAME}" + echo_stat + LOCAL_CL_MD5=$(md5sum ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} | sed 's/\s.*$//') + error_validate + + if [ "$REMOTE_CL_MD5" == "$LAST_REMOTE_CL_MD5" ] && [ "$LOCAL_CL_MD5" == "$LAST_LOCAL_CL_MD5" ]; then + GS_HASH_MARK=$((GS_HASH_MARK+0)) + else + MESSAGE="${UI_HASHING_DIFFERENCE} ${UI_CUSTOM_NAME}" + echo_warn + GS_HASH_MARK=$((GS_HASH_MARK+1)) + fi + else + MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_REMOTE}" + echo_warn + fi + else + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}; then + REMOTE_PH_CUSTOM_DNS="1" + MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_DETECTED} ${UI_HASHING_REMOTE}" + GS_HASH_MARK=$((GS_HASH_MARK+1)) + echo_warn + fi + MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_LOCAL}" + echo_warn + fi + + if [ -f ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} ]; then + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}; then + REMOTE_CNAME_DNS="1" + MESSAGE="${UI_HASHING_HASHING} ${UI_CNAME_NAME}" + echo_stat + + REMOTE_CN_MD5=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} | sed 's/\s.*$//'") + error_validate + + MESSAGE="${UI_HASHING_COMPARING} ${UI_CNAME_NAME}" + echo_stat + LOCAL_CN_MD5=$(md5sum ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} | sed 's/\s.*$//') + error_validate + + if [ "$REMOTE_CN_MD5" == "$LAST_REMOTE_CN_MD5" ] && [ "$LOCAL_CN_MD5" == "$LAST_LOCAL_CN_MD5" ]; then + GS_HASH_MARK=$((GS_HASH_MARK+0)) + else + MESSAGE="${UI_HASHING_DIFFERENCE} ${UI_CNAME_NAME}" + echo_warn + GS_HASH_MARK=$((GS_HASH_MARK+1)) + fi + else + MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_REMOTE}" + echo_warn + fi + else + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}; then + REMOTE_CNAME_DNS="1" + MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_DETECTED} ${UI_HASHING_REMOTE}" + GS_HASH_MARK=$((GS_HASH_MARK+1)) + echo_warn + fi + + MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_LOCAL}" + echo_warn + fi + + if [ "$GS_HASH_MARK" != "0" ]; then + MESSAGE="Replication of ${UI_CORE_APP} settings is required" + echo_warn + GS_HASH_MARK=$((GS_HASH_MARK+0)) + else + MESSAGE="No replication is required at this time" + echo_warn + exit_no_change + fi +} + +function previous_md5 { + if [ -f "${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG}" ]; then + LAST_REMOTE_DB_MD5=$(sed "1q;d" ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG}) + LAST_LOCAL_DB_MD5=$(sed "2q;d" ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG}) + else + LAST_REMOTE_DB_MD5="0" + LAST_LOCAL_DB_MD5="0" + fi + + if [ -f "${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG}" ]; then + LAST_REMOTE_CL_MD5=$(sed "1q;d" ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG}) + LAST_LOCAL_CL_MD5=$(sed "2q;d" ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG}) + else + LAST_REMOTE_CL_MD5="0" + LAST_LOCAL_CL_MD5="0" + fi + + if [ -f "${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG}" ]; then + LAST_REMOTE_CN_MD5=$(sed "1q;d" ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG}) + LAST_LOCAL_CN_MD5=$(sed "2q;d" ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG}) + else + LAST_REMOTE_CN_MD5="0" + LAST_LOCAL_CN_MD5="0" + fi +} + +function md5_recheck { + MESSAGE="Performing replicator diagnostics" + echo_prompt + + GS_HASH_MARK='0' + + MESSAGE="${UI_HASHING_REHASHING} ${UI_GRAVITY_NAME}" + echo_stat + REMOTE_DB_MD5=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${REMOTE_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI}" | sed 's/\s.*$//') + error_validate + + MESSAGE="${UI_HASHING_RECOMPARING} ${UI_GRAVITY_NAME}" + echo_stat + LOCAL_DB_MD5=$(md5sum ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} | sed 's/\s.*$//') + error_validate + + if [ -f ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} ]; then + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}; then + REMOTE_PH_CUSTOM_DNS="1" + MESSAGE="${UI_HASHING_REHASHING} ${UI_CUSTOM_NAME}" + echo_stat + + REMOTE_CL_MD5=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} | sed 's/\s.*$//'") + error_validate + + MESSAGE="${UI_HASHING_RECOMPARING} ${UI_CUSTOM_NAME}" + echo_stat + LOCAL_CL_MD5=$(md5sum ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} | sed 's/\s.*$//') + error_validate + else + MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_REMOTE}" + echo_warn + fi + else + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS}; then + REMOTE_PH_CUSTOM_DNS="1" + MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_DETECTED} ${UI_HASHING_REMOTE}" + echo_warn + fi + MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_LOCAL}" + echo_warn + fi + + if [ -f ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} ]; then + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}; then + REMOTE_CNAME_DNS="1" + MESSAGE="${UI_HASHING_REHASHING} ${UI_CNAME_NAME}" + echo_stat + + REMOTE_CN_MD5=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} | sed 's/\s.*$//'") + error_validate + + MESSAGE="${UI_HASHING_RECOMPARING} ${UI_CNAME_NAME}" + echo_stat + LOCAL_CN_MD5=$(md5sum ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} | sed 's/\s.*$//') + error_validate + else + MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_REMOTE}" + echo_warn + fi + else + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF}; then + REMOTE_CNAME_DNS="1" + MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_REMOTE}" + echo_warn + fi + + MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOT_DETECTED} ${UI_HASHING_LOCAL}" + echo_warn + fi +} + +## Determine SSH Pathways +function create_ssh_cmd { + timeout --preserve-status ${CMD_TIMEOUT} ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i ${GS_SSH_PKIF} -o StrictHostKeyChecking=no ${REMOTE_USER}@${REMOTE_HOST} "${CMD_REQUESTED}" + error_validate +} + +## Determine SSH Pathways +function create_rsync_cmd { + rsync --rsync-path="${RSYNC_REPATH}" -e "${OS_SSH_CMD} -p ${GS_SSH_PORT} -i ${GS_SSH_PKIF}" ${RSYNC_SOURCE} ${RSYNC_TARGET} >/dev/null 2>&1 + error_validate +} + +function generate_ssh_key { + if [ -z $INPUT_REMOTE_PASS ]; then + if [ -f ${GS_SSH_PKIF} ]; then + MESSAGE="Using existing SSH key" + echo_good_clean + else + if hash ssh-keygen >/dev/null 2>&1; then + MESSAGE="Generating new SSH key" + echo_stat + + ssh-keygen -q -P "" -t rsa -f ${OS_TMP}/gravity-sync.rsa >/dev/null 2>&1 + error_validate + + MESSAGE="Moving private key to ${GS_SSH_PKIF}" + sudo mv ${OS_TMP}/gravity-sync.rsa ${GS_SSH_PKIF} + error_validate + + MESSAGE="Moving public key to ${GS_SSH_PKIF}.pub" + sudo mv ${OS_TMP}/gravity-sync.rsa.pub ${GS_SSH_PKIF}.pub + error_validate + else + MESSAGE="No SSH-KEYGEN available" + echo_warn + exit_no_change + fi + fi + fi +} + +function export_ssh_key { + if [ -z $REMOTE_PASS ]; then + if [ -f ${GS_SSH_PKIF} ]; then + MESSAGE="Registering SSH key to ${REMOTE_HOST}" + echo_prompt + + ssh-copy-id -f -p ${GS_SSH_PORT} -i ${GS_SSH_PKIF}.pub ${REMOTE_USER}@${REMOTE_HOST} + else + MESSAGE="Error registering SSH key to ${REMOTE_HOST}" + echo_warn + fi + fi +} + +function show_target { + MESSAGE="Remote ${UI_CORE_APP}: ${REMOTE_USER}@${REMOTE_HOST}" + echo_info +} + +## Logs Task +function task_logs { + GS_TASK_TYPE='LOGS' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + logs_gs +} + +## Core Logging +### Write Logs Out +function logs_export { + MESSAGE="Saving updated data hashes" + echo_stat + sudo rm -f ${GS_ETC_PATH}/*.md5 + echo -e ${REMOTE_DB_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_DB_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CL_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CL_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CN_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CN_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + + if [ "${GS_PEERLESS_MODE}" != "1" ]; then + sudo rm -f ${OS_TMP}/*.md5 + echo -e ${LOCAL_DB_MD5} | sudo tee -a ${OS_TMP}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_DB_MD5} | sudo tee -a ${OS_TMP}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CL_MD5} | sudo tee -a ${OS_TMP}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CL_MD5} | sudo tee -a ${OS_TMP}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CN_MD5} | sudo tee -a ${OS_TMP}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CN_MD5} | sudo tee -a ${OS_TMP}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + error_validate + + MESSAGE="Sending hashes to ${PROGRAM} peer" + echo_stat + + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${OS_TMP}/*.md5" + RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${GS_ETC_PATH}/" + create_rsync_cmd + + MESSAGE="Setting permissions on remote hashing files" + echo_stat + CMD_TIMEOUT=$GS_BACKUP_TIMEOUT + CMD_REQUESTED="sudo chmod 644 ${GS_ETC_PATH}/*.md5" + create_ssh_cmd + + sudo rm -f ${OS_TMP}/*.md5 + fi + + MESSAGE="Logging successful ${GS_TASK_TYPE}" + echo_stat + echo -e "$(date) [${GS_TASK_TYPE}]" | sudo tee -a ${GS_ETC_PATH}/${GS_SYNCING_LOG} 1> /dev/null + error_validate +} + +### Output Sync Logs +function logs_gs { + MESSAGE="Displaying output of previous jobs" + echo_info + + echo -e "${UI_LOGGING_RECENT_COMPLETE} ${YELLOW}SMART${NC}" + tail -n 7 "${GS_ETC_PATH}/${GS_SYNCING_LOG}" | grep SMART + echo -e "${UI_LOGGING_RECENT_COMPLETE} ${YELLOW}PULL${NC}" + tail -n 7 "${GS_ETC_PATH}/${GS_SYNCING_LOG}" | grep PULL + echo -e "${UI_LOGGING_RECENT_COMPLETE} ${YELLOW}PUSH${NC}" + tail -n 7 "${GS_ETC_PATH}/${GS_SYNCING_LOG}" | grep PUSH + + exit_no_change +} + +## Validate Pi-hole Folders +function validate_ph_folders { + MESSAGE="${UI_VALIDATING} ${UI_CORE_APP}" + echo_stat + + if [ "$LOCAL_PIHOLE_TYPE" == "default" ]; then + if [ ! -f ${LOCAL_PIHOLE_BINARY} ]; then + MESSAGE="${UI_VALIDATING_FAIL_BINARY} ${UI_CORE_APP}" + echo_fail + exit_no_change + fi + elif [ "$LOCAL_PIHOLE_TYPE" == "docker" ]; then + PH_FTL_CHECK=$(sudo docker container ls | grep "${PIHOLE_CONTAINER_IMAGE}") + if [ "$PH_FTL_CHECK" == "" ]; then + MESSAGE="${UI_VALIDATING_FAIL_CONTAINER} ${UI_CORE_APP}" + echo_fail + exit_no_change + fi + elif [ "$LOCAL_PIHOLE_TYPE" == "podman" ]; then + PH_FTL_CHECK=$(sudo podman container ls | grep "${PIHOLE_CONTAINER_IMAGE}") + if [ "$PH_FTL_CHECK" == "" ]; then + MESSAGE="${UI_VALIDATING_FAIL_CONTAINER} ${UI_CORE_APP}" + echo_fail + exit_no_change + fi + fi + + if [ ! -d ${LOCAL_PIHOLE_DIRECTORY} ]; then + MESSAGE="${UI_VALIDATING_FAIL_FOLDER} ${UI_CORE_APP}" + echo_fail + exit_no_change + fi + + echo_good +} + +function detect_local_pihole { + MESSAGE="Detecting local ${UI_CORE_APP} installation" + echo_stat + + if hash pihole 2>/dev/null; then + LOCAL_PIHOLE_TYPE="default" + echo_good + elif hash docker 2>/dev/null; then + PH_FTL_CHECK=$(sudo docker container ls | grep 'pihole/pihole') + if [ "$PH_FTL_CHECK" != "" ]; then + LOCAL_PIHOLE_TYPE="docker" + echo_good + else + LOCAL_PIHOLE_TYPE="none" + echo_fail + fi + elif hash podman 2>/dev/null; then + PH_FTL_CHECK=$(sudo podman container ls | grep 'pihole/pihole') + if [ "$PH_FTL_CHECK" != "" ]; then + LOCAL_PIHOLE_TYPE="podman" + echo_good + else + LOCAL_PIHOLE_TYPE="none" + echo_fail + fi + else + LOCAL_PIHOLE_TYPE="none" + echo_fail + fi +} + +function detect_remote_pihole { + MESSAGE="Detecting remote ${UI_CORE_APP} installation" + echo_stat + + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${REMOTE_PIHOLE_BINARY}; then + REMOTE_PIHOLE_TYPE="default" + echo_good + else + REMOTE_DETECT_DOCKER=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo docker container ls | grep 'pihole/pihole'" 2>/dev/null) + REMOTE_DETECT_PODMAN=$(${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo podman container ls | grep 'pihole/pihole'" 2>/dev/null) + + if [ "${REMOTE_DETECT_DOCKER}" != "" ]; then + REMOTE_PIHOLE_TYPE="docker" + echo_good + elif [ "${REMOTE_DETECT_PODMAN}" != "" ]; then + REMOTE_PIHOLE_TYPE="podman" + echo_good + else + REMOTE_PIHOLE_TYPE="none" + echo_fail + fi + fi +} + +function detect_gs_peer { + MESSAGE="Checking on peer" + echo_stat + + if ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${GS_ETC_PATH}/${GS_CONFIG_FILE}; then + MESSAGE="${PROGRAM} remote peer is configured" + echo_good + else + GS_PEERLESS_MODE="1" + MESSAGE="${PROGRAM} falling back to peerless mode" + echo_good + + MESSAGE="Please configure ${PROGRAM} on remote host" + echo_warn + fi +} + +## Validate DNSMASQ Folders +function validate_dns_folders { + MESSAGE="${UI_VALIDATING} ${UI_CORE_APP_DNS}" + echo_stat + + if [ ! -d ${LOCAL_DNSMASQ_DIRECTORY} ]; then + MESSAGE="${UI_VALIDATING_FAIL_FOLDER} ${UI_CORE_APP_DNS}" + echo_fail + exit_no_change + fi + echo_good +} + +## Validate Domain Database Permissions +function validate_gravity_permissions { + MESSAGE="${UI_SET_LOCAL_FILE_OWNERSHIP} ${UI_GRAVITY_NAME}" + echo_stat + sudo chown ${LOCAL_FILE_OWNER} ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} >/dev/null 2>&1 + error_validate + + MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_GRAVITY_NAME}" + echo_stat + sudo chmod 664 ${LOCAL_PIHOLE_DIRECTORY}/${PH_GRAVITY_FI} >/dev/null 2>&1 + error_validate +} + +## Validate Local DNS Records Permissions +function validate_custom_permissions { + MESSAGE="${UI_SET_LOCAL_FILE_OWNERSHIP} ${UI_CUSTOM_NAME}" + echo_stat + sudo chown root:root ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} >/dev/null 2>&1 + error_validate + + MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_CUSTOM_NAME}" + echo_stat + sudo chmod 644 ${LOCAL_PIHOLE_DIRECTORY}/${PH_CUSTOM_DNS} >/dev/null 2>&1 + error_validate +} + +## Validate Local DNS CNAME Permissions +function validate_cname_permissions { + MESSAGE="${UI_SET_LOCAL_FILE_OWNERSHIP} ${UI_CNAME_NAME}" + echo_stat + sudo chown root:root ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} >/dev/null 2>&1 + error_validate + + MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_CNAME_NAME}" + echo_stat + sudo chmod 644 ${LOCAL_DNSMASQ_DIRECTORY}/${PH_CNAME_CONF} >/dev/null 2>&1 + error_validate +} + +## Validate Intent +function intent_validate { + PHASER=$((( RANDOM % 4 ) + 1 )) + if [ "$PHASER" = "1" ]; then + INTENT="FIRE PHOTON TORPEDOES" + elif [ "$PHASER" = "2" ]; then + INTENT="FIRE ALL PHASERS" + elif [ "$PHASER" = "3" ]; then + INTENT="EJECT THE WARP CORE" + elif [ "$PHASER" = "4" ]; then + INTENT="ENGAGE TRACTOR BEAM" + fi + + MESSAGE="Type ${INTENT} to confirm" + echo_need + + read -r INPUT_INTENT + + if [ "${INPUT_INTENT}" != "${INTENT}" ]; then + MESSAGE="${GS_TASK_TYPE} excited" + echo_info + exit_no_change + fi +} + +## Sudo Creation Task +function task_sudo { + GS_TASK_TYPE='SUDO' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + MESSAGE="Creating sudoers.d template file" + echo_stat + + NEW_SUDO_USER=$(whoami) + echo -e "${NEW_SUDO_USER} ALL=(ALL) NOPASSWD: ALL" | sudo tee ${GS_LOCAL_REPO}/templates/gs-nopasswd.sudo 1> /dev/null + error_validate + + MESSAGE="Installing sudoers.d file on $HOSTNAME" + echo_stat + + sudo install -m 0440 ${GS_LOCAL_REPO}/templates/gs-nopasswd.sudo /etc/sudoers.d/gs-nopasswd + error_validate + + exit_with_changes +} + +function validate_sudo_status { + OS_CURRENT_USER=$(whoami) + if [ ! "$EUID" -ne 0 ]; then + OS_LOCAL_ADMIN="" + else + OS_SUDO_CHECK=$(groups ${OS_CURRENT_USER} | grep -e 'sudo' -e 'wheel') + if [ "$OS_SUDO_CHECK" == "" ]; then + OS_LOCAL_ADMIN="nosudo" + else + OS_LOCAL_ADMIN="sudo" + fi + fi + + if [ "$OS_LOCAL_ADMIN" == "nosudo" ]; then + GS_TASK_TYPE='ROOT' + MESSAGE="${MESSAGE} ${GS_TASK_TYPE}" + echo_fail + + MESSAGE="${OS_CURRENT_USER} has insufficient user rights for ${PROGRAM}" + echo_warn + + exit_no_change + fi +} + +## Configure Task +function task_configure { + GS_TASK_TYPE='CONFIGURE' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + if [ -f ${GS_ETC_PATH}/${GS_CONFIG_FILE} ]; then + config_delete + else + config_generate + fi + + exit_with_changes +} + +## Generate New Configuration +function config_generate { + MESSAGE="Creating new ${GS_CONFIG_FILE}" + echo_stat + sudo cp ${GS_LOCAL_REPO}/templates/${GS_CONFIG_FILE}.example ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + echo_blank + echo -e " Welcome to the ${PURPLE}${PROGRAM}${NC} Configuration Wizard" + echo -e " Please read through ${BLUE}https://github.com/vmstan/gravity-sync/wiki${NC} before you continue" + echo -e " Make sure that ${UI_CORE_APP} is running on this system before your configure ${PROGRAM}" + echo_blank + + MESSAGE="${PROGRAM} Remote Host Settings" + echo_info + + MESSAGE="Remote ${UI_CORE_APP} host address" + echo_prompt + + MESSAGE="IP" + echo_need + read -r INPUT_REMOTE_HOST + + MESSAGE="${UI_CONFIG_SAVING} ${INPUT_REMOTE_HOST} host to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/REMOTE_HOST=''/c\REMOTE_HOST='${INPUT_REMOTE_HOST}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="Remote ${UI_CORE_APP} host username" + echo_prompt + + MESSAGE="User" + echo_need + read -r INPUT_REMOTE_USER + + MESSAGE="${UI_CONFIG_SAVING} ${INPUT_REMOTE_USER}@${INPUT_REMOTE_HOST} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/REMOTE_USER=''/c\REMOTE_USER='${INPUT_REMOTE_USER}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="${PROGRAM} SSH Key Settings" + echo_info + + generate_ssh_key + + MESSAGE="${UI_CORE_LOADING} ${GS_CONFIG_FILE}" + echo_stat + # shellcheck source=/etc/gravity-sync/gravity-sync.conf + source ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + export_ssh_key + + MESSAGE="SSH key registered to ${INPUT_REMOTE_HOST}" + echo_good_clean + + MESSAGE="${UI_CORE_APP} Installation Settings" + echo_info + + detect_local_pihole + if [ "${LOCAL_PIHOLE_TYPE}" == "default" ]; then + MESSAGE="Default install of ${UI_CORE_APP} detected" + echo_good_clean + elif [ "${LOCAL_PIHOLE_TYPE}" == "docker" ]; then + MESSAGE="Docker container ${UI_CORE_APP} install detected" + echo_good_clean + elif [ "${LOCAL_PIHOLE_TYPE}" == "podman" ]; then + MESSAGE="Podman container ${UI_CORE_APP} install detected" + echo_good_clean + elif [ "${LOCAL_PIHOLE_TYPE}" == "none" ]; then + MESSAGE="No local ${UI_CORE_APP} installed detected" + echo_warn + end_config_no_pi + fi + + detect_remote_pihole + if [ "${REMOTE_PIHOLE_TYPE}" == "default" ]; then + MESSAGE="Remote install of ${UI_CORE_APP} detected" + echo_good_clean + elif [ "${REMOTE_PIHOLE_TYPE}" == "docker" ]; then + MESSAGE="Remote Docker container of ${UI_CORE_APP} detected" + echo_good_clean + elif [ "${REMOTE_PIHOLE_TYPE}" == "podman" ]; then + MESSAGE="Remote Podman container of ${UI_CORE_APP} detected" + echo_good_clean + elif [ "${LOCAL_PIHOLE_TYPE}" == "none" ]; then + MESSAGE="No remote ${UI_CORE_APP} installed detected" + echo_warn + end_config_no_pi + fi + + if [ "${LOCAL_PIHOLE_TYPE}" == "default" ] && [ "${REMOTE_PIHOLE_TYPE}" == "default" ]; then + end_config + else + advanced_config_generate + fi +} + +function end_config { + echo_blank + echo -e " Configuration has been completed successfully, once ${PROGRAM} has been installed your other" + echo -e " node, your next step is to push all of the of data from the currently authoritative" + echo -e " ${UI_CORE_APP} instance to the other." + echo -e " ex: ${YELLOW}gravity-sync push${NC}" + echo_blank + echo -e " If that completes successfully you can automate future sync jobs to run at a regular interval on" + echo -e " both of your ${PROGRAM} peers." + echo -e " ex: ${YELLOW}gravity-sync auto${NC}" + echo_blank +} + +function end_config_no_pi { + echo_blank + echo -e " Configuration could not be completed successfully, as no instances of ${UI_CORE_APP} could be detected" + echo -e " on one or more of your systems. Please make sure they are running on both peers and try again." + echo_blank +} + +## Advanced Configuration Options +function advanced_config_generate { + if [ "${LOCAL_PIHOLE_TYPE}" == "docker" ] || [ "${LOCAL_PIHOLE_TYPE}" == "podman" ]; then + MESSAGE="Local Container Image Configuration" + echo_info + + MESSAGE="Displaying running containers on $HOSTNAME" + echo_good_clean + + if [ "${LOCAL_PIHOLE_TYPE}" == "docker" ]; then + sudo docker ps -a --format 'table {{.ID}}\t{{.Names}}' + elif [ "${LOCAL_PIHOLE_TYPE}" == "podman" ]; then + sudo podman container ls + fi + + MESSAGE="Enter local ${UI_CORE_APP} container name" + echo_prompt + MESSAGE="ex, 'pihole'" + echo_need + read -r INPUT_LOCAL_DOCKER_CONTAINER + + MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CONFIG_CONTAINER_NAME} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# LOCAL_DOCKER_CONTAINER=''/c\LOCAL_DOCKER_CONTAINER='${INPUT_LOCAL_DOCKER_CONTAINER}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="Examining local container configuration" + echo_stat + + if [ "${LOCAL_PIHOLE_TYPE}" == "docker" ]; then + sudo docker container inspect ${INPUT_LOCAL_DOCKER_CONTAINER} | grep -i -B 1 '"Destination": "/etc/pihole"' > ${OS_TMP}/local_container_pihole_etc.log; sed -i '$d' ${OS_TMP}/local_container_pihole_etc.log; sed -i 's/"Source": "//' ${OS_TMP}/local_container_pihole_etc.log; sed -i 's/",//' ${OS_TMP}/local_container_pihole_etc.log; sed -i 's/ //g' ${OS_TMP}/local_container_pihole_etc.log + sudo docker container inspect ${INPUT_LOCAL_DOCKER_CONTAINER} | grep -i -B 1 '"Destination": "/etc/dnsmasq.d"' > ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i '$d' ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i 's/"Source": "//' ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i 's/",//' ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i 's/ //g' ${OS_TMP}/local_container_dnsmasq_etc.log + elif [ "${LOCAL_PIHOLE_TYPE}" == "podman" ]; then + sudo podman container inspect ${INPUT_LOCAL_DOCKER_CONTAINER} | grep -i -B 1 '"Destination": "/etc/pihole"' > ${OS_TMP}/local_container_pihole_etc.log; sed -i '$d' ${OS_TMP}/local_container_pihole_etc.log; sed -i 's/"Source": "//' ${OS_TMP}/local_container_pihole_etc.log; sed -i 's/",//' ${OS_TMP}/local_container_pihole_etc.log; sed -i 's/ //g' ${OS_TMP}/local_container_pihole_etc.log + sudo podman container inspect ${INPUT_LOCAL_DOCKER_CONTAINER} | grep -i -B 1 '"Destination": "/etc/dnsmasq.d"' > ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i '$d' ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i 's/"Source": "//' ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i 's/",//' ${OS_TMP}/local_container_dnsmasq_etc.log; sed -i 's/ //g' ${OS_TMP}/local_container_dnsmasq_etc.log + fi + + INPUT_LOCAL_PIHOLE_DIRECTORY=$(cat ${OS_TMP}/local_container_pihole_etc.log) + INPUT_LOCAL_DNSMASQ_DIRECTORY=$(cat ${OS_TMP}/local_container_dnsmasq_etc.log) + + echo_good + + MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CORE_APP} ${UI_CONFIG_ETC_VOLUME_PATH} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# LOCAL_PIHOLE_DIRECTORY=''/c\LOCAL_PIHOLE_DIRECTORY='${INPUT_LOCAL_PIHOLE_DIRECTORY}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CORE_APP_DNS} ${UI_CONFIG_ETC_VOLUME_PATH} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# LOCAL_DNSMASQ_DIRECTORY=''/c\LOCAL_DNSMASQ_DIRECTORY='${INPUT_LOCAL_DNSMASQ_DIRECTORY}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CONFIG_VOLUME_OWNER} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# LOCAL_FILE_OWNER=''/c\LOCAL_FILE_OWNER='999:999'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + fi + + if [ "${REMOTE_PIHOLE_TYPE}" == "docker" ] || [ "${REMOTE_PIHOLE_TYPE}" == "podman" ]; then + MESSAGE="Remote Container Image Configuration" + echo_info + + MESSAGE="Querying running containers on ${REMOTE_HOST}" + echo_stat + if [ "${REMOTE_PIHOLE_TYPE}" == "docker" ]; then + ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo docker ps -a --format 'table {{.Image}}\t{{.Names}}' > /tmp/gs_local_container.log" + error_validate + elif [ "${REMOTE_PIHOLE_TYPE}" == "podman" ]; then + ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo podman container ls > /tmp/gs_local_container.log" + error_validate + fi + + MESSAGE="Retrieving container list from ${REMOTE_HOST}" + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${OS_TMP}/gs_local_container.log" + RSYNC_TARGET="${OS_TMP}/gs_remote_container.log" + create_rsync_cmd + + MESSAGE="Displaying running containers on ${REMOTE_HOST}" + echo_good_clean + + cat ${OS_TMP}/gs_remote_container.log + + MESSAGE="Enter remote ${UI_CORE_APP} container name" + echo_prompt + MESSAGE="ex, 'pihole'" + echo_need + read -r INPUT_REMOTE_DOCKER_CONTAINER + + MESSAGE="${UI_CONFIG_SAVING} remote host ${UI_CONFIG_CONTAINER_NAME} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# REMOTE_DOCKER_CONTAINER=''/c\REMOTE_DOCKER_CONTAINER='${INPUT_REMOTE_DOCKER_CONTAINER}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="Examining remote container configuration" + echo_stat + + if [ "${REMOTE_PIHOLE_TYPE}" == "docker" ]; then + ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo docker container inspect ${INPUT_REMOTE_DOCKER_CONTAINER} | grep -i -B 1 '\"Destination\": \"/etc/pihole\"' > ${OS_TMP}/local_container_pihole_etc.log" + ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo docker container inspect ${INPUT_REMOTE_DOCKER_CONTAINER} | grep -i -B 1 '\"Destination\": \"/etc/dnsmasq.d\"' > ${OS_TMP}/local_container_dnsmasq_etc.log" + error_validate + elif [ "${REMOTE_PIHOLE_TYPE}" == "podman" ]; then + ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo podman container inspect ${INPUT_REMOTE_DOCKER_CONTAINER} | grep -i -B 1 '\"Destination\": \"/etc/pihole\"' > ${OS_TMP}/local_container_pihole_etc.log" + ${OS_SSH_CMD} -p ${GS_SSH_PORT} -i "${GS_SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sudo podman container inspect ${INPUT_REMOTE_DOCKER_CONTAINER} | grep -i -B 1 '\"Destination\": \"/etc/dnsmasq.d\"' > ${OS_TMP}/local_container_dnsmasq_etc.log" + error_validate + fi + + MESSAGE="Retrieving remote ${UI_CORE_APP} configuration settings" + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${OS_TMP}/local_container_pihole_etc.log" + RSYNC_TARGET="${OS_TMP}/remote_container_pihole_etc.log" + create_rsync_cmd + + MESSAGE="Retrieving remote ${UI_CORE_APP_DNS} configuration settings" + RSYNC_REPATH="sudo rsync" + RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${OS_TMP}/local_container_dnsmasq_etc.log" + RSYNC_TARGET="${OS_TMP}/remote_container_dnsmasq_etc.log" + create_rsync_cmd + + sed -i '$d' ${OS_TMP}/remote_container_pihole_etc.log; sed -i 's/"Source": "//' ${OS_TMP}/remote_container_pihole_etc.log; sed -i 's/",//' ${OS_TMP}/remote_container_pihole_etc.log; sed -i 's/ //g' ${OS_TMP}/remote_container_pihole_etc.log + sed -i '$d' ${OS_TMP}/remote_container_dnsmasq_etc.log; sed -i 's/"Source": "//' ${OS_TMP}/remote_container_dnsmasq_etc.log; sed -i 's/",//' ${OS_TMP}/remote_container_dnsmasq_etc.log; sed -i 's/ //g' ${OS_TMP}/remote_container_dnsmasq_etc.log + + INPUT_REMOTE_PIHOLE_DIRECTORY=$(cat ${OS_TMP}/remote_container_pihole_etc.log) + INPUT_REMOTE_DNSMASQ_DIRECTORY=$(cat ${OS_TMP}/remote_container_dnsmasq_etc.log) + + echo_good + + MESSAGE="${UI_CONFIG_SAVING} remote host ${UI_CORE_APP} ${UI_CONFIG_ETC_VOLUME_PATH} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# REMOTE_PIHOLE_DIRECTORY=''/c\REMOTE_PIHOLE_DIRECTORY='${INPUT_REMOTE_PIHOLE_DIRECTORY}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="${UI_CONFIG_SAVING} remote host ${UI_CORE_APP_DNS} ${UI_CONFIG_ETC_VOLUME_PATH} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# REMOTE_DNSMASQ_DIRECTORY=''/c\REMOTE_DNSMASQ_DIRECTORY='${INPUT_REMOTE_DNSMASQ_DIRECTORY}'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + + MESSAGE="${UI_CONFIG_SAVING} remote host ${UI_CONFIG_VOLUME_OWNER} to ${GS_CONFIG_FILE}" + echo_stat + sudo sed -i "/# REMOTE_FILE_OWNER=''/c\REMOTE_FILE_OWNER='999:999'" ${GS_ETC_PATH}/${GS_CONFIG_FILE} + error_validate + fi + + end_config +} + +## Delete Existing Configuration +function config_delete { + # shellcheck source=/etc/gravity-sync/gravity-sync.conf + source ${GS_ETC_PATH}/${GS_CONFIG_FILE} + MESSAGE="${GS_CONFIG_FILE} ${UI_CONFIG_ALREADY}" + echo_warn + + MESSAGE="${UI_CONFIG_CONFIRM}" + echo_prompt + + intent_validate + + MESSAGE="${UI_CONFIG_ERASING} ${GS_CONFIG_FILE}" + echo_stat + sudo mv ${GS_ETC_PATH}/${GS_CONFIG_FILE} ${GS_ETC_PATH}/${GS_CONFIG_FILE}.${GS_BACKUP_EXT} + error_validate + + config_generate +} + +## Master Branch +function update_gs { + bash ${GS_LOCAL_REPO}/update.sh +} + +## Show Version +function show_version { + if [ -f ${GS_LOCAL_REPO}/dev ]; then + GS_DEV_VERSION="-dev" + else + GS_DEV_VERSION="" + fi + + MESSAGE="Running version: ${GREEN}${GS_VERSION}${NC}${GS_DEV_VERSION}" + echo_info + + GS_GIT_VERSION=$(curl -sf https://raw.githubusercontent.com/vmstan/gravity-sync/master/VERSION) + if [ -z "$GS_GIT_VERSION" ]; then + MESSAGE="Latest version: ${RED}Unknown${NC}" + else + if [ "$GS_GIT_VERSION" != "GS_VERSION" ]; then + MESSAGE="Update available: ${RED}${GS_GIT_VERSION}${NC}" + else + MESSAGE="Latest version: ${GREEN}${GS_GIT_VERSION}${NC}" + fi + fi + echo_info +} + +function show_info { + echo -e "${YELLOW}Local Software Versions${NC}" + echo -e "${BLUE}${UI_CORE_APP}${NC}" + if [ "${LOCAL_PIHOLE_TYPE}" == "default" ]; then + pihole version + elif [ "${LOCAL_PIHOLE_TYPE}" == "docker" ]; then + sudo docker exec -it pihole pihole -v + elif [ "${LOCAL_PIHOLE_TYPE}" == "podman" ]; then + sudo podman exec -it pihole pihole -v + fi + + if [ -f /etc/os-release ]; then + . /etc/os-release + OS_OS=$NAME + OS_VER=$VERSION_ID + echo -e "${BLUE}${OS_OS} ${OS_VER}${NC}" + fi + + uname -srm + echo -e "bash $BASH_VERSION" + ssh -V + rsync --version | grep version + sudo --version | grep "Sudo version" + git --version + + if hash docker 2>/dev/null; then + docker --version + fi + + if hash podman 2>/dev/null; then + podman --version + fi + + echo -e "" + + echo -e "${YELLOW}Global Instance Settings${NC}" + if [ ${GS_SSH_PORT} == '22' ]; then + echo -e "SSH Port: 22 (default)" + else + echo -e "SSH Port: ${GS_SSH_PORT} (custom)" + fi + + echo -e "SSH Key: ${GS_SSH_PKIF}" + + if systemctl is-active --quiet gravity-sync.timer; then + echo -e "Automated Replication: Enabled" + else + echo -e "Automated Replication: Disabled" + fi + + echo -e "" + + echo -e "${YELLOW}Local Instance Settings${NC}" + echo -e "Local Hostname: $HOSTNAME" + echo -e "Local ${UI_CORE_APP} Type: ${LOCAL_PIHOLE_TYPE}" + echo -e "Local ${UI_CORE_APP} Config Directory: ${LOCAL_PIHOLE_DIRECTORY}" + echo -e "Local ${UI_CORE_APP_DNS} Config Directory: ${LOCAL_DNSMASQ_DIRECTORY}" + echo -e "Local ${PROGRAM} Binary: ${GS_FILEPATH}" + echo -e "Local ${PROGRAM} Config Directory: ${GS_ETC_PATH}" + + if [ "${LOCAL_PIHOLE_TYPE}" == "default" ]; then + echo -e "Local ${UI_CORE_APP} Binary Directory: ${LOCAL_PIHOLE_BINARY}" + elif [ "${LOCAL_PIHOLE_TYPE}" == "docker" ]; then + echo -e "Local ${UI_CORE_APP} Container Name: ${LOCAL_DOCKER_CONTAINER}" + echo -e "Local Docker Binary Directory: ${LOCAL_DOCKER_BINARY}" + elif [ "${LOCAL_PIHOLE_TYPE}" == "podman" ]; then + echo -e "Local ${UI_CORE_APP} Container Name: ${LOCAL_DOCKER_CONTAINER}" + echo -e "Local Podman Binary Directory: ${LOCAL_PODMAN_BINARY}" + fi + + echo -e "Local File Owner Settings: ${LOCAL_FILE_OWNER}" + + echo -e "" + echo -e "${YELLOW}Remote Instance Settings${NC}" + echo -e "Remote Hostname/IP: ${REMOTE_HOST}" + echo -e "Remote Username: ${REMOTE_USER}" + echo -e "Remote ${UI_CORE_APP} Type: ${REMOTE_PIHOLE_TYPE}" + echo -e "Remote ${UI_CORE_APP} Config Directory: ${REMOTE_PIHOLE_DIRECTORY}" + echo -e "Remote ${UI_CORE_APP_DNS} Config Directory: ${REMOTE_DNSMASQ_DIRECTORY}" + + if [ "${REMOTE_PIHOLE_TYPE}" == "default" ]; then + echo -e "Remote ${UI_CORE_APP} Binary Directory: ${REMOTE_PIHOLE_BINARY}" + elif [ "${REMOTE_PIHOLE_TYPE}" == "docker" ]; then + echo -e "Remote ${UI_CORE_APP} Container Name: ${REMOTE_DOCKER_CONTAINER}" + echo -e "Remote Docker Binary Directory: ${REMOTE_DOCKER_BINARY}" + elif [ "${REMOTE_PIHOLE_TYPE}" == "podman" ]; then + echo -e "Remote ${UI_CORE_APP} Container Name: ${REMOTE_DOCKER_CONTAINER}" + echo -e "Remote Podman Binary Directory: ${REMOTE_PODMAN_BINARY}" + fi + + echo -e "Remote File Owner Settings: ${REMOTE_FILE_OWNER}" +} + +## Dev Task +function task_dev { + GS_TASK_TYPE='DEV' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + if [ -f ${GS_LOCAL_REPO}/dev ]; then + MESSAGE="Disabling ${GS_TASK_TYPE}" + echo_stat + sudo rm -f ${GS_LOCAL_REPO}/dev + error_validate + else + MESSAGE="Enabling ${GS_TASK_TYPE}" + echo_stat + touch ${GS_LOCAL_REPO}/dev + error_validate + + MESSAGE="Checking available branches" + echo_stat + (cd ${GS_LOCAL_REPO} || exit; git fetch --all >/dev/null 2>&1) + error_validate + + (cd ${GS_LOCAL_REPO} || exit; git branch -r) + + MESSAGE="Select GitHub branch to update against" + echo_need + read -r INPUT_BRANCH + + echo -e "BRANCH='${INPUT_BRANCH}'" | sudo tee ${GS_LOCAL_REPO}/dev 1> /dev/null + fi + + update_gs + exit_with_changes +} + +## Update Task +function task_update { + GS_TASK_TYPE='UPDATE' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + update_gs + exit_with_changes +} + +## Version Task +function task_version { + GS_TASK_TYPE='VERSION' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + show_version + exit_no_change +} + +## Info Task + +function task_info { + GS_TASK_TYPE='INFO' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + show_info + exit_no_change +} + +## Automate Task +function task_automate { + GS_TASK_TYPE='AUTOMATE' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + MESSAGE="Customizing service file username" + OS_CURRENT_USER=$(whoami) + sudo sed -i "/User=unknown/c\User=${OS_CURRENT_USER}" ${GS_LOCAL_REPO}/templates/gravity-sync.service + error_validate + + MESSAGE="Customizing service file executable path" + sudo sed -i "/ExecStart=/c\ExecStart=${GS_FILEPATH}" ${GS_LOCAL_REPO}/templates/gravity-sync.service + error_validate + + MESSAGE="Randomizing service timers" + ACTIVE_REP=$((( RANDOM % 4 ) + 1 )) + RANDOM_REP=$((( RANDOM % 8 ) + 2 )) + sudo sed -i "/OnUnitInactiveSec=5m/c\OnUnitInactiveSec=${ACTIVE_REP}m" ${GS_LOCAL_REPO}/templates/gravity-sync.timer + sudo sed -i "/RandomizedDelaySec=5m/c\RandomizedDelaySec=${RANDOM_REP}m" ${GS_LOCAL_REPO}/templates/gravity-sync.timer + error_validate + + if systemctl is-active --quiet gravity-sync; then + MESSAGE="Stopping existing systemd service" + sudo systemctl stop gravity-sync + error_validate + fi + + MESSAGE="Moving systemd timer into place" + sudo cp ${GS_LOCAL_REPO}/templates/gravity-sync.timer ${OS_DAEMON_PATH} + error_validate + + MESSAGE="Moving systemd service into place" + sudo cp ${GS_LOCAL_REPO}/templates/gravity-sync.service ${OS_DAEMON_PATH} + error_validate + + MESSAGE="Reloading systemd daemon" + sudo systemctl daemon-reload --quiet + error_validate + + MESSAGE="Enabling ${PROGRAM} timer" + sudo systemctl enable gravity-sync.timer --quiet + error_validate + + MESSAGE="Starting ${PROGRAM} service" + sudo systemctl start gravity-sync --quiet + error_validate + + exit_with_changes +} + +function task_disable_automate { + GS_TASK_TYPE='DISABLE' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + kill_automation_service + exit_with_changes +} + +function task_monitor { + GS_TASK_TYPE='MONITOR' + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + MESSAGE="Press CTRL+Z to exit MONITOR mode" + echo_warn + + sudo journalctl -fu gravity-sync +} + +function kill_automation_service { + if systemctl is-active --quiet gravity-sync; then + MESSAGE="Stopping ${PROGRAM} timer" + echo_stat + sudo systemctl stop gravity-sync + error_validate + fi + + MESSAGE="Disabling ${PROGRAM} automation service" + echo_stat + sudo systemctl disable gravity-sync --quiet + error_validate + + MESSAGE="Removing systemd timer" + echo_stat + sudo rm -f ${OS_DAEMON_PATH}/gravity-sync.timer + error_validate + + MESSAGE="Removing systemd service" + echo_stat + sudo rm -f ${OS_DAEMON_PATH}/gravity-sync.service + error_validate + + MESSAGE="Reloading systemd daemon" + echo_stat + sudo systemctl daemon-reload --quiet + error_validate +} + +## Purge Task +function task_purge { + GS_TASK_TYPE="PURGE" + MESSAGE="${MESSAGE}: ${GS_TASK_TYPE}" + echo_good + + echo_blank + echo -e " THIS WILL REMOVE YOUR ENTIRE GRAVITY SYNC INSTALLATION" + echo -e " ${UI_CORE_APP} binaries, configuration and services ARE NOT impacted!" + echo -e " Your devices will continue to resolve and block DNS requests," + echo -e " but your ${UI_GRAVITY_NAME} and ${UI_CUSTOM_NAME} WILL NOT sync anymore," + echo -e " until you reconfigure ${PROGRAM} on this device." + echo_blank + echo -e " In order to fully remove ${PROGRAM} from your infrastructure, you will also" + echo -e " need to run this same command from the peer instance as well." + echo_blank + + intent_validate + + kill_automation_service + + MESSAGE="Removing ${PROGRAM} backup files" + echo_stat + sudo rm -f ${OS_TMP}/*.${GS_BACKUP_EXT} + error_validate + + MESSAGE="Removing ${PROGRAM} configuration and logs" + echo_stat + sudo rm -fr ${GS_ETC_PATH} + error_validate + + MESSAGE="Removing ${PROGRAM} binary" + echo_stat + sudo rm ${GS_FILEPATH} + error_validate + + exit_with_changes +} + +## No Changes Made +function exit_no_change { + GS_RUN_END=$SECONDS + ((GS_RUNTIME=GS_RUN_END-GS_RUN_START)) + + if [ "${GS_TASK_TYPE}" == "" ]; then + MESSAGE="${PROGRAM} ${UI_EXIT_ABORT} ${UI_EXIT_CALC_END} ${GS_RUNTIME} ${UI_EXIT_CALC_TIMER}" + else + MESSAGE="${PROGRAM} ${GS_TASK_TYPE} ${UI_EXIT_ABORT} ${UI_EXIT_CALC_END} ${GS_RUNTIME} ${UI_EXIT_CALC_TIMER}" + fi + + echo_grav + exit 0 +} + +## Changes Made +function exit_with_changes { + GS_RUN_END=$SECONDS + ((GS_RUNTIME=GS_RUN_END-GS_RUN_START)) + + if [ "${GS_TASK_TYPE}" == "" ]; then + MESSAGE="${PROGRAM} ${UI_EXIT_COMPLETE} ${UI_EXIT_CALC_END} ${GS_RUNTIME} ${UI_EXIT_CALC_TIMER}" + else + MESSAGE="${PROGRAM} ${GS_TASK_TYPE} ${UI_EXIT_COMPLETE} ${UI_EXIT_CALC_END} ${GS_RUNTIME} ${UI_EXIT_CALC_TIMER}" + fi + + echo_grav + exit 0 +} + +## List GS Arguments +function list_gs_arguments { + echo -e "Usage: $0 [options]" + echo -e "Example: '$0 pull'" + echo_blank + echo -e "Setup Options:" + echo -e " ${YELLOW}config${NC} Creates a new ${PROGRAM} configuration file" + echo -e " ${YELLOW}version${NC} Lists the installed version of ${PROGRAM} and checks for updates" + echo -e " ${YELLOW}upgrade${NC} Upgrades ${PROGRAM} to the latest available version on GitHub" + echo -e " ${YELLOW}dev${NC} Sets upgrade command to use a development version of ${PROGRAM} (toggle on/off)" + echo -e " ${YELLOW}sudo${NC} Enables password-less sudo for current user" + echo -e " ${YELLOW}purge${NC} Uninstalls ${PROGRAM} from this system" + echo_blank + echo -e "Replication Options:" + echo -e " ${YELLOW}smart${NC} Reviews all ${UI_CORE_APP} changes syncs them (default)" + echo -e " ${YELLOW}pull${NC} Syncs only the remote ${UI_CORE_APP} configuration to this server" + echo -e " ${YELLOW}push${NC} Syncs only the local ${UI_CORE_APP} configuration to the remote" + echo -e " ${YELLOW}compare${NC} Checks for ${UI_CORE_APP} differences without making changes" + echo_blank + echo -e "Automation Options:" + echo -e " ${YELLOW}auto${NC} Schedules ${PROGRAM} replication tasks using systemd timers" + echo -e " ${YELLOW}monitor${NC} Monitors the ${PROGRAM} replication job in real time" + echo -e " ${YELLOW}disable${NC} Disables the ${PROGRAM} automated replication task" + echo_blank + echo -e "Debug Options:" + echo -e " ${YELLOW}logs${NC} Shows the recent successful replication jobs/times" + echo -e " ${YELLOW}info${NC} Shows information about the current configuration" + echo_blank + exit_no_change +} + +# SCRIPT EXECUTION ########################### + +case $# in + 0) + start_gs + task_smart ;; + 1) + case $1 in + smart|sync) + start_gs + task_smart ;; + pull) + start_gs + task_pull ;; + push) + start_gs + task_push ;; + version) + start_gs_no_config + task_version ;; + update|upgrade) + start_gs_no_config + task_update ;; + dev|development|develop) + start_gs_no_config + task_dev ;; + logs|log) + start_gs_no_config + task_logs ;; + compare) + start_gs + task_compare ;; + config|configure) + start_gs_no_config + task_configure ;; + auto|automate) + start_gs + task_automate ;; + disable) + start_gs_no_config + task_disable_automate ;; + monitor|follow) + start_gs_no_config + task_monitor ;; + purge|uninstall|remove) + start_gs_no_config + task_purge ;; + sudo) + start_gs_no_config + task_sudo ;; + info) + start_gs + task_info ;; + *) + start_gs_no_config + task_invalid ;; + esac + ;; + + *) + start_gs_no_config + task_invalid ;; +esac + +# END OF SCRIPT ############################## diff --git a/gravity-sync.sh b/gravity-sync.sh index de9f9f4..6f78b81 100755 --- a/gravity-sync.sh +++ b/gravity-sync.sh @@ -1,198 +1,430 @@ -#!/bin/bash -SCRIPT_START=$SECONDS +#!/usr/bin/env bash # GRAVITY SYNC BY VMSTAN ##################### +# GS 3.x to 4.0 Upgrade Utility ############## + +# Run only to upgrade your existing Gravity Sync 3.x installation to 4.0 format PROGRAM='Gravity Sync' -VERSION='3.7.0' -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# Requires Pi-Hole 5.x or higher already be installed, for help visit https://pi-hole.net +GS_FILEPATH=$(realpath $0) +LOCAL_FOLDR=$(dirname $GS_FILEPATH) -# REQUIRED SETTINGS ########################## +GS_ETC_PATH="/etc/gravity-sync" +GS_GRAVITY_FI_MD5_LOG='gs-gravity.md5' +GS_CUSTOM_DNS_MD5_LOG='gs-clist.md5' +GS_CNAME_CONF_MD5_LOG='gs-cname.md5' -# Run './gravity-sync.sh config' to get started, it will customize the script for your environment -# You should not to change the values of any variables here here to customize your install -# Add replacement variables to gravity-sync.conf, which will overwrite these defaults. +OS_DAEMON_PATH='/etc/systemd/system' -# STANDARD VARIABLES ######################### +## Script Colors +RED='\033[0;91m' +GREEN='\033[0;92m' +CYAN='\033[0;96m' +YELLOW='\033[0;93m' +PURPLE='\033[0;95m' +BLUE='\033[0;94m' +BOLD='\033[1m' +NC='\033[0m' -# Installation Types -PH_IN_TYPE='default' # Pi-hole install type, `default`, `docker`, or `podman` (local) -RH_IN_TYPE='default' # Pi-hole install type, `default`, `docker`, or `podman` (remote) +## Message Codes +FAIL="${RED}✗${NC}" +WARN="${PURPLE}!${NC}" +GOOD="${GREEN}✓${NC}" +STAT="${CYAN}∞${NC}" +INFO="${YELLOW}»${NC}" +INF1="${CYAN}›${NC}" +NEED="${BLUE}?${NC}" +LOGO="${PURPLE}∞${NC}" -# Pi-hole Folder/File Customization -PIHOLE_DIR='/etc/pihole' # default Pi-hole data directory (local) -RIHOLE_DIR='/etc/pihole' # default Pi-hole data directory (remote) -DNSMAQ_DIR='/etc/dnsmasq.d' # default DNSMASQ data directory (local) -RNSMAQ_DIR='/etc/dnsmasq.d' # default DNSMASQ data directory (remote) -PIHOLE_BIN='/usr/local/bin/pihole' # default Pi-hole binary directory (local) -RIHOLE_BIN='/usr/local/bin/pihole' # default Pi-hole binary directory (remote) -DOCKER_BIN='/usr/bin/docker' # default Docker binary directory (local) -ROCKER_BIN='/usr/bin/docker' # default Docker binary directory (remote) -PODMAN_BIN='/usr/bin/podman' # default Podman binary directory (local) -RODMAN_BIN='/usr/bin/podman' # default Podman binary directory (remote) -FILE_OWNER='pihole:pihole' # default Pi-hole file owner and group (local) -RILE_OWNER='pihole:pihole' # default Pi-hole file owner and group (remote) -DOCKER_CON='pihole' # default Pi-hole container name (local) -ROCKER_CON='pihole' # default Pi-hole container name (remote) -CONTAIMAGE='pihole/pihole' # official Pi-hole container image +## Echo Stack +### Informative +function echo_info { + echo -e "${INFO} ${YELLOW}${MESSAGE}${NC}" +} -GRAVITY_FI='gravity.db' # default Pi-hole database file -CUSTOM_DNS='custom.list' # default Pi-hole local DNS lookups -CNAME_CONF='05-pihole-custom-cname.conf' # default DNSMASQ CNAME alias file -GSLAN_CONF='08-gs-lan.conf' # default DNSMASQ GS managed file +function echo_prompt { + echo -e "${INF1} ${CYAN}${MESSAGE}${NC}" +} -# Interaction Customization -VERIFY_PASS='0' # replace in gravity-sync.conf to overwrite -SKIP_CUSTOM='0' # replace in gravity-sync.conf to overwrite -INCLUDE_CNAME='0' # replace in gravity-sync.conf to overwrite -DATE_OUTPUT='0' # replace in gravity-sync.conf to overwrite -PING_AVOID='0' # replace in gravity-sync.conf to overwrite -ROOT_CHECK_AVOID='0' # replace in gravity-sync.conf to overwrite +### Warning +function echo_warn { + echo -e "${WARN} ${PURPLE}${MESSAGE}${NC}" +} -# Backup Customization -BACKUP_TIMEOUT='240' # replace in gravity-sync.conf to overwrite -BACKUP_INTEGRITY_WAIT='5' # replace in gravity-sync.conf to overwrite +### Executing +function echo_stat { + echo -en "${STAT} ${MESSAGE}" +} -# SSH Customization -SSH_PORT='22' # default SSH port -SSH_PKIF='.ssh/id_rsa' # default local SSH key +### Success +function echo_good { + echo -e "\r${GOOD} ${MESSAGE}" +} -# GS Folder/File Locations -GS_FILEPATH=$(realpath $0) # auto determined - do not change! -LOCAL_FOLDR=$(dirname $GS_FILEPATH) # auto determined - do not change! -CONFIG_FILE='gravity-sync.conf' # must exist with primary host/user configured -GS_FILENAME='gravity-sync.sh' # must exist because it's this script -BACKUP_FOLD='backup' # must exist as subdirectory in LOCAL_FOLDR -LOG_PATH="${LOCAL_FOLDR}/logs" # replace in gravity-sync.conf to overwrite -SYNCING_LOG='gravity-sync.log' # replace in gravity-sync.conf to overwrite -CRONJOB_LOG='gravity-sync.cron' # replace in gravity-sync.conf to overwrite -HISTORY_MD5='gravity-sync.md5' # replace in gravity-sync.conf to overwrite +### Success +function echo_good_clean { + echo -e "\r${GOOD} ${MESSAGE}" +} -# OS Settings -BASH_PATH='/bin/bash' # default OS bash path -DAEMON_PATH='/etc/systemd/system' # systemd timer/service folder +### Failure +function echo_fail { + echo -e "\r${FAIL} ${MESSAGE}" +} -############################################## -### NEVER CHANGE ANYTHING BELOW THIS LINE! ### -############################################## +### Request +function echo_need { + echo -en "${NEED} ${BOLD}${MESSAGE}:${NC} " +} -# Import UI Fields -source ${LOCAL_FOLDR}/includes/gs-ui.sh +### Indent +function echo_over { + echo -e " ${MESSAGE}" +} -# Import Color/Message Includes -source ${LOCAL_FOLDR}/includes/gs-colors.sh +### Gravity Sync Logo +function echo_grav { + echo -e "${LOGO} ${BOLD}${MESSAGE}${NC}" +} -# FUNCTION DEFINITIONS ####################### +### Lines +function echo_blank { + echo -e "" +} -# Core Functions -source ${LOCAL_FOLDR}/includes/gs-core.sh +## Error Validation +function error_validate { + if [ "$?" != "0" ]; then + echo_fail + exit 1 + else + echo_good + fi +} -# Gravity Replication Functions -source ${LOCAL_FOLDR}/includes/gs-pull.sh -source ${LOCAL_FOLDR}/includes/gs-push.sh -source ${LOCAL_FOLDR}/includes/gs-smart.sh -source ${LOCAL_FOLDR}/includes/gs-backup.sh +function start_gs_no_config { + MESSAGE="Gravity Sync 3.x to 4.0 Migration Utility" + echo_grav +} -# Hashing & SSH Functions -source ${LOCAL_FOLDR}/includes/gs-hashing.sh -source ${LOCAL_FOLDR}/includes/gs-ssh.sh +function check_old_version { + MESSAGE="Checking for 3.x Configuration File" + echo_stat -# Logging Functions -source ${LOCAL_FOLDR}/includes/gs-logging.sh + if [ -f settings/gravity-sync.conf ]; then + echo_good + else + echo_fail + exit 1 + fi -# Validation Functions -source ${LOCAL_FOLDR}/includes/gs-validate.sh -source ${LOCAL_FOLDR}/includes/gs-intent.sh -source ${LOCAL_FOLDR}/includes/gs-root.sh +} -# Configuration Management -source ${LOCAL_FOLDR}/includes/gs-config.sh -source ${LOCAL_FOLDR}/includes/gs-update.sh -source ${LOCAL_FOLDR}/includes/gs-automate.sh -source ${LOCAL_FOLDR}/includes/gs-purge.sh +function install_new_gravity { + MESSAGE="Installing Gravity Sync 4.0" + echo_info -# Exit Codes -source ${LOCAL_FOLDR}/includes/gs-exit.sh + if [ -d /etc/gravity-sync/.gs ]; then + MESSAGE="Removing existing GitHub cache" + echo_stat + sudo rm -fr /etc/gravity-sync/.gs + error_validate + fi + + if [ ! -d /etc/gravity-sync ]; then + MESSAGE="Creating new configuration directory" + echo_stat + sudo mkdir /etc/gravity-sync + error_validate + fi + + MESSAGE="Validating configuration directory permissions" + echo_stat + sudo chmod 775 /etc/gravity-sync + error_validate + + if [ -f /usr/local/bin/gravity-sync ]; then + MESSAGE="Removing old Gravity Sync binary" + echo_stat + sudo rm -f /usr/local/bin/gravity-sync + error_validate + fi + + MESSAGE="Creating new GitHub cache" + echo_prompt + + sudo git clone https://github.com/vmstan/gravity-sync.git /etc/gravity-sync/.gs + + # MESSAGE="Enabling beta updates" + # echo_stat + # sudo touch /etc/gravity-sync/.gs/dev + # echo -e "BRANCH='origin/4.0.0'" | sudo tee /etc/gravity-sync/.gs/dev 1> /dev/null + # error_validate + + sudo cp /etc/gravity-sync/.gs/gravity-sync /usr/local/bin +} + +function upgrade_to_4 { + MESSAGE="Migrating Previous Configuration" + echo_info + + MESSAGE="Transferring SSH keys" + sudo cp $HOME/.ssh/id_rsa /etc/gravity-sync/gravity-sync.rsa + sudo cp $HOME/.ssh/id_rsa.pub /etc/gravity-sync/gravity-sync.rsa.pub + error_validate + + REMOTE_HOST='' + REMOTE_USER='' + + PIHOLE_DIR='' + RIHOLE_DIR='' + DNSMAQ_DIR='' + RNSMAQ_DIR='' + FILE_OWNER='' + RILE_OWNER='' + DOCKER_CON='' + ROCKER_CON='' + + MESSAGE="Reviewing old configuration file settings" + echo_stat + source settings/gravity-sync.conf + error_validate + + MESSAGE="Creating new configuration file from template" + echo_stat + sudo cp /etc/gravity-sync/.gs/templates/gravity-sync.conf.example /etc/gravity-sync/gravity-sync.conf + error_validate + + LOCAL_PIHOLE_DIRECTORY=${PIHOLE_DIR} + REMOTE_PIHOLE_DIRECTORY=${RIHOLE_DIR} + LOCAL_DNSMASQ_DIRECTORY=${DNSMAQ_DIR} + REMOTE_DNSMASQ_DIRECTORY=${RNSMAQ_DIR} + LOCAL_FILE_OWNER=${FILE_OWNER} + REMOTE_FILE_OWNER=${RILE_OWNER} + LOCAL_DOCKER_CONTAINER=${DOCKER_CON} + REMOTE_DOCKER_CONTAINER=${ROCKER_CON} + + MESSAGE="Migrating remote host settings" + echo_stat + sudo sed -i "/REMOTE_HOST=''/c\REMOTE_HOST='${REMOTE_HOST}'" /etc/gravity-sync/gravity-sync.conf + error_validate + + MESSAGE="Migrating remote user settings" + echo_stat + sudo sed -i "/REMOTE_USER=''/c\REMOTE_USER='${REMOTE_USER}'" /etc/gravity-sync/gravity-sync.conf + error_validate + + if [ "${LOCAL_PIHOLE_DIRECTORY}" == '' ] || [ "${LOCAL_PIHOLE_DIRECTORY}" == '/etc/pihole' ]; then + MESSAGE="Defaulting local Pi-hole directory setting" + echo_good_clean + else + MESSAGE="Migrating local Pi-hole directory setting" + echo_stat + sudo sed -i "/LOCAL_PIHOLE_DIRECTORY=''/c\LOCAL_PIHOLE_DIRECTORY='${LOCAL_PIHOLE_DIRECTORY}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${REMOTE_PIHOLE_DIRECTORY}" == '' ] || [ "${REMOTE_PIHOLE_DIRECTORY}" == '/etc/pihole' ]; then + MESSAGE="Defaulting remote Pi-hole directory setting" + echo_good_clean + else + MESSAGE="Migrating remote Pi-hole directory setting" + echo_stat + sudo sed -i "/REMOTE_PIHOLE_DIRECTORY=''/c\REMOTE_PIHOLE_DIRECTORY='${REMOTE_PIHOLE_DIRECTORY}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${LOCAL_DNSMASQ_DIRECTORY}" == '' ] || [ "${LOCAL_DNSMASQ_DIRECTORY}" == '/etc/dnsmasq.d' ]; then + MESSAGE="Defaulting local DNSMASQ directory setting" + echo_good_clean + else + MESSAGE="Migrating local DNSMASQ directory setting" + echo_stat + sudo sed -i "/LOCAL_DNSMASQ_DIRECTORY=''/c\LOCAL_DNSMASQ_DIRECTORY='${LOCAL_DNSMASQ_DIRECTORY}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${REMOTE_DNSMASQ_DIRECTORY}" == '' ] || [ "${REMOTE_DNSMASQ_DIRECTORY}" == '/etc/dnsmasq.d' ]; then + MESSAGE="Defaulting remote DNSMASQ directory setting" + echo_good_clean + else + MESSAGE="Migrating remote DNSMASQ directory setting" + echo_stat + sudo sed -i "/REMOTE_DNSMASQ_DIRECTORY=''/c\REMOTE_DNSMASQ_DIRECTORY='${REMOTE_DNSMASQ_DIRECTORY}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${LOCAL_FILE_OWNER}" == '' ]; then + MESSAGE="Defaulting local file owner setting" + echo_good_clean + else + MESSAGE="Migrating local file owner setting" + echo_stat + sudo sed -i "/LOCAL_FILE_OWNER=''/c\LOCAL_FILE_OWNER='${LOCAL_FILE_OWNER}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${REMOTE_FILE_OWNER}" == '' ]; then + MESSAGE="Defaulting remote file owner setting" + echo_good_clean + else + MESSAGE="Migrating remote file owner setting" + echo_stat + sudo sed -i "/REMOTE_FILE_OWNER=''/c\REMOTE_FILE_OWNER='${REMOTE_FILE_OWNER}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${LOCAL_DOCKER_CONTAINER}" == '' ] || [ "${LOCAL_DOCKER_CONTAINER}" == 'pihole' ]; then + MESSAGE="Defaulting local Pi-hole container setting" + echo_good_clean + else + MESSAGE="Migrating local Pi-hole container setting" + echo_stat + sudo sed -i "/LOCAL_DOCKER_CONTAINER=''/c\LOCAL_DOCKER_CONTAINER='${LOCAL_DOCKER_CONTAINER}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + if [ "${REMOTE_DOCKER_CONTAINER}" == '' ] || [ "${REMOTE_DOCKER_CONTAINER}" == 'pihole' ]; then + MESSAGE="Defaulting remote Pi-hole container setting" + echo_good_clean + else + MESSAGE="Migrating local Pi-hole container setting" + echo_stat + sudo sed -i "/REMOTE_DOCKER_CONTAINER=''/c\REMOTE_DOCKER_CONTAINER='${REMOTE_DOCKER_CONTAINER}'" /etc/gravity-sync/gravity-sync.conf + error_validate + fi + + MESSAGE="Migrating task history" + echo_stat + sudo cp logs/gravity-sync.log /etc/gravity-sync/gs-sync.log + error_validate + + MESSAGE="Migrating hashing tables" + echo_stat + if [ -f "logs/gravity-sync.md5" ]; then + REMOTE_DB_MD5=$(sed "1q;d" logs/gravity-sync.md5) + LOCAL_DB_MD5=$(sed "2q;d" logs/gravity-sync.md5) + REMOTE_CL_MD5=$(sed "3q;d" logs/gravity-sync.md5) + LOCAL_CL_MD5=$(sed "4q;d" logs/gravity-sync.md5) + REMOTE_CN_MD5=$(sed "5q;d" logs/gravity-sync.md5) + LOCAL_CN_MD5=$(sed "6q;d" logs/gravity-sync.md5) + + echo -e ${REMOTE_DB_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_DB_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CL_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CL_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CN_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CN_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + else + REMOTE_DB_MD5="0" + LOCAL_DB_MD5="0" + REMOTE_CL_MD5="0" + LOCAL_CL_MD5="0" + REMOTE_CN_MD5="0" + LOCAL_CN_MD5="0" + + echo -e ${REMOTE_DB_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_DB_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_GRAVITY_FI_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CL_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CL_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CUSTOM_DNS_MD5_LOG} 1> /dev/null + echo -e ${REMOTE_CN_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + echo -e ${LOCAL_CN_MD5} | sudo tee -a ${GS_ETC_PATH}/${GS_CNAME_CONF_MD5_LOG} 1> /dev/null + fi + error_validate +} + +function remove_old_version { + MESSAGE="Removing Old Version & Settings" + echo_info + + if hash crontab 2>/dev/null; then + MESSAGE="Clearing automation from crontab" + echo_stat + crontab -l > cronjob-old.tmp + sed "/gravity-sync.sh/d" cronjob-old.tmp > cronjob-new.tmp + crontab cronjob-new.tmp + error_validate + fi + + kill_automation_service + + if [ -f /etc/bash.bashrc ]; then + MESSAGE="Cleaning up bash.bashrc" + echo_info + sudo sed -i "/gravity-sync.sh/d" /etc/bash.bashrc + error_validate + fi + + MESSAGE="Removing old Gravity Sync folder" + echo_stat + sudo rm -fr ${LOCAL_FOLDR} + error_validate +} + +function kill_automation_service { + if systemctl is-active --quiet gravity-sync; then + MESSAGE="Stopping ${PROGRAM} timer" + echo_stat + sudo systemctl stop gravity-sync + error_validate + + MESSAGE="Disabling ${PROGRAM} automation service" + echo_stat + sudo systemctl disable gravity-sync --quiet + error_validate + + MESSAGE="Removing systemd timer" + echo_stat + sudo rm -f ${OS_DAEMON_PATH}/gravity-sync.timer + error_validate + + MESSAGE="Removing systemd service" + echo_stat + sudo rm -f ${OS_DAEMON_PATH}/gravity-sync.service + error_validate + + MESSAGE="Reloading systemd daemon" + echo_stat + sudo systemctl daemon-reload --quiet + error_validate + fi +} + +function end_migration { + MESSAGE="Migration Complete" + echo_info + + +} # SCRIPT EXECUTION ########################### case $# in 0) - start_gs - task_smart ;; + start_gs_no_config + check_old_version + install_new_gravity + upgrade_to_4 + remove_old_version + end_migration ;; 1) case $1 in - smart|sync) - start_gs - task_smart ;; - pull) - start_gs - task_pull ;; - push) - start_gs - task_push ;; - version) - start_gs_noconfig - task_version ;; - update|upgrade) - start_gs_noconfig - task_update ;; - dev|devmode|development|develop) - start_gs_noconfig - task_devmode ;; - logs|log) - start_gs - task_logs ;; - compare) - start_gs - task_compare ;; - cron) - start_gs - task_autocron ;; - config|configure) - start_gs_noconfig - task_configure ;; - auto|automate) - start_gs - task_automate ;; - purge) - start_gs - task_purge ;; - sudo) - start_gs - task_sudo ;; - info) - start_gs - task_info ;; - cname) - start_gs - task_cname ;; *) - start_gs - task_invalid ;; - esac - ;; - - 2) - case $1 in - auto|automate) - start_gs - task_automate ;; - esac - ;; - - 3) - case $1 in - auto|automate) - start_gs - task_automate $2 ;; + start_gs_no_config + check_old_version + install_new_gravity + upgrade_to_4 + remove_old_version + end_migration ;; esac ;; *) - start_gs - task_invalid ;; + start_gs_no_config + check_old_version + install_new_gravity + upgrade_to_4 + remove_old_version + end_migration ;; esac -# END OF SCRIPT ############################## +# END OF SCRIPT ############################## \ No newline at end of file diff --git a/includes/gs-automate.sh b/includes/gs-automate.sh deleted file mode 100644 index 0aa632c..0000000 --- a/includes/gs-automate.sh +++ /dev/null @@ -1,148 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-automate.sh ############################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Automate Task -function task_automate { - TASKTYPE='AUTOMATE' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - CRON_EXIST='0' - CRON_CHECK=$(crontab -l | grep -q "${GS_FILENAME}" && echo '1' || echo '0') - if [ ${CRON_CHECK} == 1 ] - then - MESSAGE="${UI_AUTO_CRON_EXISTS}" - echo_warn - clear_cron - fi - - MESSAGE="Customizing service file username" - sed -i "/User=unknown/c\User=$USER" ${LOCAL_FOLDR}/templates/gravity-sync.service - error_validate - - MESSAGE="Customizing service file executable" - sed -i "/ExecStart=/c\ExecStart=${LOCAL_FOLDR}/${GS_FILENAME}" ${LOCAL_FOLDR}/templates/gravity-sync.service - error_validate - - if systemctl is-active --quiet gravity-sync - then - MESSAGE="Stopping existing systemd service" - sudo systemctl stop gravity-sync - error_validate - fi - - MESSAGE="Moving systemd timer into place" - sudo cp ${LOCAL_FOLDR}/templates/gravity-sync.timer ${DAEMON_PATH} - error_validate - - MESSAGE="Moving systemd service into place" - sudo cp ${LOCAL_FOLDR}/templates/gravity-sync.service ${DAEMON_PATH} - error_validate - - MESSAGE="Reloading systemd daemon" - sudo systemctl daemon-reload --quiet - error_validate - - MESSAGE="Enabling Gravity Sync timer" - sudo systemctl enable gravity-sync.timer --quiet - error_validate - - MESSAGE="Starting Gravity Sync service" - sudo systemctl start gravity-sync --quiet - error_validate - - exit_withchange -} - -function task_autocron { - TASKTYPE='AUTOCRON' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - MESSAGE="Crontab automation is deprecated and will be removed in a future release" - echo_warn - - CRON_EXIST='0' - CRON_CHECK=$(crontab -l | grep -q "${GS_FILENAME}" && echo '1' || echo '0') - if [ ${CRON_CHECK} == 1 ] - then - MESSAGE="${UI_AUTO_CRON_EXISTS}" - echo_warn - CRON_EXIST='1' - fi - - MESSAGE="${UI_AUTO_CRON_DISPLAY_FREQ}" - echo_info - - if [[ $1 =~ ^[0-9][0-9]?$ ]] - then - INPUT_AUTO_FREQ=$1 - else - MESSAGE="${UI_AUTO_CRON_SELECT_FREQ}" - echo_need - read INPUT_AUTO_FREQ - INPUT_AUTO_FREQ="${INPUT_AUTO_FREQ:-15}" - fi - - if [ $INPUT_AUTO_FREQ == 5 ] || [ $INPUT_AUTO_FREQ == 10 ] || [ $INPUT_AUTO_FREQ == 15 ] || [ $INPUT_AUTO_FREQ == 30 ] - then - if [ $CRON_EXIST == 1 ] - then - clear_cron - fi - - path_fix - - MESSAGE="${UI_AUTO_CRON_SAVING}" - echo_stat - (crontab -l 2>/dev/null; echo "*/${INPUT_AUTO_FREQ} * * * * ${BASH_PATH} ${LOCAL_FOLDR}/${GS_FILENAME} smart > ${LOG_PATH}/${CRONJOB_LOG}") | crontab - - error_validate - elif [ $INPUT_AUTO_FREQ == 0 ] - then - if [ $CRON_EXIST == 1 ] - then - clear_cron - else - MESSAGE="${UI_AUTO_CRON_DISABLED}" - echo_warn - fi - else - MESSAGE="${UI_INVALID_SELECTION}" - echo_fail - exit_nochange - fi - - exit_withchange -} - -## Clear Existing Automation Settings -function clear_cron { - MESSAGE="${UI_AUTO_CRON_DISABLED}" - echo_stat - - crontab -l > cronjob-old.tmp - sed "/${GS_FILENAME}/d" cronjob-old.tmp > cronjob-new.tmp - crontab cronjob-new.tmp 2>/dev/null - error_validate - rm cronjob-old.tmp - rm cronjob-new.tmp -} - -## Cron Task -function task_cron { - TASKTYPE='CRON' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - show_crontab -} - -function path_fix { - MESSAGE="Adding user path to Crontab" - echo_stat - (crontab -l 2>/dev/null; echo "PATH=$PATH") | crontab - - error_validate -} \ No newline at end of file diff --git a/includes/gs-backup.sh b/includes/gs-backup.sh deleted file mode 100644 index a234467..0000000 --- a/includes/gs-backup.sh +++ /dev/null @@ -1,139 +0,0 @@ - -# GRAVITY SYNC BY VMSTAN ##################### -# gs-backup.sh ############################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -function backup_settime() { - BACKUPTIMESTAMP=$(date +%F-%H%M%S) -} - -function backup_local_gravity() { - MESSAGE="${UI_BACKUP_SECONDARY} ${UI_GRAVITY_NAME}" - echo_stat - - sqlite3 ${PIHOLE_DIR}/${GRAVITY_FI} ".backup '${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${GRAVITY_FI}.backup'" - error_validate -} - -function backup_local_gravity_integrity() { - MESSAGE="${UI_BACKUP_INTEGRITY}" - echo_stat - - sleep $BACKUP_INTEGRITY_WAIT - secondaryIntegrity=$(sqlite3 ${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${GRAVITY_FI}.backup 'PRAGMA integrity_check;' | sed 's/\s.*$//') - error_validate - - if [ "$secondaryIntegrity" != 'ok' ] - then - MESSAGE="${UI_BACKUP_INTEGRITY_FAILED} ${UI_GRAVITY_NAME}" - echo_fail - - MESSAGE="${UI_BACKUP_INTEGRITY_DELETE} ${UI_GRAVITY_NAME}" - echo_stat - - sudo rm ${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${GRAVITY_FI}.backup - error_validate - - exit_nochange - fi -} - -function backup_local_custom() { - if [ "$SKIP_CUSTOM" != '1' ] - then - if [ -f ${PIHOLE_DIR}/${CUSTOM_DNS} ] - then - MESSAGE="${UI_BACKUP_SECONDARY} ${UI_CUSTOM_NAME}" - echo_stat - - cp ${PIHOLE_DIR}/${CUSTOM_DNS} ${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${CUSTOM_DNS}.backup - error_validate - else - MESSAGE="No local ${CUSTOM_DNS} detected" - echo_info - fi - fi -} - -function backup_local_cname() { - if [ "${INCLUDE_CNAME}" == '1' ] - then - if [ -f ${DNSMAQ_DIR}/${CNAME_CONF} ] - then - MESSAGE="${UI_BACKUP_SECONDARY} ${UI_CNAME_NAME}" - echo_stat - - cp ${DNSMAQ_DIR}/${CNAME_CONF} ${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${CNAME_CONF}.backup - error_validate - else - MESSAGE="No local ${CNAME_CONF} detected" - echo_info - fi - fi -} - -function backup_remote_gravity() { - MESSAGE="${UI_BACKUP_PRIMARY} ${UI_GRAVITY_NAME}" - echo_stat - - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo sqlite3 ${RIHOLE_DIR}/${GRAVITY_FI} \".backup '${RIHOLE_DIR}/${GRAVITY_FI}.backup'\"" - create_sshcmd -} - -function backup_remote_gravity_integrity() { - MESSAGE="${UI_BACKUP_INTEGRITY}" - echo_stat - - sleep $BACKUP_INTEGRITY_WAIT - primaryIntegrity=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "sqlite3 ${RIHOLE_DIR}/${GRAVITY_FI}.backup 'PRAGMA integrity_check;'" | sed 's/\s.*$//') - error_validate - - if [ "$primaryIntegrity" != 'ok' ] - then - MESSAGE="${UI_BACKUP_INTEGRITY_FAILED} ${UI_GRAVITY_NAME}" - echo_fail - - MESSAGE="{UI_BACKUP_INTEGRITY_DELETE} ${UI_GRAVITY_NAME}" - echo_stat - - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo rm ${RIHOLE_DIR}/${GRAVITY_FI}.backup" - create_sshcmd - - exit_nochange - fi -} - -function backup_remote_custom() { - if [ "$SKIP_CUSTOM" != '1' ] - then - MESSAGE="${UI_BACKUP_PRIMARY} ${UI_CUSTOM_NAME}" - echo_stat - - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo cp ${RIHOLE_DIR}/${CUSTOM_DNS} ${RIHOLE_DIR}/${CUSTOM_DNS}.backup" - create_sshcmd - fi -} - -function backup_remote_cname() { - if [ "$INCLUDE_CNAME" == '1' ] - then - MESSAGE="${UI_BACKUP_PRIMARY} ${UI_CNAME_NAME}" - echo_stat - - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo cp ${RNSMAQ_DIR}/${CNAME_CONF} ${RIHOLE_DIR}/dnsmasq.d-${CNAME_CONF}.backup" - create_sshcmd - fi -} - -function backup_cleanup() { - MESSAGE="${UI_BACKUP_PURGE}" - echo_stat - git clean -fq - error_validate -} \ No newline at end of file diff --git a/includes/gs-colors.sh b/includes/gs-colors.sh deleted file mode 100644 index dfe877e..0000000 --- a/includes/gs-colors.sh +++ /dev/null @@ -1,88 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-colors.sh ############################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Script Colors -RED='\033[0;91m' -GREEN='\033[0;92m' -CYAN='\033[0;96m' -YELLOW='\033[0;93m' -PURPLE='\033[0;95m' -BLUE='\033[0;94m' -BOLD='\033[1m' -NC='\033[0m' - -## Message Codes -FAIL="${RED}✗${NC}" -WARN="${PURPLE}!${NC}" -GOOD="${GREEN}✓${NC}" -STAT="${CYAN}∞${NC}" -INFO="${YELLOW}»${NC}" -INF1="${CYAN}›${NC}" -NEED="${BLUE}?${NC}" -LOGO="${PURPLE}∞${NC}" - -## Echo Stack -### Informative -function echo_info { - echo -e "${INFO} ${YELLOW}${MESSAGE}${NC}" -} - -function echo_inf1 { - echo -e "${INF1} ${CYAN}${MESSAGE}${NC}" -} - -### Warning -function echo_warn { - echo -e "${WARN} ${PURPLE}${MESSAGE}${NC}" -} - -### Executing -function echo_stat { - echo -en "${STAT} ${MESSAGE}" -} - -### Success -function echo_good { - echo -e "\r${GOOD} ${MESSAGE}" -} - -### Success -function echo_good_clean { - echo -e "\r${GOOD} ${MESSAGE}" -} - -### Success -function echo_sameline { - echo -en " " - echo -en "\r" -} - -### Failure -function echo_fail { - echo -e "\r${FAIL} ${MESSAGE}" -} - -### Request -function echo_need { - echo -en "${NEED} ${BOLD}${MESSAGE}:${NC} " -} - -### Gravity Sync Logo -function echo_grav { - echo -e "${LOGO} ${BOLD}${MESSAGE}${NC}" -} - -### Lines -function echo_lines { - echo -e "========================================================" -} - -### Lines -function echo_blank { - echo -e "" -} - - diff --git a/includes/gs-config.sh b/includes/gs-config.sh deleted file mode 100644 index bbf6c6a..0000000 --- a/includes/gs-config.sh +++ /dev/null @@ -1,494 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-config.sh ############################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Configure Task -function task_configure { - TASKTYPE='CONFIGURE' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - relocate_config_gs - - if [ -f ${LOCAL_FOLDR}/settings/${CONFIG_FILE} ] - then - config_delete - else - config_generate - fi - - create_alias - exit_withchange -} - -## Generate New Configuration -function config_generate { - MESSAGE="${UI_CONFIG_CREATING} ${CONFIG_FILE}" - echo_stat - cp ${LOCAL_FOLDR}/templates/${CONFIG_FILE}.example ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - echo_lines - echo -e "Welcome to the ${PURPLE}Gravity Sync${NC} Configuration Wizard" - echo -e "Please read through ${BLUE}https://github.com/vmstan/gravity-sync/wiki${NC} before you continue!" - echo_blank - echo -e "If the installer detects that you have a supported container engine (Docker or Podman) installed" - echo -e "on your local Pi-hole, you will be directed to the advanced installation options. If you using " - echo -e "containers on your remote Pi-hole, you'll need to select this option manually to adjust settings" - echo -e "such as custom Pi-hole binary or configuration directories, CNAME replication, etc." - echo_blank - echo -e "Gravity Sync uses a primary/secondary model for replication, and normally syncs changes from the " - echo -e "primary to the secondary. The LOCAL Pi-hole where you are running this configuration script is" - echo -e "considered the SECONDARY Pi-hole! The REMOTE Pi-hole where you normally make Gravity Database" - echo -e "changes, and is considered the PRIMARY Pi-hole." - echo_blank - echo -e "Confused? Please refer back to the documentation." - echo_lines - - MESSAGE="${PROGRAM} ${UI_CONFIG_REQUIRED}" - echo_info - - MESSAGE="${UI_CORE_APP} ${UI_CONFIG_REMOTE} ${UI_CONFIG_HOSTREQ}" - echo_inf1 - - MESSAGE="IP" - echo_need - read INPUT_REMOTE_HOST - - MESSAGE="${UI_CONFIG_ICMP_TEST} ${INPUT_REMOTE_HOST}" - echo_stat - ping -c 3 ${INPUT_REMOTE_HOST} >/dev/null 2>&1 - error_validate - - MESSAGE="${UI_CONFIG_SAVING} ${INPUT_REMOTE_HOST} host to ${CONFIG_FILE}" - echo_stat - sed -i "/REMOTE_HOST='192.168.1.10'/c\REMOTE_HOST='${INPUT_REMOTE_HOST}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - MESSAGE="${UI_CORE_APP} ${UI_CONFIG_REMOTE} ${UI_CONFIG_USERREQ} for ${INPUT_REMOTE_HOST}" - echo_inf1 - - MESSAGE="User" - echo_need - read INPUT_REMOTE_USER - - MESSAGE="${UI_CONFIG_SAVING} ${INPUT_REMOTE_USER}@${INPUT_REMOTE_HOST} user to ${CONFIG_FILE}" - echo_stat - sed -i "/REMOTE_USER='pi'/c\REMOTE_USER='${INPUT_REMOTE_USER}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - generate_sshkey - - MESSAGE="${UI_CORE_LOADING} ${CONFIG_FILE}" - echo_stat - source ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - echo_lines - export_sshkey - echo_lines - - MESSAGE="${UI_CONFIG_SSH_KEYPAIR} ${INPUT_REMOTE_HOST}" - echo_good_clean - - MESSAGE="${UI_CONFIG_CONT_LOOKUP}" - echo_stat - - docker_detect - podman_detect - - if [ "${DOCKERREADY}" == "1" ] || [ "${PODMANREADY}" == "1" ] - then - MESSAGE="${UI_CONFIG_CONT_DETECT} ${UI_CONFIG_CONT_DETECTED}" - echo_good - MESSAGE="${UI_CORE_LOADING} ${UI_CONFIG_ADVANCED}" - echo_info - advanced_config_generate - else - MESSAGE="${UI_CONFIG_CONT_DETECT} ${UI_CONFIG_CONT_DETECTNA}" - echo_good - MESSAGE="${UI_CONFIG_DOADVANCED}" - echo_inf1 - MESSAGE="${UI_CONFIG_YESNON}" - echo_need - read INPUT_ADVANCED_INSTALL - INPUT_ADVANCED_INSTALL="${INPUT_ADVANCED_INSTALL:-N}" - - if [ "${INPUT_ADVANCED_INSTALL}" == "Yes" ] || [ "${INPUT_ADVANCED_INSTALL}" == "yes" ] || [ "${INPUT_ADVANCED_INSTALL}" == "Y" ] || [ "${INPUT_ADVANCED_INSTALL}" == "y" ] - then - MESSAGE="${UI_CORE_LOADING} ${UI_CONFIG_ADVANCED}" - echo_info - - advanced_config_generate - else - end_config - fi - fi -} - -function end_config(){ - echo_lines - echo -e "Configuration has been completed successfully, if you've still not read the instructions" - echo -e "please read through ${BLUE}https://github.com/vmstan/gravity-sync/wiki${NC} before you continue!" - echo_blank - echo -e "Your next step is to complete a sync of data from your remote Pi-hole to this local Pi-hole." - echo -e "ex: gravity-sync pull" - echo_blank - echo -e "If this completes successfully you can automate future sync jobs to run at a regular interval." - echo -e "ex: gravity-sync automate" - echo_blank - echo -e "Still confused? Please refer back to the documentation." - echo_lines -} - -## Advanced Configuration Options -function advanced_config_generate { - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CORE_APP} ${UI_CONFIG_INSTANCEREQ}" - echo_inf1 - MESSAGE="${UI_CONFIG_INSTANCETYPE}" - echo_need - read INPUT_PH_IN_TYPE - INPUT_PH_IN_TYPE="${INPUT_PH_IN_TYPE:-default}" - - if [ "${INPUT_PH_IN_TYPE}" != "default" ] - then - if [ "${INPUT_PH_IN_TYPE}" != "docker" ] && [ "${INPUT_PH_IN_TYPE}" != "podman" ] - then - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CONFIG_INSTANCE_ERROR}" - echo_warn - exit_withchange - fi - - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CONFIG_CONTAINER_TYPE} to ${CONFIG_FILE}" - echo_stat - sed -i "/# PH_IN_TYPE=''/c\PH_IN_TYPE='${INPUT_PH_IN_TYPE}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - MESSAGE="${UI_CONFIG_CONT_DETECT} ${UI_CONFIG_IMAGES}" - echo_info - echo_lines - if [ "${INPUT_PH_IN_TYPE}" == "docker" ] - then - sudo docker container ls - elif [ "${INPUT_PH_IN_TYPE}" == "podman" ] - then - sudo podman container ls - fi - echo_lines - - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CORE_APP} ${UI_CONFIG_INSTANCENAME}" - echo_inf1 - MESSAGE="${UI_CONFIG_PIHOLE_DEFAULT}" - echo_need - read INPUT_DOCKER_CON - INPUT_DOCKER_CON="${INPUT_DOCKER_CON:-pihole}" - - if [ "${INPUT_DOCKER_CON}" != "pihole" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CONFIG_CONTAINER_NAME} to ${CONFIG_FILE}" - echo_stat - sed -i "/# DOCKER_CON=''/c\DOCKER_CON='${INPUT_DOCKER_CON}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CORE_APP} ${UI_CONFIG_ETC_VOLPATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_ETC_VOLPATH_EXAMPLE}" - echo_need - read INPUT_PIHOLE_DIR - - if [ "${INPUT_PIHOLE_DIR}" != "" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CORE_APP} ${UI_CONFIG_ETC_VOLPATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# PIHOLE_DIR=''/c\PIHOLE_DIR='${INPUT_PIHOLE_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - SKIP_PIHOLE_DIR="1" - else - MESSAGE="${UI_CONFIG_SETTING_REQUIRED}" - echo_warn - exit_withchange - fi - - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CORE_APP_DNS} ${UI_CONFIG_ETC_VOLPATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_ETC_VOLDNSQ_EXAMPLE}" - echo_need - read INPUT_DNSMAQ_DIR - - if [ "${INPUT_DNSMAQ_DIR}" != "" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CORE_APP_DNS} ${UI_CONFIG_ETC_VOLPATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# DNSMAQ_DIR=''/c\DNSMAQ_DIR='${INPUT_DNSMAQ_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - SKIP_DNSMASQ_DIR="1" - else - MESSAGE="${UI_CONFIG_SETTING_REQUIRED}" - echo_warn - exit_withchange - fi - - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CONFIG_VOLUME_OWNER} to ${CONFIG_FILE}" - echo_stat - sed -i "/# FILE_OWNER=''/c\FILE_OWNER='999:999'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CORE_APP} ${UI_CONFIG_INSTANCEREQ}" - echo_inf1 - MESSAGE="${UI_CONFIG_INSTANCETYPE}" - echo_need - read INPUT_RH_IN_TYPE - INPUT_RH_IN_TYPE="${INPUT_RH_IN_TYPE:-default}" - - if [ "${INPUT_RH_IN_TYPE}" != "default" ] - then - if [ "${INPUT_RH_IN_TYPE}" != "docker" ] && [ "${INPUT_RH_IN_TYPE}" != "podman" ] - then - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CONFIG_INSTANCE_ERROR}" - echo_warn - exit_withchange - fi - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CONFIG_CONTAINER_TYPE} to ${CONFIG_FILE}" - echo_stat - sed -i "/# RH_IN_TYPE=''/c\RH_IN_TYPE='${INPUT_RH_IN_TYPE}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CONFIG_CONTAINER_NAME}" - echo_inf1 - MESSAGE="${UI_CONFIG_DEFAULT_LEAVE} 'pihole'" - echo_need - read INPUT_ROCKER_CON - INPUT_ROCKER_CON="${INPUT_ROCKER_CON:-pihole}" - - if [ "${INPUT_ROCKER_CON}" != "pihole" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CONFIG_CONTAINER_NAME} to ${CONFIG_FILE}" - echo_stat - sed -i "/# ROCKER_CON=''/c\ROCKER_CON='${INPUT_ROCKER_CON}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CORE_APP} ${UI_CONFIG_ETC_VOLPATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_ETC_VOLPATH_EXAMPLE}" - echo_need - read INPUT_RIHOLE_DIR - - if [ "${INPUT_RIHOLE_DIR}" != "" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CORE_APP} ${UI_CONFIG_ETC_VOLPATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# RIHOLE_DIR=''/c\RIHOLE_DIR='${INPUT_RIHOLE_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - SKIP_RIHOLE_DIR="1" - else - MESSAGE="${UI_CONFIG_SETTING_REQUIRED}" - echo_warn - exit_withchange - fi - - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CORE_APP_DNS} ${UI_CONFIG_ETC_VOLPATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_ETC_VOLDNSQ_EXAMPLE}" - echo_need - read INPUT_RNSMAQ_DIR - - if [ "${INPUT_RNSMAQ_DIR}" != "" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CORE_APP_DNS} ${UI_CONFIG_ETC_VOLPATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# RNSMAQ_DIR=''/c\RNSMAQ_DIR='${INPUT_RNSMAQ_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - SKIP_RNSMASQ_DIR="1" - else - MESSAGE="${UI_CONFIG_SETTING_REQUIRED}" - echo_warn - exit_withchange - fi - - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CONFIG_VOLUME_OWNER} to ${CONFIG_FILE}" - echo_stat - sed -i "/# RILE_OWNER=''/c\RILE_OWNER='999:999'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - - - if [ "$SKIP_PIHOLE_DIR" != "1" ] - then - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CORE_APP} ${UI_CONFIG_SETTING_DIR_PATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_DEFAULT_LEAVE} ${UI_CONFIG_DEFAULT_PIHOLE_ETC}" - echo_need - read INPUT_PIHOLE_DIR - INPUT_PIHOLE_DIR="${INPUT_PIHOLE_DIR:-/etc/pihole}" - - if [ "${INPUT_PIHOLE_DIR}" != "/etc/pihole" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CORE_APP} ${UI_CONFIG_SETTING_DIR_PATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# PIHOLE_DIR=''/c\PIHOLE_DIR='${INPUT_PIHOLE_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - fi - - if [ "$SKIP_RIHOLE_DIR" != "1" ] - then - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CORE_APP} ${UI_CONFIG_SETTING_DIR_PATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_DEFAULT_LEAVE} ${UI_CONFIG_DEFAULT_PIHOLE_ETC}" - echo_need - read INPUT_RIHOLE_DIR - INPUT_RIHOLE_DIR="${INPUT_RIHOLE_DIR:-/etc/pihole}" - - if [ "${INPUT_RIHOLE_DIR}" != "/etc/pihole" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CORE_APP} ${UI_CONFIG_SETTING_DIR_PATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# RIHOLE_DIR=''/c\RIHOLE_DIR='${INPUT_RIHOLE_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - fi - - if [ "$SKIP_DNSMASQ_DIR" != "1" ] - then - MESSAGE="${UI_CONFIG_LOCALSEC} ${UI_CORE_APP_DNS} ${UI_CONFIG_SETTING_DIR_PATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_DEFAULT_LEAVE} '/etc/dnsmasq.d'" - echo_need - read INPUT_DNSMASQ_DIR - INPUT_DNSMASQ_DIR="${INPUT_DNSMASQ_DIR:-/etc/dnsmasq.d}" - - if [ "${INPUT_DNSMASQ_DIR}" != "/etc/dnsmasq.d" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_LOCAL} ${UI_CORE_APP_DNS} ${UI_CONFIG_SETTING_DIR_PATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# DNSMASQ_DIR=''/c\DNSMASQ_DIR='${INPUT_DNSMASQ_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - fi - - if [ "$SKIP_RNSMASQ_DIR" != "1" ] - then - MESSAGE="${UI_CONFIG_REMOTEPRI} ${UI_CORE_APP_DNS} ${UI_CONFIG_SETTING_DIR_PATH}" - echo_inf1 - MESSAGE="${UI_CONFIG_DEFAULT_LEAVE} ${UI_CONFIG_DEFAULT_DNSMASQ_ETC}" - echo_need - read INPUT_RNSMASQ_DIR - INPUT_RNSMASQ_DIR="${INPUT_RNSMASQ_DIR:-/etc/dnsmasq.d}" - - if [ "${INPUT_RNSMASQ_DIR}" != "/etc/dnsmasq.d" ] - then - MESSAGE="${UI_CONFIG_SAVING} ${UI_CONFIG_REMOTE} ${UI_CORE_APP_DNS} ${UI_CONFIG_SETTING_DIR_PATH} to ${CONFIG_FILE}" - echo_stat - sed -i "/# RNSMASQ_DIR=''/c\RNSMASQ_DIR='${INPUT_RNSMASQ_DIR}'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - fi - - MESSAGE="${UI_ENABLE_REPLICATION_QUEST} ${UI_CUSTOM_NAME}" - echo_inf1 - MESSAGE="${UI_CONFIG_YESNOY}" - echo_need - read INPUT_SKIP_CUSTOM - INPUT_SKIP_CUSTOM="${INPUT_SKIP_CUSTOM:-Y}" - - if [ "${INPUT_SKIP_CUSTOM}" != "Y" ] - then - MESSAGE="${UI_DNS_NAME} ${UI_CONFIG_PREF_SAVED} ${CONFIG_FILE}" - echo_stat - sed -i "/# SKIP_CUSTOM=''/c\SKIP_CUSTOM='1'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - - if [ "${INPUT_SKIP_CUSTOM}" == "Y" ] - then - MESSAGE="${UI_ENABLE_REPLICATION_QUEST} ${UI_CNAME_NAME}" - echo_inf1 - MESSAGE="${UI_CONFIG_YESNON}" - echo_need - read INPUT_INCLUDE_CNAME - INPUT_INCLUDE_CNAME="${INPUT_INCLUDE_CNAME:-N}" - - if [ "${INPUT_INCLUDE_CNAME}" == "Y" ] - then - config_enablecname - fi - fi - - end_config -} - -function task_cname { - TASKTYPE='CNAME' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - config_enablecname - exit_withchange -} - -function config_enablecname { - MESSAGE="${UI_CNAME_NAME} ${UI_CONFIG_PREF_SAVED} ${CONFIG_FILE}" - echo_stat - sed -i "/# INCLUDE_CNAME=''/c\INCLUDE_CNAME='1'" ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate -} - -## Delete Existing Configuration -function config_delete { - source ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - MESSAGE="${CONFIG_FILE} ${UI_CONFIG_ALREADY}" - echo_info - - MESSAGE="${UI_CONFIG_AREYOUSURE}" - echo_inf1 - - intent_validate - - MESSAGE="${UI_CONFIG_ERASING} ${CONFIG_FILE}" - echo_stat - rm -f ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - - config_generate -} - -## Detect Docker -function docker_detect { - if hash docker 2>/dev/null - then - FTLCHECK=$(sudo docker container ls | grep 'pihole/pihole') - if [ "$FTLCHECK" != "" ] - then - DOCKERREADY="1" - fi - fi -} - -## Detect Podman -function podman_detect { - if hash podman 2>/dev/null - then - FTLCHECK=$(sudo podman container ls | grep 'pihole/pihole') - if [ "$FTLCHECK" != "" ] - then - PODMANREADY="1" - fi - fi -} - -## Create Bash Alias -function create_alias { - MESSAGE="${UI_CONFIG_BASH}" - echo_stat - - echo -e "alias gravity-sync='${GS_FILEPATH}'" | sudo tee -a /etc/bash.bashrc > /dev/null - error_validate - - MESSAGE="${UI_CONFIG_ALIAS}" - echo_info -} \ No newline at end of file diff --git a/includes/gs-core.sh b/includes/gs-core.sh deleted file mode 100644 index a12afba..0000000 --- a/includes/gs-core.sh +++ /dev/null @@ -1,176 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-core.sh ################################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -# Standard Output -function start_gs { - MESSAGE="${UI_CORE_INIT}" - echo_grav - cd ${LOCAL_FOLDR} - - import_gs - ph_type - - MESSAGE="${UI_CORE_EVALUATING}" - echo_stat - - if [ "${ROOT_CHECK_AVOID}" != "1" ] - then - new_root_check - fi - - if [ "${INCLUDE_CNAME}" == "1" ] && [ "${SKIP_CUSTOM}" == "1" ] - then - MESSAGE="${UI_INVALID_DNS_CONFIG} ${CONFIG_FILE}" - echo_fail - - exit_nochange - fi -} - -# Standard Output No Config -function start_gs_noconfig { - MESSAGE="${UI_CORE_INIT}" - echo_grav - cd ${LOCAL_FOLDR} - - MESSAGE="${UI_CORE_EVALUATING}" - echo_stat -} - -## Import Settings -function import_gs { - relocate_config_gs - - MESSAGE="${UI_CORE_LOADING} ${CONFIG_FILE}" - echo -en "${STAT} $MESSAGE" - if [ -f ${LOCAL_FOLDR}/settings/${CONFIG_FILE} ] - then - source ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - else - echo_fail - - MESSAGE="${UI_CORE_MISSING} ${CONFIG_FILE}" - echo_info - - TASKTYPE='CONFIG' - config_generate - fi -} - -function relocate_config_gs { - if [ -f ${LOCAL_FOLDR}/${CONFIG_FILE} ] - then - MESSAGE="${UI_CORE_RELOCATING} ${CONFIG_FILE}" - echo -en "${STAT} $MESSAGE" - - mv ${LOCAL_FOLDR}/${CONFIG_FILE} ${LOCAL_FOLDR}/settings/${CONFIG_FILE} - error_validate - fi - - if [ -f ${LOCAL_FOLDR}/${SYNCING_LOG} ] - then - MESSAGE="${UI_CORE_RELOCATING} ${SYNCING_LOG}" - echo -en "${STAT} $MESSAGE" - - mv ${LOCAL_FOLDR}/${SYNCING_LOG} ${LOG_PATH}/${SYNCING_LOG} - error_validate - fi - - if [ -f ${LOCAL_FOLDR}/${CRONJOB_LOG} ] - then - MESSAGE="${UI_CORE_RELOCATING} ${CRONJOB_LOG}" - echo -en "${STAT} $MESSAGE" - - mv ${LOCAL_FOLDR}/${CRONJOB_LOG} ${LOG_PATH}/${CRONJOB_LOG} - error_validate - fi - - if [ -f ${LOCAL_FOLDR}/${HISTORY_MD5} ] - then - MESSAGE="${UI_CORE_RELOCATING} ${HISTORY_MD5}" - echo -en "${STAT} $MESSAGE" - - mv ${LOCAL_FOLDR}/${HISTORY_MD5} ${LOG_PATH}/${HISTORY_MD5} - error_validate - fi -} - -## Invalid Tasks -function task_invalid { - echo_fail - list_gs_arguments -} - -## Error Validation -function error_validate { - if [ "$?" != "0" ] - then - echo_fail - exit 1 - else - echo_good - fi -} - -## Error Validation -function silent_error_validate { - if [ "$?" != "0" ] - then - echo_fail - exit 1 - else - echo_sameline - fi -} - -function ph_type { - if [ "$PH_IN_TYPE" == "default" ] - then - PH_EXEC="${PIHOLE_BIN}" - elif [ "$PH_IN_TYPE" == "docker" ] - then - PH_EXEC="sudo ${DOCKER_BIN} exec $(sudo ${DOCKER_BIN} ps -qf name=${DOCKER_CON}) pihole" - elif [ "$PH_IN_TYPE" == "podman" ] - then - PH_EXEC="sudo ${PODMAN_BIN} exec ${DOCKER_CON} pihole" - fi - - if [ "$RH_IN_TYPE" == "default" ] - then - RH_EXEC="${RIHOLE_BIN}" - elif [ "$RH_IN_TYPE" == "docker" ] - then - RH_EXEC="sudo ${ROCKER_BIN} exec \$(sudo ${ROCKER_BIN} ps -qf name=${ROCKER_CON}) pihole" - elif [ "$RH_IN_TYPE" == "podman" ] - then - RH_EXEC="sudo ${RODMAN_BIN} exec ${ROCKER_CON} pihole" - fi -} - -## Compare Task -function task_compare { - TASKTYPE='COMPARE' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - show_target - validate_gs_folders - validate_ph_folders - - if [ "${INCLUDE_CNAME}" == "1" ] - then - validate_dns_folders - fi - - validate_os_sshpass - - previous_md5 - md5_compare - backup_cleanup - - exit_withchange -} \ No newline at end of file diff --git a/includes/gs-exit.sh b/includes/gs-exit.sh deleted file mode 100644 index 5d11416..0000000 --- a/includes/gs-exit.sh +++ /dev/null @@ -1,64 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-exit.sh ################################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## No Changes Made -function exit_nochange { - SCRIPT_END=$SECONDS - let SCRIPT_RUN=SCRIPT_END-SCRIPT_START - - if [ "${TASKTYPE}" == "" ] - then - MESSAGE="${PROGRAM} ${UI_EXIT_ABORT} ${UI_EXIT_CALC_END} ${SCRIPT_RUN} ${UI_EXIT_CALC_TIMER}" - else - MESSAGE="${PROGRAM} ${TASKTYPE} ${UI_EXIT_ABORT} ${UI_EXIT_CALC_END} ${SCRIPT_RUN} ${UI_EXIT_CALC_TIMER}" - fi - - echo_grav - exit 0 -} - -## Changes Made -function exit_withchange { - SCRIPT_END=$SECONDS - let SCRIPT_RUN=SCRIPT_END-SCRIPT_START - - if [ "${TASKTYPE}" == "" ] - then - MESSAGE="${PROGRAM} ${UI_EXIT_COMPLETE} ${UI_EXIT_CALC_END} ${SCRIPT_RUN} ${UI_EXIT_CALC_TIMER}" - else - MESSAGE="${PROGRAM} ${TASKTYPE} ${UI_EXIT_COMPLETE} ${UI_EXIT_CALC_END} ${SCRIPT_RUN} ${UI_EXIT_CALC_TIMER}" - fi - - echo_grav - exit 0 -} - -## List GS Arguments -function list_gs_arguments { - echo -e "Usage: $0 [options]" - echo -e "Example: '$0 pull'" - echo_lines - echo -e "Setup Options:" - echo -e " ${YELLOW}config${NC} Creates a new ${PROGRAM} configuration file" - echo -e " ${YELLOW}automate${NC} Schedules the ${PROGRAM} replication task using systemd" - echo -e " ${YELLOW}cron${NC} Schedules the ${PROGRAM} replication task using crontab (legacy)" - echo -e " ${YELLOW}version${NC} Shows the installed version of ${PROGRAM} and check for updates" - echo -e " ${YELLOW}update${NC} Upgrades ${PROGRAM} to the latest available version using Git" - echo -e " ${YELLOW}dev${NC} Sets update command to use a development version of ${PROGRAM}" - echo -e " ${YELLOW}sudo${NC} Configures passwordless sudo for current user" - echo_blank - echo -e "Replication Options:" - echo -e " ${YELLOW}smart${NC} Detects Pi-hole changes on primary and secondary and then combines them" - echo -e " ${YELLOW}pull${NC} Brings the remote Pi-hole configuration to this server" - echo -e " ${YELLOW}push${NC} Sends the local Pi-hole configuration to the primary" - echo -e " ${YELLOW}compare${NC} Just checks for Pi-hole differences at each side without making changes" - echo_blank - echo -e "Debug Options:" - echo -e " ${YELLOW}logs${NC} Shows the recent successful replication jobs/times" - echo -e " ${YELLOW}info${NC} Shows information about the current configuration" - echo_lines - exit_nochange -} \ No newline at end of file diff --git a/includes/gs-hashing.sh b/includes/gs-hashing.sh deleted file mode 100644 index 808e61c..0000000 --- a/includes/gs-hashing.sh +++ /dev/null @@ -1,237 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-hasing.sh ############################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Validate Sync Required -function md5_compare { - HASHMARK='0' - - MESSAGE="${UI_HASHING_HASHING} ${UI_GRAVITY_NAME}" - echo_stat - primaryDBMD5=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${RIHOLE_DIR}/${GRAVITY_FI}" | sed 's/\s.*$//') - error_validate - - MESSAGE="${UI_HASHING_COMPARING} ${UI_GRAVITY_NAME}" - echo_stat - secondDBMD5=$(md5sum ${PIHOLE_DIR}/${GRAVITY_FI} | sed 's/\s.*$//') - error_validate - - if [ "$primaryDBMD5" == "$last_primaryDBMD5" ] && [ "$secondDBMD5" == "$last_secondDBMD5" ] - then - HASHMARK=$((HASHMARK+0)) - else - MESSAGE="${UI_HASHING_DIFFERNCE} ${UI_GRAVITY_NAME}" - echo_warn - HASHMARK=$((HASHMARK+1)) - fi - - if [ "$SKIP_CUSTOM" != '1' ] - then - if [ -f ${PIHOLE_DIR}/${CUSTOM_DNS} ] - then - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RIHOLE_DIR}/${CUSTOM_DNS} - then - REMOTE_CUSTOM_DNS="1" - MESSAGE="${UI_HASHING_HASHING} ${UI_CUSTOM_NAME}" - echo_stat - - primaryCLMD5=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${RIHOLE_DIR}/${CUSTOM_DNS} | sed 's/\s.*$//'") - error_validate - - MESSAGE="${UI_HASHING_COMPARING} ${UI_CUSTOM_NAME}" - echo_stat - secondCLMD5=$(md5sum ${PIHOLE_DIR}/${CUSTOM_DNS} | sed 's/\s.*$//') - error_validate - - if [ "$primaryCLMD5" == "$last_primaryCLMD5" ] && [ "$secondCLMD5" == "$last_secondCLMD5" ] - then - HASHMARK=$((HASHMARK+0)) - else - MESSAGE="${UI_HASHING_DIFFERNCE} ${UI_CUSTOM_NAME}" - echo_warn - HASHMARK=$((HASHMARK+1)) - fi - else - MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_PRIMARY}" - echo_info - fi - else - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RIHOLE_DIR}/${CUSTOM_DNS} - then - REMOTE_CUSTOM_DNS="1" - MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_DETECTED} ${UI_HASHING_PRIMARY}" - HASHMARK=$((HASHMARK+1)) - echo_info - fi - MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_SECONDARY}" - echo_info - fi - fi - - if [ "${SKIP_CUSTOM}" != '1' ] - then - if [ "${INCLUDE_CNAME}" == "1" ] - then - if [ -f ${DNSMAQ_DIR}/${CNAME_CONF} ] - then - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RNSMAQ_DIR}/${CNAME_CONF} - then - REMOTE_CNAME_DNS="1" - MESSAGE="${UI_HASHING_HASHING} ${UI_CNAME_NAME}" - echo_stat - - primaryCNMD5=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${RNSMAQ_DIR}/${CNAME_CONF} | sed 's/\s.*$//'") - error_validate - - MESSAGE="${UI_HASHING_COMPARING} ${UI_CNAME_NAME}" - echo_stat - secondCNMD5=$(md5sum ${DNSMAQ_DIR}/${CNAME_CONF} | sed 's/\s.*$//') - error_validate - - if [ "$primaryCNMD5" == "$last_primaryCNMD5" ] && [ "$secondCNMD5" == "$last_secondCNMD5" ] - then - HASHMARK=$((HASHMARK+0)) - else - MESSAGE="${UI_HASHING_DIFFERNCE} ${UI_CNAME_NAME}" - echo_warn - HASHMARK=$((HASHMARK+1)) - fi - else - MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_PRIMARY}" - echo_info - fi - else - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RNSMAQ_DIR}/${CNAME_CONF} - then - REMOTE_CNAME_DNS="1" - MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_DETECTED} ${UI_HASHING_PRIMARY}" - HASHMARK=$((HASHMARK+1)) - echo_info - fi - - MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_SECONDARY}" - echo_info - fi - fi - fi - - if [ "$HASHMARK" != "0" ] - then - MESSAGE="${UI_HASHING_REQUIRED}" - echo_warn - HASHMARK=$((HASHMARK+0)) - else - MESSAGE="${UI_HASHING_NOREP}" - echo_info - backup_cleanup - exit_nochange - fi -} - -function previous_md5 { - if [ -f "${LOG_PATH}/${HISTORY_MD5}" ] - then - last_primaryDBMD5=$(sed "1q;d" ${LOG_PATH}/${HISTORY_MD5}) - last_secondDBMD5=$(sed "2q;d" ${LOG_PATH}/${HISTORY_MD5}) - last_primaryCLMD5=$(sed "3q;d" ${LOG_PATH}/${HISTORY_MD5}) - last_secondCLMD5=$(sed "4q;d" ${LOG_PATH}/${HISTORY_MD5}) - last_primaryCNMD5=$(sed "5q;d" ${LOG_PATH}/${HISTORY_MD5}) - last_secondCNMD5=$(sed "6q;d" ${LOG_PATH}/${HISTORY_MD5}) - else - last_primaryDBMD5="0" - last_secondDBMD5="0" - last_primaryCLMD5="0" - last_secondCLMD5="0" - last_primaryCNMD5="0" - last_secondCNMD5="0" - fi -} - -function md5_recheck { - MESSAGE="${UI_HASHING_DIAGNOSTICS}" - echo_info - - HASHMARK='0' - - MESSAGE="${UI_HASHING_REHASHING} ${UI_GRAVITY_NAME}" - echo_stat - primaryDBMD5=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${RIHOLE_DIR}/${GRAVITY_FI}" | sed 's/\s.*$//') - error_validate - - MESSAGE="${UI_HASHING_RECOMPARING} ${UI_GRAVITY_NAME}" - echo_stat - secondDBMD5=$(md5sum ${PIHOLE_DIR}/${GRAVITY_FI} | sed 's/\s.*$//') - error_validate - - if [ "$SKIP_CUSTOM" != '1' ] - then - if [ -f ${PIHOLE_DIR}/${CUSTOM_DNS} ] - then - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RIHOLE_DIR}/${CUSTOM_DNS} - then - REMOTE_CUSTOM_DNS="1" - MESSAGE="${UI_HASHING_REHASHING} ${UI_CUSTOM_NAME}" - echo_stat - - primaryCLMD5=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${RIHOLE_DIR}/${CUSTOM_DNS} | sed 's/\s.*$//'") - error_validate - - MESSAGE="${UI_HASHING_RECOMPARING} ${UI_CUSTOM_NAME}" - echo_stat - secondCLMD5=$(md5sum ${PIHOLE_DIR}/${CUSTOM_DNS} | sed 's/\s.*$//') - error_validate - else - MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_PRIMARY}" - echo_info - fi - else - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RIHOLE_DIR}/${CUSTOM_DNS} - then - REMOTE_CUSTOM_DNS="1" - MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_DETECTED} ${UI_HASHING_PRIMARY}" - echo_info - fi - MESSAGE="${UI_CUSTOM_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_SECONDARY}" - echo_info - fi - fi - - if [ "${SKIP_CUSTOM}" != '1' ] - then - if [ "${INCLUDE_CNAME}" == "1" ] - then - if [ -f ${DNSMAQ_DIR}/${CNAME_CONF} ] - then - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RNSMAQ_DIR}/${CNAME_CONF} - then - REMOTE_CNAME_DNS="1" - MESSAGE="${UI_HASHING_REHASHING} ${UI_CNAME_NAME}" - echo_stat - - primaryCNMD5=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "md5sum ${RNSMAQ_DIR}/${CNAME_CONF} | sed 's/\s.*$//'") - error_validate - - MESSAGE="${UI_HASHING_RECOMPARING} ${UI_CNAME_NAME}" - echo_stat - secondCNMD5=$(md5sum ${DNSMAQ_DIR}/${CNAME_CONF} | sed 's/\s.*$//') - error_validate - else - MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_PRIMARY}" - echo_info - fi - else - if ${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} test -e ${RNSMAQ_DIR}/${CNAME_CONF} - then - REMOTE_CNAME_DNS="1" - MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_PRIMARY}" - echo_info - fi - - MESSAGE="${UI_CNAME_NAME} ${UI_HASHING_NOTDETECTED} ${UI_HASHING_SECONDARY}" - echo_info - fi - fi - fi -} \ No newline at end of file diff --git a/includes/gs-intent.sh b/includes/gs-intent.sh deleted file mode 100644 index 131e74a..0000000 --- a/includes/gs-intent.sh +++ /dev/null @@ -1,41 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-intent.sh ############################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Validate Intent -function intent_validate { - if [ "$VERIFY_PASS" == "0" ] - then - PHASER=$((( RANDOM % 4 ) + 1 )) - if [ "$PHASER" = "1" ] - then - INTENT="FIRE PHOTON TORPEDOS" - elif [ "$PHASER" = "2" ] - then - INTENT="FIRE ALL PHASERS" - elif [ "$PHASER" = "3" ] - then - INTENT="EJECT THE WARPCORE" - elif [ "$PHASER" = "4" ] - then - INTENT="ENGAGE TRACTOR BEAM" - fi - - MESSAGE="Type ${INTENT} to confirm" - echo_need - - read INPUT_INTENT - - if [ "${INPUT_INTENT}" != "${INTENT}" ] - then - MESSAGE="${TASKTYPE} excited" - echo_info - exit_nochange - fi - else - MESSAGE="Verification bypassed" - echo_warn - fi -} \ No newline at end of file diff --git a/includes/gs-logging.sh b/includes/gs-logging.sh deleted file mode 100644 index 405b0e3..0000000 --- a/includes/gs-logging.sh +++ /dev/null @@ -1,88 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-logging.sh ############################## - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Logs Task -function task_logs { - TASKTYPE='LOGS' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - logs_gs -} - -## Core Logging -### Write Logs Out -function logs_export { - if [ "${TASKTYPE}" != "BACKUP" ] - then - MESSAGE="${UI_LOGGING_HASHES}" - echo_stat - rm -f ${LOG_PATH}/${HISTORY_MD5} - echo -e ${primaryDBMD5} >> ${LOG_PATH}/${HISTORY_MD5} - echo -e ${secondDBMD5} >> ${LOG_PATH}/${HISTORY_MD5} - echo -e ${primaryCLMD5} >> ${LOG_PATH}/${HISTORY_MD5} - echo -e ${secondCLMD5} >> ${LOG_PATH}/${HISTORY_MD5} - echo -e ${primaryCNMD5} >> ${LOG_PATH}/${HISTORY_MD5} - echo -e ${secondCNMD5} >> ${LOG_PATH}/${HISTORY_MD5} - error_validate - fi - - MESSAGE="${UI_LOGGING_SUCCESS} ${TASKTYPE}" - echo_stat - echo -e $(date) "[${TASKTYPE}]" >> ${LOG_PATH}/${SYNCING_LOG} - error_validate -} - -### Output Sync Logs -function logs_gs { - MESSAGE="${UI_LOGGING_DISPLAY}" - echo_info - - echo_lines - echo -e "${UI_LOGGING_RECENT_COMPLETE} ${YELLOW}SMART${NC}" - tail -n 7 "${LOG_PATH}/${SYNCING_LOG}" | grep SMART - echo -e "${UI_LOGGING_RECENT_COMPLETE} ${YELLOW}PULL${NC}" - tail -n 7 "${LOG_PATH}/${SYNCING_LOG}" | grep PULL - echo -e "${UI_LOGGING_RECENT_COMPLETE} ${YELLOW}PUSH${NC}" - tail -n 7 "${LOG_PATH}/${SYNCING_LOG}" | grep PUSH - echo_lines - - exit_nochange -} - -## Crontab Logs -### Core Crontab Logs -function show_crontab { - MESSAGE="${UI_LOGGING_DISPLAY}" - echo_stat - - if [ -f ${LOG_PATH}/${CRONJOB_LOG} ] - then - if [ -s ${LOG_PATH}/${CRONJOB_LOG} ] - echo_good - - echo_lines - date -r ${LOG_PATH}/${CRONJOB_LOG} - cat ${LOG_PATH}/${CRONJOB_LOG} - echo_lines - - exit_nochange - then - echo_fail - - MESSAGE="${LOG_PATH}/${CRONJOB_LOG} ${UI_LOGGING_EMPTY}" - echo_info - - exit_nochange - fi - else - echo_fail - - MESSAGE="${LOG_PATH}/${CRONJOB_LOG} ${UI_LOGGING_MISSING}" - echo_info - - exit_nochange - fi -} diff --git a/includes/gs-pull.sh b/includes/gs-pull.sh deleted file mode 100644 index f423bab..0000000 --- a/includes/gs-pull.sh +++ /dev/null @@ -1,146 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-pull.sh ################################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Pull Task -function task_pull { - TASKTYPE='PULL' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - show_target - validate_gs_folders - validate_ph_folders - - if [ "${INCLUDE_CNAME}" == "1" ] - then - validate_dns_folders - fi - - validate_sqlite3 - validate_os_sshpass - - pull_gs - exit -} - -## Pull Gravity -function pull_gs_grav { - - backup_local_gravity - backup_remote_gravity - backup_remote_gravity_integrity - - MESSAGE="${UI_PULL_PRIMARY} ${UI_GRAVITY_NAME}" - echo_stat - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/${GRAVITY_FI}.backup" - RSYNC_TARGET="${LOCAL_FOLDR}/${BACKUP_FOLD}/${GRAVITY_FI}.pull" - create_rsynccmd - - MESSAGE="${UI_REPLACE_SECONDARY} ${UI_GRAVITY_NAME}" - echo_stat - sudo cp ${LOCAL_FOLDR}/${BACKUP_FOLD}/${GRAVITY_FI}.pull ${PIHOLE_DIR}/${GRAVITY_FI} >/dev/null 2>&1 - error_validate - - validate_gravity_permissions -} - -## Pull Custom -function pull_gs_cust { - if [ "$SKIP_CUSTOM" != '1' ] - then - if [ "$REMOTE_CUSTOM_DNS" == "1" ] - then - backup_local_custom - backup_remote_custom - - MESSAGE="${UI_PULL_PRIMARY} ${UI_CUSTOM_NAME}" - echo_stat - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/${CUSTOM_DNS}.backup" - RSYNC_TARGET="${LOCAL_FOLDR}/${BACKUP_FOLD}/${CUSTOM_DNS}.pull" - create_rsynccmd - - MESSAGE="${UI_REPLACE_SECONDARY} ${UI_CUSTOM_NAME}" - echo_stat - sudo cp ${LOCAL_FOLDR}/${BACKUP_FOLD}/${CUSTOM_DNS}.pull ${PIHOLE_DIR}/${CUSTOM_DNS} >/dev/null 2>&1 - error_validate - - validate_custom_permissions - fi - fi -} - -## Pull CNAME -function pull_gs_cname { - if [ "${INCLUDE_CNAME}" == '1' ] - then - if [ "$REMOTE_CNAME_DNS" == "1" ] - then - backup_local_cname - backup_remote_cname - - MESSAGE="${UI_PULL_PRIMARY} ${UI_CNAME_NAME}" - echo_stat - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/dnsmasq.d-${CNAME_CONF}.backup" - RSYNC_TARGET="${LOCAL_FOLDR}/${BACKUP_FOLD}/${CNAME_CONF}.pull" - create_rsynccmd - - MESSAGE="${UI_REPLACE_SECONDARY} ${UI_CNAME_NAME}" - echo_stat - sudo cp ${LOCAL_FOLDR}/${BACKUP_FOLD}/${CNAME_CONF}.pull ${DNSMAQ_DIR}/${CNAME_CONF} >/dev/null 2>&1 - error_validate - - validate_cname_permissions - fi - fi -} - -## Pull Reload -function pull_gs_reload { - MESSAGE="${UI_PULL_RELOAD_WAIT}" - echo_info - sleep 1 - - MESSAGE="${UI_FTLDNS_CONFIG_UPDATE}" - echo_stat - ${PH_EXEC} restartdns reload-lists >/dev/null 2>&1 - error_validate - - if [ "${TASKTYPE}" == SMART ] - then - if [ "${PRICLCHANGE}" == "1" ] || [ "${SECCLCHANGE}" == "1" ] || [ "${PRICNCHANGE}" == "1" ] || [ "${SECCNCHANGE}" == "1" ] - then - MESSAGE="${UI_FTLDNS_CONFIG_RELOAD}" - echo_stat - ${PH_EXEC} restartdns >/dev/null 2>&1 - error_validate - fi - else - MESSAGE="${UI_FTLDNS_CONFIG_RELOAD}" - echo_stat - ${PH_EXEC} restartdns >/dev/null 2>&1 - error_validate - fi -} - -## Pull Function -function pull_gs { - previous_md5 - md5_compare - - backup_settime - pull_gs_grav - pull_gs_cust - pull_gs_cname - pull_gs_reload - md5_recheck - backup_cleanup - - logs_export - exit_withchange -} \ No newline at end of file diff --git a/includes/gs-purge.sh b/includes/gs-purge.sh deleted file mode 100644 index 4b4d562..0000000 --- a/includes/gs-purge.sh +++ /dev/null @@ -1,62 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-purge.sh ################################ - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Purge Task -function task_purge { - TASKTYPE="THE-PURGE" - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - echo_lines - echo -e "THIS WILL RESET YOUR ENTIRE GRAVITY SYNC INSTALLATION" - echo -e "This will remove:" - echo -e "- Your ${CONFIG_FILE} file." - - if [ -f "${LOCAL_FOLDR}/dev" ] - then - echo -e "- Your development branch updater." - elif [ -f "${LOCAL_FOLDR}/beta" ] - then - echo -e "- Your beta branch updater." - fi - - echo -e "- All cronjob/automation tasks." - echo -e "- All job history/logs." - echo -e "- Associated SSH id_rsa keys." - echo -e "" - echo -e "This function cannot be undone!" - echo -e "" - echo -e "YOU WILL NEED TO REBUILD GRAVITY SYNC AFTER EXECUTION" - echo -e "Pi-hole binaries, configuration and services ARE NOT impacted!" - echo -e "Your device will continue to resolve and block DNS requests," - echo -e "but your ${UI_GRAVITY_NAME} and ${UI_CUSTOM_NAME} WILL NOT sync anymore," - echo -e "until you reconfigure Gravity Sync on this device." - echo_lines - - intent_validate - - MESSAGE="${UI_PURGE_CLEANING_DIR}" - echo_stat - - git clean -f -X -d >/dev/null 2>&1 - error_validate - - clear_cron - - MESSAGE="${UI_PURGE_DELETE_SSH_KEYS}" - echo_stat - - rm -f $HOME/${SSH_PKIF} >/dev/null 2>&1 - rm -f $HOME/${SSH_PKIF}.pub >/dev/null 2>&1 - error_validate - - MESSAGE="${UI_PURGE_MATRIX_ALIGNMENT}" - echo_info - - sleep 1 - - update_gs -} \ No newline at end of file diff --git a/includes/gs-push.sh b/includes/gs-push.sh deleted file mode 100644 index 58ffb2a..0000000 --- a/includes/gs-push.sh +++ /dev/null @@ -1,187 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-push.sh ################################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Push Task -function task_push { - TASKTYPE='PUSH' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - show_target - validate_gs_folders - validate_ph_folders - - if [ "${INCLUDE_CNAME}" == "1" ] - then - validate_dns_folders - fi - - validate_sqlite3 - validate_os_sshpass - - push_gs - exit -} - -## Push Gravity -function push_gs_grav { - backup_remote_gravity - backup_local_gravity - backup_local_gravity_integrity - - MESSAGE="${UI_BACKUP_COPY} ${UI_GRAVITY_NAME}" - echo_stat - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/${GRAVITY_FI}.backup" - RSYNC_TARGET="${LOCAL_FOLDR}/${BACKUP_FOLD}/${GRAVITY_FI}.push" - create_rsynccmd - - MESSAGE="${UI_PUSH_SECONDARY} ${UI_GRAVITY_NAME}" - echo_stat - RSYNC_REPATH="sudo rsync" - RSYNC_SOURCE="${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${GRAVITY_FI}.backup" - RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/${GRAVITY_FI}" - create_rsynccmd - - MESSAGE="${UI_SET_FILE_OWNERSHIP} ${UI_GRAVITY_NAME}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo chown ${RILE_OWNER} ${RIHOLE_DIR}/${GRAVITY_FI}" - create_sshcmd - - MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_GRAVITY_NAME}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo chmod 664 ${RIHOLE_DIR}/${GRAVITY_FI}" - create_sshcmd -} - -## Push Custom -function push_gs_cust { - if [ "$SKIP_CUSTOM" != '1' ] - then - if [ "$REMOTE_CUSTOM_DNS" == "1" ] - then - backup_remote_custom - backup_local_custom - - MESSAGE="${UI_BACKUP_COPY} ${UI_CUSTOM_NAME}" - echo_stat - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/${CUSTOM_DNS}.backup" - RSYNC_TARGET="${LOCAL_FOLDR}/${BACKUP_FOLD}/${CUSTOM_DNS}.push" - create_rsynccmd - - MESSAGE="${UI_PUSH_SECONDARY} ${UI_CUSTOM_NAME}" - echo_stat - RSYNC_REPATH="sudo rsync" - RSYNC_SOURCE="${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${CUSTOM_DNS}.backup" - RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/${CUSTOM_DNS}" - create_rsynccmd - - MESSAGE="${UI_SET_FILE_OWNERSHIP} ${UI_CUSTOM_NAME}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo chown root:root ${RIHOLE_DIR}/${CUSTOM_DNS}" - create_sshcmd - - MESSAGE="${UI_SET_FILE_PERMISSIONS} ${UI_CUSTOM_NAME}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo chmod 644 ${RIHOLE_DIR}/${CUSTOM_DNS}" - create_sshcmd - fi - fi -} - -## Push Custom -function push_gs_cname { - if [ "${INCLUDE_CNAME}" == '1' ] - then - if [ "$REMOTE_CNAME_DNS" == "1" ] - then - backup_remote_cname - backup_local_cname - - MESSAGE="${UI_BACKUP_COPY} ${UI_CNAME_NAME}" - echo_stat - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:${RIHOLE_DIR}/dnsmasq.d-${CNAME_CONF}.backup" - RSYNC_TARGET="${LOCAL_FOLDR}/${BACKUP_FOLD}/${CNAME_CONF}.push" - create_rsynccmd - - MESSAGE="${UI_PUSH_SECONDARY} ${UI_CNAME_NAME}" - echo_stat - RSYNC_REPATH="sudo rsync" - RSYNC_SOURCE="${LOCAL_FOLDR}/${BACKUP_FOLD}/${BACKUPTIMESTAMP}-${CNAME_CONF}.backup" - RSYNC_TARGET="${REMOTE_USER}@${REMOTE_HOST}:${RNSMAQ_DIR}/${CNAME_CONF}" - create_rsynccmd - - MESSAGE="${UI_SET_FILE_OWNERSHIP} ${UI_CNAME_NAME}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo chown root:root ${RNSMAQ_DIR}/${CNAME_CONF}" - create_sshcmd - - - MESSAGE="${UI_SET_FILE_PERMISSIONS} ${UI_CNAME_NAME}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="sudo chmod 644 ${RNSMAQ_DIR}/${CNAME_CONF}" - create_sshcmd - fi - fi -} - -## Push Reload -function push_gs_reload { - MESSAGE="${UI_PUSH_RELOAD_WAIT}" - echo_info - sleep 1 - - MESSAGE="${UI_FTLDNS_CONFIG_PUSH_UPDATE}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="${RH_EXEC} restartdns reload-lists" - create_sshcmd - - if [ "${TASKTYPE}" == SMART ] - then - if [ "${PRICLCHANGE}" == "1" ] || [ "${SECCLCHANGE}" == "1" ] || [ "${PRICNCHANGE}" == "1" ] || [ "${SECCNCHANGE}" == "1" ] - then - MESSAGE="${UI_FTLDNS_CONFIG_PUSH_RELOAD}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="${RH_EXEC} restartdns" - create_sshcmd - fi - else - MESSAGE="${UI_FTLDNS_CONFIG_PUSH_RELOAD}" - echo_stat - CMD_TIMEOUT=$BACKUP_TIMEOUT - CMD_REQUESTED="${RH_EXEC} restartdns" - create_sshcmd - fi -} - -## Push Function -function push_gs { - previous_md5 - md5_compare - backup_settime - - intent_validate - - push_gs_grav - push_gs_cust - push_gs_cname - push_gs_reload - md5_recheck - backup_cleanup - - logs_export - exit_withchange -} \ No newline at end of file diff --git a/includes/gs-root.sh b/includes/gs-root.sh deleted file mode 100644 index 342dbd0..0000000 --- a/includes/gs-root.sh +++ /dev/null @@ -1,71 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-root.sh ################################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Sudo Creation Task -function task_sudo { - TASKTYPE='SUDO' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - MESSAGE="Creating sudoers.d template file" - echo_stat - - NEW_SUDO_USER=$(whoami) - echo -e "${NEW_SUDO_USER} ALL=(ALL) NOPASSWD: ALL" > ${LOCAL_FOLDR}/templates/gs-nopasswd.sudo - error_validate - - MESSAGE="Installing sudoers.d file on $HOSTNAME" - echo_stat - - sudo install -m 0440 ${LOCAL_FOLDR}/templates/gs-nopasswd.sudo /etc/sudoers.d/gs-nopasswd - error_validate - - exit_withchange -} - -## Root Check -function root_check { - if [ ! "$EUID" -ne 0 ] - then - TASKTYPE='ROOT' - MESSAGE="${MESSAGE} ${TASKTYPE}" - echo_fail - - MESSAGE="${PROGRAM} should not run as 'root'" - echo_warn - - exit_nochange - fi -} - -function new_root_check { - CURRENTUSER=$(whoami) - if [ ! "$EUID" -ne 0 ] - then - LOCALADMIN="" - else - # Check Sudo - SUDOCHECK=$(groups ${CURRENTUSER} | grep -e 'sudo' -e 'wheel') - if [ "$SUDOCHECK" == "" ] - then - LOCALADMIN="nosudo" - else - LOCALADMIN="sudo" - fi - fi - - if [ "$LOCALADMIN" == "nosudo" ] - then - TASKTYPE='ROOT' - MESSAGE="${MESSAGE} ${TASKTYPE}" - echo_fail - - MESSAGE="${CURRENTUSER} has insufficent user rights for ${PROGRAM}" - echo_warn - - exit_nochange - fi -} \ No newline at end of file diff --git a/includes/gs-smart.sh b/includes/gs-smart.sh deleted file mode 100644 index b0dd02f..0000000 --- a/includes/gs-smart.sh +++ /dev/null @@ -1,219 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-smart.sh ################################ - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Smart Task -function task_smart { - TASKTYPE='SMART' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - show_target - validate_gs_folders - validate_ph_folders - - if [ "${INCLUDE_CNAME}" == "1" ] - then - validate_dns_folders - fi - - validate_sqlite3 - validate_os_sshpass - - smart_gs - exit -} - -## Smart Sync Function -function smart_gs { - MESSAGE="Starting ${TASKTYPE} Analysis" - echo_info - - previous_md5 - md5_compare - backup_settime - - PRIDBCHANGE="0" - SECDBCHANGE="0" - PRICLCHANGE="0" - SECCLCHANGE="0" - PRICNCHANGE="0" - SECCNCHANGE="0" - - if [ "${primaryDBMD5}" != "${last_primaryDBMD5}" ] - then - PRIDBCHANGE="1" - fi - - if [ "${secondDBMD5}" != "${last_secondDBMD5}" ] - then - SECDBCHANGE="1" - fi - - if [ "${PRIDBCHANGE}" == "${SECDBCHANGE}" ] - then - if [ "${PRIDBCHANGE}" != "0" ] - then - MESSAGE="Both ${GRAVITY_FI} Have Changed" - echo_warn - - PRIDBDATE=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "stat -c %Y ${RIHOLE_DIR}/${GRAVITY_FI}") - SECDBDATE=$(stat -c %Y ${PIHOLE_DIR}/${GRAVITY_FI}) - - if (( "$PRIDBDATE" >= "$SECDBDATE" )) - then - MESSAGE="Primary ${GRAVITY_FI} Last Changed" - echo_warn - - pull_gs_grav - PULLRESTART="1" - else - MESSAGE="Secondary ${GRAVITY_FI} Last Changed" - echo_warn - - push_gs_grav - PUSHRESTART="1" - fi - fi - else - if [ "${PRIDBCHANGE}" != "0" ] - then - pull_gs_grav - PULLRESTART="1" - elif [ "${SECDBCHANGE}" != "0" ] - then - push_gs_grav - PUSHRESTART="1" - fi - fi - - if [ "${primaryCLMD5}" != "${last_primaryCLMD5}" ] - then - PRICLCHANGE="1" - fi - - if [ "${secondCLMD5}" != "${last_secondCLMD5}" ] - then - SECCLCHANGE="1" - fi - - if [ "$SKIP_CUSTOM" != '1' ] - then - if [ -f "${PIHOLE_DIR}/${CUSTOM_DNS}" ] - then - if [ "${PRICLCHANGE}" == "${SECCLCHANGE}" ] - then - if [ "${PRICLCHANGE}" != "0" ] - then - MESSAGE="Both ${CUSTOM_DNS} Have Changed" - echo_warn - - PRICLDATE=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "stat -c %Y ${RIHOLE_DIR}/${CUSTOM_DNS}") - SECCLDATE=$(stat -c %Y ${PIHOLE_DIR}/${CUSTOM_DNS}) - - if (( "$PRICLDATE" >= "$SECCLDATE" )) - then - MESSAGE="Primary ${CUSTOM_DNS} Last Changed" - echo_warn - - pull_gs_cust - PULLRESTART="1" - else - MESSAGE="Secondary ${CUSTOM_DNS} Last Changed" - echo_warn - - push_gs_cust - PUSHRESTART="1" - fi - fi - else - if [ "${PRICLCHANGE}" != "0" ] - then - pull_gs_cust - PULLRESTART="1" - elif [ "${SECCLCHANGE}" != "0" ] - then - push_gs_cust - PUSHRESTART="1" - fi - fi - else - pull_gs_cust - PULLRESTART="1" - fi - fi - - if [ "${primaryCNMD5}" != "${last_primaryCNMD5}" ] - then - PRICNCHANGE="1" - fi - - if [ "${secondCNMD5}" != "${last_secondCNMD5}" ] - then - SECCNCHANGE="1" - fi - - if [ "$INCLUDE_CNAME" == "1" ] - then - if [ -f "${DNSMAQ_DIR}/${CNAME_CONF}" ] - then - if [ "${PRICNCHANGE}" == "${SECCNCHANGE}" ] - then - if [ "${PRICNCHANGE}" != "0" ] - then - MESSAGE="Both ${CNAME_CONF} Have Changed" - echo_warn - - PRICNDATE=$(${SSHPASSWORD} ${SSH_CMD} -p ${SSH_PORT} -i "$HOME/${SSH_PKIF}" ${REMOTE_USER}@${REMOTE_HOST} "stat -c %Y ${RNSMAQ_DIR}/${CNAME_CONF}") - SECCNDATE=$(stat -c %Y ${DNSMAQ_DIR}/${CNAME_CONF}) - - if (( "$PRICNDATE" >= "$SECCNDATE" )) - then - MESSAGE="Primary ${CNAME_CONF} Last Changed" - echo_warn - - pull_gs_cname - PULLRESTART="1" - else - MESSAGE="Secondary ${CNAME_CONF} Last Changed" - echo_warn - - push_gs_cname - PUSHRESTART="1" - fi - fi - else - if [ "${PRICNCHANGE}" != "0" ] - then - pull_gs_cname - PULLRESTART="1" - elif [ "${SECCNCHANGE}" != "0" ] - then - push_gs_cname - PUSHRESTART="1" - fi - fi - else - pull_gs_cname - PULLRESTART="1" - fi - fi - - if [ "$PULLRESTART" == "1" ] - then - pull_gs_reload - fi - - if [ "$PUSHRESTART" == "1" ] - then - push_gs_reload - fi - - md5_recheck - backup_cleanup - - logs_export - exit_withchange -} \ No newline at end of file diff --git a/includes/gs-ssh.sh b/includes/gs-ssh.sh deleted file mode 100644 index 2fd7da5..0000000 --- a/includes/gs-ssh.sh +++ /dev/null @@ -1,149 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-ssh.sh ################################## - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Determine SSH Pathways -function create_sshcmd { - timeout --preserve-status ${CMD_TIMEOUT} ${SSH_CMD} -p ${SSH_PORT} -i $HOME/${SSH_PKIF} -o StrictHostKeyChecking=no ${REMOTE_USER}@${REMOTE_HOST} "${CMD_REQUESTED}" - error_validate -} - -## Determine SSH Pathways -function create_rsynccmd { - rsync --rsync-path="${RSYNC_REPATH}" -e "${SSH_CMD} -p ${SSH_PORT} -i $HOME/${SSH_PKIF}" ${RSYNC_SOURCE} ${RSYNC_TARGET} >/dev/null 2>&1 - error_validate -} - -## Detect SSH-KEYGEN -function detect_sshkeygen { - MESSAGE="Validating SSH-KEYGEN install" - echo_stat - - if hash ssh-keygen >/dev/null 2>&1 - then - echo_good - else - echo_fail - MESSAGE="SSH-KEYGEN is required on $HOSTNAME" - echo_info - - exit_nochange - fi -} - -function generate_sshkey { - if [ -z $INPUT_REMOTE_PASS ] - then - if [ -f $HOME/${SSH_PKIF} ] - then - MESSAGE="Using existing ~/${SSH_PKIF} file" - echo_good_clean - else - if hash ssh-keygen >/dev/null 2>&1 - then - MESSAGE="Generating ~/${SSH_PKIF} file" - echo_stat - - ssh-keygen -q -P "" -t rsa -f $HOME/${SSH_PKIF} >/dev/null 2>&1 - error_validate - else - MESSAGE="No SSH-KEYGEN available" - echo_warn - exit_nochange - fi - fi - fi -} - -function export_sshkey { - if [ -z $REMOTE_PASS ] - then - if [ -f $HOME/${SSH_PKIF} ] - then - MESSAGE="Registering key-pair to ${REMOTE_HOST}" - echo_info - - ssh-copy-id -f -p ${SSH_PORT} -i $HOME/${SSH_PKIF}.pub ${REMOTE_USER}@${REMOTE_HOST} - else - MESSAGE="Error registering key-pair to ${REMOTE_HOST}" - echo_warn - fi - fi -} - -## Detect SSH & RSYNC -function detect_ssh { - MESSAGE="${UI_VALIDATING} ${UI_CORE_OPENSSH}" - echo_stat - - if hash ssh 2>/dev/null - then - echo_good - SSH_CMD='ssh' - elif hash dbclient 2>/dev/null - then - echo_fail - - MESSAGE="${UI_DROPBEAR_DEP}" - echo_warn - exit_nochange - else - echo_fail - exit_nochange - fi - - MESSAGE="${UI_VALIDATING} ${UI_CORE_RSYNC}" - echo_stat - - if hash rsync 2>/dev/null - then - echo_good - else - echo_fail - - echo_warn - exit_nochange - fi -} - -function detect_remotersync { - MESSAGE="Creating test file on ${REMOTE_HOST}" - echo_stat - - CMD_TIMEOUT='15' - CMD_REQUESTED="touch ~/gs.test" - create_sshcmd - - MESSAGE="If pull test fails ensure RSYNC is installed on ${REMOTE_HOST}" - echo_warn - - MESSAGE="Pulling test file to $HOSTNAME" - echo_stat - - RSYNC_REPATH="rsync" - RSYNC_SOURCE="${REMOTE_USER}@${REMOTE_HOST}:~/gs.test" - RSYNC_TARGET="${LOCAL_FOLDR}/gs.test" - create_rsynccmd - - MESSAGE="Cleaning up local test file" - echo_stat - - rm ${LOCAL_FOLDR}/gs.test - error_validate - - MESSAGE="Cleaning up remote test file" - echo_stat - - CMD_TIMEOUT='15' - CMD_REQUESTED="rm ~/gs.test" - create_sshcmd -} - -function show_target { - MESSAGE="${UI_CORE_UC_PRIMARY} ${UI_CORE_APP}: ${REMOTE_USER}@${REMOTE_HOST}" - echo_info - - detect_ssh -} \ No newline at end of file diff --git a/includes/gs-ui.sh b/includes/gs-ui.sh deleted file mode 100644 index 8946f7f..0000000 --- a/includes/gs-ui.sh +++ /dev/null @@ -1,164 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-ui.sh ################################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -# Interface Settings -UI_GRAVITY_NAME='Domain Database' -UI_CUSTOM_NAME='Local DNS Records' -UI_CNAME_NAME='Local DNS CNAMEs' - -# Core -UI_INVALID_SELECTION='Invalid selection' -UI_INVALID_DNS_CONFIG='Invalid DNS replication settings in' -UI_CORE_MISSING='Missing' -UI_CORE_LOADING='Loading' -UI_CORE_EVALUATING='Evaluating arguments' -UI_CORE_INIT="Initalizing ${PROGRAM} (${VERSION})" -UI_CORE_RELOCATING='Relocating' -UI_CORE_APP='Pi-hole' -UI_CORE_APP_DNS='DNSMASQ' -UI_CORE_APP_SQL='SQLITE3' -UI_CORE_OPENSSH='OpenSSH' -UI_CORE_RSYNC='RSYNC' -UI_CORE_DOCKER='Docker' -UI_CORE_PODMAN='Podman' -UI_CORE_UC_PRIMARY='Primary' -UI_CORE_UC_SECONDARY='Secondary' - -# Exit -UI_EXIT_CALC_END='after' -UI_EXIT_ABORT='exited' -UI_EXIT_COMPLETE='completed' -UI_EXIT_CALC_TIMER='seconds' - -# Hashing -UI_HASHING_HASHING='Hashing the primary' -UI_HASHING_COMPARING='Comparing to the secondary' -UI_HASHING_DIFFERNCE='Differences detected in the' -UI_HASHING_DETECTED='has been detected on the' -UI_HASHING_NOTDETECTED='not detected on the' -UI_HASHING_PRIMARY='primary host' -UI_HASHING_SECONDARY='secondary host' -UI_HASHING_REQUIRED='Replication of Pi-hole settings is required' -UI_HASHING_NOREP='No replication is required at this time' -UI_HASHING_DIAGNOSTICS='Performing replicator diagnostics' -UI_HASHING_REHASHING='Rehashing the primary' -UI_HASHING_RECOMPARING='Recomparing to the secondary' - -# Validation -UI_VALIDATING='Validating configuration of' -UI_VALIDATING_FAIL_CONTAINER='Unable to validate running container instance of' -UI_VALIDATING_FAIL_FOLDER='Unable to validate configuration folder for' -UI_VALIDATING_FAIL_BINARY='Unable to validate the availibility of' -UI_COMPENSATE='Attempting to compensate' -UI_SET_FILE_OWNERSHIP='Setting file ownership on' -UI_SET_FILE_PERMISSION='Setting file permissions on' -UI_VAL_FILE_OWNERSHIP='Validating file ownership on' -UI_VAL_FILE_PERMISSION='Validating file permissions on' -UI_VAL_GS_FOLDERS="Validating ${PROGRAM} folders on $HOSTNAME" -UI_VAL_GS_FOLDERS_FAIL="Unable to validate ${PROGRAM} folders on $HOSTNAME" - -# Configuration -UI_DROPBEAR_DEP='Dropbear support has been deprecated' - -# Pull/Push -UI_PULL_PRIMARY='Pulling the primary' -UI_PUSH_SECONDARY='Pushing the secondary' -UI_REPLACE_SECONDARY='Replacing the secondary' -UI_PULL_RELOAD_WAIT='Isolating regeneration pathways' -UI_PUSH_RELOAD_WAIT='Inverting tachyon pulses' -UI_FTLDNS_CONFIG_UPDATE='Updating secondary FTLDNS configuration' -UI_FTLDNS_CONFIG_RELOAD='Reloading secondary FTLDNS services' -UI_FTLDNS_CONFIG_PUSH_UPDATE='Updating primary FTLDNS configuration' -UI_FTLDNS_CONFIG_PUSH_RELOAD='Reloading primary FTLDNS services' - -# Logging -UI_LOGGING_SUCCESS='Logging successful' -UI_LOGGING_HASHES='Saving the updated hashes from this replication' -UI_LOGGING_DISPLAY='Displaying output of previous jobs' -UI_LOGGING_EMPTY='is empty' -UI_LOGGING_MISSING='is missing' -UI_LOGGING_RECENT_COMPLETE='Recent complete executions of' - -# Backup -UI_BACKUP_PRIMARY='Performing copy of primary' -UI_BACKUP_COPY='Pulling copy of primary' -UI_BACKUP_SECONDARY='Performing copy of secondary' -UI_BACKUP_PURGE='Purging unused data files from this Pi-hole instance' -UI_BACKUP_REMAIN='days of backups remain' -UI_BACKUP_INTEGRITY="Checking ${UI_GRAVITY_NAME} copy integrity" -UI_BACKUP_INTEGRITY_FAILED='Integrity check has failed for the primary' -UI_BACKUP_INTEGRITY_DELETE='Removing failed copies' -UI_BACKUP_DELETE_ALL='All unused data files purged' - -# Restore -UI_RESTORE_WARNING="This will overwrite your current Pi-hole settings on $HOSTNAME with a previous version!" -UI_RESTORE_INVALID='Invalid restoration request' -UI_RESTORE_SELECT_DATE='Select backup date from which to restore the' -UI_RESTORE_SKIPPING='Skipping restore of' -UI_RESTORE_BACKUP_SELECTED='backup selected for restoration' -UI_RESTORE_BACKUP_UNAVAILABLE='backups are unavailable' -UI_RESTORE_FROM='restoring from' -UI_RESTORE_TIME_TRAVEL='Preparing calculations for time travel' -UI_RESTORE_SECONDARY='Restoring the secondary' -UI_RESTORE_PUSH_PROMPT='Do you want to push the restored configuration to the primary Pi-hole? (Y/N)' -UI_RESTORE_PUSH_NOPUSH="Configuration will not be pushed to the primaryp Pi-hole" - -# Purge -UI_PURGE_MATRIX_ALIGNMENT='Realigning dilithium crystal matrix' -UI_PURGE_DELETE_SSH_KEYS='Deleting SSH key-files' -UI_PURGE_CLEANING_DIR="Purging ${PROGRAM} directory" - -# Automation -UI_AUTO_CRON_EXISTS='Automation task exists in crontab' -UI_AUTO_CRON_DISPLAY_FREQ='Select synchronization frequency (in minutes)' -UI_AUTO_CRON_SELECT_FREQ='Valid options are 5, 10, 15, 30 or 0 to disable (default: 15)' -UI_AUTO_CRON_SAVING='Saving new synchronization task to crontab' -UI_AUTO_CRON_DISABLED='Automation with crontab has been disabled' - -# Configuration -UI_CONFIG_YESNOY="'Yes' or 'No', blank is default 'Yes'" -UI_CONFIG_YESNON="'Yes' or 'No', blank is default 'No'" -UI_CONFIG_ALREADY='already exists' -UI_ENABLE_REPLICATION_QUEST='Enable replication of' -UI_CONFIG_PREF_SAVED='preference saved to' -UI_CONFIG_AREYOUSURE='Proceeding will wipe out your existing configuration' -UI_CONFIG_DOADVANCED='Do you want to enable advanced installation options' -UI_CONFIG_ERASING='Erasing existing' -UI_CONFIG_CREATING='Creating new' -UI_CONFIG_BASH='Creating gravity-sync alias in Bash config' -UI_CONFIG_ALIAS="You must reload your session for 'gravity-sync' alias to function" -UI_CONFIG_ICMP_TEST='Testing network connection to' -UI_CONFIG_REQUIRED='required settings' -UI_CONFIG_ADVANCED='advanced settings' -UI_CONFIG_REMOTE='remote host' -UI_CONFIG_LOCAL='local host' -UI_CONFIG_HOSTREQ='address required' -UI_CONFIG_USERREQ='SSH user required' -UI_CONFIG_CONT_LOOKUP='Looking for container engines' -UI_CONFIG_CONT_DETECT='Docker or Podman container engine' -UI_CONFIG_CONT_DETECTED='detected' -UI_CONFIG_CONT_DETECTNA='not detected' -UI_CONFIG_SSH_KEYPAIR='Key-pair registered to' -UI_CONFIG_LOCALSEC='Local/secondary' -UI_CONFIG_REMOTEPRI='Remote/primary' -UI_CONFIG_INSTANCEREQ='instance type required' -UI_CONFIG_INSTANCENAME='instance name required' -UI_CONFIG_INSTANCETYPE="'docker' or 'podman' container, or blank for default install" -UI_CONFIG_CONTAINER_TYPE='container type' -UI_CONFIG_CONTAINER_NAME='container name' -UI_CONFIG_INSTANCE_ERROR="${UI_CONFIG_CONTAINER_TYPE} must either be 'docker' or 'podman'" -UI_CONFIG_IMAGES='running instances' -UI_CONFIG_SAVING='Saving' -UI_CONFIG_PIHOLE_DEFAULT="Container name or blank for default 'pihole'" -UI_CONFIG_ETC_VOLPATH="'etc' volume path" -UI_CONFIG_ETC_VOLPATH_EXAMPLE="Example, '/opt/pihole/etc-pihole'" -UI_CONFIG_ETC_VOLDNSQ_EXAMPLE="Example, '/opt/pihole/etc-dnsmasq.d'" -UI_CONFIG_SETTING_REQUIRED='This setting is required!' -UI_CONFIG_SETTING_DIR_PATH='settings directory path' -UI_CONFIG_VOLUME_OWNER='volume ownership' -UI_CONFIG_DEFAULT_LEAVE="Leave blank for default" -UI_CONFIG_DEFAULT_DNSMASQ_ETC="'/etc/dnsmasq.d'" -UI_CONFIG_DEFAULT_PIHOLE_ETC="'/etc/pihole'" \ No newline at end of file diff --git a/includes/gs-update.sh b/includes/gs-update.sh deleted file mode 100644 index 9edb19f..0000000 --- a/includes/gs-update.sh +++ /dev/null @@ -1,284 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-update.sh ############################### - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Master Branch -function update_gs { - if [ -f "${LOCAL_FOLDR}/dev" ] - then - source ${LOCAL_FOLDR}/dev - else - BRANCH='origin/master' - fi - - if [ "$BRANCH" != "origin/master" ] - then - MESSAGE="Pulling from ${BRANCH}" - echo_info - fi - - GIT_CHECK=$(git status | awk '{print $1}') - if [ "$GIT_CHECK" == "fatal:" ] - then - MESSAGE="Updater usage requires GitHub installation" - echo_warn - exit_nochange - else - MESSAGE="Downloading updates via GitHub" - echo_stat - git fetch --all >/dev/null 2>&1 - error_validate - MESSAGE="Deploying the latest ${PROGRAM} code" - echo_stat - git reset --hard ${BRANCH} >/dev/null 2>&1 - error_validate - MESSAGE="Cleaning things up" - echo_stat - git clean -fq - error_validate - fi -} - -## Show Version -function show_version { - if [ -f ${LOCAL_FOLDR}/dev ] - then - DEVVERSION="-dev" - elif [ -f ${LOCAL_FOLDR}/beta ] - then - DEVVERSION="-beta" - else - DEVVERSION="" - fi - - MESSAGE="Running version: ${GREEN}${VERSION}${NC}${DEVVERSION}" - echo_info - - GITVERSION=$(curl -sf https://raw.githubusercontent.com/vmstan/gravity-sync/master/VERSION) - if [ -z "$GITVERSION" ] - then - MESSAGE="Latest version: ${RED}Unknown${NC}" - else - if [ "$GITVERSION" != "$VERSION" ] - then - MESSAGE="Update available: ${RED}${GITVERSION}${NC}" - else - MESSAGE="Latest version: ${GREEN}${GITVERSION}${NC}" - fi - fi - echo_info -} - -function show_info() { - if [ -f ${LOCAL_FOLDR}/dev ] - then - DEVVERSION="-dev" - elif [ -f ${LOCAL_FOLDR}/beta ] - then - DEVVERSION="-beta" - else - DEVVERSION="" - fi - - echo_lines - echo -e "${YELLOW}Local Software Versions${NC}" - echo -e "${PURPLE}Gravity Sync${NC} ${VERSION}${DEVVERSION}" - echo -e "${BLUE}Pi-hole${NC}" - if [ "${PH_IN_TYPE}" == "default" ] - then - pihole version - elif [ "${PH_IN_TYPE}" == "docker" ] - then - sudo docker exec -it pihole pihole -v - elif [ "${PH_IN_TYPE}" == "podman" ] - then - sudo podman exec -it pihole pihole -v - fi - - uname -srm - echo -e "bash $BASH_VERSION" - ssh -V - rsync --version | grep version - SQLITE3_VERSION=$(sqlite3 --version) - echo -e "sqlite3 ${SQLITE3_VERSION}" - sudo --version | grep "Sudo version" - git --version - - if hash docker 2>/dev/null - then - docker --version - fi - - if hash podman 2>/dev/null - then - podman --version - fi - - echo -e "" - - echo -e "${YELLOW}Local/Secondary Instance Settings${NC}" - echo -e "Local Hostname: $HOSTNAME" - echo -e "Local Pi-hole Type: ${PH_IN_TYPE}" - echo -e "Local Pi-hole Config Directory: ${PIHOLE_DIR}" - echo -e "Local DNSMASQ Config Directory: ${DNSMAQ_DIR}" - echo -e "Local Gravity Sync Directory: ${LOCAL_FOLDR}" - - if [ "${PH_IN_TYPE}" == "default" ] - then - echo -e "Local Pi-hole Binary Directory: ${PIHOLE_BIN}" - elif [ "${PH_IN_TYPE}" == "docker" ] - then - echo -e "Local Pi-hole Container Name: ${DOCKER_CON}" - echo -e "Local Docker Binary Directory: ${DOCKER_BIN}" - elif [ "${PH_IN_TYPE}" == "podman" ] - then - echo -e "Local Pi-hole Container Name: ${DOCKER_CON}" - echo -e "Local Podman Binary Directory: ${PODMAN_BIN}" - fi - - echo -e "Local File Owner Settings: ${FILE_OWNER}" - - if [ ${SKIP_CUSTOM} == '0' ] - then - echo -e "DNS Replication: Enabled (default)" - elif [ ${SKIP_CUSTOM} == '1' ] - then - echo -e "DNS Replication: Disabled (custom)" - else - echo -e "DNS Replication: Invalid Configuration" - fi - - if [ ${INCLUDE_CNAME} == '1' ] - then - echo -e "CNAME Replication: Enabled (custom)" - elif [ ${INCLUDE_CNAME} == '0' ] - then - echo -e "CNAME Replication: Disabled (default)" - else - echo -e "CNAME Replication: Invalid Configuration" - fi - - if [ ${VERIFY_PASS} == '1' ] - then - echo -e "Verify Operations: Enabled (default)" - elif [ ${VERIFY_PASS} == '0' ] - then - echo -e "Verify Operations: Disabled (custom)" - else - echo -e "Verify Operations: Invalid Configuration" - fi - - if [ ${PING_AVOID} == '0' ] - then - echo -e "Ping Test: Enabled (default)" - elif [ ${PING_AVOID} == '1' ] - then - echo -e "Ping Test: Disabled (custom)" - else - echo -e "Ping Test: Invalid Configuration" - fi - - BACKUP_FOLDER_SIZE=$(du -h ${LOCAL_FOLDR}/${BACKUP_FOLD} | sed 's/\s.*$//') - echo -e "Backup Folder Size: ${BACKUP_FOLDER_SIZE}" - - echo -e "" - echo -e "${YELLOW}Remote/Primary Instance Settings${NC}" - echo -e "Remote Hostname/IP: ${REMOTE_HOST}" - echo -e "Remote Username: ${REMOTE_USER}" - echo -e "Remote Pi-hole Type: ${RH_IN_TYPE}" - echo -e "Remote Pi-hole Config Directory: ${RIHOLE_DIR}" - echo -e "Remote DNSMASQ Config Directory: ${RNSMAQ_DIR}" - - if [ "${RH_IN_TYPE}" == "default" ] - then - echo -e "Remote Pi-hole Binary Directory: ${RIHOLE_BIN}" - elif [ "${RH_IN_TYPE}" == "docker" ] - then - echo -e "Remote Pi-hole Container Name: ${ROCKER_CON}" - echo -e "Remote Docker Binary Directory: ${ROCKER_BIN}" - elif [ "${RH_IN_TYPE}" == "podman" ] - then - echo -e "Remote Pi-hole Container Name: ${ROCKER_CON}" - echo -e "Remote Podman Binary Directory: ${RODMAN_BIN}" - fi - - echo -e "Remote File Owner Settings: ${RILE_OWNER}" - echo_lines -} - -## Devmode Task -function task_devmode { - TASKTYPE='DEV' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - - if [ -f ${LOCAL_FOLDR}/dev ] - then - MESSAGE="Disabling ${TASKTYPE}" - echo_stat - rm -f ${LOCAL_FOLDR}/dev - error_validate - elif [ -f ${LOCAL_FOLDR}/beta ] - then - MESSAGE="Disabling BETA" - echo_stat - rm -f ${LOCAL_FOLDR}/beta - error_validate - - MESSAGE="Enabling ${TASKTYPE}" - echo_stat - touch ${LOCAL_FOLDR}/dev - error_validate - else - MESSAGE="Enabling ${TASKTYPE}" - echo_stat - touch ${LOCAL_FOLDR}/dev - error_validate - - MESSAGE="Checking available branches" - echo_stat - git fetch --all >/dev/null 2>&1 - error_validate - - git branch -r - - MESSAGE="Select GitHub branch to update against" - echo_need - read INPUT_BRANCH - - echo -e "BRANCH='${INPUT_BRANCH}'" >> ${LOCAL_FOLDR}/dev - fi - - update_gs - exit_withchange -} - -## Update Task -function task_update { - TASKTYPE='UPDATE' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - update_gs - exit_withchange -} - -## Version Task -function task_version { - TASKTYPE='VERSION' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - show_version - exit_nochange -} - -## Info Task - -function task_info() { - TASKTYPE='INFO' - MESSAGE="${MESSAGE}: ${TASKTYPE}" - echo_good - show_info - exit_nochange -} \ No newline at end of file diff --git a/includes/gs-validate.sh b/includes/gs-validate.sh deleted file mode 100644 index 800f2a4..0000000 --- a/includes/gs-validate.sh +++ /dev/null @@ -1,264 +0,0 @@ -# GRAVITY SYNC BY VMSTAN ##################### -# gs-validate.sh ############################# - -# For documentation or downloading updates visit https://github.com/vmstan/gravity-sync -# This code is called from the main gravity-sync.sh file and should not execute directly! - -## Validate GS Folders -function validate_gs_folders { - MESSAGE="${UI_VAL_GS_FOLDERS}" - echo_stat - - if [ ! -d ${LOCAL_FOLDR} ] || [ ! -d ${LOCAL_FOLDR}/${BACKUP_FOLD} ] || [ ! -d ${LOCAL_FOLDR}/settings ] || [ ! -d ${LOG_PATH} ] - then - MESSAGE="${UI_VAL_GS_FOLDERS_FAIL}" - echo_fail - exit_nochange - fi - - echo_good -} - -## Validate Pi-hole Folders -function validate_ph_folders { - MESSAGE="${UI_VALIDATING} ${UI_CORE_APP}" - echo_stat - - if [ "$PH_IN_TYPE" == "default" ] - then - if [ ! -f ${PIHOLE_BIN} ] - then - MESSAGE="${UI_VALIDATING_FAIL_BINARY} ${UI_CORE_APP}" - echo_fail - exit_nochange - fi - elif [ "$PH_IN_TYPE" == "docker" ] - then - FTLCHECK=$(sudo docker container ls | grep "${CONTAIMAGE}") - if [ "$FTLCHECK" == "" ] - then - MESSAGE="${UI_VALIDATING_FAIL_CONTAINER} ${UI_CORE_APP}" - echo_fail - exit_nochange - fi - elif [ "$PH_IN_TYPE" == "podman" ] - then - FTLCHECK=$(sudo podman container ls | grep "${CONTAIMAGE}") - if [ "$FTLCHECK" == "" ] - then - MESSAGE="${UI_VALIDATING_FAIL_CONTAINER} ${UI_CORE_APP}" - echo_fail - exit_nochange - fi - fi - - if [ ! -d ${PIHOLE_DIR} ] - then - MESSAGE="${UI_VALIDATING_FAIL_FOLDER} ${UI_CORE_APP}" - echo_fail - exit_nochange - fi - - echo_good -} - -## Validate DNSMASQ Folders -function validate_dns_folders { - MESSAGE="${UI_VALIDATING} ${UI_CORE_APP_DNS}" - echo_stat - - if [ ! -d ${DNSMAQ_DIR} ] - then - MESSAGE="${UI_VALIDATING_FAIL_FOLDER} ${UI_CORE_APP_DNS}" - echo_fail - exit_nochange - fi - echo_good -} - -## Validate SQLite3 -function validate_sqlite3 { - MESSAGE="${UI_VALIDATING} ${UI_CORE_APP_SQL}" - echo_stat - if hash sqlite3 2>/dev/null - then - echo_good - else - MESSAGE="${UI_VALIDATING_FAIL_BINARY} ${UI_CORE_APP_SQL}" - echo_warn - fi -} - -## Validate SSHPASS -function validate_os_sshpass { - MESSAGE="Connecting to ${REMOTE_HOST}" - echo_stat - - CMD_TIMEOUT='5' - CMD_REQUESTED="exit" - create_sshcmd -} - -## Detect Package Manager -function distro_check { - if hash apt-get 2>/dev/null - then - PKG_MANAGER="apt-get" - PKG_INSTALL="sudo apt-get --yes --no-install-recommends --quiet install" - elif hash rpm 2>/dev/null - then - if hash dnf 2>/dev/null - then - PKG_MANAGER="dnf" - elif hash yum 2>/dev/null - then - PKG_MANAGER="yum" - else - MESSAGE="Unable to find OS Package Manager" - echo_info - exit_nochange - fi - PKG_INSTALL="sudo ${PKG_MANAGER} install -y" - else - MESSAGE="Unable to find OS Package Manager" - echo_info - exit_nochange - fi -} - -## Dropbear Warning -function dbclient_warning { - if hash dbclient 2>/dev/null - then - if hash ssh 2>/dev/null - then - NOEMPTYBASHIF="1" - else - MESSAGE="${UI_DROPBEAR_DEP}" - echo_warn - fi - fi -} - -## Validate Domain Database Permissions -function validate_gravity_permissions() { - MESSAGE="${UI_VAL_FILE_OWNERSHIP} ${UI_GRAVITY_NAME}" - echo_stat - - GRAVDB_OWN=$(ls -ld ${PIHOLE_DIR}/${GRAVITY_FI} | awk 'OFS=":" {print $3,$4}') - if [ "$GRAVDB_OWN" == "$FILE_OWNER" ] - then - echo_good - else - echo_fail - - MESSAGE="${UI_COMPENSATE}" - echo_warn - - MESSAGE="${UI_SET_FILE_OWNERSHIP} ${UI_GRAVITY_NAME}" - echo_stat - sudo chown ${FILE_OWNER} ${PIHOLE_DIR}/${GRAVITY_FI} >/dev/null 2>&1 - error_validate - fi - - MESSAGE="${UI_VAL_FILE_PERMISSION} of ${UI_GRAVITY_NAME}" - echo_stat - - GRAVDB_RWE=$(namei -m ${PIHOLE_DIR}/${GRAVITY_FI} | grep -v f: | grep ${GRAVITY_FI} | awk '{print $1}') - if [ "$GRAVDB_RWE" = "-rw-rw-r--" ] - then - echo_good - else - echo_fail - - MESSAGE="${UI_COMPENSATE}" - echo_warn - - MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_GRAVITY_NAME}" - echo_stat - sudo chmod 664 ${PIHOLE_DIR}/${GRAVITY_FI} >/dev/null 2>&1 - error_validate - fi -} - -## Validate Local DNS Records Permissions -function validate_custom_permissions() { - MESSAGE="${UI_VAL_FILE_OWNERSHIP} ${UI_CUSTOM_NAME}" - echo_stat - - CUSTOMLS_OWN=$(ls -ld ${PIHOLE_DIR}/${CUSTOM_DNS} | awk '{print $3 $4}') - if [ "$CUSTOMLS_OWN" == "rootroot" ] - then - echo_good - else - echo_fail - - MESSAGE="${UI_COMPENSATE}" - echo_warn - - MESSAGE="${UI_SET_FILE_OWNERSHIP} ${UI_CUSTOM_NAME}" - echo_stat - sudo chown root:root ${PIHOLE_DIR}/${CUSTOM_DNS} >/dev/null 2>&1 - error_validate - fi - - MESSAGE="${UI_VAL_FILE_PERMISSION} ${UI_CUSTOM_NAME}" - echo_stat - - CUSTOMLS_RWE=$(namei -m ${PIHOLE_DIR}/${CUSTOM_DNS} | grep -v f: | grep ${CUSTOM_DNS} | awk '{print $1}') - if [ "$CUSTOMLS_RWE" == "-rw-r--r--" ] - then - echo_good - else - echo_fail - - MESSAGE="${UI_COMPENSATE}" - echo_warn - - MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_CUSTOM_NAME}" - echo_stat - sudo chmod 644 ${PIHOLE_DIR}/${CUSTOM_DNS} >/dev/null 2>&1 - error_validate - fi -} - -## Validate Local DNS CNAME Permissions -function validate_cname_permissions() { - MESSAGE="${UI_VAL_FILE_OWNERSHIP} ${UI_CNAME_NAME}" - echo_stat - - CNAMELS_OWN=$(ls -ld ${DNSMAQ_DIR}/${CNAME_CONF} | awk '{print $3 $4}') - if [ "$CNAMELS_OWN" == "rootroot" ] - then - echo_good - else - echo_fail - - MESSAGE="${UI_COMPENSATE}" - echo_warn - - MESSAGE="${UI_SET_FILE_OWNERSHIP} ${UI_CNAME_NAME}" - echo_stat - sudo chown root:root ${DNSMAQ_DIR}/${CNAME_CONF} >/dev/null 2>&1 - error_validate - fi - - MESSAGE="${UI_VAL_FILE_PERMISSION} ${UI_CNAME_NAME}" - echo_stat - - CNAMELS_RWE=$(namei -m ${DNSMAQ_DIR}/${CNAME_CONF} | grep -v f: | grep ${CNAME_CONF} | awk '{print $1}') - if [ "$CNAMELS_RWE" == "-rw-r--r--" ] - then - echo_good - else - echo_fail - - MESSAGE="${UI_COMPENSATE}" - echo_warn - - MESSAGE="${UI_SET_FILE_PERMISSION} ${UI_CNAME_NAME}" - echo_stat - sudo chmod 644 ${DNSMAQ_DIR}/${CNAME_CONF} >/dev/null 2>&1 - error_validate - fi -} \ No newline at end of file diff --git a/logs/README.md b/logs/README.md deleted file mode 100644 index 97a2637..0000000 --- a/logs/README.md +++ /dev/null @@ -1,5 +0,0 @@ -Your log files will be stored here. -They will be ignored by any git updates. -This file is required for Git to create the folder. -It serves no other purpose. -The needs of the many, outweigh the needs of the few. \ No newline at end of file diff --git a/settings/README.md b/settings/README.md deleted file mode 100644 index 66ba9c6..0000000 --- a/settings/README.md +++ /dev/null @@ -1,5 +0,0 @@ -Your configuration file will be stored here. -It will be ignored by any git updates. -This file is required for Git to create the folder. -It serves no other purpose. -I have been, and always shall be, your friend. \ No newline at end of file diff --git a/templates/gravity-sync.conf.example b/templates/gravity-sync.conf.example index c83d073..e4a2ae9 100644 --- a/templates/gravity-sync.conf.example +++ b/templates/gravity-sync.conf.example @@ -1,64 +1,21 @@ -# REQUIRED SETTINGS ###################### +# REQUIRED SETTINGS ########################## -# Primary Pi-hole host (in IP or DNS name) -REMOTE_HOST='192.168.1.10' +REMOTE_HOST='' +REMOTE_USER='' -# User account on primary with SUDO rights -REMOTE_USER='pi' +# CUSTOM VARIABLES ########################### -# STANDARD VARIABLES ######################### +# Pi-hole Folder/File Customization - Only need to be customized when using containers +# LOCAL_PIHOLE_DIRECTORY='' # Local Pi-hole data directory +# REMOTE_PIHOLE_DIRECTORY='' # Remote Pi-hole data directory +# LOCAL_DNSMASQ_DIRECTORY='' # Local DNSMASQ/FTL data directory +# REMOTE_DNSMASQ_DIRECTORY='' # Remote DNSMASQ/FTL data directory +# LOCAL_FILE_OWNER='' # Local file owner for Pi-hole +# REMOTE_FILE_OWNER='' # Remote file owner for Pi-hole -### Installation Types -# PH_IN_TYPE='' # Pi-hole install type, `default`, `docker`, or `podman` (local) -# RH_IN_TYPE='' # Pi-hole install type, `default`, `docker`, or `podman` (remote) +# Pi-hole Docker/Podman container name - Docker will pattern match anything set below +# LOCAL_DOCKER_CONTAINER='' # Local Pi-hole container name +# REMOTE_DOCKER_CONTAINER='' # Remote Pi-hole container name -### Pi-hole Folder/File Customization -# PIHOLE_DIR='' # default Pi-hole data directory (local) -# RIHOLE_DIR='' # default Pi-hole data directory (remote) -# DNSMAQ_DIR='' # default DNSMASQ data directory (local) -# RNSMAQ_DIR='' # default DNSMASQ data directory (remote) -# PIHOLE_BIN='' # default Pi-hole binary directory (local) -# RIHOLE_BIN='' # default Pi-hole binary directory (remote) -# DOCKER_BIN='' # default Docker binary directory (local) -# ROCKER_BIN='' # default Docker binary directory (remote) -# PODMAN_BIN='' # default Podman binary directory (local) -# RODMAN_BIN='' # default Podman binary directory (remote) -# FILE_OWNER='' # default Pi-hole file owner and group (local) -# RILE_OWNER='' # default Pi-hole file owner and group (remote) -# DOCKER_CON='' # default Pi-hole container name (local) -# ROCKER_CON='' # default Pi-hole container name (remote) -# CONTAIMAGE='' # official Pi-hole container image - -# GRAVITY_FI='' # default Pi-hole database file -# CUSTOM_DNS='' # default Pi-hole local DNS lookups -# CNAME_CONF='' # default DNSMASQ CNAME alias file -# GSLAN_CONF='' # default DNSMASQ GS managed file - -### Interaction Customization -# VERIFY_PASS='' # replace in gravity-sync.conf to overwrite -# SKIP_CUSTOM='' # replace in gravity-sync.conf to overwrite -# INCLUDE_CNAME='' # replace in gravity-sync.conf to overwrite -# DATE_OUTPUT='' # replace in gravity-sync.conf to overwrite -# PING_AVOID='' # replace in gravity-sync.conf to overwrite -# ROOT_CHECK_AVOID='' # replace in gravity-sync.conf to overwrite - -### Backup Customization -# BACKUP_TIMEOUT='' # replace in gravity-sync.conf to overwrite -# BACKUP_INTEGRITY_WAIT='' # time to wait after backup for integrity check - -### SSH Customization -# SSH_PORT='' # default SSH port -# SSH_PKIF='' # default local SSH key - -### GS Folder/File Locations -# CONFIG_FILE='' # must exist with primary host/user configured -# GS_FILENAME='' # must exist because it's this script -# BACKUP_FOLD='' # must exist as subdirectory in LOCAL_FOLDR -# LOG_PATH='' # replace in gravity-sync.conf to overwrite -# SYNCING_LOG='' # replace in gravity-sync.conf to overwrite -# CRONJOB_LOG='' # replace in gravity-sync.conf to overwrite -# HISTORY_MD5='' # replace in gravity-sync.conf to overwrite - -### OS Settings -# BASH_PATH='' # default OS bash path -# DAEMON_PATH='' # systemd timer/service folder \ No newline at end of file +# HIDDEN FIGURES ############################# +# See https://github.com/vmstan/gravity-sync/wiki/Hidden-Figures diff --git a/update.sh b/update.sh new file mode 100644 index 0000000..981ce79 --- /dev/null +++ b/update.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +GS_LOCAL_REPO='/etc/gravity-sync/.gs' + + if [ -f "${GS_LOCAL_REPO}/dev" ]; then + source ${GS_LOCAL_REPO}/dev + else + BRANCH='origin/master' + fi + + if [ "$BRANCH" != "origin/master" ]; then + echo -e "Pulling from ${BRANCH}" + fi + + (cd ${GS_LOCAL_REPO}; sudo git fetch --all; sudo git reset --hard ${BRANCH}; sudo cp gravity-sync /usr/local/bin; sudo git clean -fq) +