2023-04-12 06:15:59 -04:00
|
|
|
#!/bin/bash
|
2023-04-12 10:18:02 -04:00
|
|
|
#
|
2023-06-08 06:20:32 -04:00
|
|
|
# https://github.com/edoardottt/awesome-hacker-search-engines
|
|
|
|
#
|
2023-04-12 10:18:02 -04:00
|
|
|
# This script checks if there are duplicate entries in the README file.
|
|
|
|
#
|
2023-04-12 06:15:59 -04:00
|
|
|
|
|
|
|
readme="README.md"
|
|
|
|
|
2023-04-12 06:21:26 -04:00
|
|
|
pwd=$(pwd)
|
|
|
|
|
|
|
|
if [[ "${pwd: -7}" == "scripts" ]];
|
|
|
|
then
|
|
|
|
readme="../README.md"
|
|
|
|
fi
|
|
|
|
|
2023-10-14 10:15:48 -04:00
|
|
|
# Function to extract links from a section and check for duplicates
|
|
|
|
check_section() {
|
|
|
|
section=$1
|
|
|
|
section_content=$(awk -v section="$section" '/^### / {p=0} {if(p)print} /^### '"$section"'/ {p=1}' "$readme")
|
|
|
|
duplicate_links=$(echo "$section_content" | grep -oP '\[.*?\]\(\K[^)]+' | sort | uniq -d)
|
|
|
|
if [[ -n $duplicate_links ]]; then
|
|
|
|
echo "[ ERR ] DUPLICATE LINKS FOUND"
|
|
|
|
echo "$duplicate_links"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
}
|
2023-04-12 06:15:59 -04:00
|
|
|
|
2023-10-14 10:15:48 -04:00
|
|
|
# Get all unique section headings from the README file and handle spaces and slashes
|
|
|
|
sections=$(grep '^### ' "$readme" | sed 's/^### //' | sed 's/[\/&]/\\&/g')
|
2023-04-12 06:15:59 -04:00
|
|
|
|
2023-10-14 10:15:48 -04:00
|
|
|
# Call the function for each section
|
|
|
|
for section in $sections; do
|
|
|
|
check_section "$section"
|
|
|
|
done
|
|
|
|
echo "[ OK! ] NO DUPLICATES FOUND."
|