55 lines
1.2 KiB
Bash
55 lines
1.2 KiB
Bash
# Base URL
|
|
url="https://ct.tlscc.ru/json?identity=%25&iCAID=2"
|
|
|
|
# Initial page and items per page
|
|
page=1
|
|
n=1000
|
|
all_items=()
|
|
|
|
while true; do
|
|
# Fetch the current page
|
|
response=$(curl -ks "${url}&p=${page}&n=${n}")
|
|
|
|
# Check if the response is valid JSON and extract items
|
|
items=$(echo "$response" | grep -o 'CN=[^,]*' | grep -oE '(\*\.)?[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}')
|
|
|
|
# Count the number of items
|
|
count=$(echo "$items" | wc -l)
|
|
echo $count
|
|
|
|
# If no items, break the loop
|
|
if [ "$count" -eq 1 ]; then
|
|
break
|
|
fi
|
|
|
|
# Decode and collect items
|
|
while IFS= read -r line; do
|
|
if [ -n "$line" ]; then
|
|
all_items+=("\"$line\"")
|
|
fi
|
|
done <<< "$items"
|
|
|
|
# Break if fewer items than requested per page (last page)
|
|
# if [ "$count" -lt "$n" ]; then
|
|
# break
|
|
# fi
|
|
|
|
# Increment page number
|
|
((page++))
|
|
done
|
|
|
|
all_items=`echo $all_items`
|
|
printf "%s\n" "${all_items[@]}" >> result.nix.temp
|
|
|
|
lines=$(wc -l < result.nix.temp)
|
|
echo "Total domains: ${lines}"
|
|
|
|
echo '[' > result.nix
|
|
cat result.nix.temp | sort | uniq >> result.nix
|
|
echo ']' >> result.nix
|
|
lines=$(wc -l < result.nix)
|
|
echo "Unique domains: $((lines-2))"
|
|
|
|
sleep 3
|
|
|
|
rm result.nix.temp
|