first commit
This commit is contained in:
commit
b3d3c853b8
3 changed files with 16482 additions and 0 deletions
45
create.sh
Normal file
45
create.sh
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Base URL
|
||||||
|
url="https://ct.tlscc.ru/json?identity=%25&iCAID=2"
|
||||||
|
|
||||||
|
# Initial page and items per page
|
||||||
|
page=1
|
||||||
|
n=1000
|
||||||
|
all_items=()
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
# Fetch the current page
|
||||||
|
response=$(curl -ks "${url}&p=${page}&n=${n}")
|
||||||
|
|
||||||
|
# Check if the response is valid JSON and extract items
|
||||||
|
items=$(echo "$response" | grep -o 'CN=[^,]*' | grep -oE '(\*\.)?[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}')
|
||||||
|
|
||||||
|
# Count the number of items
|
||||||
|
count=$(echo "$items" | wc -l)
|
||||||
|
echo $count
|
||||||
|
|
||||||
|
# If no items, break the loop
|
||||||
|
if [ "$count" -eq 1 ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Decode and collect items
|
||||||
|
while IFS= read -r line; do
|
||||||
|
if [ -n "$line" ]; then
|
||||||
|
all_items+=("\"$line\"")
|
||||||
|
fi
|
||||||
|
done <<< "$items"
|
||||||
|
|
||||||
|
# Break if fewer items than requested per page (last page)
|
||||||
|
# if [ "$count" -lt "$n" ]; then
|
||||||
|
# break
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# Increment page number
|
||||||
|
((page++))
|
||||||
|
done
|
||||||
|
|
||||||
|
all_items=`echo $all_items | uniq`
|
||||||
|
|
||||||
|
echo '[' > result.nix
|
||||||
|
printf "%s\n" "${all_items[@]}" >> result.nix
|
||||||
|
echo ']' >> result.nix
|
||||||
9
flake.nix
Normal file
9
flake.nix
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
description = "List of domain names that has Russian certificates.";
|
||||||
|
|
||||||
|
inputs = {};
|
||||||
|
|
||||||
|
outputs = { self, ... }: {
|
||||||
|
list = import ./result.nix;
|
||||||
|
};
|
||||||
|
}
|
||||||
16428
result.nix
Normal file
16428
result.nix
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue