From 3a7a97867b3d53d1131a76b95a7e1750df3e5f85 Mon Sep 17 00:00:00 2001 From: Aetherinox Date: Sun, 3 Nov 2024 22:06:15 -0700 Subject: [PATCH] ci: update ipset generation workflow --- .github/blocks/isp/aol.ipset | 3 + .github/blocks/privacy/01.ipset | 1 + ..._highrisk_ipset.txt => highrisk_ipset.txt} | 0 .github/categories/isp_aol_ipset.txt | 1 + .github/categories/isp_att_ipset.txt | 1 + .github/categories/isp_cablevision_ipset.txt | 1 + ...charter_spectrum_timewarnercable_ipset.txt | 1 + .../{01_master_ipset.txt => master_ipset.txt} | 0 ...efs_ipset.txt => privacy_ahrefs_ipset.txt} | 0 ...ipset.txt => privacy_amazon_aws_ipset.txt} | 0 ...ipset.txt => privacy_amazon_ec2_ipset.txt} | 0 ...t_ipset.txt => privacy_applebot_ipset.txt} | 0 ..._bing_ipset.txt => privacy_bing_ipset.txt} | 0 ...n_ipset.txt => privacy_bunnycdn_ipset.txt} | 0 ...et.txt => privacy_cloudflarecdn_ipset.txt} | 0 ...ipset.txt => privacy_cloudfront_ipset.txt} | 0 ...ipset.txt => privacy_duckduckgo_ipset.txt} | 0 ...k_ipset.txt => privacy_facebook_ipset.txt} | 0 ...tly_ipset.txt => privacy_fastly_ipset.txt} | 0 ...al_ipset.txt => privacy_general_ipset.txt} | 0 ...gle_ipset.txt => privacy_google_ipset.txt} | 0 ...om_ipset.txt => privacy_pingdom_ipset.txt} | 0 ...api_ipset.txt => privacy_rssapi_ipset.txt} | 0 ...ipset.txt => privacy_stripe_api_ipset.txt} | 0 ... => privacy_stripe_armada_gator_ipset.txt} | 0 ....txt => privacy_stripe_webhooks_ipset.txt} | 0 ...m_ipset.txt => privacy_telegram_ipset.txt} | 0 ...pset.txt => privacy_uptimerobot_ipset.txt} | 0 ...pset.txt => privacy_webpagetest_ipset.txt} | 0 ...dex_ipset.txt => privacy_yandex_ipset.txt} | 0 ...forums_ipset.txt => spam_forums_ipset.txt} | 0 ...haus_ipset.txt => spam_spamhaus_ipset.txt} | 0 ..._highrisk_ipset.txt => highrisk_ipset.txt} | 0 .github/descriptions/isp_aol_ipset.txt | 12 + .github/descriptions/isp_att_ipset.txt | 10 + .../descriptions/isp_cablevision_ipset.txt | 17 + ...charter_spectrum_timewarnercable_ipset.txt | 10 + .../{01_master_ipset.txt => master_ipset.txt} | 0 ...efs_ipset.txt => privacy_ahrefs_ipset.txt} | 0 ...ipset.txt => privacy_amazon_aws_ipset.txt} | 0 ...ipset.txt => privacy_amazon_ec2_ipset.txt} | 0 ..._ipset.txt => privacy_apple_bot_ipset.txt} | 0 ..._bing_ipset.txt => privacy_bing_ipset.txt} | 0 ...n_ipset.txt => privacy_bunnycdn_ipset.txt} | 0 ...et.txt => privacy_cloudflarecdn_ipset.txt} | 0 ...ipset.txt => privacy_cloudfront_ipset.txt} | 0 ...ipset.txt => privacy_duckduckgo_ipset.txt} | 0 ...k_ipset.txt => privacy_facebook_ipset.txt} | 0 ...tly_ipset.txt => privacy_fastly_ipset.txt} | 0 ...al_ipset.txt => privacy_general_ipset.txt} | 0 ...gle_ipset.txt => privacy_google_ipset.txt} | 0 ...om_ipset.txt => privacy_pingdom_ipset.txt} | 0 ...api_ipset.txt => privacy_rssapi_ipset.txt} | 0 ...ipset.txt => privacy_stripe_api_ipset.txt} | 0 ... => privacy_stripe_armada_gator_ipset.txt} | 0 ....txt => privacy_stripe_webhooks_ipset.txt} | 0 ...m_ipset.txt => privacy_telegram_ipset.txt} | 0 ...pset.txt => privacy_uptimerobot_ipset.txt} | 0 ...pset.txt => privacy_webpagetest_ipset.txt} | 0 ...dex_ipset.txt => privacy_yandex_ipset.txt} | 0 ...forums_ipset.txt => spam_forums_ipset.txt} | 0 ...haus_ipset.txt => spam_spamhaus_ipset.txt} | 0 ..._highrisk_ipset.txt => highrisk_ipset.txt} | 0 .github/expires/isp_aol_ipset.txt | 1 + .github/expires/isp_att_ipset.txt | 1 + .github/expires/isp_cablevision_ipset.txt | 1 + ...charter_spectrum_timewarnercable_ipset.txt | 1 + .../expires/isp_suddenlink_optimum_ipset.txt | 1 + .../{01_master_ipset.txt => master_ipset.txt} | 0 ...efs_ipset.txt => privacy_ahrefs_ipset.txt} | 0 ...ipset.txt => privacy_amazon_aws_ipset.txt} | 0 ...ipset.txt => privacy_amazon_ec2_ipset.txt} | 0 ..._ipset.txt => privacy_apple_bot_ipset.txt} | 0 ..._bing_ipset.txt => privacy_bing_ipset.txt} | 0 ...n_ipset.txt => privacy_bunnycdn_ipset.txt} | 0 ...et.txt => privacy_cloudflarecdn_ipset.txt} | 0 ...ipset.txt => privacy_cloudfront_ipset.txt} | 0 ...ipset.txt => privacy_duckduckgo_ipset.txt} | 0 ...k_ipset.txt => privacy_facebook_ipset.txt} | 0 ...tly_ipset.txt => privacy_fastly_ipset.txt} | 0 ...al_ipset.txt => privacy_general_ipset.txt} | 0 ...gle_ipset.txt => privacy_google_ipset.txt} | 0 ...om_ipset.txt => privacy_pingdom_ipset.txt} | 0 ...api_ipset.txt => privacy_rssapi_ipset.txt} | 0 ...ipset.txt => privacy_stripe_api_ipset.txt} | 0 ... => privacy_stripe_armada_gator_ipset.txt} | 0 ....txt => privacy_stripe_webhooks_ipset.txt} | 0 ...m_ipset.txt => privacy_telegram_ipset.txt} | 0 ...pset.txt => privacy_uptimerobot_ipset.txt} | 0 ...pset.txt => privacy_webpagetest_ipset.txt} | 0 ...dex_ipset.txt => privacy_yandex_ipset.txt} | 0 ...forums_ipset.txt => spam_forums_ipset.txt} | 0 ...haus_ipset.txt => spam_spamhaus_ipset.txt} | 0 .github/scripts/bl-block.sh | 70 +- .github/scripts/bl-format.sh | 41 +- .github/scripts/bl-geolite2.sh | 741 +++++++++++++++--- .github/scripts/bl-html.sh | 41 +- .github/scripts/bl-htmlip.sh | 40 +- .github/scripts/bl-json.sh | 39 +- .github/scripts/bl-master.sh | 39 +- .github/scripts/bl-plain.sh | 39 +- .github/scripts/bl-spf.sh | 17 +- .github/scripts/bl-whois.sh | 480 ++++++++++++ .github/scripts/bt-transmission.sh | 64 +- .github/scripts/tool-count-ip.sh | 85 +- .github/scripts/tool-range-ipcalc.sh | 149 ++++ .github/scripts/tool-range-iprange.sh | 357 +++++++++ .github/scripts/update-readme.sh | 42 +- ..._highrisk_ipset.txt => highrisk_ipset.txt} | 0 .github/url-source/isp_aol_ipset.txt | 1 + .github/url-source/isp_att_ipset.txt | 1 + .github/url-source/isp_cablevision_ipset.txt | 1 + ...charter_spectrum_timewarnercable_ipset.txt | 1 + .../{01_master_ipset.txt => master_ipset.txt} | 0 ...efs_ipset.txt => privacy_ahrefs_ipset.txt} | 0 ...ipset.txt => privacy_amazon_aws_ipset.txt} | 0 ...ipset.txt => privacy_amazon_ec2_ipset.txt} | 0 ...t_ipset.txt => privacy_applebot_ipset.txt} | 0 ..._bing_ipset.txt => privacy_bing_ipset.txt} | 0 ...n_ipset.txt => privacy_bunnycdn_ipset.txt} | 0 ...et.txt => privacy_cloudflarecdn_ipset.txt} | 0 ...ipset.txt => privacy_cloudfront_ipset.txt} | 0 ...ipset.txt => privacy_duckduckgo_ipset.txt} | 0 ...k_ipset.txt => privacy_facebook_ipset.txt} | 0 ...tly_ipset.txt => privacy_fastly_ipset.txt} | 0 ...al_ipset.txt => privacy_general_ipset.txt} | 0 ...gle_ipset.txt => privacy_google_ipset.txt} | 0 ...om_ipset.txt => privacy_pingdom_ipset.txt} | 0 ...api_ipset.txt => privacy_rssapi_ipset.txt} | 0 ...ipset.txt => privacy_stripe_api_ipset.txt} | 0 ... => privacy_stripe_armada_gator_ipset.txt} | 0 ....txt => privacy_stripe_webhooks_ipset.txt} | 0 ...m_ipset.txt => privacy_telegram_ipset.txt} | 0 ...pset.txt => privacy_uptimerobot_ipset.txt} | 0 ...pset.txt => privacy_webpagetest_ipset.txt} | 0 ...dex_ipset.txt => privacy_yandex_ipset.txt} | 0 ...haus_ipset.txt => spam_spamhaus_ipset.txt} | 0 .github/workflows/blocklist-generate.yml | 273 ++++++- 138 files changed, 2384 insertions(+), 199 deletions(-) create mode 100644 .github/blocks/isp/aol.ipset rename .github/categories/{01_highrisk_ipset.txt => highrisk_ipset.txt} (100%) create mode 100644 .github/categories/isp_aol_ipset.txt create mode 100644 .github/categories/isp_att_ipset.txt create mode 100644 .github/categories/isp_cablevision_ipset.txt create mode 100644 .github/categories/isp_charter_spectrum_timewarnercable_ipset.txt rename .github/categories/{01_master_ipset.txt => master_ipset.txt} (100%) rename .github/categories/{02_privacy_ahrefs_ipset.txt => privacy_ahrefs_ipset.txt} (100%) rename .github/categories/{02_privacy_amazon_aws_ipset.txt => privacy_amazon_aws_ipset.txt} (100%) rename .github/categories/{02_privacy_amazon_ec2_ipset.txt => privacy_amazon_ec2_ipset.txt} (100%) rename .github/categories/{02_privacy_applebot_ipset.txt => privacy_applebot_ipset.txt} (100%) rename .github/categories/{02_privacy_bing_ipset.txt => privacy_bing_ipset.txt} (100%) rename .github/categories/{02_privacy_bunnycdn_ipset.txt => privacy_bunnycdn_ipset.txt} (100%) rename .github/categories/{02_privacy_cloudflarecdn_ipset.txt => privacy_cloudflarecdn_ipset.txt} (100%) rename .github/categories/{02_privacy_cloudfront_ipset.txt => privacy_cloudfront_ipset.txt} (100%) rename .github/categories/{02_privacy_duckduckgo_ipset.txt => privacy_duckduckgo_ipset.txt} (100%) rename .github/categories/{02_privacy_facebook_ipset.txt => privacy_facebook_ipset.txt} (100%) rename .github/categories/{02_privacy_fastly_ipset.txt => privacy_fastly_ipset.txt} (100%) rename .github/categories/{02_privacy_general_ipset.txt => privacy_general_ipset.txt} (100%) rename .github/categories/{02_privacy_google_ipset.txt => privacy_google_ipset.txt} (100%) rename .github/categories/{02_privacy_pingdom_ipset.txt => privacy_pingdom_ipset.txt} (100%) rename .github/categories/{02_privacy_rssapi_ipset.txt => privacy_rssapi_ipset.txt} (100%) rename .github/categories/{02_privacy_stripe_api_ipset.txt => privacy_stripe_api_ipset.txt} (100%) rename .github/categories/{02_privacy_stripe_armada_gator_ipset.txt => privacy_stripe_armada_gator_ipset.txt} (100%) rename .github/categories/{02_privacy_stripe_webhooks_ipset.txt => privacy_stripe_webhooks_ipset.txt} (100%) rename .github/categories/{02_privacy_telegram_ipset.txt => privacy_telegram_ipset.txt} (100%) rename .github/categories/{02_privacy_uptimerobot_ipset.txt => privacy_uptimerobot_ipset.txt} (100%) rename .github/categories/{02_privacy_webpagetest_ipset.txt => privacy_webpagetest_ipset.txt} (100%) rename .github/categories/{02_privacy_yandex_ipset.txt => privacy_yandex_ipset.txt} (100%) rename .github/categories/{03_spam_forums_ipset.txt => spam_forums_ipset.txt} (100%) rename .github/categories/{03_spam_spamhaus_ipset.txt => spam_spamhaus_ipset.txt} (100%) rename .github/descriptions/{01_highrisk_ipset.txt => highrisk_ipset.txt} (100%) create mode 100644 .github/descriptions/isp_aol_ipset.txt create mode 100644 .github/descriptions/isp_att_ipset.txt create mode 100644 .github/descriptions/isp_cablevision_ipset.txt create mode 100644 .github/descriptions/isp_charter_spectrum_timewarnercable_ipset.txt rename .github/descriptions/{01_master_ipset.txt => master_ipset.txt} (100%) rename .github/descriptions/{02_privacy_ahrefs_ipset.txt => privacy_ahrefs_ipset.txt} (100%) rename .github/descriptions/{02_privacy_amazon_aws_ipset.txt => privacy_amazon_aws_ipset.txt} (100%) rename .github/descriptions/{02_privacy_amazon_ec2_ipset.txt => privacy_amazon_ec2_ipset.txt} (100%) rename .github/descriptions/{02_privacy_apple_bot_ipset.txt => privacy_apple_bot_ipset.txt} (100%) rename .github/descriptions/{02_privacy_bing_ipset.txt => privacy_bing_ipset.txt} (100%) rename .github/descriptions/{02_privacy_bunnycdn_ipset.txt => privacy_bunnycdn_ipset.txt} (100%) rename .github/descriptions/{02_privacy_cloudflarecdn_ipset.txt => privacy_cloudflarecdn_ipset.txt} (100%) rename .github/descriptions/{02_privacy_cloudfront_ipset.txt => privacy_cloudfront_ipset.txt} (100%) rename .github/descriptions/{02_privacy_duckduckgo_ipset.txt => privacy_duckduckgo_ipset.txt} (100%) rename .github/descriptions/{02_privacy_facebook_ipset.txt => privacy_facebook_ipset.txt} (100%) rename .github/descriptions/{02_privacy_fastly_ipset.txt => privacy_fastly_ipset.txt} (100%) rename .github/descriptions/{02_privacy_general_ipset.txt => privacy_general_ipset.txt} (100%) rename .github/descriptions/{02_privacy_google_ipset.txt => privacy_google_ipset.txt} (100%) rename .github/descriptions/{02_privacy_pingdom_ipset.txt => privacy_pingdom_ipset.txt} (100%) rename .github/descriptions/{02_privacy_rssapi_ipset.txt => privacy_rssapi_ipset.txt} (100%) rename .github/descriptions/{02_privacy_stripe_api_ipset.txt => privacy_stripe_api_ipset.txt} (100%) rename .github/descriptions/{02_privacy_stripe_armada_gator_ipset.txt => privacy_stripe_armada_gator_ipset.txt} (100%) rename .github/descriptions/{02_privacy_stripe_webhooks_ipset.txt => privacy_stripe_webhooks_ipset.txt} (100%) rename .github/descriptions/{02_privacy_telegram_ipset.txt => privacy_telegram_ipset.txt} (100%) rename .github/descriptions/{02_privacy_uptimerobot_ipset.txt => privacy_uptimerobot_ipset.txt} (100%) rename .github/descriptions/{02_privacy_webpagetest_ipset.txt => privacy_webpagetest_ipset.txt} (100%) rename .github/descriptions/{02_privacy_yandex_ipset.txt => privacy_yandex_ipset.txt} (100%) rename .github/descriptions/{03_spam_forums_ipset.txt => spam_forums_ipset.txt} (100%) rename .github/descriptions/{03_spam_spamhaus_ipset.txt => spam_spamhaus_ipset.txt} (100%) rename .github/expires/{01_highrisk_ipset.txt => highrisk_ipset.txt} (100%) create mode 100644 .github/expires/isp_aol_ipset.txt create mode 100644 .github/expires/isp_att_ipset.txt create mode 100644 .github/expires/isp_cablevision_ipset.txt create mode 100644 .github/expires/isp_charter_spectrum_timewarnercable_ipset.txt create mode 100644 .github/expires/isp_suddenlink_optimum_ipset.txt rename .github/expires/{01_master_ipset.txt => master_ipset.txt} (100%) rename .github/expires/{02_privacy_ahrefs_ipset.txt => privacy_ahrefs_ipset.txt} (100%) rename .github/expires/{02_privacy_amazon_aws_ipset.txt => privacy_amazon_aws_ipset.txt} (100%) rename .github/expires/{02_privacy_amazon_ec2_ipset.txt => privacy_amazon_ec2_ipset.txt} (100%) rename .github/expires/{02_privacy_apple_bot_ipset.txt => privacy_apple_bot_ipset.txt} (100%) rename .github/expires/{02_privacy_bing_ipset.txt => privacy_bing_ipset.txt} (100%) rename .github/expires/{02_privacy_bunnycdn_ipset.txt => privacy_bunnycdn_ipset.txt} (100%) rename .github/expires/{02_privacy_cloudflarecdn_ipset.txt => privacy_cloudflarecdn_ipset.txt} (100%) rename .github/expires/{02_privacy_cloudfront_ipset.txt => privacy_cloudfront_ipset.txt} (100%) rename .github/expires/{02_privacy_duckduckgo_ipset.txt => privacy_duckduckgo_ipset.txt} (100%) rename .github/expires/{02_privacy_facebook_ipset.txt => privacy_facebook_ipset.txt} (100%) rename .github/expires/{02_privacy_fastly_ipset.txt => privacy_fastly_ipset.txt} (100%) rename .github/expires/{02_privacy_general_ipset.txt => privacy_general_ipset.txt} (100%) rename .github/expires/{02_privacy_google_ipset.txt => privacy_google_ipset.txt} (100%) rename .github/expires/{02_privacy_pingdom_ipset.txt => privacy_pingdom_ipset.txt} (100%) rename .github/expires/{02_privacy_rssapi_ipset.txt => privacy_rssapi_ipset.txt} (100%) rename .github/expires/{02_privacy_stripe_api_ipset.txt => privacy_stripe_api_ipset.txt} (100%) rename .github/expires/{02_privacy_stripe_armada_gator_ipset.txt => privacy_stripe_armada_gator_ipset.txt} (100%) rename .github/expires/{02_privacy_stripe_webhooks_ipset.txt => privacy_stripe_webhooks_ipset.txt} (100%) rename .github/expires/{02_privacy_telegram_ipset.txt => privacy_telegram_ipset.txt} (100%) rename .github/expires/{02_privacy_uptimerobot_ipset.txt => privacy_uptimerobot_ipset.txt} (100%) rename .github/expires/{02_privacy_webpagetest_ipset.txt => privacy_webpagetest_ipset.txt} (100%) rename .github/expires/{02_privacy_yandex_ipset.txt => privacy_yandex_ipset.txt} (100%) rename .github/expires/{03_spam_forums_ipset.txt => spam_forums_ipset.txt} (100%) rename .github/expires/{03_spam_spamhaus_ipset.txt => spam_spamhaus_ipset.txt} (100%) create mode 100644 .github/scripts/bl-whois.sh create mode 100644 .github/scripts/tool-range-ipcalc.sh create mode 100644 .github/scripts/tool-range-iprange.sh rename .github/url-source/{01_highrisk_ipset.txt => highrisk_ipset.txt} (100%) create mode 100644 .github/url-source/isp_aol_ipset.txt create mode 100644 .github/url-source/isp_att_ipset.txt create mode 100644 .github/url-source/isp_cablevision_ipset.txt create mode 100644 .github/url-source/isp_charter_spectrum_timewarnercable_ipset.txt rename .github/url-source/{01_master_ipset.txt => master_ipset.txt} (100%) rename .github/url-source/{02_privacy_ahrefs_ipset.txt => privacy_ahrefs_ipset.txt} (100%) rename .github/url-source/{02_privacy_amazon_aws_ipset.txt => privacy_amazon_aws_ipset.txt} (100%) rename .github/url-source/{02_privacy_amazon_ec2_ipset.txt => privacy_amazon_ec2_ipset.txt} (100%) rename .github/url-source/{02_privacy_applebot_ipset.txt => privacy_applebot_ipset.txt} (100%) rename .github/url-source/{02_privacy_bing_ipset.txt => privacy_bing_ipset.txt} (100%) rename .github/url-source/{02_privacy_bunnycdn_ipset.txt => privacy_bunnycdn_ipset.txt} (100%) rename .github/url-source/{02_privacy_cloudflarecdn_ipset.txt => privacy_cloudflarecdn_ipset.txt} (100%) rename .github/url-source/{02_privacy_cloudfront_ipset.txt => privacy_cloudfront_ipset.txt} (100%) rename .github/url-source/{02_privacy_duckduckgo_ipset.txt => privacy_duckduckgo_ipset.txt} (100%) rename .github/url-source/{02_privacy_facebook_ipset.txt => privacy_facebook_ipset.txt} (100%) rename .github/url-source/{02_privacy_fastly_ipset.txt => privacy_fastly_ipset.txt} (100%) rename .github/url-source/{02_privacy_general_ipset.txt => privacy_general_ipset.txt} (100%) rename .github/url-source/{02_privacy_google_ipset.txt => privacy_google_ipset.txt} (100%) rename .github/url-source/{02_privacy_pingdom_ipset.txt => privacy_pingdom_ipset.txt} (100%) rename .github/url-source/{02_privacy_rssapi_ipset.txt => privacy_rssapi_ipset.txt} (100%) rename .github/url-source/{02_privacy_stripe_api_ipset.txt => privacy_stripe_api_ipset.txt} (100%) rename .github/url-source/{02_privacy_stripe_armada_gator_ipset.txt => privacy_stripe_armada_gator_ipset.txt} (100%) rename .github/url-source/{02_privacy_stripe_webhooks_ipset.txt => privacy_stripe_webhooks_ipset.txt} (100%) rename .github/url-source/{02_privacy_telegram_ipset.txt => privacy_telegram_ipset.txt} (100%) rename .github/url-source/{02_privacy_uptimerobot_ipset.txt => privacy_uptimerobot_ipset.txt} (100%) rename .github/url-source/{02_privacy_webpagetest_ipset.txt => privacy_webpagetest_ipset.txt} (100%) rename .github/url-source/{02_privacy_yandex_ipset.txt => privacy_yandex_ipset.txt} (100%) rename .github/url-source/{03_spam_spamhaus_ipset.txt => spam_spamhaus_ipset.txt} (100%) diff --git a/.github/blocks/isp/aol.ipset b/.github/blocks/isp/aol.ipset new file mode 100644 index 000000000..03e54275b --- /dev/null +++ b/.github/blocks/isp/aol.ipset @@ -0,0 +1,3 @@ +62.51.0.0/16 +62.78.0.0/19 +195.93.0.0/17 \ No newline at end of file diff --git a/.github/blocks/privacy/01.ipset b/.github/blocks/privacy/01.ipset index dc663e49d..76e01b397 100644 --- a/.github/blocks/privacy/01.ipset +++ b/.github/blocks/privacy/01.ipset @@ -22,6 +22,7 @@ # a wide variety of bots come from this company (or customers of the company), # including scrapers, brute-forcing, email spam, data collection, etc. # +# https://raw.githubusercontent.com/cbuijs/ipasn/master/asn/as10439.list # # 135.84.216.0/24 diff --git a/.github/categories/01_highrisk_ipset.txt b/.github/categories/highrisk_ipset.txt similarity index 100% rename from .github/categories/01_highrisk_ipset.txt rename to .github/categories/highrisk_ipset.txt diff --git a/.github/categories/isp_aol_ipset.txt b/.github/categories/isp_aol_ipset.txt new file mode 100644 index 000000000..fc1cce23f --- /dev/null +++ b/.github/categories/isp_aol_ipset.txt @@ -0,0 +1 @@ +Internet Service Provider (ISP) \ No newline at end of file diff --git a/.github/categories/isp_att_ipset.txt b/.github/categories/isp_att_ipset.txt new file mode 100644 index 000000000..fc1cce23f --- /dev/null +++ b/.github/categories/isp_att_ipset.txt @@ -0,0 +1 @@ +Internet Service Provider (ISP) \ No newline at end of file diff --git a/.github/categories/isp_cablevision_ipset.txt b/.github/categories/isp_cablevision_ipset.txt new file mode 100644 index 000000000..fc1cce23f --- /dev/null +++ b/.github/categories/isp_cablevision_ipset.txt @@ -0,0 +1 @@ +Internet Service Provider (ISP) \ No newline at end of file diff --git a/.github/categories/isp_charter_spectrum_timewarnercable_ipset.txt b/.github/categories/isp_charter_spectrum_timewarnercable_ipset.txt new file mode 100644 index 000000000..fc1cce23f --- /dev/null +++ b/.github/categories/isp_charter_spectrum_timewarnercable_ipset.txt @@ -0,0 +1 @@ +Internet Service Provider (ISP) \ No newline at end of file diff --git a/.github/categories/01_master_ipset.txt b/.github/categories/master_ipset.txt similarity index 100% rename from .github/categories/01_master_ipset.txt rename to .github/categories/master_ipset.txt diff --git a/.github/categories/02_privacy_ahrefs_ipset.txt b/.github/categories/privacy_ahrefs_ipset.txt similarity index 100% rename from .github/categories/02_privacy_ahrefs_ipset.txt rename to .github/categories/privacy_ahrefs_ipset.txt diff --git a/.github/categories/02_privacy_amazon_aws_ipset.txt b/.github/categories/privacy_amazon_aws_ipset.txt similarity index 100% rename from .github/categories/02_privacy_amazon_aws_ipset.txt rename to .github/categories/privacy_amazon_aws_ipset.txt diff --git a/.github/categories/02_privacy_amazon_ec2_ipset.txt b/.github/categories/privacy_amazon_ec2_ipset.txt similarity index 100% rename from .github/categories/02_privacy_amazon_ec2_ipset.txt rename to .github/categories/privacy_amazon_ec2_ipset.txt diff --git a/.github/categories/02_privacy_applebot_ipset.txt b/.github/categories/privacy_applebot_ipset.txt similarity index 100% rename from .github/categories/02_privacy_applebot_ipset.txt rename to .github/categories/privacy_applebot_ipset.txt diff --git a/.github/categories/02_privacy_bing_ipset.txt b/.github/categories/privacy_bing_ipset.txt similarity index 100% rename from .github/categories/02_privacy_bing_ipset.txt rename to .github/categories/privacy_bing_ipset.txt diff --git a/.github/categories/02_privacy_bunnycdn_ipset.txt b/.github/categories/privacy_bunnycdn_ipset.txt similarity index 100% rename from .github/categories/02_privacy_bunnycdn_ipset.txt rename to .github/categories/privacy_bunnycdn_ipset.txt diff --git a/.github/categories/02_privacy_cloudflarecdn_ipset.txt b/.github/categories/privacy_cloudflarecdn_ipset.txt similarity index 100% rename from .github/categories/02_privacy_cloudflarecdn_ipset.txt rename to .github/categories/privacy_cloudflarecdn_ipset.txt diff --git a/.github/categories/02_privacy_cloudfront_ipset.txt b/.github/categories/privacy_cloudfront_ipset.txt similarity index 100% rename from .github/categories/02_privacy_cloudfront_ipset.txt rename to .github/categories/privacy_cloudfront_ipset.txt diff --git a/.github/categories/02_privacy_duckduckgo_ipset.txt b/.github/categories/privacy_duckduckgo_ipset.txt similarity index 100% rename from .github/categories/02_privacy_duckduckgo_ipset.txt rename to .github/categories/privacy_duckduckgo_ipset.txt diff --git a/.github/categories/02_privacy_facebook_ipset.txt b/.github/categories/privacy_facebook_ipset.txt similarity index 100% rename from .github/categories/02_privacy_facebook_ipset.txt rename to .github/categories/privacy_facebook_ipset.txt diff --git a/.github/categories/02_privacy_fastly_ipset.txt b/.github/categories/privacy_fastly_ipset.txt similarity index 100% rename from .github/categories/02_privacy_fastly_ipset.txt rename to .github/categories/privacy_fastly_ipset.txt diff --git a/.github/categories/02_privacy_general_ipset.txt b/.github/categories/privacy_general_ipset.txt similarity index 100% rename from .github/categories/02_privacy_general_ipset.txt rename to .github/categories/privacy_general_ipset.txt diff --git a/.github/categories/02_privacy_google_ipset.txt b/.github/categories/privacy_google_ipset.txt similarity index 100% rename from .github/categories/02_privacy_google_ipset.txt rename to .github/categories/privacy_google_ipset.txt diff --git a/.github/categories/02_privacy_pingdom_ipset.txt b/.github/categories/privacy_pingdom_ipset.txt similarity index 100% rename from .github/categories/02_privacy_pingdom_ipset.txt rename to .github/categories/privacy_pingdom_ipset.txt diff --git a/.github/categories/02_privacy_rssapi_ipset.txt b/.github/categories/privacy_rssapi_ipset.txt similarity index 100% rename from .github/categories/02_privacy_rssapi_ipset.txt rename to .github/categories/privacy_rssapi_ipset.txt diff --git a/.github/categories/02_privacy_stripe_api_ipset.txt b/.github/categories/privacy_stripe_api_ipset.txt similarity index 100% rename from .github/categories/02_privacy_stripe_api_ipset.txt rename to .github/categories/privacy_stripe_api_ipset.txt diff --git a/.github/categories/02_privacy_stripe_armada_gator_ipset.txt b/.github/categories/privacy_stripe_armada_gator_ipset.txt similarity index 100% rename from .github/categories/02_privacy_stripe_armada_gator_ipset.txt rename to .github/categories/privacy_stripe_armada_gator_ipset.txt diff --git a/.github/categories/02_privacy_stripe_webhooks_ipset.txt b/.github/categories/privacy_stripe_webhooks_ipset.txt similarity index 100% rename from .github/categories/02_privacy_stripe_webhooks_ipset.txt rename to .github/categories/privacy_stripe_webhooks_ipset.txt diff --git a/.github/categories/02_privacy_telegram_ipset.txt b/.github/categories/privacy_telegram_ipset.txt similarity index 100% rename from .github/categories/02_privacy_telegram_ipset.txt rename to .github/categories/privacy_telegram_ipset.txt diff --git a/.github/categories/02_privacy_uptimerobot_ipset.txt b/.github/categories/privacy_uptimerobot_ipset.txt similarity index 100% rename from .github/categories/02_privacy_uptimerobot_ipset.txt rename to .github/categories/privacy_uptimerobot_ipset.txt diff --git a/.github/categories/02_privacy_webpagetest_ipset.txt b/.github/categories/privacy_webpagetest_ipset.txt similarity index 100% rename from .github/categories/02_privacy_webpagetest_ipset.txt rename to .github/categories/privacy_webpagetest_ipset.txt diff --git a/.github/categories/02_privacy_yandex_ipset.txt b/.github/categories/privacy_yandex_ipset.txt similarity index 100% rename from .github/categories/02_privacy_yandex_ipset.txt rename to .github/categories/privacy_yandex_ipset.txt diff --git a/.github/categories/03_spam_forums_ipset.txt b/.github/categories/spam_forums_ipset.txt similarity index 100% rename from .github/categories/03_spam_forums_ipset.txt rename to .github/categories/spam_forums_ipset.txt diff --git a/.github/categories/03_spam_spamhaus_ipset.txt b/.github/categories/spam_spamhaus_ipset.txt similarity index 100% rename from .github/categories/03_spam_spamhaus_ipset.txt rename to .github/categories/spam_spamhaus_ipset.txt diff --git a/.github/descriptions/01_highrisk_ipset.txt b/.github/descriptions/highrisk_ipset.txt similarity index 100% rename from .github/descriptions/01_highrisk_ipset.txt rename to .github/descriptions/highrisk_ipset.txt diff --git a/.github/descriptions/isp_aol_ipset.txt b/.github/descriptions/isp_aol_ipset.txt new file mode 100644 index 000000000..0aee5e5a3 --- /dev/null +++ b/.github/descriptions/isp_aol_ipset.txt @@ -0,0 +1,12 @@ +# Historic AOL LLC (Internet Service Provider) +# Oath Holdings Inc. +# +# Website https://aol.com +# Parent organization: Warner Bros. Discovery +# Founded 1985 +# Registry RIPE +# +# This ISP has slowly been winding down over the years, with less and less servers. This is +# especially true since the messenger service ICQ was shut down on June 26, 2024. +# +# Be aware that this ASN may disappear in the future as less IP blocks are assigned. \ No newline at end of file diff --git a/.github/descriptions/isp_att_ipset.txt b/.github/descriptions/isp_att_ipset.txt new file mode 100644 index 000000000..180da64f8 --- /dev/null +++ b/.github/descriptions/isp_att_ipset.txt @@ -0,0 +1,10 @@ +# AT&T Internet (Internet Service Provider) +# +# Website https://att.com/internet/ +# Parent organization: AT&T Communications +# Founded September 19, 2016 +# +# AT&T Internet is an AT&T brand of broadband internet service. Previously, AT&T Internet was +# branded as U-verse Internet and bundled with U-verse TV, which was spun off into the newly +# independent DirecTV in 2021. AT&T Internet plans powered by fiber-optic cable use the AT&T +# Fiber brand. \ No newline at end of file diff --git a/.github/descriptions/isp_cablevision_ipset.txt b/.github/descriptions/isp_cablevision_ipset.txt new file mode 100644 index 000000000..af4d92437 --- /dev/null +++ b/.github/descriptions/isp_cablevision_ipset.txt @@ -0,0 +1,17 @@ +# Cablevision (Internet Service Provider) +# +# Website https://cablevision.com +# Founded 1973 +# Status Acquired by Altice (June 21, 2016) +# +# If you wish to block this internet service provider, it is recommended that you use both this blocklist, +# as well as the blocklist for Suddenlink / Optimum. +# - https://raw.githubusercontent.com/Aetherinox/blocklists/main/blocklists/isp/isp_suddenlink_optimum.ipset +# +# After Altice acquired Cablevision Systems Corporation on November 30, 2016, Suddenlink was combined with +# Cablevision. Together with Optimum, the name used by Cablevision for its products, Altice USA became the +# United States' fourth largest cable operator with 4.6 million subscribers, and the sixth largest Pay TV +# service provider with 3.5 million subscribers. On August 1, 2022, Suddenlink rebranded into Optimum. +# +# This blocklist contains mostly Cablevision IP addresses, however, it also includes some blocks which +# carried over from the acquisition of Suddenlink and Optimum which are registered as the same ASN. \ No newline at end of file diff --git a/.github/descriptions/isp_charter_spectrum_timewarnercable_ipset.txt b/.github/descriptions/isp_charter_spectrum_timewarnercable_ipset.txt new file mode 100644 index 000000000..fc82ee752 --- /dev/null +++ b/.github/descriptions/isp_charter_spectrum_timewarnercable_ipset.txt @@ -0,0 +1,10 @@ +# Time Warner Cable, Charter Communications, Spectrum (Internet Service Provider) +# +# Website https://spectrum.com +# Founded Time Warner Cable (1973), Spectrum (2014) +# Status Merged into Charter Spectrum as of 2016. +# +# Time Warner Cable, Inc. was acquired by Charter Communications on May 18, 2016, which formed Charter Spectrum. +# - https://ir.charter.com/news-releases/news-release-details/charter-communications-merge-time-warner-cable-and-acquire +# +# This list merges all three companies together to block the overall network. \ No newline at end of file diff --git a/.github/descriptions/01_master_ipset.txt b/.github/descriptions/master_ipset.txt similarity index 100% rename from .github/descriptions/01_master_ipset.txt rename to .github/descriptions/master_ipset.txt diff --git a/.github/descriptions/02_privacy_ahrefs_ipset.txt b/.github/descriptions/privacy_ahrefs_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_ahrefs_ipset.txt rename to .github/descriptions/privacy_ahrefs_ipset.txt diff --git a/.github/descriptions/02_privacy_amazon_aws_ipset.txt b/.github/descriptions/privacy_amazon_aws_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_amazon_aws_ipset.txt rename to .github/descriptions/privacy_amazon_aws_ipset.txt diff --git a/.github/descriptions/02_privacy_amazon_ec2_ipset.txt b/.github/descriptions/privacy_amazon_ec2_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_amazon_ec2_ipset.txt rename to .github/descriptions/privacy_amazon_ec2_ipset.txt diff --git a/.github/descriptions/02_privacy_apple_bot_ipset.txt b/.github/descriptions/privacy_apple_bot_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_apple_bot_ipset.txt rename to .github/descriptions/privacy_apple_bot_ipset.txt diff --git a/.github/descriptions/02_privacy_bing_ipset.txt b/.github/descriptions/privacy_bing_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_bing_ipset.txt rename to .github/descriptions/privacy_bing_ipset.txt diff --git a/.github/descriptions/02_privacy_bunnycdn_ipset.txt b/.github/descriptions/privacy_bunnycdn_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_bunnycdn_ipset.txt rename to .github/descriptions/privacy_bunnycdn_ipset.txt diff --git a/.github/descriptions/02_privacy_cloudflarecdn_ipset.txt b/.github/descriptions/privacy_cloudflarecdn_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_cloudflarecdn_ipset.txt rename to .github/descriptions/privacy_cloudflarecdn_ipset.txt diff --git a/.github/descriptions/02_privacy_cloudfront_ipset.txt b/.github/descriptions/privacy_cloudfront_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_cloudfront_ipset.txt rename to .github/descriptions/privacy_cloudfront_ipset.txt diff --git a/.github/descriptions/02_privacy_duckduckgo_ipset.txt b/.github/descriptions/privacy_duckduckgo_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_duckduckgo_ipset.txt rename to .github/descriptions/privacy_duckduckgo_ipset.txt diff --git a/.github/descriptions/02_privacy_facebook_ipset.txt b/.github/descriptions/privacy_facebook_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_facebook_ipset.txt rename to .github/descriptions/privacy_facebook_ipset.txt diff --git a/.github/descriptions/02_privacy_fastly_ipset.txt b/.github/descriptions/privacy_fastly_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_fastly_ipset.txt rename to .github/descriptions/privacy_fastly_ipset.txt diff --git a/.github/descriptions/02_privacy_general_ipset.txt b/.github/descriptions/privacy_general_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_general_ipset.txt rename to .github/descriptions/privacy_general_ipset.txt diff --git a/.github/descriptions/02_privacy_google_ipset.txt b/.github/descriptions/privacy_google_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_google_ipset.txt rename to .github/descriptions/privacy_google_ipset.txt diff --git a/.github/descriptions/02_privacy_pingdom_ipset.txt b/.github/descriptions/privacy_pingdom_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_pingdom_ipset.txt rename to .github/descriptions/privacy_pingdom_ipset.txt diff --git a/.github/descriptions/02_privacy_rssapi_ipset.txt b/.github/descriptions/privacy_rssapi_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_rssapi_ipset.txt rename to .github/descriptions/privacy_rssapi_ipset.txt diff --git a/.github/descriptions/02_privacy_stripe_api_ipset.txt b/.github/descriptions/privacy_stripe_api_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_stripe_api_ipset.txt rename to .github/descriptions/privacy_stripe_api_ipset.txt diff --git a/.github/descriptions/02_privacy_stripe_armada_gator_ipset.txt b/.github/descriptions/privacy_stripe_armada_gator_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_stripe_armada_gator_ipset.txt rename to .github/descriptions/privacy_stripe_armada_gator_ipset.txt diff --git a/.github/descriptions/02_privacy_stripe_webhooks_ipset.txt b/.github/descriptions/privacy_stripe_webhooks_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_stripe_webhooks_ipset.txt rename to .github/descriptions/privacy_stripe_webhooks_ipset.txt diff --git a/.github/descriptions/02_privacy_telegram_ipset.txt b/.github/descriptions/privacy_telegram_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_telegram_ipset.txt rename to .github/descriptions/privacy_telegram_ipset.txt diff --git a/.github/descriptions/02_privacy_uptimerobot_ipset.txt b/.github/descriptions/privacy_uptimerobot_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_uptimerobot_ipset.txt rename to .github/descriptions/privacy_uptimerobot_ipset.txt diff --git a/.github/descriptions/02_privacy_webpagetest_ipset.txt b/.github/descriptions/privacy_webpagetest_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_webpagetest_ipset.txt rename to .github/descriptions/privacy_webpagetest_ipset.txt diff --git a/.github/descriptions/02_privacy_yandex_ipset.txt b/.github/descriptions/privacy_yandex_ipset.txt similarity index 100% rename from .github/descriptions/02_privacy_yandex_ipset.txt rename to .github/descriptions/privacy_yandex_ipset.txt diff --git a/.github/descriptions/03_spam_forums_ipset.txt b/.github/descriptions/spam_forums_ipset.txt similarity index 100% rename from .github/descriptions/03_spam_forums_ipset.txt rename to .github/descriptions/spam_forums_ipset.txt diff --git a/.github/descriptions/03_spam_spamhaus_ipset.txt b/.github/descriptions/spam_spamhaus_ipset.txt similarity index 100% rename from .github/descriptions/03_spam_spamhaus_ipset.txt rename to .github/descriptions/spam_spamhaus_ipset.txt diff --git a/.github/expires/01_highrisk_ipset.txt b/.github/expires/highrisk_ipset.txt similarity index 100% rename from .github/expires/01_highrisk_ipset.txt rename to .github/expires/highrisk_ipset.txt diff --git a/.github/expires/isp_aol_ipset.txt b/.github/expires/isp_aol_ipset.txt new file mode 100644 index 000000000..fb971a6f7 --- /dev/null +++ b/.github/expires/isp_aol_ipset.txt @@ -0,0 +1 @@ +7 days \ No newline at end of file diff --git a/.github/expires/isp_att_ipset.txt b/.github/expires/isp_att_ipset.txt new file mode 100644 index 000000000..fb971a6f7 --- /dev/null +++ b/.github/expires/isp_att_ipset.txt @@ -0,0 +1 @@ +7 days \ No newline at end of file diff --git a/.github/expires/isp_cablevision_ipset.txt b/.github/expires/isp_cablevision_ipset.txt new file mode 100644 index 000000000..fb971a6f7 --- /dev/null +++ b/.github/expires/isp_cablevision_ipset.txt @@ -0,0 +1 @@ +7 days \ No newline at end of file diff --git a/.github/expires/isp_charter_spectrum_timewarnercable_ipset.txt b/.github/expires/isp_charter_spectrum_timewarnercable_ipset.txt new file mode 100644 index 000000000..fb971a6f7 --- /dev/null +++ b/.github/expires/isp_charter_spectrum_timewarnercable_ipset.txt @@ -0,0 +1 @@ +7 days \ No newline at end of file diff --git a/.github/expires/isp_suddenlink_optimum_ipset.txt b/.github/expires/isp_suddenlink_optimum_ipset.txt new file mode 100644 index 000000000..fb971a6f7 --- /dev/null +++ b/.github/expires/isp_suddenlink_optimum_ipset.txt @@ -0,0 +1 @@ +7 days \ No newline at end of file diff --git a/.github/expires/01_master_ipset.txt b/.github/expires/master_ipset.txt similarity index 100% rename from .github/expires/01_master_ipset.txt rename to .github/expires/master_ipset.txt diff --git a/.github/expires/02_privacy_ahrefs_ipset.txt b/.github/expires/privacy_ahrefs_ipset.txt similarity index 100% rename from .github/expires/02_privacy_ahrefs_ipset.txt rename to .github/expires/privacy_ahrefs_ipset.txt diff --git a/.github/expires/02_privacy_amazon_aws_ipset.txt b/.github/expires/privacy_amazon_aws_ipset.txt similarity index 100% rename from .github/expires/02_privacy_amazon_aws_ipset.txt rename to .github/expires/privacy_amazon_aws_ipset.txt diff --git a/.github/expires/02_privacy_amazon_ec2_ipset.txt b/.github/expires/privacy_amazon_ec2_ipset.txt similarity index 100% rename from .github/expires/02_privacy_amazon_ec2_ipset.txt rename to .github/expires/privacy_amazon_ec2_ipset.txt diff --git a/.github/expires/02_privacy_apple_bot_ipset.txt b/.github/expires/privacy_apple_bot_ipset.txt similarity index 100% rename from .github/expires/02_privacy_apple_bot_ipset.txt rename to .github/expires/privacy_apple_bot_ipset.txt diff --git a/.github/expires/02_privacy_bing_ipset.txt b/.github/expires/privacy_bing_ipset.txt similarity index 100% rename from .github/expires/02_privacy_bing_ipset.txt rename to .github/expires/privacy_bing_ipset.txt diff --git a/.github/expires/02_privacy_bunnycdn_ipset.txt b/.github/expires/privacy_bunnycdn_ipset.txt similarity index 100% rename from .github/expires/02_privacy_bunnycdn_ipset.txt rename to .github/expires/privacy_bunnycdn_ipset.txt diff --git a/.github/expires/02_privacy_cloudflarecdn_ipset.txt b/.github/expires/privacy_cloudflarecdn_ipset.txt similarity index 100% rename from .github/expires/02_privacy_cloudflarecdn_ipset.txt rename to .github/expires/privacy_cloudflarecdn_ipset.txt diff --git a/.github/expires/02_privacy_cloudfront_ipset.txt b/.github/expires/privacy_cloudfront_ipset.txt similarity index 100% rename from .github/expires/02_privacy_cloudfront_ipset.txt rename to .github/expires/privacy_cloudfront_ipset.txt diff --git a/.github/expires/02_privacy_duckduckgo_ipset.txt b/.github/expires/privacy_duckduckgo_ipset.txt similarity index 100% rename from .github/expires/02_privacy_duckduckgo_ipset.txt rename to .github/expires/privacy_duckduckgo_ipset.txt diff --git a/.github/expires/02_privacy_facebook_ipset.txt b/.github/expires/privacy_facebook_ipset.txt similarity index 100% rename from .github/expires/02_privacy_facebook_ipset.txt rename to .github/expires/privacy_facebook_ipset.txt diff --git a/.github/expires/02_privacy_fastly_ipset.txt b/.github/expires/privacy_fastly_ipset.txt similarity index 100% rename from .github/expires/02_privacy_fastly_ipset.txt rename to .github/expires/privacy_fastly_ipset.txt diff --git a/.github/expires/02_privacy_general_ipset.txt b/.github/expires/privacy_general_ipset.txt similarity index 100% rename from .github/expires/02_privacy_general_ipset.txt rename to .github/expires/privacy_general_ipset.txt diff --git a/.github/expires/02_privacy_google_ipset.txt b/.github/expires/privacy_google_ipset.txt similarity index 100% rename from .github/expires/02_privacy_google_ipset.txt rename to .github/expires/privacy_google_ipset.txt diff --git a/.github/expires/02_privacy_pingdom_ipset.txt b/.github/expires/privacy_pingdom_ipset.txt similarity index 100% rename from .github/expires/02_privacy_pingdom_ipset.txt rename to .github/expires/privacy_pingdom_ipset.txt diff --git a/.github/expires/02_privacy_rssapi_ipset.txt b/.github/expires/privacy_rssapi_ipset.txt similarity index 100% rename from .github/expires/02_privacy_rssapi_ipset.txt rename to .github/expires/privacy_rssapi_ipset.txt diff --git a/.github/expires/02_privacy_stripe_api_ipset.txt b/.github/expires/privacy_stripe_api_ipset.txt similarity index 100% rename from .github/expires/02_privacy_stripe_api_ipset.txt rename to .github/expires/privacy_stripe_api_ipset.txt diff --git a/.github/expires/02_privacy_stripe_armada_gator_ipset.txt b/.github/expires/privacy_stripe_armada_gator_ipset.txt similarity index 100% rename from .github/expires/02_privacy_stripe_armada_gator_ipset.txt rename to .github/expires/privacy_stripe_armada_gator_ipset.txt diff --git a/.github/expires/02_privacy_stripe_webhooks_ipset.txt b/.github/expires/privacy_stripe_webhooks_ipset.txt similarity index 100% rename from .github/expires/02_privacy_stripe_webhooks_ipset.txt rename to .github/expires/privacy_stripe_webhooks_ipset.txt diff --git a/.github/expires/02_privacy_telegram_ipset.txt b/.github/expires/privacy_telegram_ipset.txt similarity index 100% rename from .github/expires/02_privacy_telegram_ipset.txt rename to .github/expires/privacy_telegram_ipset.txt diff --git a/.github/expires/02_privacy_uptimerobot_ipset.txt b/.github/expires/privacy_uptimerobot_ipset.txt similarity index 100% rename from .github/expires/02_privacy_uptimerobot_ipset.txt rename to .github/expires/privacy_uptimerobot_ipset.txt diff --git a/.github/expires/02_privacy_webpagetest_ipset.txt b/.github/expires/privacy_webpagetest_ipset.txt similarity index 100% rename from .github/expires/02_privacy_webpagetest_ipset.txt rename to .github/expires/privacy_webpagetest_ipset.txt diff --git a/.github/expires/02_privacy_yandex_ipset.txt b/.github/expires/privacy_yandex_ipset.txt similarity index 100% rename from .github/expires/02_privacy_yandex_ipset.txt rename to .github/expires/privacy_yandex_ipset.txt diff --git a/.github/expires/03_spam_forums_ipset.txt b/.github/expires/spam_forums_ipset.txt similarity index 100% rename from .github/expires/03_spam_forums_ipset.txt rename to .github/expires/spam_forums_ipset.txt diff --git a/.github/expires/03_spam_spamhaus_ipset.txt b/.github/expires/spam_spamhaus_ipset.txt similarity index 100% rename from .github/expires/03_spam_spamhaus_ipset.txt rename to .github/expires/spam_spamhaus_ipset.txt diff --git a/.github/scripts/bl-block.sh b/.github/scripts/bl-block.sh index 199fef155..48b22ac85 100644 --- a/.github/scripts/bl-block.sh +++ b/.github/scripts/bl-block.sh @@ -8,9 +8,13 @@ # copies local ipsets from .github/blocks/${ARG_BLOCKS_CAT}/*.ipset # # @terminal .github/scripts/bl-block.sh \ -# blocklists/02_privacy_general.ipset \ +# blocklists/privacy/privacy_general.ipset \ # privacy # +# @terminal .github/scripts/bl-block.sh \ +# blocklists/isp/isp_aol.ipset \ +# isp/aol.ipset +# # @workflow # Privacy › General # chmod +x ".github/scripts/bl-block.sh" # run_general=".github/scripts/bl-block.sh 02_privacy_general.ipset privacy" @@ -31,7 +35,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -68,6 +73,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -189,8 +228,27 @@ fi # # if [ -d .github/blocks/ ]; then - for APP_FILE_TEMP in .github/blocks/${ARG_BLOCKS_CAT}/*.ipset; do - echo -e " 📒 Reading static block ${ORANGE2}${APP_FILE_TEMP}${RESET}" + + # # + # Determines if the category provided is either a folder, or a file ending with `.ipset`. + # + # if a folder is provided, all files in the folder will be looped and loaded. + # if a file is provided, only that one file will be loaded. + # # + + APP_BLOCK_TARGET=".github/blocks/${ARG_BLOCKS_CAT}/*.ipset" + if [[ "$ARG_BLOCKS_CAT" == *ipset ]]; then + APP_BLOCK_TARGET=".github/blocks/${ARG_BLOCKS_CAT}" + fi + + # # + # Block folder specified. Each file in folder will be loaded. does not have .ipset at the end + # + # @usage .github/scripts/bl-block.sh blocklists/isp/isp_aol.ipset isp/aol + # # + + for APP_FILE_TEMP in ${APP_BLOCK_TARGET}; do + echo -e " 📒 Reading static block ${ORANGE2}${APP_FILE_TEMP}${RESET}" # # # calculate how many IPs are in a subnet @@ -261,7 +319,7 @@ if [ -d .github/blocks/ ]; then echo -e " ➕ Added ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_SUBNET} Subnets${RESET} to ${BLUE2}${APP_FILE_PERM}${RESET}" echo -e - done + done fi # # @@ -271,7 +329,7 @@ fi # - remove .sort temp file # # -APP_OUT=$(cat ${APP_FILE_PERM} | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) +APP_OUT=$(cat ${APP_FILE_PERM} | grep -vi "^#|^;|^$" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) sed -i 's/[[:blank:]]*$//' ${APP_FILE_PERM}.sort > ${APP_FILE_PERM} cat ${APP_FILE_PERM}.sort >> ${APP_FILE_PERM} diff --git a/.github/scripts/bl-format.sh b/.github/scripts/bl-format.sh index 0583c9c72..2acf69a47 100644 --- a/.github/scripts/bl-format.sh +++ b/.github/scripts/bl-format.sh @@ -27,9 +27,10 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder -# # +## # # vars > colors # # Use the color table at: @@ -64,6 +65,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -244,7 +279,7 @@ done # Get IP list # # -list_ips=$(echo "${APP_OUT}" | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) +list_ips=$(echo "${APP_OUT}" | grep -vi "^#|^;|^$" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) sed -i '/[#;]/{s/#.*//;s/;.*//;/^$/d}' ${APP_FILE_TEMP} # remove # and ; comments sed -i 's/\-.*//' ${APP_FILE_TEMP} # remove hyphens for ip ranges sed -i 's/[[:blank:]]*$//' ${APP_FILE_TEMP} # remove space / tab from EOL diff --git a/.github/scripts/bl-geolite2.sh b/.github/scripts/bl-geolite2.sh index b91217510..b8ff367d3 100644 --- a/.github/scripts/bl-geolite2.sh +++ b/.github/scripts/bl-geolite2.sh @@ -7,17 +7,23 @@ # @summary Aetherx Blocklists > GeoLite2 Country IPsets # generates a set of IPSET files by reading the GeoLite2 csv file and splitting the IPs up into their associated country. # -# @terminal .github/scripts/bl-geolite2.sh \ -# -p +# @terminal .github/scripts/bl-geolite2.sh -l +# .github/scripts/bl-geolite2.sh --local +# .github/scripts/bl-geolite2.sh --local --dev +# .github/scripts/bl-geolite2.sh --dry # -# @command bl-geolite2.sh [ -p ] -# bl-geolite2.sh -p ABCDEF123456789 +# @command bl-geolite2.sh -l ] +# bl-geolite2.sh --local +# bl-geolite2.sh --dev +# bl-geolite2.sh --dry # # # # -# Download / License Key +# LICENSE KEY / DOWNLOAD MODE +# .github/scripts/bl-geolite2.sh -l +# .github/scripts/bl-geolite2.sh --license # -# If you are not running local mode (see below), you will need to download the GeoLite2 database .csv files when the script starts. +# If you are not running LOCAL MODE (see below), you will need to download the GeoLite2 database .csv files when the script starts. # You must specify a license key from the MaxMind website. Ensure you set up a Github workflow secret if running this script on Github. # # To specify a license key, you can: @@ -25,21 +31,114 @@ # Add LICENSE_KEY=YOUR_LICENSE_KEY # # - Provide the license key as a parameter when running the script -# bl-geolite2.sh -p ABCDEF123456789 +# bl-geolite2.sh --license ABCDEF123456789 +# bl-geolite2.sh -l ABCDEF123456789 # # # # -# Local Mode +# LOCAL MODE +# .github/scripts/bl-geolite2.sh -o +# .github/scripts/bl-geolite2.sh --local # -# allows you to use a local copy of the .CSV files instead of downloading them each and every time you run the script. -# - Create a new folder `/local` in the parent folder -# - Download the location files from: -# http://geolite.maxmind.com/download/geoip/database/GeoLite2-Country-CSV.zip -# - Place the .csv files in the `/local` folder -# - Enable local mode in the settings below -# APP_SOURCE_LOCAL_ENABLED=true +# PLACE FILES IN +# `.github/local` +# +# Local mode allows you to use GeoLite2 database from a local copy on your server, instead of downloading a fresh zip. +# +# Local files must be placed in the `.github/local` folder. This method supports either the zipped files, OR each CSV. +# +# If providing the ZIP, you must have the following files: +# .github/local/GeoLite2-Country-CSV.zip +# .github/local/GeoLite2-Country-CSV.zip.md5 +# +# OR +# +# If providing each CSV file, you must have the files: +# .github/local/GeoLite2-Country-Locations-en.csv +# .github/local/GeoLite2-Country-Blocks-IPv4.csv +# .github/local/GeoLite2-Country-Blocks-IPv6.csv +# +# If you are providing the ZIP files, you can get the zip and the md5 hash files from +# - CSV URL: https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip +# - MD5 URL: https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip.md5 +# +# The files MUST be named: +# - GeoLite2-Country-CSV.zip +# - GeoLite2-Country-CSV.zip.md5 # # +# # +# DRY-RUN MODE +# .github/scripts/bl-geolite2.sh -d +# .github/scripts/bl-geolite2.sh --dry +# +# PLACE FILES IN +# `.github/local` +# +# This parameter runs the script as if it were downloading the files from the MaxMind official website, except the CURL calls are skipped. +# the .ZIP and .ZIP.MD5 files are required to be in the .temp folder. +# +# The files MUST be named: +# - GeoLite2-Country-CSV.zip +# - GeoLite2-Country-CSV.zip.md5 +# +# Download the .zip and .zip.md5 from: +# - CSV URL: https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip +# - MD5 URL: https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip.md5 +# # + +APP_THIS_FILE=$(basename "$0") # current script file +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder + +# # +# vars > colors +# +# Use the color table at: +# - https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797 +# # + +RESET=$'\e[0m' +WHITE=$'\e[97m' +BOLD=$'\e[1m' +DIM=$'\e[2m' +UNDERLINE=$'\e[4m' +BLINK=$'\e[5m' +INVERTED=$'\e[7m' +HIDDEN=$'\e[8m' +BLACK=$'\e[38;5;0m' +FUCHSIA1=$'\e[38;5;125m' +FUCHSIA2=$'\e[38;5;198m' +RED1=$'\e[38;5;160m' +RED2=$'\e[38;5;196m' +RED3=$'\e[38;5;166m' +ORANGE1=$'\e[38;5;202m' +ORANGE2=$'\e[38;5;208m' +MAGENTA=$'\e[38;5;5m' +BLUE1=$'\e[38;5;033m' +BLUE2=$'\e[38;5;39m' +CYAN=$'\e[38;5;6m' +GREEN1=$'\e[38;5;2m' +GREEN2=$'\e[38;5;76m' +YELLOW1=$'\e[38;5;184m' +YELLOW2=$'\e[38;5;190m' +YELLOW3=$'\e[38;5;193m' +GREY1=$'\e[38;5;240m' +GREY2=$'\e[38;5;244m' +GREY3=$'\e[38;5;250m' + +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED1}Error${RESET}: ${RESET}$1" + echo -e + exit 0 +} + # # # Debug Mode # @@ -50,30 +149,312 @@ # in production mode. # # +SECONDS=0 # set seconds count for beginning of script +APP_NAME="GeoLite2 Database Script" # name of app APP_VER=("1" "1" "0" "0") # current script version APP_DEBUG=false # debug mode -APP_REPO="Aetherinox/dev-kw" # repository +APP_REPO="Aetherinox/blocklists" # repository APP_REPO_BRANCH="main" # repository branch -APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory APP_CFG_FILE="aetherx.conf" # Optional config file for license key / settings APP_TARGET_DIR="blocklists/country/geolite" # path to save ipsets APP_TARGET_EXT_TMP="tmp" # temp extension for ipsets before work is done APP_TARGET_EXT_PROD="ipset" # extension for ipsets APP_SOURCE_LOCAL_ENABLED=false # True = loads from ./local, False = download from MaxMind -APP_SOURCE_LOCAL="local" # where to fetch local csv from if local mode enabled +APP_SOURCE_LOCAL="local" # local mode enabled: where to fetch local csv from +APP_SOURCE_TEMP=".temp" # local mode disabled: where csv will be downloaded to +APP_SOURCE_CACHE="cache" # location where countries and continents are stored as array to file APP_DIR_IPV4="./${APP_TARGET_DIR}/ipv4" # folder to store ipv4 APP_DIR_IPV6="./${APP_TARGET_DIR}/ipv6" # folder to store ipv6 APP_GEO_LOCS_CSV="GeoLite2-Country-Locations-en.csv" # Geolite2 Country Locations CSV APP_GEO_IPV4_CSV="GeoLite2-Country-Blocks-IPv4.csv" # Geolite2 Country CSV IPv4 APP_GEO_IPV6_CSV="GeoLite2-Country-Blocks-IPv6.csv" # Geolite2 Country CSV IPv6 APP_GEO_ZIP="GeoLite2-Country-CSV.zip" # Geolite2 Country CSV Zip -APP_CURL_AGENT="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36" +APP_GEO_ZIP_MD5="${APP_GEO_ZIP}.md5" # Geolite2 Country CSV Zip MD5 hash file COUNT_LINES=0 # number of lines in doc COUNT_TOTAL_SUBNET=0 # number of IPs in all subnets combined COUNT_TOTAL_IP=0 # number of single IPs (counts each line) BLOCKS_COUNT_TOTAL_IP=0 # number of ips for one particular file BLOCKS_COUNT_TOTAL_SUBNET=0 # number of subnets for one particular file +APP_AGENT="Mozilla/5.0 (Windows NT 10.0; WOW64) "\ +"AppleWebKit/537.36 (KHTML, like Gecko) "\ +"Chrome/51.0.2704.103 Safari/537.36" # user agent used with curl + +# # +# Define > Help Vars +# # + +APP_DESC="This script downloads the geographical databases from the MaxMind GeoLite2 servers. \n\n They are then broken up into their respective continent and country files. Duplicates are removed, IPs\n are re-sorted, and then all files are pushed to the repository." + +APP_USAGE="🗔 Usage: ./${APP_THIS_FILE} ${BLUE2}[-l ]${RESET} + ${GREY2}./${APP_THIS_FILE} ${BLUE2}-?${RESET} + ${GREY2}./${APP_THIS_FILE} ${BLUE2}clr${RESET} + ${GREY2}./${APP_THIS_FILE} ${BLUE2}chart${RESET} +" + +# # +# Color Code Test +# +# @usage .github/scripts/bt-transmission.sh clr +# # + +function debug_ColorTest() +{ + echo -e + echo -e "RESET ${GREY1}................ ${RESET}This is test text ███████████████${RESET}" + echo -e "WHITE ${GREY1}................ ${WHITE}This is test text ███████████████${RESET}" + echo -e "BOLD ${GREY1}................. ${BOLD}This is test text ███████████████${RESET}" + echo -e "DIM ${GREY1}.................. ${DIM}This is test text ███████████████${RESET}" + echo -e "UNDERLINE ${GREY1}............ ${UNDERLINE}This is test text ███████████████${RESET}" + echo -e "BLINK ${GREY1}................ ${BLINK}This is test text ███████████████${RESET}" + echo -e "INVERTED ${GREY1}............. ${INVERTED}This is test text ███████████████${RESET}" + echo -e "HIDDEN ${GREY1}............... ${HIDDEN}This is test text ███████████████${RESET}" + echo -e "BLACK ${GREY1}................ ${BLACK}This is test text ███████████████${RESET}" + echo -e "FUCHSIA1 ${GREY1}............. ${FUCHSIA1}This is test text ███████████████${RESET}" + echo -e "FUCHSIA2 ${GREY1}............. ${FUCHSIA2}This is test text ███████████████${RESET}" + echo -e "RED1 ${GREY1}................. ${RED1}This is test text ███████████████${RESET}" + echo -e "RED2 ${GREY1}................. ${RED2}This is test text ███████████████${RESET}" + echo -e "RED3 ${GREY1}................. ${RED3}This is test text ███████████████${RESET}" + echo -e "ORANGE1 ${GREY1}.............. ${ORANGE1}This is test text ███████████████${RESET}" + echo -e "ORANGE2 ${GREY1}.............. ${ORANGE2}This is test text ███████████████${RESET}" + echo -e "MAGENTA ${GREY1}.............. ${MAGENTA}This is test text ███████████████${RESET}" + echo -e "BLUE1 ${GREY1}................ ${BLUE1}This is test text ███████████████${RESET}" + echo -e "BLUE2 ${GREY1}................ ${BLUE2}This is test text ███████████████${RESET}" + echo -e "CYAN ${GREY1}................. ${CYAN}This is test text ███████████████${RESET}" + echo -e "GREEN1 ${GREY1}............... ${GREEN1}This is test text ███████████████${RESET}" + echo -e "GREEN2 ${GREY1}............... ${GREEN2}This is test text ███████████████${RESET}" + echo -e "YELLOW1 ${GREY1}.............. ${YELLOW1}This is test text ███████████████${RESET}" + echo -e "YELLOW2 ${GREY1}.............. ${YELLOW2}This is test text ███████████████${RESET}" + echo -e "YELLOW3 ${GREY1}.............. ${YELLOW3}This is test text ███████████████${RESET}" + echo -e "GREY1 ${GREY1}................ ${GREY1}This is test text ███████████████${RESET}" + echo -e "GREY2 ${GREY1}................ ${GREY2}This is test text ███████████████${RESET}" + echo -e "GREY3 ${GREY1}................ ${GREY3}This is test text ███████████████${RESET}" + echo -e + + exit 1 +} + +# # +# Helper > Show Color Chart +# Shows a complete color charge which can be used with the color declarations in this script. +# +# @usage .github/scripts/bt-transmission.sh chart +# # + +function debug_ColorChart() +{ + # foreground / background + for fgbg in 38 48 ; do + # colors + for clr in {0..255} ; do + # show color + printf "\e[${fgbg};5;%sm %3s \e[0m" $clr $clr + # show 6 colors per lines + if [ $((($clr + 1) % 6)) == 4 ] ; then + echo -e + fi + done + + echo -e + done + + exit 1 +} + +# # +# func > get version +# +# returns current version of app +# converts to human string. +# e.g. "1" "2" "4" "0" +# 1.2.4.0 +# # + +get_version() +{ + ver_join=${APP_VER[*]} + ver_str=${ver_join// /.} + echo ${ver_str} +} + +# # +# Usage +# # + +opt_usage() +{ + echo -e + printf " ${BLUE1}${APP_NAME}${RESET}\n" 1>&2 + printf " ${DIM}${APP_DESC}${RESET}\n" 1>&2 + echo -e + printf ' %-5s %-40s\n' "Usage:" "" 1>&2 + printf ' %-5s %-40s\n' " " "${APP_THIS_FILE} [ ${GREY2} options${RESET} ]" 1>&2 + printf ' %-5s %-40s\n\n' " " "${APP_THIS_FILE} [ ${GREY2}--help${RESET} ] [ ${GREY2}--dry${RESET} ] [ ${GREY2}--local${RESET} ] [ ${GREY2}--license LICENSE_KEY${RESET} ] [ ${GREY2}--version${RESET} ]" 1>&2 + printf ' %-5s %-40s\n' "Options:" "" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-l, --license" "specifies your MaxMind license key" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-o, --local" "enables local mode, geo database must be provided locally." 1>&2 + printf ' %-5s %-18s %-40s\n' " " "" " ${GREY2}does not require MaxMind license key${RESET}" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "" " ${GREY2}local geo .csv files OR .zip must be placed in folder ${BLUE2}${APP_THIS_DIR}/${APP_SOURCE_LOCAL}${RESET}" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-d, --dry" "runs a dry run of loading csv files from ${BLUE2}${APP_GITHUB_DIR}/${APP_SOURCE_TEMP}${RESET} folder" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "" " ${GREY2}requires you place ${GREEN1}${APP_GEO_ZIP}${RESET} and ${GREEN1}${APP_GEO_ZIP_MD5}${RESET} files in ${BLUE2}${APP_GITHUB_DIR}/${APP_SOURCE_TEMP}${RESET} folder${RESET}" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-c, --color" "displays a demo of the available colors" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "" " ${GREY2}only needed by developer${RESET}" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-g, --graph" "displays a demo bash color graph" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "" " ${GREY2}only needed by developer${RESET}" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-d, --dev" "dev mode" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-p, --path" "list of paths associated to script" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-h, --help" "show help menu" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "" " ${GREY2}not required when using local mode${RESET}" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-u, --usage" "how to use this script" 1>&2 + printf ' %-5s %-18s %-40s\n' " " "-v, --version" "current version of ${APP_THIS_FILE}" 1>&2 + echo + echo + exit 1 +} + +# # +# Display help text if command not complete +# # + +while [ $# -gt 0 ]; do + case "$1" in + -u|--usage) + echo -e + echo -e " ${WHITE}To use this script, use one of the following methods:\n" + echo -e " ${GREEN1}${BOLD} License Key / Normal Mode${RESET}" + echo -e " ${GREY3}${BOLD} This method requires no files to be added. The geographical files will be downloaded from the${RESET}" + echo -e " ${GREY3}${BOLD} MaxMind website / servers.${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} -l ABCDEF1234567-01234${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} -l ABCDEF1234567-01234${RESET}" + echo -e + echo -e + echo -e " ${GREEN1}${BOLD} Local Mode .................................................................................................. ${DIM}[ Option 1 ]${RESET}" + echo -e " ${GREY3} This mode allows you to use local copies of the GeoLite2 database files to generate an IP list instead of${RESET}" + echo -e " ${GREY3} downloading a fresh copy of the .CSV / .ZIP files from the MaxMind website. This method requires you to${RESET}" + echo -e " ${GREY3} place the .ZIP, and .ZIP.MD5 file in the folder ${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_LOCAL}${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Download the following files from the MaxMind website: ${RESET}" + echo -e " ${BLUE2} https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip${RESET}" + echo -e " ${BLUE2} https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip.md5${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Place the ${GREEN2}.ZIP${RESET} and ${GREEN2}.ZIP.MD5${RESET} files in: ${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_LOCAL}${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} The filenames MUST be: ${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_LOCAL}/GeoLite2-Country-CSV.zip${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_LOCAL}/GeoLite2-Country-CSV.zip.md5${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Run the following command: ${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} --local${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} -o${RESET}" + echo -e + echo -e + echo -e " ${GREEN1}${BOLD} Local Mode .................................................................................................. ${DIM}[ Option 2 ]${RESET}" + echo -e " ${GREY3} This mode allows you to use local copies of the GeoLite2 database files to generate an IP list instead of${RESET}" + echo -e " ${GREY3} downloading a fresh copy of the .ZIP files from the MaxMind website. This method requires you to extract${RESET}" + echo -e " ${GREY3} the .ZIP and place the .CSV files in the folder ${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_LOCAL}${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Download the following file from the MaxMind website: ${RESET}" + echo -e " ${BLUE2} https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Open the .ZIP and extract the following files to the folder ${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_LOCAL}${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_LOCAL}/GeoLite2-Country-Locations-en.csv${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_LOCAL}/GeoLite2-Country-Blocks-IPv4.csv${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_LOCAL}/GeoLite2-Country-Blocks-IPv6.csv${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Run the following command: ${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} --local${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} -o${RESET}" + echo -e + echo -e + echo -e " ${GREEN1}${BOLD} Dry Run .....................................................................................................${RESET}" + echo -e " ${GREY3} This mode allows you to simulate downloading the .ZIP files from the MaxMind website. However, the CURL${RESET}" + echo -e " ${GREY3} commands will not actually be ran. Instead, the script will look for the needed database files in the ${RESET}" + echo -e " ${GREY3} ${APP_SOURCE_TEMP} folder. This method requires you to place either the .ZIP & .ZIP.MD5 files, or extracted CSV files${RESET}" + echo -e " ${GREY3} in the folder ${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_TEMP}${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Place the .ZIP & .ZIP.MD5 file, OR the .CSV files in the folder ${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_TEMP}${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_TEMP}/GeoLite2-Country-Locations-en.csv${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_TEMP}/GeoLite2-Country-Blocks-IPv4.csv${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_TEMP}/GeoLite2-Country-Blocks-IPv6.csv${RESET}" + echo -e + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_TEMP}/GeoLite2-Country-CSV.zip${RESET}" + echo -e " ${BLUE2} ${APP_THIS_DIR}/${APP_SOURCE_TEMP}/GeoLite2-Country-CSV.zip.md5${RESET}" + echo -e + echo -e " ${GREY3}${BOLD} Run the following command: ${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} --dry${RESET}" + echo -e " ${BLUE2} ./${APP_THIS_FILE} -d${RESET}" + echo -e + exit 1 + ;; + -p|--paths) + echo -e + echo -e " ${WHITE}List of paths important to this script:\n" + echo -e " ${GREEN1}${BOLD}${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_LOCAL}${RESET}${RESET}" + echo -e " ${GREY3}Folder used when Local Mode enabled (${GREEN2}--local${RESET})${RESET}" + echo -e " ${GREY2} Can detect GeoLite2 ${BLUE2}.ZIP${GREY2} and ${BLUE2}.ZIP.MD5${GREY2} files${RESET}" + echo -e " ${GREY2} Can detect GeoLite2 ${BLUE2}.CSV${GREY2} location and IPv4/IPv6 files${RESET}" + echo -e + echo -e + echo -e " ${GREEN1}${BOLD}${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_TEMP}${RESET}${RESET}" + echo -e " ${GREY3}Folder used when Dry Run enabled (${GREEN2}--dry${RESET})${RESET}" + echo -e " ${GREY2} Can detect GeoLite2 ${BLUE2}.ZIP${GREY2} and ${BLUE2}.ZIP.MD5${GREY2} files${RESET}" + echo -e " ${GREY2} Can detect GeoLite2 ${BLUE2}.CSV${GREY2} location and IPv4/IPv6 files${RESET}" + echo -e + echo -e + echo -e " ${GREEN1}${BOLD}${ORANGE2}${APP_THIS_DIR}/${APP_SOURCE_CACHE}${RESET}${RESET}" + echo -e " ${GREY3}Folder used to store associative array for continents and countries${RESET}" + echo -e + echo -e + exit 1 + ;; + -l|--license) + if [[ "$1" != *=* ]]; then shift; fi + LICENSE_KEY="${1#*=}" + if [ -z "${LICENSE_KEY}" ]; then + echo -e + echo -e " ${WHITE}Specifies your MaxMind license key.${RESET}" + echo -e " ${GREY1}Required if you are not running the script in local mode.${RESET}" + echo -e " ${WHITE} Example: ${GREY2}./${APP_THIS_FILE} -l ABCDEF1234567-01234${RESET}" + echo + exit 1 + fi + ;; + -d|--dev) + APP_DEBUG=true + echo -e " ${FUCHSIA2}${BLINK}Devmode Enabled${RESET}" + ;; + -o|--local) + APP_SOURCE_LOCAL_ENABLED=true + echo -e " ${FUCHSIA2}${BLINK}Local Mode Enabled${RESET}" + ;; + -d|--dry) + APP_DRYRUN=true + echo -e " ${FUCHSIA2}${BLINK}Dry Run Enabled${RESET}" + ;; + -v|--version) + echo -e + echo -e " ${BLUE2}${BOLD}${APP_NAME}${RESET} - v$(get_version)${RESET}" + echo -e " ${GREEN1}${BOLD}https://github.com/${APP_REPO}${RESET}" + echo + exit 1 + ;; + -c|--color) + debug_ColorTest + exit 1 + ;; + -g|--graph|--chart) + debug_ColorChart + exit 1 + ;; + -\?|-h|--help) + opt_usage + ;; + *) + opt_usage + ;; + esac + shift +done # # # Define @@ -83,6 +464,22 @@ readonly CONFIGS_LIST="${APP_GEO_LOCS_CSV} ${APP_GEO_IPV4_CSV} ${APP_GEO_IPV6_CS declare -A MAP_COUNTRY declare -A MAP_CONTINENT +# # +# Arguments +# # + +ARG1=$1 + +if [ "$ARG1" == "clr" ] || [ "$ARG1" == "color" ]; then + debug_ColorTest + exit 1 +fi + +if [ "$ARG1" == "chart" ] || [ "$ARG1" == "graph" ]; then + debug_ColorChart + exit 1 +fi + # # # Country codes # # @@ -618,18 +1015,6 @@ sa["sr"]="SR" # Suriname sa["uy"]="UY" # Uruguay sa["ve"]="VE" # Venezuela -# # -# print an error and exit with failure -# $1: error message -# # - -function error() -{ - echo -e " ⭕ $0: err: $1" - echo -e - exit 1 -} - # # # Sort Results # @@ -666,35 +1051,125 @@ function CHECK_PACKAGES() # get latest MaxMind GeoLite2 IP country database and md5 checksum # CSV URL: https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip # MD5 URL: https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=LICENSE_KEY&suffix=zip.md5 +# +# if using --dry, you must manually download the .zip and .zip.md5 files and place them in the local folder assigned to the value +# $APP_SOURCE_LOCAL # # function DB_DOWNLOAD() { - local FILE_MD5="${APP_GEO_ZIP}.md5" local URL_CSV="https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country-CSV&license_key=${LICENSE_KEY}&suffix=zip" - local URL_MD5="${URL_CSV}.md5" + local URL_MD5="${URL_CSV}.md5" # take URL_CSV value and add .md5 to end for hash file # # # download files # # - echo -e " 🌎 Downloading file ${APP_GEO_ZIP}" - curl --silent --location --output $APP_GEO_ZIP "$URL_CSV" || error "Failed to curl file: ${URL_CSV}" - curl --silent --location --output $FILE_MD5 "$URL_MD5" || error "Failed to curl file: ${URL_MD5}" + if [[ "${APP_DRYRUN}" != "true" ]] && [[ $APP_SOURCE_LOCAL_ENABLED != "true" ]]; then + local URL_HIDDEN_CSV=$(echo $URL_CSV | sed -e "s/$LICENSE_KEY/HIDDEN/g") + local URL_HIDDEN_MD5=$(echo $URL_MD5 | sed -e "s/$LICENSE_KEY/HIDDEN/g") + + echo -e " 🌎 Downloading file ${GREEN2}${APP_GEO_ZIP}${RESET} from ${URL_HIDDEN_CSV}" + curl --silent --location --output $APP_GEO_ZIP "$URL_CSV" || error "Failed to curl file: ${URL_CSV}" + + echo -e " 🌎 Downloading file ${GREEN2}${APP_GEO_ZIP_MD5}${RESET} from ${URL_HIDDEN_MD5}" + curl --silent --location --output $APP_GEO_ZIP_MD5 "$URL_MD5" || error "Failed to curl file: ${URL_MD5}" + fi + + # # + # Both the .ZIP and the .CSV are missing, warn user to provide one or the other + # # + + if [[ ! -f ${APP_GEO_ZIP} ]] && [[ ! -f ${APP_GEO_LOCS_CSV} ]]; then + error "You must supply either the [ ZIP ${RED2}${APP_GEO_ZIP}${RESET} + MD5 hash file ${RED2}${APP_GEO_ZIP_MD5}${RESET} ] or the extracted CSV files ${RED2}${APP_GEO_LOCS_CSV}${RESET} -- Cannot locate either${RESET}" + fi # # - # validate checksum - # .md5 file is not in expected format; which means method 'md5sum --check $FILE_MD5' wont work + # Provided the .ZIP, but not the ZIP hash file # # - [[ "$(cat ${FILE_MD5})" == "$(md5sum ${APP_GEO_ZIP} | awk '{print $1}')" ]] || error "GeoLite2 md5 downloaded checksum does not match local md5 checksum" + if [[ -f ${APP_GEO_ZIP} ]] && [[ ! -f "${APP_GEO_ZIP_MD5}" ]]; then + error "You provided the ZIP ${RED2}${APP_GEO_ZIP}${RESET}, but did not provide the hash file ${RED2}${APP_GEO_ZIP_MD5}${RESET} -- Cannot continue${RESET}" + fi # # - # unzip into current working directory + # Provided the LOCATIONS csv file, but may be missing the others # # - echo -e " 📦 Unzip ${APP_GEO_ZIP}" - unzip -j -q -d . ${APP_GEO_ZIP} + if [[ -f ${APP_GEO_LOCS_CSV} ]]; then + if [[ ! -f ${APP_GEO_IPV4_CSV} ]]; then + error "You provided the LOCATION CSV ${RED2}${APP_GEO_LOCS_CSV}${RESET}, but did not provide the other needed CSV file ${RED2}$APP_GEO_IPV4_CSV${RESET} -- Cannot continue${RESET}" + fi + + if [[ ! -f ${APP_GEO_IPV6_CSV} ]]; then + error "You provided the LOCATION CSV ${RED2}${APP_GEO_LOCS_CSV}${RESET}, but did not provide the other needed CSV file ${RED2}$APP_GEO_IPV6_CSV${RESET} -- Cannot continue${RESET}" + fi + fi + + # # + # Provided the IPv4 csv file, but may be missing the others + # # + + if [[ -f ${APP_GEO_IPV4_CSV} ]]; then + if [[ ! -f ${APP_GEO_LOCS_CSV} ]]; then + error "You provided the IPV4 CSV ${RED2}${APP_GEO_IPV4_CSV}${RESET}, the locations file ${RED2}$APP_GEO_LOCS_CSV${RESET} -- Cannot continue${RESET}" + fi + + if [[ ! -f ${APP_GEO_IPV6_CSV} ]]; then + error "You provided the IPV4 CSV ${RED2}${APP_GEO_LOCS_CSV}${RESET}, but did not provide the other IPv6 CSV file ${RED2}$APP_GEO_IPV6_CSV${RESET} -- Cannot continue${RESET}" + fi + fi + + # # + # Provided the IPv6 csv file, but may be missing the others + # # + + if [[ -f ${APP_GEO_IPV6_CSV} ]]; then + if [[ ! -f ${APP_GEO_LOCS_CSV} ]]; then + error "You provided the IPV6 CSV ${RED2}${APP_GEO_IPV4_CSV}${RESET}, the locations file ${RED2}$APP_GEO_LOCS_CSV${RESET} -- Cannot continue${RESET}" + fi + + if [[ ! -f ${APP_GEO_IPV4_CSV} ]]; then + error "You provided the IPV6 CSV ${RED2}${APP_GEO_LOCS_CSV}${RESET}, but did not provide the other IPv4 CSV file ${RED2}$APP_GEO_IPV6_CSV${RESET} -- Cannot continue${RESET}" + fi + fi + + # # + # Zip files provided, check MD5 + # # + + if [[ -f ${APP_GEO_ZIP} ]] && [[ -f ${APP_GEO_ZIP_MD5} ]]; then + + echo -e " 📄 Found ZIP set ${BLUE2}${APP_GEO_ZIP}${RESET} and ${BLUE2}${APP_GEO_ZIP_MD5}${RESET}" + + local md5Response="$(cat ${APP_GEO_ZIP_MD5})" + if [[ $md5Response == *"download limit reached"* ]]; then + error "MaxMind: Daily download limit reached" + fi + + # # + # validate checksum + # .md5 file is not in expected format; which means method 'md5sum --check $APP_GEO_ZIP_MD5' wont work + # # + + [[ "$md5Response" == "$(md5sum ${TEMPDIR}/${APP_GEO_ZIP} | awk '{print $1}')" ]] || error "GeoLite2 md5 downloaded checksum does not match local md5 checksum" + + # # + # unzip into current working directory + # # + + if [ -f ${APP_GEO_ZIP} ]; then + echo -e " 📦 Unzip ${BLUE2}${APP_GEO_ZIP}${RESET}" + unzip -o -j -q -d . ${APP_GEO_ZIP} + else + error "Cannot find ${RED2}${APP_GEO_ZIP}${RESET}" + fi + + elif [[ -f ${APP_GEO_LOCS_CSV} ]] && [[ -f ${APP_GEO_IPV4_CSV} ]] && [[ -f ${APP_GEO_IPV6_CSV} ]]; then + echo -e " 📄 Found Uncompressed set ${BLUE2}${APP_GEO_LOCS_CSV}${RESET}, ${BLUE2}${APP_GEO_IPV4_CSV}${RESET} and ${BLUE2}${APP_GEO_IPV6_CSV}${RESET}" + else + error "Could not find either ${ORANGE1}ZIP + MD5${RESET}, or the ${ORANGE1}uncompressed CSV files${RESET}. Aborting.${RESET}" + fi } # # @@ -703,12 +1178,12 @@ function DB_DOWNLOAD() function CONFIG_LOAD() { - echo -e " 📄 Check config files" + echo -e " 📄 Loading geo database files" local configs=(${CONFIGS_LIST}) for f in ${configs[@]}; do - echo -e " 📄 Adding config ${f}" - [[ -f $f ]] || error "Missing configuration file: $f" + echo -e " 📄 Mounting geo file ${BLUE2}${TEMPDIR}/${f}${RESET}" + [[ -f $f ]] || error "Missing geo file: $f" done } @@ -809,10 +1284,16 @@ function GENERATE_IPv4 { echo -e " 📟 Generate IPv4" - echo -e " 📂 Remove $APP_DIR_IPV4" + echo -e " 📂 Remove ${RED2}${APP_DIR_IPV4}${RESET}" + + rm -rf $APP_DIR_IPV4 + echo -e " 📂 Create ${GREEN1}${APP_DIR_IPV4}${RESET}" + mkdir --parent $APP_DIR_IPV4 OIFS=$IFS IFS=',' + + echo -e " ➕ Importing IPs from database${RESET}" while read -ra LINE; do # # @@ -900,12 +1381,17 @@ function GENERATE_IPv4 function GENERATE_IPv6 { - echo -e " 📂 Remove $APP_DIR_IPV6" + echo -e " 📟 Generate IPv6" + echo -e " 📂 Remove ${RED2}${APP_DIR_IPV6}${RESET}" + rm -rf $APP_DIR_IPV6 + echo -e " 📂 Create ${GREEN1}${APP_DIR_IPV6}${RESET}" mkdir --parent $APP_DIR_IPV6 OIFS=$IFS IFS=',' + + echo -e " ➕ Importing IPs from database${RESET}" while read -ra LINE; do # # @@ -988,7 +1474,11 @@ function MERGE_IPSETS() for fullpath_ipv6 in ${APP_DIR_IPV6}/*.${APP_TARGET_EXT_TMP}; do file_ipv6=$(basename ${fullpath_ipv6}) - echo -e " 📄 Move ${fullpath_ipv6} to ${APP_DIR_IPV4}/${file_ipv6}" + if [[ $APP_DEBUG == "true" ]]; then + # /blocklists/country/geolite/ipv6/AE.tmp to ./blocklists/country/geolite/ipv4/AE.tmp + echo -e " 📄 Move ${fullpath_ipv6} to ${APP_DIR_IPV4}/${file_ipv6}" + fi + cat $fullpath_ipv6 >> ${APP_DIR_IPV4}/${file_ipv6} rm -rf $fullpath_ipv6 done @@ -1011,6 +1501,9 @@ function GARBAGE() echo -e " 🗑️ Cleanup ${APP_DIR_IPV6}" rm -rf ${APP_DIR_IPV6} fi + + # remove temp + rm -rf "${APP_GITHUB_DIR}/${APP_SOURCE_TEMP}" } # # @@ -1066,7 +1559,7 @@ function GENERATE_CONTINENTS() # loop continents, antartica, europe, north america local TEMPL_COUNTRIES_LIST="" - local count=1 + local count=0 for key in "${!continents[@]}"; do CONTINENT_NAME=${continents[$key]} @@ -1075,8 +1568,7 @@ function GENERATE_CONTINENTS() FILE_CONTINENT_TEMP="$APP_DIR_IPV4/continent_$CONTINENT_ID.$APP_TARGET_EXT_TMP" # blocklists/country/geolite/ipv4/continent_europe.tmp FILE_CONTINENT_PERM="$APP_TARGET_DIR/continent_$CONTINENT_ID.$APP_TARGET_EXT_PROD" # blocklists/country/geolite/ipv4/continent_europe.ipset - echo -e - echo -e " 🌎 Generate Continent ${CONTINENT_NAME} (${CONTINENT_ID})" + echo -e " 🌎 Generate Continent ${BLUE2}${CONTINENT_NAME}${RESET} ${GREY3}(${CONTINENT_ID})${RESET}" # # # Return each country's ips to be included in continent file @@ -1092,21 +1584,22 @@ function GENERATE_CONTINENTS() # count number of items in country array for this particular continent i_array=$(eval echo \${#$COUNTRY_ABBREV${i}[@]}) + i_array=$(( $i_array - 1 )) - echo -e " 🌎 Continent ${CONTINENT_NAME} -> Adding country ${CONTINENT_COUNTRY_NAME}" + echo -e " 🌎 + Country ${DIM}${BLUE2}${CONTINENT_NAME}${RESET} › ${BLUE2}${CONTINENT_COUNTRY_NAME}${RESET} ${GREY2}(${country})${RESET}" # blocklists/country/geolite/ipv4/JE.tmp FILE_TARGET="$APP_DIR_IPV4/$country.$APP_TARGET_EXT_TMP" # check if a specific country file exists, if so, open and grab all the IPs in the list. They need to be copied to $FILE_CONTINENT_TEMP if [ -f "$FILE_TARGET" ]; then - echo -e " 📒 Importing file ${FILE_TARGET} to ${FILE_CONTINENT_TEMP}" + # ./blocklists/country/geolite/ipv4/VU.tmp to ./blocklists/country/geolite/ipv4/continent_oceania.tmp + if [[ $APP_DEBUG == "true" ]]; then + echo -e " 📒 Add country to continent file ${ORANGE2}${FILE_TARGET}${RESET} to ${BLUE2}${FILE_CONTINENT_TEMP}${RESET}" + fi APP_OUTPUT=$(cat "$FILE_TARGET" | sort_results | awk '{if (++dup[$0] == 1) print $0;}' >> ${FILE_CONTINENT_TEMP}) - - echo -e "" else - echo -e " ⭕ Could not find target file $FILE_TARGET" - echo -e + echo -e " ⭕ Could not find target file $FILE_TARGET" fi # # @@ -1115,7 +1608,11 @@ function GENERATE_CONTINENTS() # # if [ "${i_array}" == "${count}" ]; then - TEMPL_COUNTRIES_LIST+="${CONTINENT_COUNTRY_NAME} (${country})" + if [ $((ASN_I_STEP%3)) -eq 0 ]; then + TEMPL_ASN_LIST+=$'\n'"# ${CONTINENT_COUNTRY_NAME} (${country})" + else + TEMPL_ASN_LIST+="${CONTINENT_COUNTRY_NAME} (${country})" + fi else if [ $((count%3)) -eq 0 ]; then TEMPL_COUNTRIES_LIST+=$'\n'"# ${CONTINENT_COUNTRY_NAME} (${country}), " @@ -1144,13 +1641,12 @@ function GENERATE_CONTINENTS() CONTINENT_BASE_TARGET="$APP_DIR_IPV4/$key.$APP_TARGET_EXT_TMP" if [ -f "$CONTINENT_BASE_TARGET" ]; then - echo -e " 📒 Importing base continent file ${CONTINENT_BASE_TARGET} to ${FILE_CONTINENT_TEMP}" + echo -e " 📒 Merge base continent file ${ORANGE2}${CONTINENT_BASE_TARGET}${RESET} to ${BLUE2}${FILE_CONTINENT_TEMP}${RESET}" APP_OUTPUT=$(cat "$CONTINENT_BASE_TARGET" | sort_results | awk '{if (++dup[$0] == 1) print $0;}' >> ${FILE_CONTINENT_TEMP}) echo -e else - echo -e " ⭕ Continent ${CONTINENT_NAME} doesn't have a base file to import from ${CONTINENT_BASE_TARGET}" - echo -e + echo -e " ⭕ Continent ${BLUE2}${CONTINENT_NAME}${RESET} doesn't have a base file to import from ${BLUE2}${CONTINENT_BASE_TARGET}${RESET} ... skipping" fi # # @@ -1158,8 +1654,7 @@ function GENERATE_CONTINENTS() # # if [ ! -f "$FILE_TARGET" ]; then - echo -e " ⭕ Could not find temp country file ${FILE_CONTINENT_TEMP}. Something failed." - echo -e + echo -e " ⭕ Could not find temp country file ${ORANGE2}${FILE_CONTINENT_TEMP}${RESET}. Something failed." break fi @@ -1214,11 +1709,11 @@ function GENERATE_CONTINENTS() BLOCKS_COUNT_TOTAL_SUBNET=$(printf "%'d" "$BLOCKS_COUNT_TOTAL_SUBNET") # LOCAL add commas to thousands BLOCKS_COUNT_LINES=$(printf "%'d" "$BLOCKS_COUNT_LINES") # LOCAL add commas to thousands - echo -e " 🚛 Move ${FILE_CONTINENT_TEMP} to ${FILE_CONTINENT_PERM}" + echo -e " 🚛 Move ${ORANGE2}${FILE_CONTINENT_TEMP}${RESET} to ${BLUE2}${FILE_CONTINENT_PERM}${RESET}" mv -- "$FILE_CONTINENT_TEMP" "${FILE_CONTINENT_PERM}" # cp "$FILE_CONTINENT_TEMP" "${FILE_CONTINENT_PERM}" - echo -e " ➕ Added ${BLOCKS_COUNT_TOTAL_IP} IPs and ${BLOCKS_COUNT_TOTAL_SUBNET} Subnets to ${FILE_CONTINENT_PERM}" + echo -e " ➕ Added ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_SUBNET} Subnets${RESET} to ${BLUE2}${FILE_CONTINENT_PERM}${RESET}" echo -e TEMPL_NAME=$(basename -- ${FILE_CONTINENT_PERM}) # file name @@ -1296,8 +1791,12 @@ END_ED # # T=$SECONDS - echo -e - printf " 🎌 Finished! %02d days %02d hrs %02d mins %02d secs\n" "$((T/86400))" "$((T/3600%24))" "$((T/60%60))" "$((T%60))" + D=$((T/86400)) + H=$((T/3600%24)) + M=$((T/60%60)) + S=$((T%60)) + + echo -e " 🎌 ${GREY2}Finished! ${YELLOW2}${D} days ${H} hrs ${M} mins ${S} secs${RESET}" # # # Continents > Output @@ -1305,7 +1804,7 @@ END_ED echo -e echo -e " ──────────────────────────────────────────────────────────────────────────────────────────────" - printf "%-25s | %-30s\n" " #️⃣ ${FILE_CONTINENT_PERM}" "${COUNT_TOTAL_IP} IPs, ${COUNT_TOTAL_SUBNET} Subnets" + echo -e " #️⃣ ${BLUE2}${FILE_CONTINENT_PERM}${RESET} | Added ${FUCHSIA2}${COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${COUNT_TOTAL_SUBNET} Subnets${RESET}" echo -e " ──────────────────────────────────────────────────────────────────────────────────────────────" echo -e echo -e @@ -1331,11 +1830,8 @@ END_ED function GENERATE_COUNTRIES() { - COUNT_TOTAL_IP=0 - COUNT_TOTAL_SUBNET=0 - echo -e - echo -e " 🏷️ Generate Headers" + echo -e " 🔖 Generate Countries" # # # Loop each temp file @@ -1343,18 +1839,21 @@ function GENERATE_COUNTRIES() # US.TMP # # + COUNT_TOTAL_IP=0 + COUNT_TOTAL_SUBNET=0 + for APP_FILE_TEMP in ./${APP_DIR_IPV4}/*.${APP_TARGET_EXT_TMP}; do file_temp_base=$(basename -- ${APP_FILE_TEMP}) # get two letter country code COUNTRY_CODE="${file_temp_base%.*}" # base file without extension COUNTRY=$(get_country_name "$COUNTRY_CODE") # get full country name from abbreviation + + echo -e " 📒 + Country ${GREY2}${COUNTRY}${RESET} to ${ORANGE2}${APP_FILE_TEMP}${RESET}" COUNTRY_ID=$(echo "$COUNTRY" | sed 's/ /_/g' | tr -d "[.,/\\-\=\+\{\[\]\}\!\@\#\$\%\^\*\'\\\(\)]" | tr '[:upper:]' '[:lower:]') # country long name with spaces, special chars removed APP_FILE_TEMP=${APP_FILE_TEMP#././} # remove ./ from front which means us with just the temp path APP_FILE_PERM="${APP_TARGET_DIR}/country_${COUNTRY_ID}.${APP_TARGET_EXT_PROD}" # final location where ipset files should be - echo -e " 📒 Adding static file ${APP_FILE_TEMP} ( ${COUNTRY} )" - # # # calculate how many IPs are in a subnet # if you want to calculate the USABLE IP addresses, subtract -2 from any subnet not ending with 31 or 32. @@ -1367,6 +1866,7 @@ function GENERATE_COUNTRIES() BLOCKS_COUNT_TOTAL_IP=0 BLOCKS_COUNT_TOTAL_SUBNET=0 + echo -e " 📊 Fetching statistics for clean file ${ORANGE2}${APP_FILE_TEMP}${RESET}" for line in $(cat ${APP_FILE_TEMP}); do # is ipv6 @@ -1409,11 +1909,11 @@ function GENERATE_COUNTRIES() BLOCKS_COUNT_TOTAL_SUBNET=$(printf "%'d" "$BLOCKS_COUNT_TOTAL_SUBNET") # LOCAL add commas to thousands BLOCKS_COUNT_LINES=$(printf "%'d" "$BLOCKS_COUNT_LINES") # LOCAL add commas to thousands - echo -e " 🚛 Move ${APP_FILE_TEMP} to ${APP_FILE_PERM}" + echo -e " 🚛 Move ${ORANGE2}${APP_FILE_TEMP}${RESET} to ${BLUE2}${APP_FILE_PERM}${RESET}" mv -- "$APP_FILE_TEMP" "${APP_FILE_PERM}" # cp "$APP_FILE_TEMP" "${APP_FILE_PERM}" - echo -e " ➕ Added ${BLOCKS_COUNT_TOTAL_IP} IPs and ${BLOCKS_COUNT_TOTAL_SUBNET} Subnets to ${APP_FILE_PERM}" + echo -e " ➕ Added ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_SUBNET} subnets${RESET} to ${BLUE2}${APP_FILE_PERM}${RESET}" echo -e TEMPL_NAME=$(basename -- ${APP_FILE_PERM}) # file name @@ -1492,12 +1992,16 @@ END_ED GARBAGE # # - # Finished + # Countries > Finished # # T=$SECONDS - echo -e - printf " 🎌 Finished! %02d days %02d hrs %02d mins %02d secs\n" "$((T/86400))" "$((T/3600%24))" "$((T/60%60))" "$((T%60))" + D=$((T/86400)) + H=$((T/3600%24)) + M=$((T/60%60)) + S=$((T%60)) + + echo -e " 🎌 ${GREY2}Finished! ${YELLOW2}${D} days ${H} hrs ${M} mins ${S} secs${RESET}" # # # Output @@ -1505,11 +2009,9 @@ END_ED echo -e echo -e " ──────────────────────────────────────────────────────────────────────────────────────────────" - printf "%-25s | %-30s\n" " #️⃣ ${APP_FILE_PERM}" "${COUNT_TOTAL_IP} IPs, ${COUNT_TOTAL_SUBNET} Subnets" + echo -e " #️⃣ ${BLUE2}${APP_FILE_PERM}${RESET} | Added ${FUCHSIA2}${COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${COUNT_TOTAL_SUBNET} Subnets${RESET}" echo -e " ──────────────────────────────────────────────────────────────────────────────────────────────" echo -e - echo -e - echo -e } @@ -1532,50 +2034,77 @@ function main() source "${APP_THIS_DIR}/${APP_CFG_FILE}" > /dev/null 2>&1 fi + if [[ -z "${APP_SOURCE_LOCAL_ENABLED}" ]] && [[ -z "${LICENSE_KEY}" ]]; then + error "Must supply a valid MaxMind license key -- aborting" + fi + # # - # Display help text if command not complete + # Start # # - local usage="Usage: ./${APP_SCRIPT} [-p ]" - while getopts ":p:" opt; do - case ${opt} in - p ) - [[ ! -z "${OPTARG}" ]] && LICENSE_KEY=$OPTARG || error "$usage" ;; - \? ) - error "$usage" ;; - : ) - error "$usage" ;; - esac - done - shift $((OPTIND -1)) - - [[ -z "${LICENSE_KEY}" ]] && error "Must supply a valid MaxMind license key -- aborting" + echo -e + echo -e " ⭐ Starting script ${GREEN1}${APP_THIS_FILE}${RESET}" # # - # setup + # Check Packages + # + # ensure all the packages we need are installed on the system. # # CHECK_PACKAGES + + # # + # Temp Path + # + # Local Mode .github/local + # Network Mode .github/.temp + # # + if [[ $APP_SOURCE_LOCAL_ENABLED == "false" ]]; then - export TEMPDIR=$(mktemp --directory) + # export TEMPDIR=$(mktemp --directory "${APP_GITHUB_DIR}/${APP_SOURCE_TEMP}") + mkdir -p "${APP_GITHUB_DIR}/${APP_SOURCE_TEMP}" + export TEMPDIR="${APP_GITHUB_DIR}/${APP_SOURCE_TEMP}" else - export TEMPDIR="${APP_THIS_DIR}/${APP_SOURCE_LOCAL}" + mkdir -p "${APP_GITHUB_DIR}/${APP_SOURCE_LOCAL}" + export TEMPDIR="${APP_GITHUB_DIR}/${APP_SOURCE_LOCAL}" fi # # # place geolite data in temporary directory # # - echo -e " ⚙️ Setting temp path $TEMPDIR" - pushd $TEMPDIR > /dev/null 2>&1 + echo -e " ⚙️ Setting temp folder ${YELLOW2}${TEMPDIR}${RESET}" + pushd ${TEMPDIR} > /dev/null 2>&1 - if [[ $APP_SOURCE_LOCAL_ENABLED == "false" ]]; then - DB_DOWNLOAD - fi + # # + # Download / Unzip .zip + # # + + DB_DOWNLOAD + CONFIG_LOAD MAP_BUILD + # # + # @TODO add caching for associative array + # # + + mkdir -p "${APP_GITHUB_DIR}/${APP_SOURCE_CACHE}" + + declare -p MAP_CONTINENT > ${APP_GITHUB_DIR}/${APP_SOURCE_CACHE}/MAP_CONTINENT.cache + declare -p MAP_COUNTRY > ${APP_GITHUB_DIR}/${APP_SOURCE_CACHE}/MAP_COUNTRY.cache + + if [[ $APP_DEBUG == "true" ]]; then + for KEY in "${!MAP_CONTINENT[@]}"; do + printf "%s --> %s\n" "$KEY" "${MAP_CONTINENT[$KEY]}" + done | tee "${APP_GITHUB_DIR}/.logs/MAP_CONTINENT.log" + + for KEY in "${!MAP_COUNTRY[@]}"; do + printf "%s --> %s\n" "$KEY" "${MAP_COUNTRY[$KEY]}" + done | tee "${APP_GITHUB_DIR}/.logs/MAP_COUNTRY.log" + fi + # # # place set output in current working directory # # diff --git a/.github/scripts/bl-html.sh b/.github/scripts/bl-html.sh index a2c4b7f4e..176ad3328 100644 --- a/.github/scripts/bl-html.sh +++ b/.github/scripts/bl-html.sh @@ -39,7 +39,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -76,6 +77,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -204,7 +239,7 @@ download_list() echo -e " 🌎 Downloading IP blacklist to ${ORANGE2}${fnFileTemp}${RESET}" - APP_OUT=$(curl -sSL -A "${APP_AGENT}" ${fnUrl} | html2text | grep -v "^#" | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${fnFileTemp}) + APP_OUT=$(curl -sSL -A "${APP_AGENT}" ${fnUrl} | html2text | grep -vi "^#|^;|^$" | grep -Eo '([0-9]{1,3}\.){3}[0-9]{1,3}(/[0-9]{1,2})|(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))(/[0-9]{1,2})?' | sort_results | awk '{if (++dup[$0] == 1) print $0;}' > ${fnFileTemp}) sed -i 's/\-.*//' ${fnFileTemp} # remove hyphens for ip ranges sed -i '/[#;]/{s/#.*//;s/;.*//;/^$/d}' ${fnFileTemp} # remove # and ; comments sed -i 's/[[:blank:]]*$//' ${fnFileTemp} # remove space / tab from EOL @@ -294,7 +329,7 @@ done # - remove .sort temp file # # -sorting=$(cat ${APP_FILE_PERM} | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) +sorting=$(cat ${APP_FILE_PERM} | grep -vi "^#|^;|^$" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) > ${APP_FILE_PERM} cat ${APP_FILE_PERM}.sort >> ${APP_FILE_PERM} rm ${APP_FILE_PERM}.sort diff --git a/.github/scripts/bl-htmlip.sh b/.github/scripts/bl-htmlip.sh index b697f6de5..82d69170a 100644 --- a/.github/scripts/bl-htmlip.sh +++ b/.github/scripts/bl-htmlip.sh @@ -40,7 +40,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -77,6 +78,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -204,7 +239,7 @@ echo -e " 🌎 Downloading IP blacklist to ${ORANGE2}${APP_FILE_TEMP}${RESET}" # Get IP list # # -APP_OUT=$(curl -sSL -A "${APP_AGENT}" ${ARG_URL} | html2text | grep -v "^#" | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) +APP_OUT=$(curl -sSL -A "${APP_AGENT}" ${ARG_URL} | html2text | grep -vi "^#|^;|^$" | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) sed -i '/[#;]/{s/#.*//;s/;.*//;/^$/d}' ${APP_FILE_TEMP} # remove # and ; comments sed -i 's/\-.*//' ${APP_FILE_TEMP} # remove hyphens for ip ranges sed -i 's/[[:blank:]]*$//' ${APP_FILE_TEMP} # remove space / tab from EOL @@ -316,6 +351,7 @@ H=$((T/3600%24)) M=$((T/60%60)) S=$((T%60)) +echo -e echo -e " 🎌 ${GREY2}Finished! ${YELLOW2}${D} days ${H} hrs ${M} mins ${S} secs${RESET}" # # diff --git a/.github/scripts/bl-json.sh b/.github/scripts/bl-json.sh index db2765da3..c0ba7765c 100755 --- a/.github/scripts/bl-json.sh +++ b/.github/scripts/bl-json.sh @@ -32,7 +32,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -69,6 +70,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -203,7 +238,7 @@ echo -e " 🌎 Downloading IP blacklist to ${ORANGE1}${APP_FILE_TEMP}${RESET}" # Get IP list # # -jsonOutput=$(curl -sSL -A "${APP_AGENT}" ${ARG_JSON_URL} | jq -r "${ARG_JSON_QRY}" | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) +APP_OUT=$(curl -sSL -A "${APP_AGENT}" ${ARG_JSON_URL} | jq -r "${ARG_JSON_QRY}" | grep -vi "^#|^;|^$" | awk '{if (++dup[$0] == 1) print $0;}' | sort_results > ${APP_FILE_TEMP}) sed -i '/[#;]/{s/#.*//;s/;.*//;/^$/d}' ${APP_FILE_TEMP} # remove # and ; comments sed -i 's/\-.*//' ${APP_FILE_TEMP} # remove hyphens for ip ranges sed -i 's/[[:blank:]]*$//' ${APP_FILE_TEMP} # remove space / tab from EOL diff --git a/.github/scripts/bl-master.sh b/.github/scripts/bl-master.sh index b2402f7da..dc1a23b08 100755 --- a/.github/scripts/bl-master.sh +++ b/.github/scripts/bl-master.sh @@ -37,7 +37,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -74,6 +75,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -364,7 +399,7 @@ fi # - remove .sort temp file # # -sorting=$(cat ${APP_FILE_PERM} | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) +sorting=$(cat ${APP_FILE_PERM} | grep -vi "^#|^;|^$" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) > ${APP_FILE_PERM} cat ${APP_FILE_PERM}.sort >> ${APP_FILE_PERM} rm ${APP_FILE_PERM}.sort diff --git a/.github/scripts/bl-plain.sh b/.github/scripts/bl-plain.sh index 7d4c6076d..bfe9bd3d0 100644 --- a/.github/scripts/bl-plain.sh +++ b/.github/scripts/bl-plain.sh @@ -35,7 +35,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -72,6 +73,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + # # # Arguments # @@ -285,7 +320,7 @@ done # - remove .sort temp file # # -sorting=$(cat ${APP_FILE_PERM} | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) +sorting=$(cat ${APP_FILE_PERM} | grep -vi "^#|^;|^$" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_PERM}.sort) > ${APP_FILE_PERM} cat ${APP_FILE_PERM}.sort >> ${APP_FILE_PERM} rm ${APP_FILE_PERM}.sort diff --git a/.github/scripts/bl-spf.sh b/.github/scripts/bl-spf.sh index 88aa5999a..c2cf41211 100644 --- a/.github/scripts/bl-spf.sh +++ b/.github/scripts/bl-spf.sh @@ -25,7 +25,8 @@ # # APP_THIS_FILE=$(basename "$0") # current script file -APP_THIS_DIR="${PWD}" # Current script directory +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder # # # vars > colors @@ -62,6 +63,18 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + # # # Arguments # @@ -324,7 +337,7 @@ done # Get IP list # # -list_ips=$(echo "${APP_OUT}" | grep -v "^#" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) +list_ips=$(echo "${APP_OUT}" | grep -v "^#|^;|^$" | sort -n | awk '{if (++dup[$0] == 1) print $0;}' > ${APP_FILE_TEMP}) sed -i '/[#;]/{s/#.*//;s/;.*//;/^$/d}' ${APP_FILE_TEMP} # remove # and ; comments sed -i 's/\-.*//' ${APP_FILE_TEMP} # remove hyphens for ip ranges sed -i 's/[[:blank:]]*$//' ${APP_FILE_TEMP} # remove space / tab from EOL diff --git a/.github/scripts/bl-whois.sh b/.github/scripts/bl-whois.sh new file mode 100644 index 000000000..499e52824 --- /dev/null +++ b/.github/scripts/bl-whois.sh @@ -0,0 +1,480 @@ +#!/bin/bash + +# # +# @for https://github.com/Aetherinox/csf-firewall +# @workflow blocklist-generate.yml +# @type bash script +# @summary utilizes various whois services and allows you to fetch a list of IP addresses associated with an ASN. +# +# @terminal .github/scripts/bl-whois.sh \ +# blocklists/privacy/privacy_facebook.ipset +# AS32934 +# +# .github/scripts/bl-whois.sh \ +# blocklists/privacy/privacy_facebook.ipset +# AS32934 \ +# whois.radb.net +# +# .github/scripts/bl-whois.sh \ +# blocklists/privacy/privacy_facebook.ipset +# AS32934 \ +# whois.radb.net \ +# '#|^;|^$' +# +# @workflow # Privacy › Facebook +# chmod +x ".github/scripts/bl-whois.sh" +# run_facebook=".github/scripts/bl-whois.sh blocklists/privacy/privacy_facebook.ipset AS32934" +# eval "./$run_facebook" +# +# @command bl-whois.sh +# required +# required +# optional +# optional +# +# bl-whois.sh blocklists/privacy/privacy_facebook.ipset AS32934 whois.radb.net '#|^;|^$' +# +# 📁 .github +# 📁 scripts +# 📄 bl-whois.sh +# 📁 workflows +# 📄 blocklist-generate.yml +# +# # + +APP_THIS_FILE=$(basename "$0") # current script file +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder + +# # +# vars > colors +# +# Use the color table at: +# - https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797 +# # + +RESET="\e[0m" +WHITE="\e[97m" +BOLD="\e[1m" +DIM="\e[2m" +UNDERLINE="\e[4m" +BLINK="\e[5m" +INVERTED="\e[7m" +HIDDEN="\e[8m" +BLACK="\e[38;5;0m" +FUCHSIA1="\e[38;5;125m" +FUCHSIA2="\e[38;5;198m" +RED1="\e[38;5;160m" +RED2="\e[38;5;196m" +ORANGE1="\e[38;5;202m" +ORANGE2="\e[38;5;208m" +MAGENTA="\e[38;5;5m" +BLUE1="\e[38;5;033m" +BLUE2="\e[38;5;39m" +CYAN="\e[38;5;6m" +GREEN1="\e[38;5;2m" +GREEN2="\e[38;5;76m" +YELLOW1="\e[38;5;184m" +YELLOW2="\e[38;5;190m" +YELLOW3="\e[38;5;193m" +GREY1="\e[38;5;240m" +GREY2="\e[38;5;244m" +GREY3="\e[38;5;250m" + +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + +# # +# Arguments +# +# We are attempting to add dynamic arguments, meaning they can be in any order. this is because some of the arguments are +# optional, and we support providing multiple ASN. +# +# ARG_SAVEFILE (str) always the first arg +# ARG_WHOIS_SERVICE (str) specifies what whois service to use +# - if string arg is valid URL (checked by regex) +# - if string arg STARTS with `whois` +# ARG_GREP_FILTER (str) specifies what grep pattern to use for filtering out results +# - if string arg STARTS with ^ +# - if string arg STARTS with ( +# - if string arg ENDS with $ +# - if string arg ENDS with ) +# ARG_ASN (str) ASN to grab IP addresses from. supports multiple ASN numbers. +# - if string arg STARTS with `AS` +# # + +REGEX_URL='^(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]\.[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]$' +for arg in "${@:1}"; do + if [[ $arg == whois* ]] || [[ $arg =~ $REGEX_URL ]]; then + ARG_WHOIS_SERVICE=${arg} + fi + if [[ $arg == ^* ]] || [[ $arg == \(* ]] || [[ $arg == *$ ]] || [[ $arg == *\) ]]; then + ARG_GREP_FILTER=${arg} + fi +done + +ARG_SAVEFILE=$1 + +# # +# Arguments > Validate +# # + +if [[ -z "${ARG_SAVEFILE}" ]]; then + echo -e + echo -e " ⭕ ${YELLOW1}[${APP_THIS_FILE}]${RESET}: No target file specified" + echo -e + exit 0 +fi + +if test "$#" -lt 2; then + echo -e + echo -e " ⭕ ${YELLOW1}[${APP_THIS_FILE}]${RESET}: Invalid ASN list specified for ${YELLOW1}${ARG_SAVEFILE}${RESET}" + echo -e + exit 0 +fi + +# # +# No whois service specified, set to default +# +# # + +if [[ -z "${ARG_WHOIS_SERVICE}" ]]; then + ARG_WHOIS_SERVICE="whois.radb.net" +fi + +# # +# Grep search pattern not provided, ignore comments and blank lines. +# this is already done in the step before this grep exclude pattern is ran, but +# we need a default grep pattern if one is not provided. +# # + +if [[ -z "${ARG_GREP_FILTER}" ]]; then + ARG_GREP_FILTER="^#|^;|^$" +fi + +# # +# Define > General +# # + +SECONDS=0 # set seconds count for beginning of script +APP_VER=("1" "0" "0" "0") # current script version +APP_DEBUG=false # debug mode +APP_REPO="Aetherinox/blocklists" # repository +APP_REPO_BRANCH="main" # repository branch +APP_OUT="" # each ip fetched from stdin will be stored in this var +APP_FILE_TEMP="${ARG_SAVEFILE}.tmp" # temp file when building ipset list +APP_FILE_PERM="${ARG_SAVEFILE}" # perm file when building ipset list +COUNT_LINES=0 # number of lines in doc +COUNT_TOTAL_SUBNET=0 # number of IPs in all subnets combined +COUNT_TOTAL_IP=0 # number of single IPs (counts each line) +BLOCKS_COUNT_TOTAL_IP=0 # number of ips for one particular file +BLOCKS_COUNT_TOTAL_SUBNET=0 # number of subnets for one particular file +APP_AGENT="Mozilla/5.0 (Windows NT 10.0; WOW64) "\ +"AppleWebKit/537.36 (KHTML, like Gecko) "\ +"Chrome/51.0.2704.103 Safari/537.36" # user agent used with curl +TEMPL_NOW=`date -u` # get current date in utc format +TEMPL_ID=$(basename -- ${APP_FILE_PERM}) # ipset id, get base filename +TEMPL_ID="${TEMPL_ID//[^[:alnum:]]/_}" # ipset id, only allow alphanum and underscore, /description/* and /category/* files must match this value +TEMPL_UUID=$(uuidgen -m -N "${TEMPL_ID}" -n @url) # uuid associated to each release +TEMPL_DESC=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/descriptions/${TEMPL_ID}.txt") +TEMPL_CAT=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/categories/${TEMPL_ID}.txt") +TEMPL_EXP=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/expires/${TEMPL_ID}.txt") +TEMP_URL_SRC=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/url-source/${TEMPL_ID}.txt") +REGEX_ISNUM='^[0-9]+$' + +# # +# Default Values +# # + +if [[ "$TEMPL_DESC" == *"404: Not Found"* ]]; then + TEMPL_DESC="# No description provided" +fi + +if [[ "$TEMPL_CAT" == *"404: Not Found"* ]]; then + TEMPL_CAT="Uncategorized" +fi + +if [[ "$TEMPL_EXP" == *"404: Not Found"* ]]; then + TEMPL_EXP="6 hours" +fi + +if [[ "$TEMP_URL_SRC" == *"404: Not Found"* ]]; then + TEMP_URL_SRC="None" +fi + +# # +# Output > Header +# # + +echo -e +echo -e " ──────────────────────────────────────────────────────────────────────────────────────────────" +echo -e " ${YELLOW1}${APP_FILE_PERM}${RESET}" +echo -e +echo -e " ${GREY2}ID: ${TEMPL_ID}${RESET}" +echo -e " ${GREY2}UUID: ${TEMPL_UUID}${RESET}" +echo -e " ${GREY2}CATEGORY: ${TEMPL_CAT}${RESET}" +echo -e " ${GREY2}ACTION: ${APP_THIS_FILE}${RESET}" +echo -e " ──────────────────────────────────────────────────────────────────────────────────────────────" + +# # +# output +# # + +echo -e +echo -e " ⭐ Starting script ${GREEN1}${APP_THIS_FILE}${RESET}" + +# # +# Create or Clean file +# # + +if [ -f $APP_FILE_PERM ]; then + echo -e " 📄 Clean ${BLUE2}${APP_FILE_PERM}${RESET}" + echo -e + > ${APP_FILE_PERM} # clean file +else + echo -e " 📁 Create ${BLUE2}${APP_FILE_PERM}${RESET}" + echo -e + mkdir -p $(dirname "${APP_FILE_PERM}") + touch ${APP_FILE_PERM} +fi + +# # +# Func > Download List +# # + +download_list() +{ + + local fnASN=$1 + local fnFile=$2 + local fnFileTemp="${2}.tmp" + local DL_COUNT_TOTAL_IP=0 + local DL_COUNT_TOTAL_SUBNET=0 + + echo -e " 🌎 Downloading ASN ${YELLOW1}${fnASN}${RESET} list to ${ORANGE2}${fnFileTemp}${RESET}" + + whois -h ${ARG_WHOIS_SERVICE} -- "-i origin ${fnASN}" | grep ^route | awk '{gsub("(route:|route6:)","");print}' | awk '{gsub(/ /,""); print}' | grep -vi "^#|^;|^$" | grep -vi "${ARG_GREP_FILTER}" | awk '{if (++dup[$0] == 1) print $0;}' | sort_results > ${fnFileTemp} + + # # + # calculate how many IPs are in a subnet + # if you want to calculate the USABLE IP addresses, subtract -2 from any subnet not ending with 31 or 32. + # + # for our purpose, we want to block them all in the event that the network has reconfigured their network / broadcast IPs, + # so we will count every IP in the block. + # # + + echo -e " 📊 Fetching statistics for clean file ${ORANGE2}${fnFileTemp}${RESET}" + for line in $(cat ${fnFileTemp}); do + # is ipv6 + if [ "$line" != "${line#*:[0-9a-fA-F]}" ]; then + if [[ $line =~ /[0-9]{1,3}$ ]]; then + COUNT_TOTAL_SUBNET=$(( $COUNT_TOTAL_SUBNET + 1 )) # GLOBAL count subnet + BLOCKS_COUNT_TOTAL_SUBNET=$(( $BLOCKS_COUNT_TOTAL_SUBNET + 1 )) # LOCAL count subnet + else + COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP + 1 )) # GLOBAL count ip + BLOCKS_COUNT_TOTAL_IP=$(( $BLOCKS_COUNT_TOTAL_IP + 1 )) # LOCAL count ip + fi + + # is subnet + elif [[ $line =~ /[0-9]{1,2}$ ]]; then + ips=$(( 1 << (32 - ${line#*/}) )) + + if [[ $ips =~ $REGEX_ISNUM ]]; then + # CIDR=$(echo $line | sed 's:.*/::') + + # uncomment if you want to count ONLY usable IP addresses + # subtract - 2 from any cidr not ending with 31 or 32 + # if [[ $CIDR != "31" ]] && [[ $CIDR != "32" ]]; then + # COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP - 2 )) + # DL_COUNT_TOTAL_IP=$(( $DL_COUNT_TOTAL_IP - 2 )) + # fi + + COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP + $ips )) # GLOBAL count IPs in subnet + COUNT_TOTAL_SUBNET=$(( $COUNT_TOTAL_SUBNET + 1 )) # GLOBAL count subnet + + DL_COUNT_TOTAL_IP=$(( $DL_COUNT_TOTAL_IP + $ips )) # LOCAL count IPs in subnet + DL_COUNT_TOTAL_SUBNET=$(( $DL_COUNT_TOTAL_SUBNET + 1 )) # LOCAL count subnet + fi + + # is normal IP + elif [[ $line =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP + 1 )) + DL_COUNT_TOTAL_IP=$(( $DL_COUNT_TOTAL_IP + 1 )) + fi + done + + # # + # Count lines and subnets + # # + + DL_COUNT_TOTAL_IP=$(printf "%'d" "$DL_COUNT_TOTAL_IP") # LOCAL add commas to thousands + DL_COUNT_TOTAL_SUBNET=$(printf "%'d" "$DL_COUNT_TOTAL_SUBNET") # LOCAL add commas to thousands + + # # + # Move temp file to final + # # + + echo -e " 🚛 Move ${ORANGE2}${fnFileTemp}${RESET} to ${BLUE2}${fnFile}${RESET}" + cat ${fnFileTemp} >> ${fnFile} # copy .tmp contents to real file + rm ${fnFileTemp} # delete temp file + + echo -e " ➕ Added ${FUCHSIA2}${DL_COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${DL_COUNT_TOTAL_SUBNET} subnets${RESET} to ${BLUE2}${fnFile}${RESET}" +} + +# # +# Count ASN +# +# To make sure we add the correct amount of commas to the ASN list, as well as break up the ASN numbers per line +# we need to get the total count available. +# # + +ASN_I_TOTAL=0 # start at one, since the last step is base continent file +ASN_I_STEP=0 # count current asn in step +TEMPL_ASN_LIST="" # ASN list + +for arg in "${@:2}"; do + if [[ $arg == AS* ]]; then + ASN_I_TOTAL=$(( ASN_I_TOTAL + 1 )) + fi +done + +# Hacky, remove one from total since step starts at 0 +ASN_I_TOTAL=$(( $ASN_I_TOTAL - 1 )) + +# # +# Get ASN arguments +# +# string must start with "AS*" +# # + +for arg in "${@:2}"; do + if [[ $arg == AS* ]]; then + download_list ${arg} ${APP_FILE_PERM} + echo -e + + if [ "${ASN_I_TOTAL}" == "${ASN_I_STEP}" ]; then + if [ $((ASN_I_STEP%3)) -eq 0 ]; then + TEMPL_ASN_LIST+=$'\n'"# ${arg}" + else + TEMPL_ASN_LIST+="${arg}" + fi + else + if [ $((ASN_I_STEP%3)) -eq 0 ]; then + TEMPL_ASN_LIST+=$'\n'"# ${arg}, " + else + TEMPL_ASN_LIST+="${arg}, " + fi + fi + + ASN_I_STEP=$(( ASN_I_STEP + 1 )) + fi +done + +# # +# Sort +# - sort lines numerically and create .sort file +# - move re-sorted text from .sort over to real file +# - remove .sort temp file +# # + +sorting=$(cat ${APP_FILE_PERM} | grep -vi "^#|^;|^$" | awk '{if (++dup[$0] == 1) print $0;}' | sort_results > ${APP_FILE_PERM}.sort) +> ${APP_FILE_PERM} +cat ${APP_FILE_PERM}.sort >> ${APP_FILE_PERM} +rm ${APP_FILE_PERM}.sort + +# # +# Format Counts +# # + +COUNT_LINES=$(wc -l < ${APP_FILE_PERM}) # count ip lines +COUNT_LINES=$(printf "%'d" "$COUNT_LINES") # GLOBAL add commas to thousands + +# # +# Format count totals since we no longer need to add +# # + +COUNT_TOTAL_IP=$(printf "%'d" "$COUNT_TOTAL_IP") # GLOBAL add commas to thousands +COUNT_TOTAL_SUBNET=$(printf "%'d" "$COUNT_TOTAL_SUBNET") # GLOBAL add commas to thousands + +# # +# ed +# 0a top of file +# # + +ed -s ${APP_FILE_PERM} < colors @@ -70,6 +54,40 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Define > General +# # + +SECONDS=0 # set seconds count for beginning of script +APP_VER=("1" "0" "0" "0") # current script version +APP_DEBUG=false # debug mode +APP_REPO="Aetherinox/blocklists" # repository +APP_REPO_BRANCH="main" # repository branch +APP_FILE_TEMP="bt_temp" # name of temp file to use throughout process +APP_FILE_PERM_DIR="blocklists/transmission" # folder where perm files will be stored +APP_FILE_PERM="${APP_FILE_PERM_DIR}/blocklist" # name of file to save at the end of the process +APP_FILE_PERM_EXT="ipset" # name of final file extension +APP_ZIP_FILE="wael.list.p2p.zip" # zip to download from waelisa/Best-blocklist +APP_ZIP_READ_FILE="wael.list.p2p" # file to target and read inside the zip +APP_ZIP_URL="https://raw.githubusercontent.com/waelisa/Best-blocklist/main/${APP_ZIP_FILE}" # location to download bt blocklist zip +APP_URL_CBUCKET="https://mirror.codebucket.de/transmission/blocklist.p2p" +APP_URL_IBL="https://www.iblocklist.com/lists.php" +APP_AGENT="Mozilla/5.0 (Windows NT 10.0; WOW64) "\ +"AppleWebKit/537.36 (KHTML, like Gecko) "\ +"Chrome/51.0.2704.103 Safari/537.36" # user agent used with curl + # # # Color Code Test # @@ -108,7 +126,7 @@ function debug_ColorTest() echo -e "GREY3 ${GREY1}................ ${GREY3}This is test text ███████████████${RESET}" echo -e - exit 0 + exit 1 } # # @@ -135,7 +153,7 @@ function debug_ColorChart() echo -e done - exit 0 + exit 1 } # # @@ -146,12 +164,12 @@ ARG1=$1 if [ "$ARG1" == "clr" ]; then debug_ColorTest - exit 0 + exit 1 fi if [ "$ARG1" == "chart" ]; then debug_ColorChart - exit 0 + exit 1 fi # # diff --git a/.github/scripts/tool-count-ip.sh b/.github/scripts/tool-count-ip.sh index 6a14d4b97..5c39e9da0 100644 --- a/.github/scripts/tool-count-ip.sh +++ b/.github/scripts/tool-count-ip.sh @@ -2,13 +2,90 @@ # Test script to calculate number of IPs in list # # -SECONDS=0 # set seconds count for beginning of script APP_THIS_FILE=$(basename "$0") # current script file +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder + +# # +# vars > colors +# +# Use the color table at: +# - https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797 +# # + +RESET="\e[0m" +WHITE="\e[97m" +BOLD="\e[1m" +DIM="\e[2m" +UNDERLINE="\e[4m" +BLINK="\e[5m" +INVERTED="\e[7m" +HIDDEN="\e[8m" +BLACK="\e[38;5;0m" +FUCHSIA1="\e[38;5;125m" +FUCHSIA2="\e[38;5;198m" +RED1="\e[38;5;160m" +RED2="\e[38;5;196m" +ORANGE1="\e[38;5;202m" +ORANGE2="\e[38;5;208m" +MAGENTA="\e[38;5;5m" +BLUE1="\e[38;5;033m" +BLUE2="\e[38;5;39m" +CYAN="\e[38;5;6m" +GREEN1="\e[38;5;2m" +GREEN2="\e[38;5;76m" +YELLOW1="\e[38;5;184m" +YELLOW2="\e[38;5;190m" +YELLOW3="\e[38;5;193m" +GREY1="\e[38;5;240m" +GREY2="\e[38;5;244m" +GREY3="\e[38;5;250m" + +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Sort Results +# +# @usage line=$(parse_spf_record "${ip}" | sort_results) +# # + +sort_results() +{ + declare -a ipv4 ipv6 + + while read -r line ; do + if [[ ${line} =~ : ]] ; then + ipv6+=("${line}") + else + ipv4+=("${line}") + fi + done + + [[ -v ipv4[@] ]] && printf '%s\n' "${ipv4[@]}" | sort -g -t. -k1,1 -k 2,2 -k 3,3 -k 4,4 | uniq + [[ -v ipv6[@] ]] && printf '%s\n' "${ipv6[@]}" | sort -g -t: -k1,1 -k 2,2 -k 3,3 -k 4,4 -k 5,5 -k 6,6 -k 7,7 -k 8,8 | uniq +} + +# # +# Define > General +# # + +SECONDS=0 # set seconds count for beginning of script +APP_VER=("1" "0" "0" "0") # current script version +APP_DEBUG=false # debug mode +APP_REPO="Aetherinox/blocklists" # repository +APP_REPO_BRANCH="main" # repository branch APP_FILE=$1 # file specified APP_THIS_DIR="${PWD}" # Current script directory -APP_VER=("1" "1" "0" "0") # current version -APP_REPO="Aetherinox/csf-firewall" # repository -APP_REPO_BRANCH="main" # repository branch APP_DIR=${PWD} # returns the folder this script is being executed in COUNT_LINES=0 # number of lines in doc COUNT_TOTAL_SUBNET=0 # number of IPs in all subnets combined diff --git a/.github/scripts/tool-range-ipcalc.sh b/.github/scripts/tool-range-ipcalc.sh new file mode 100644 index 000000000..927d593a3 --- /dev/null +++ b/.github/scripts/tool-range-ipcalc.sh @@ -0,0 +1,149 @@ +#!/bin/bash + +# # +# script to take ip ranges, clean them up, and pass them on to ipcalc. +# Need to create our own in-house script to do the conversion, ipcalc has massive overhead times. +# +# this repository has created two versions for this scenario: +# - tool-range-ipcalc.sh +# - tool-range.iprange.sh +# +# it is preferred to use the `iprange.sh` script. the ipcalc version is a backup, and is slower. +# however, iprange requires a custom package to be built and installed. +# +# [ INSTALL ] +# +# to install this `tool-range-iprange.sh` version, run the following commands within the server: +# +# sudo apt-get install -y ipcalc +# # + +APP_THIS_FILE=$(basename "$0") # current script file +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder + +# # +# vars > colors +# +# Use the color table at: +# - https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797 +# # + +RESET="\e[0m" +WHITE="\e[97m" +BOLD="\e[1m" +DIM="\e[2m" +UNDERLINE="\e[4m" +BLINK="\e[5m" +INVERTED="\e[7m" +HIDDEN="\e[8m" +BLACK="\e[38;5;0m" +FUCHSIA1="\e[38;5;125m" +FUCHSIA2="\e[38;5;198m" +RED1="\e[38;5;160m" +RED2="\e[38;5;196m" +ORANGE1="\e[38;5;202m" +ORANGE2="\e[38;5;208m" +MAGENTA="\e[38;5;5m" +BLUE1="\e[38;5;033m" +BLUE2="\e[38;5;39m" +CYAN="\e[38;5;6m" +GREEN1="\e[38;5;2m" +GREEN2="\e[38;5;76m" +YELLOW1="\e[38;5;184m" +YELLOW2="\e[38;5;190m" +YELLOW3="\e[38;5;193m" +GREY1="\e[38;5;240m" +GREY2="\e[38;5;244m" +GREY3="\e[38;5;250m" + +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Arguments +# +# This bash script has the following arguments: +# +# ARG_SAVEFILE (str) file to save IP addresses into +# ARG_SOURCEFILE (str) file containing list of ip ranges +# ARG_GREP_FILTER (str) grep filter to exclude certain words +# # + +ARG_SAVEFILE=$1 +ARG_SOURCEFILE=$2 + +# # +# Validation checks +# # + +if [[ -z "${ARG_SAVEFILE}" ]]; then + echo -e + echo -e " ⭕ ${YELLOW1}[${APP_THIS_FILE}]${RESET}: No target file specified" + echo -e + exit 0 +fi + +if [[ -z "${ARG_SOURCEFILE}" ]]; then + echo -e + echo -e " ⭕ ${YELLOW1}[${APP_THIS_FILE}]${RESET}: No source file provided -- must specify a file containing a list of ip ranges to convert" + echo -e + exit 0 +fi + +# # +# Define > General +# # + +SECONDS=0 # set seconds count for beginning of script +APP_VER=("1" "0" "0" "0") # current script version +APP_DEBUG=false # debug mode +APP_REPO="Aetherinox/blocklists" # repository +APP_REPO_BRANCH="main" # repository branch +APP_FILE_PERM="${ARG_SAVEFILE}" # perm file when building ipset list + +# # +# output +# # + +echo -e +echo -e " ⭐ Starting script ${GREEN1}${APP_THIS_FILE}${RESET}" + +# # +# Create or Clean file +# # + +if [ -f $APP_FILE_PERM ]; then + echo -e " 📄 Clean ${BLUE2}${APP_FILE_PERM}${RESET}" + echo -e + > ${APP_FILE_PERM} # clean file +else + echo -e " 📁 Create ${BLUE2}${APP_FILE_PERM}${RESET}" + echo -e + mkdir -p $(dirname "${APP_FILE_PERM}") + touch ${APP_FILE_PERM} +fi + +# # +# ip ranges converted to CIDR notation +# +# in case our source file is not clean, run the file through grep first and get only the ip ranges. +# ipcalc adds extra lines, so we will use `awk` to filter out the words "deaggregate". to add more items to the list, append | and other words. +# awk '!/^(deaggregate|word2|word3)/' +# # + +cat "$ARG_SOURCEFILE" |\ +while IFS= read ip; do + ipAddr=$(echo "$ip" | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\s*-\s*[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' ) + ipcalc "$ipAddr" -nr |\ + awk '!/^(deaggregate)/' +done \ No newline at end of file diff --git a/.github/scripts/tool-range-iprange.sh b/.github/scripts/tool-range-iprange.sh new file mode 100644 index 000000000..d5e53c020 --- /dev/null +++ b/.github/scripts/tool-range-iprange.sh @@ -0,0 +1,357 @@ +#!/bin/bash + +# # +# script to take ip ranges, clean them up, and pass them on to ipcalc. +# Need to create our own in-house script to do the conversion, ipcalc has massive overhead times. +# +# this repository has created two versions for this scenario: +# - tool-range-ipcalc.sh +# - tool-range.iprange.sh +# +# it is preferred to use the `iprange.sh` script. the ipcalc version is a backup, and is slower. +# however, iprange requires a custom package to be built and installed. +# +# [ INSTALL ] +# +# to install this `tool-range-iprange.sh` version, run the following commands within the server: +# +# sudo apt-get install -y autoconf +# git clone https://github.com/firehol/iprange.git ./iprange +# cd iprange/ +# ./autogen.sh +# ./configure --disable-man +# sudo make && make install +# +# @terminal .github/scripts/tool-range-iprange.sh \ +# blocklists/02_privacy_blizzard.ipset \ +# http://list.iblocklist.com/?list=ercbntshuthyykfkmhxc \ +# "at&t" +# +# .github/scripts/tool-range-iprange.sh \ +# blocklists/02_privacy_blizzard.ipset \ +# list.txt \ +# "at&t" +# +# @workflow chmod +x ".github/scripts/tool-range-iprange.sh" +# run_blizzard=".github/scripts/tool-range-iprange.sh blocklists/02_privacy_blizzard.ipset http://list.iblocklist.com/?list=ercbntshuthyykfkmhxc 'at&t'" +# eval "./$run_blizzard" +# +# @command bl-html.sh +# +# +# +# {...} +# # + +APP_THIS_FILE=$(basename "$0") # current script file +APP_THIS_DIR="${PWD}" # current script directory +APP_GITHUB_DIR="${APP_THIS_DIR}/.github" # .github folder + +# # +# vars > colors +# +# Use the color table at: +# - https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797 +# # + +RESET="\e[0m" +WHITE="\e[97m" +BOLD="\e[1m" +DIM="\e[2m" +UNDERLINE="\e[4m" +BLINK="\e[5m" +INVERTED="\e[7m" +HIDDEN="\e[8m" +BLACK="\e[38;5;0m" +FUCHSIA1="\e[38;5;125m" +FUCHSIA2="\e[38;5;198m" +RED1="\e[38;5;160m" +RED2="\e[38;5;196m" +ORANGE1="\e[38;5;202m" +ORANGE2="\e[38;5;208m" +MAGENTA="\e[38;5;5m" +BLUE1="\e[38;5;033m" +BLUE2="\e[38;5;39m" +CYAN="\e[38;5;6m" +GREEN1="\e[38;5;2m" +GREEN2="\e[38;5;76m" +YELLOW1="\e[38;5;184m" +YELLOW2="\e[38;5;190m" +YELLOW3="\e[38;5;193m" +GREY1="\e[38;5;240m" +GREY2="\e[38;5;244m" +GREY3="\e[38;5;250m" + +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Arguments +# +# This bash script has the following arguments: +# +# ARG_SAVEFILE (str) file to save IP addresses into +# ARG_SOURCEFILE (str) file containing list of ip ranges +# accepts either a local file OR a URL. +# ARG_GREP_FILTER (str) grep filter to exclude certain words +# # + +ARG_SAVEFILE=$1 +ARG_SOURCEFILE=$2 +ARG_GREP_FILTER=$3 + +# # +# Grep search pattern not provided, ignore comments and blank lines. +# this is already done in the step before this grep exclude pattern is ran, but +# we need a default grep pattern if one is not provided. +# # + +if [[ -z "${ARG_GREP_FILTER}" ]]; then + ARG_GREP_FILTER="^#|^;|^$" +fi + +# # +# Validation checks +# # + +if [[ -z "${ARG_SAVEFILE}" ]]; then + echo -e + echo -e " ⭕ ${YELLOW1}[${APP_THIS_FILE}]${RESET}: No target file specified" + echo -e + exit 0 +fi + +# http://list.iblocklist.com/?list=ercbntshuthyykfkmhxc + +if [[ -z "${ARG_SOURCEFILE}" ]]; then + echo -e + echo -e " ⭕ ${YELLOW1}[${APP_THIS_FILE}]${RESET}: No source file provided -- must specify a file containing a list of ip ranges to convert" + echo -e + exit 0 +fi + +# # +# Define > General +# # + +SECONDS=0 # set seconds count for beginning of script +APP_VER=("1" "0" "0" "0") # current script version +APP_DEBUG=false # debug mode +APP_REPO="Aetherinox/blocklists" # repository +APP_REPO_BRANCH="main" # repository branch +APP_FILE_TEMP="${ARG_SAVEFILE}.tmp" # temp file when building ipset list +APP_FILE_SRC="${ARG_SAVEFILE}.src" # temp file when building ipset list +APP_FILE_PERM="${ARG_SAVEFILE}" # perm file when building ipset list +COUNT_LINES=0 # number of lines in doc +COUNT_TOTAL_SUBNET=0 # number of IPs in all subnets combined +COUNT_TOTAL_IP=0 # number of single IPs (counts each line) +BLOCKS_COUNT_TOTAL_IP=0 # number of ips for one particular file +BLOCKS_COUNT_TOTAL_SUBNET=0 # number of subnets for one particular file +APP_AGENT="Mozilla/5.0 (Windows NT 10.0; WOW64) "\ +"AppleWebKit/537.36 (KHTML, like Gecko) "\ +"Chrome/51.0.2704.103 Safari/537.36" # user agent used with curl +TEMPL_NOW=`date -u` # get current date in utc format +TEMPL_ID=$(basename -- ${APP_FILE_PERM}) # ipset id, get base filename +TEMPL_ID="${TEMPL_ID//[^[:alnum:]]/_}" # ipset id, only allow alphanum and underscore, /description/* and /category/* files must match this value +TEMPL_UUID=$(uuidgen -m -N "${TEMPL_ID}" -n @url) # uuid associated to each release +TEMPL_DESC=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/descriptions/${TEMPL_ID}.txt") +TEMPL_CAT=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/categories/${TEMPL_ID}.txt") +TEMPL_EXP=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/expires/${TEMPL_ID}.txt") +TEMP_URL_SRC=$(curl -sSL -A "${APP_AGENT}" "https://raw.githubusercontent.com/${APP_REPO}/${APP_REPO_BRANCH}/.github/url-source/${TEMPL_ID}.txt") +REGEX_URL='^(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]\.[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]$' +REGEX_ISNUM='^[0-9]+$' + +# # +# Default Values +# # + +if [[ "$TEMPL_DESC" == *"404: Not Found"* ]]; then + TEMPL_DESC="# No description provided" +fi + +if [[ "$TEMPL_CAT" == *"404: Not Found"* ]]; then + TEMPL_CAT="Uncategorized" +fi + +if [[ "$TEMPL_EXP" == *"404: Not Found"* ]]; then + TEMPL_EXP="6 hours" +fi + +if [[ "$TEMP_URL_SRC" == *"404: Not Found"* ]]; then + TEMP_URL_SRC="None" +fi + +# # +# output +# # + +echo -e +echo -e " ⭐ Starting script ${GREEN1}${APP_THIS_FILE}${RESET}" + +# # +# Create or Clean file +# # + +if [ -f $APP_FILE_PERM ]; then + echo -e " 📄 Clean ${BLUE2}${APP_FILE_PERM}${RESET}" + echo -e + > ${APP_FILE_PERM} # clean file +else + echo -e " 📁 Create ${BLUE2}${APP_FILE_PERM}${RESET}" + echo -e + mkdir -p $(dirname "${APP_FILE_PERM}") + touch ${APP_FILE_PERM} +fi + +# # +# Source is URL +# # + +if [[ $ARG_SOURCEFILE =~ $REGEX_URL ]]; then + wget -q "${ARG_SOURCEFILE}" -O "${ARG_SAVEFILE}.gz" + ARG_SOURCEFILE=$(zcat "${ARG_SAVEFILE}.gz") + ipAddr=$(echo "$ARG_SOURCEFILE" | grep -v "^#|^;|^$" | awk '{if (++dup[$0] == 1) print $0;}' | grep -vi "${ARG_GREP_FILTER}" | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\s*-\s*[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | iprange | sort -n > ${APP_FILE_TEMP}) + + rm "${ARG_SAVEFILE}.gz" +else + ipAddr=$(cat "$ARG_SOURCEFILE" | grep -v "^#|^;|^$" | awk '{if (++dup[$0] == 1) print $0;}' | grep -vi "${ARG_GREP_FILTER}" | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\s*-\s*[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | iprange | sort -n > ${APP_FILE_TEMP}) +fi + +# # +# ip ranges converted to CIDR notation +# +# in case our source file is not clean, run the file through grep first and get only the ip ranges. +# # + +sed -i '/[#;]/{s/#.*//;s/;.*//;/^$/d}' ${APP_FILE_TEMP} # remove # and ; comments +sed -i 's/\-.*//' ${APP_FILE_TEMP} # remove hyphens for ip ranges +sed -i 's/[[:blank:]]*$//' ${APP_FILE_TEMP} # remove space / tab from EOL +sed -i '/^\s*$/d' ${APP_FILE_TEMP} # remove empty lines + +# # +# calculate how many IPs are in a subnet +# if you want to calculate the USABLE IP addresses, subtract -2 from any subnet not ending with 31 or 32. +# +# for our purpose, we want to block them all in the event that the network has reconfigured their network / broadcast IPs, +# so we will count every IP in the block. +# # + +echo -e " 📊 Fetching statistics for clean file ${ORANGE2}${APP_FILE_TEMP}${RESET}" +for line in $(cat ${APP_FILE_TEMP}); do + + # is ipv6 + if [ "$line" != "${line#*:[0-9a-fA-F]}" ]; then + if [[ $line =~ /[0-9]{1,3}$ ]]; then + COUNT_TOTAL_SUBNET=$(( $COUNT_TOTAL_SUBNET + 1 )) # GLOBAL count subnet + BLOCKS_COUNT_TOTAL_SUBNET=$(( $BLOCKS_COUNT_TOTAL_SUBNET + 1 )) # LOCAL count subnet + else + COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP + 1 )) # GLOBAL count ip + BLOCKS_COUNT_TOTAL_IP=$(( $BLOCKS_COUNT_TOTAL_IP + 1 )) # LOCAL count ip + fi + + # is subnet + elif [[ $line =~ /[0-9]{1,2}$ ]]; then + ips=$(( 1 << (32 - ${line#*/}) )) + + if [[ $ips =~ $REGEX_ISNUM ]]; then + # CIDR=$(echo $line | sed 's:.*/::') + + # uncomment if you want to count ONLY usable IP addresses + # subtract - 2 from any cidr not ending with 31 or 32 + # if [[ $CIDR != "31" ]] && [[ $CIDR != "32" ]]; then + # BLOCKS_COUNT_TOTAL_IP=$(( $BLOCKS_COUNT_TOTAL_IP - 2 )) + # COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP - 2 )) + # fi + + BLOCKS_COUNT_TOTAL_IP=$(( $BLOCKS_COUNT_TOTAL_IP + $ips )) # LOCAL count IPs in subnet + BLOCKS_COUNT_TOTAL_SUBNET=$(( $BLOCKS_COUNT_TOTAL_SUBNET + 1 )) # LOCAL count subnet + + COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP + $ips )) # GLOBAL count IPs in subnet + COUNT_TOTAL_SUBNET=$(( $COUNT_TOTAL_SUBNET + 1 )) # GLOBAL count subnet + fi + + # is normal IP + elif [[ $line =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + BLOCKS_COUNT_TOTAL_IP=$(( $BLOCKS_COUNT_TOTAL_IP + 1 )) + COUNT_TOTAL_IP=$(( $COUNT_TOTAL_IP + 1 )) + fi +done + +# # +# Count lines and subnets +# # + +COUNT_LINES=$(wc -l < ${APP_FILE_TEMP}) # GLOBAL count ip lines +COUNT_LINES=$(printf "%'d" "$COUNT_LINES") # GLOBAL add commas to thousands +COUNT_TOTAL_IP=$(printf "%'d" "$COUNT_TOTAL_IP") # GLOBAL add commas to thousands +COUNT_TOTAL_SUBNET=$(printf "%'d" "$COUNT_TOTAL_SUBNET") # GLOBAL add commas to thousands + +BLOCKS_COUNT_TOTAL_IP=$(printf "%'d" "$BLOCKS_COUNT_TOTAL_IP") # LOCAL add commas to thousands +BLOCKS_COUNT_TOTAL_SUBNET=$(printf "%'d" "$BLOCKS_COUNT_TOTAL_SUBNET") # LOCAL add commas to thousands + +echo -e " 🚛 Move ${ORANGE2}${APP_FILE_TEMP}${RESET} to ${BLUE2}${APP_FILE_PERM}${RESET}" +cat ${APP_FILE_TEMP} >> ${APP_FILE_PERM} # copy .tmp contents to real file +rm ${APP_FILE_TEMP} # delete temp file + +echo -e " ➕ Added ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_IP} IPs${RESET} and ${FUCHSIA2}${BLOCKS_COUNT_TOTAL_SUBNET} Subnets${RESET} to ${BLUE2}${APP_FILE_PERM}${RESET}" + +# # +# ed +# 0a top of file +# # + +ed -s ${APP_FILE_PERM} < colors @@ -68,6 +63,29 @@ GREY1="\e[38;5;240m" GREY2="\e[38;5;244m" GREY3="\e[38;5;250m" +# # +# print an error and exit with failure +# $1: error message +# # + +function error() +{ + echo -e " ⭕ ${GREY2}${APP_THIS_FILE}${RESET}: \n ${BOLD}${RED}Error${NORMAL}: ${RESET}$1" + echo -e + exit 0 +} + +# # +# Define > General +# # + +SECONDS=0 # set seconds count for beginning of script +APP_VER=("1" "0" "0" "0") # current script version +APP_DEBUG=false # debug mode +APP_REPO="Aetherinox/blocklists" # repository +APP_REPO_BRANCH="main" # repository branch +TEMPL_NOW=`date -u '+%m/%d/%Y %H:%M'` # get current date in utc format + # # # Color Code Test # @@ -106,7 +124,7 @@ function debug_ColorTest() echo -e "GREY3 ${GREY1}................ ${GREY3}This is test text ███████████████${RESET}" echo -e - exit 0 + exit 1 } # # @@ -133,7 +151,7 @@ function debug_ColorChart() echo -e done - exit 0 + exit 1 } # # @@ -144,12 +162,12 @@ ARG1=$1 if [ "$ARG1" == "clr" ]; then debug_ColorTest - exit 0 + exit 1 fi if [ "$ARG1" == "chart" ]; then debug_ColorChart - exit 0 + exit 1 fi # # diff --git a/.github/url-source/01_highrisk_ipset.txt b/.github/url-source/highrisk_ipset.txt similarity index 100% rename from .github/url-source/01_highrisk_ipset.txt rename to .github/url-source/highrisk_ipset.txt diff --git a/.github/url-source/isp_aol_ipset.txt b/.github/url-source/isp_aol_ipset.txt new file mode 100644 index 000000000..584b9f55a --- /dev/null +++ b/.github/url-source/isp_aol_ipset.txt @@ -0,0 +1 @@ +https://raw.githubusercontent.com/Aetherinox/blocklists/main/blocklists/isp/isp_aol.ipset \ No newline at end of file diff --git a/.github/url-source/isp_att_ipset.txt b/.github/url-source/isp_att_ipset.txt new file mode 100644 index 000000000..b0400d5e7 --- /dev/null +++ b/.github/url-source/isp_att_ipset.txt @@ -0,0 +1 @@ +whois.radb.net \ No newline at end of file diff --git a/.github/url-source/isp_cablevision_ipset.txt b/.github/url-source/isp_cablevision_ipset.txt new file mode 100644 index 000000000..b0400d5e7 --- /dev/null +++ b/.github/url-source/isp_cablevision_ipset.txt @@ -0,0 +1 @@ +whois.radb.net \ No newline at end of file diff --git a/.github/url-source/isp_charter_spectrum_timewarnercable_ipset.txt b/.github/url-source/isp_charter_spectrum_timewarnercable_ipset.txt new file mode 100644 index 000000000..b0400d5e7 --- /dev/null +++ b/.github/url-source/isp_charter_spectrum_timewarnercable_ipset.txt @@ -0,0 +1 @@ +whois.radb.net \ No newline at end of file diff --git a/.github/url-source/01_master_ipset.txt b/.github/url-source/master_ipset.txt similarity index 100% rename from .github/url-source/01_master_ipset.txt rename to .github/url-source/master_ipset.txt diff --git a/.github/url-source/02_privacy_ahrefs_ipset.txt b/.github/url-source/privacy_ahrefs_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_ahrefs_ipset.txt rename to .github/url-source/privacy_ahrefs_ipset.txt diff --git a/.github/url-source/02_privacy_amazon_aws_ipset.txt b/.github/url-source/privacy_amazon_aws_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_amazon_aws_ipset.txt rename to .github/url-source/privacy_amazon_aws_ipset.txt diff --git a/.github/url-source/02_privacy_amazon_ec2_ipset.txt b/.github/url-source/privacy_amazon_ec2_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_amazon_ec2_ipset.txt rename to .github/url-source/privacy_amazon_ec2_ipset.txt diff --git a/.github/url-source/02_privacy_applebot_ipset.txt b/.github/url-source/privacy_applebot_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_applebot_ipset.txt rename to .github/url-source/privacy_applebot_ipset.txt diff --git a/.github/url-source/02_privacy_bing_ipset.txt b/.github/url-source/privacy_bing_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_bing_ipset.txt rename to .github/url-source/privacy_bing_ipset.txt diff --git a/.github/url-source/02_privacy_bunnycdn_ipset.txt b/.github/url-source/privacy_bunnycdn_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_bunnycdn_ipset.txt rename to .github/url-source/privacy_bunnycdn_ipset.txt diff --git a/.github/url-source/02_privacy_cloudflarecdn_ipset.txt b/.github/url-source/privacy_cloudflarecdn_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_cloudflarecdn_ipset.txt rename to .github/url-source/privacy_cloudflarecdn_ipset.txt diff --git a/.github/url-source/02_privacy_cloudfront_ipset.txt b/.github/url-source/privacy_cloudfront_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_cloudfront_ipset.txt rename to .github/url-source/privacy_cloudfront_ipset.txt diff --git a/.github/url-source/02_privacy_duckduckgo_ipset.txt b/.github/url-source/privacy_duckduckgo_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_duckduckgo_ipset.txt rename to .github/url-source/privacy_duckduckgo_ipset.txt diff --git a/.github/url-source/02_privacy_facebook_ipset.txt b/.github/url-source/privacy_facebook_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_facebook_ipset.txt rename to .github/url-source/privacy_facebook_ipset.txt diff --git a/.github/url-source/02_privacy_fastly_ipset.txt b/.github/url-source/privacy_fastly_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_fastly_ipset.txt rename to .github/url-source/privacy_fastly_ipset.txt diff --git a/.github/url-source/02_privacy_general_ipset.txt b/.github/url-source/privacy_general_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_general_ipset.txt rename to .github/url-source/privacy_general_ipset.txt diff --git a/.github/url-source/02_privacy_google_ipset.txt b/.github/url-source/privacy_google_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_google_ipset.txt rename to .github/url-source/privacy_google_ipset.txt diff --git a/.github/url-source/02_privacy_pingdom_ipset.txt b/.github/url-source/privacy_pingdom_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_pingdom_ipset.txt rename to .github/url-source/privacy_pingdom_ipset.txt diff --git a/.github/url-source/02_privacy_rssapi_ipset.txt b/.github/url-source/privacy_rssapi_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_rssapi_ipset.txt rename to .github/url-source/privacy_rssapi_ipset.txt diff --git a/.github/url-source/02_privacy_stripe_api_ipset.txt b/.github/url-source/privacy_stripe_api_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_stripe_api_ipset.txt rename to .github/url-source/privacy_stripe_api_ipset.txt diff --git a/.github/url-source/02_privacy_stripe_armada_gator_ipset.txt b/.github/url-source/privacy_stripe_armada_gator_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_stripe_armada_gator_ipset.txt rename to .github/url-source/privacy_stripe_armada_gator_ipset.txt diff --git a/.github/url-source/02_privacy_stripe_webhooks_ipset.txt b/.github/url-source/privacy_stripe_webhooks_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_stripe_webhooks_ipset.txt rename to .github/url-source/privacy_stripe_webhooks_ipset.txt diff --git a/.github/url-source/02_privacy_telegram_ipset.txt b/.github/url-source/privacy_telegram_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_telegram_ipset.txt rename to .github/url-source/privacy_telegram_ipset.txt diff --git a/.github/url-source/02_privacy_uptimerobot_ipset.txt b/.github/url-source/privacy_uptimerobot_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_uptimerobot_ipset.txt rename to .github/url-source/privacy_uptimerobot_ipset.txt diff --git a/.github/url-source/02_privacy_webpagetest_ipset.txt b/.github/url-source/privacy_webpagetest_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_webpagetest_ipset.txt rename to .github/url-source/privacy_webpagetest_ipset.txt diff --git a/.github/url-source/02_privacy_yandex_ipset.txt b/.github/url-source/privacy_yandex_ipset.txt similarity index 100% rename from .github/url-source/02_privacy_yandex_ipset.txt rename to .github/url-source/privacy_yandex_ipset.txt diff --git a/.github/url-source/03_spam_spamhaus_ipset.txt b/.github/url-source/spam_spamhaus_ipset.txt similarity index 100% rename from .github/url-source/03_spam_spamhaus_ipset.txt rename to .github/url-source/spam_spamhaus_ipset.txt diff --git a/.github/workflows/blocklist-generate.yml b/.github/workflows/blocklist-generate.yml index e28ae2112..7a361ee27 100644 --- a/.github/workflows/blocklist-generate.yml +++ b/.github/workflows/blocklist-generate.yml @@ -41,7 +41,28 @@ run-name: "🧱 Blocklist › Generate" # # on: + + # # + # Trigger > Workflow Dispatch + # # + workflow_dispatch: + inputs: + + # # + # true: runs all actions, even ones not scheduled + # false: only scheduled tasks will run + # # + + RUN_ALL_ACTIONS: + description: "📑 Run All Actions" + required: true + default: false + type: boolean + + # # + # Trigger > Cron Schedule + # # schedule: - cron: '0 0,6,12,18 * * *' @@ -93,7 +114,7 @@ jobs: - name: "🧱 Install Packages" id: task_setup_install run: | - sudo apt-get install -y ipcalc ed html2text whois uuid-runtime + sudo apt-get install -y ipcalc ed html2text whois uuid-runtime autoconf # # # Generate > Cache Packages @@ -126,6 +147,28 @@ jobs: with: fetch-depth: 0 + # # + # Generate > Configure + # + # this step installs packages we need to manage ipsets. + # - iprange + # this package allows us to convert ip ranges into a CIDR formatted ip + # 10.10.0.1-10.10.0.9 => 10.10.0.1 + # 10.10.0.2/31 + # 10.10.0.4/30 + # 10.10.0.8/31 + # https://github.com/firehol/iprange + # # + + - name: "⚙️ Configure" + id: task_blocklist_generate_configure + run: | + git clone https://github.com/firehol/iprange.git ./.temp/iprange + cd .temp/iprange + ./autogen.sh + ./configure --disable-man + sudo make && make install + # # # Generate > Set Template Permissions # # @@ -143,8 +186,10 @@ jobs: chmod +x ".github/scripts/bl-json.sh" chmod +x ".github/scripts/bl-plain.sh" chmod +x ".github/scripts/bl-spf.sh" + chmod +x ".github/scripts/bl-whois.sh" chmod +x ".github/scripts/bt-transmission.sh" chmod +x ".github/scripts/update-readme.sh" + chmod +x ".github/scripts/tool-range-iprange.sh" # # # Generate > Set Env Variables @@ -153,7 +198,7 @@ jobs: - name: "📦 Set Env Variables" id: task_commit_pre run: | - useragent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36" + useragent="${{ vars.API_USERAGENT }}" echo "USERAGENT=$(echo $useragent)" >> $GITHUB_ENV # # @@ -163,10 +208,10 @@ jobs: - name: "🧱 Generate › Master" id: task_blocklist_generate_master run: | - run_master=".github/scripts/bl-master.sh blocklists/${{ vars.API_01_OUT }} ${{ secrets.API_01_FILE_01 }} ${{ secrets.API_01_FILE_02 }} ${{ secrets.API_01_FILE_03 }} ${{ secrets.API_01_FILE_04 }} ${{ secrets.API_01_FILE_05 }} ${{ secrets.API_01_FILE_06 }} ${{ secrets.API_01_FILE_07 }} ${{ secrets.API_01_FILE_08 }}" + run_master=".github/scripts/bl-master.sh blocklists/master.ipset ${{ secrets.API_01_FILE_01 }} ${{ secrets.API_01_FILE_02 }} ${{ secrets.API_01_FILE_03 }} ${{ secrets.API_01_FILE_04 }} ${{ secrets.API_01_FILE_05 }} ${{ secrets.API_01_FILE_06 }} ${{ secrets.API_01_FILE_07 }} ${{ secrets.API_01_FILE_08 }}" eval "./$run_master" - run_highrisk=".github/scripts/bl-htmlip.sh blocklists/${{ vars.API_01_HIGHRISK_OUT }} ${{ secrets.API_01_HIGHRISK_URL }} '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}'" + run_highrisk=".github/scripts/bl-htmlip.sh blocklists/highrisk.ipset ${{ secrets.API_01_HIGHRISK_URL }} '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}'" eval "./$run_highrisk" # # @@ -178,83 +223,136 @@ jobs: run: | # Privacy › General - run_general=".github/scripts/bl-block.sh blocklists/${{ vars.API_02_GENERAL_OUT }} privacy" + run_general=".github/scripts/bl-block.sh blocklists/privacy/privacy_general.ipset privacy" eval "./$run_general" # Privacy › Google - run_google=".github/scripts/bl-json.sh blocklists/02_privacy_google.ipset https://developers.google.com/search/apis/ipranges/googlebot.json '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty'" + run_google=".github/scripts/bl-json.sh blocklists/privacy/privacy_google.ipset https://developers.google.com/search/apis/ipranges/googlebot.json '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty'" eval "./$run_google" # Privacy › Cloudfront - run_cloudfront=".github/scripts/bl-json.sh blocklists/02_privacy_cloudfront.ipset https://d7uri8nf7uskq.cloudfront.net/tools/list-cloudfront-ips 'map(.[]) | sort | .[]'" + run_cloudfront=".github/scripts/bl-json.sh blocklists/privacy/privacy_cloudfront.ipset https://d7uri8nf7uskq.cloudfront.net/tools/list-cloudfront-ips 'map(.[]) | sort | .[]'" eval "./$run_cloudfront" # Privacy › Bing - run_bing=".github/scripts/bl-json.sh blocklists/02_privacy_bing.ipset https://bing.com/toolbox/bingbot.json '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty'" + run_bing=".github/scripts/bl-json.sh blocklists/privacy/privacy_bing.ipset https://bing.com/toolbox/bingbot.json '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty'" eval "./$run_bing" # Privacy › Fastly - run_fastly=".github/scripts/bl-json.sh blocklists/02_privacy_fastly.ipset https://api.fastly.com/public-ip-list 'map(.[]) | .[]'" + run_fastly=".github/scripts/bl-json.sh blocklists/privacy/privacy_fastly.ipset https://api.fastly.com/public-ip-list 'map(.[]) | .[]'" eval "./$run_fastly" # Privacy › Amazon AWS - run_amz_aws=".github/scripts/bl-json.sh blocklists/02_privacy_amazon_aws.ipset https://ip-ranges.amazonaws.com/ip-ranges.json '.prefixes[] | select(.service==\"AMAZON\") | .ip_prefix'" + run_amz_aws=".github/scripts/bl-json.sh blocklists/privacy/privacy_amazon_aws.ipset https://ip-ranges.amazonaws.com/ip-ranges.json '.prefixes[] | select(.service==\"AMAZON\") | .ip_prefix'" eval "./$run_amz_aws" # Privacy › Amazon EC2 - run_amz_ec2=".github/scripts/bl-json.sh blocklists/02_privacy_amazon_ec2.ipset https://ip-ranges.amazonaws.com/ip-ranges.json '.prefixes[] | select(.service==\"EC2\") | .ip_prefix'" + run_amz_ec2=".github/scripts/bl-json.sh blocklists/privacy/privacy_amazon_ec2.ipset https://ip-ranges.amazonaws.com/ip-ranges.json '.prefixes[] | select(.service==\"EC2\") | .ip_prefix'" eval "./$run_amz_ec2" # Privacy › Facebook - whois -h whois.radb.net -- '-i origin AS32934' | grep ^route | awk '{gsub("(route:|route6:)","");print}' | awk '{gsub(/ /,""); print}' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_facebook.ipset + run_facebook=".github/scripts/bl-whois.sh blocklists/privacy/privacy_facebook.ipset AS32934" + eval "./$run_facebook" # Privacy › Ahrefs - curl -sSL -A "${{ env.USERAGENT }}" https://api.ahrefs.com/v3/public/crawler-ips | jq -r '.ips[].ip_address | select( . != null )' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_ahrefs.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://api.ahrefs.com/v3/public/crawler-ips | jq -r '.ips[].ip_address | select( . != null )' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_ahrefs.ipset # Privacy › DuckDuckGo - curl -sSL -A "${{ env.USERAGENT }}" https://raw.githubusercontent.com/duckduckgo/duckduckgo-help-pages/master/_docs/results/duckduckbot.md | grep "^\- " | awk '{gsub("-",""); print}' | awk '{gsub(/ /,""); print}' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_duckduckgo.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://raw.githubusercontent.com/duckduckgo/duckduckgo-help-pages/master/_docs/results/duckduckbot.md | grep "^\- " | awk '{gsub("-",""); print}' | awk '{gsub(/ /,""); print}' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_duckduckgo.ipset # Privacy › Telegram - curl -sSL -A "${{ env.USERAGENT }}" https://core.telegram.org/resources/cidr.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_telegram.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://core.telegram.org/resources/cidr.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_telegram.ipset # Privacy › Uptime Robot - curl -sSL -A "${{ env.USERAGENT }}" https://uptimerobot.com/inc/files/ips/IPv4andIPv6.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_uptimerobot.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://uptimerobot.com/inc/files/ips/IPv4andIPv6.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_uptimerobot.ipset # Privacy › Pingdom PINGDOM_IPv4=$(curl -sSL -A "${{ env.USERAGENT }}" https://my.pingdom.com/probes/ipv4) PINGDOM_IPv6=$(curl -sSL -A "${{ env.USERAGENT }}" https://my.pingdom.com/probes/ipv6) PINGDOM_LIST="${PINGDOM_IPv4} ${PINGDOM_IPv6}" - echo "$PINGDOM_LIST" | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_pingdom.ipset + echo "$PINGDOM_LIST" | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_pingdom.ipset # Privacy › Stripe › API - curl -sSL -A "${{ env.USERAGENT }}" https://stripe.com/files/ips/ips_api.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_stripe_api.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://stripe.com/files/ips/ips_api.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_stripe_api.ipset # Privacy › Stripe › Webhooks - curl -sSL -A "${{ env.USERAGENT }}" https://stripe.com/files/ips/ips_webhooks.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_stripe_webhooks.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://stripe.com/files/ips/ips_webhooks.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_stripe_webhooks.ipset # Privacy › Stripe › Armada Gator - curl -sSL -A "${{ env.USERAGENT }}" https://stripe.com/files/ips/ips_armada_gator.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_stripe_armada_gator.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://stripe.com/files/ips/ips_armada_gator.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_stripe_armada_gator.ipset # Privacy › RSS API - curl -sSL -A "${{ env.USERAGENT }}" https://rssapi.net/ips.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_rssapi.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://rssapi.net/ips.txt | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_rssapi.ipset # Privacy › WebPageTest - curl -sSL -A "${{ env.USERAGENT }}" https://www.webpagetest.org/addresses.php?f=json | jq -r '.data[].addresses[] | select( . != null )' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_webpagetest.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://www.webpagetest.org/addresses.php?f=json | jq -r '.data[].addresses[] | select( . != null )' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_webpagetest.ipset # Privacy > Bunny CDN BUNNYCDN_IPv4=$(curl -sSL -A "${{ env.USERAGENT }}" https://api.bunny.net/system/edgeserverlist/plain) BUNNYCDN_IPv6=$(curl -sSL -A "${{ env.USERAGENT }}" https://api.bunny.net/system/edgeserverlist/ipv6 | jq -r '.[] | select( . != null )') BUNNYCDN_LIST="${BUNNYCDN_IPv4} ${BUNNYCDN_IPv6}" - echo "$BUNNYCDN_LIST" | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_bunnycdn.ipset + echo "$BUNNYCDN_LIST" | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_bunnycdn.ipset # Privacy › Cloudflare CDN CLOUDFLARE_IPv4=$(curl -sSL -A "${{ env.USERAGENT }}" https://www.cloudflare.com/ips-v4) CLOUDFLARE_IPv6=$(curl -sSL -A "${{ env.USERAGENT }}" https://www.cloudflare.com/ips-v6) CLOUDFLARE_LIST="${CLOUDFLARE_IPv4} ${CLOUDFLARE_IPv6}" - echo "$CLOUDFLARE_LIST" | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_cloudflarecdn.ipset + echo "$CLOUDFLARE_LIST" | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_cloudflarecdn.ipset # Privacy › AppleBot - curl -sSL -A "${{ env.USERAGENT }}" https://search.developer.apple.com/applebot.json | jq -r '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/02_privacy_applebot.ipset + curl -sSL -A "${{ env.USERAGENT }}" https://search.developer.apple.com/applebot.json | jq -r '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty' | $GITHUB_WORKSPACE/.github/scripts/bl-format.sh blocklists/privacy/privacy_applebot.ipset + + # Privacy › Blizzard + run_blizzard=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_blizzard.ipset http://list.iblocklist.com/?list=ercbntshuthyykfkmhxc 'at&t'" + eval "./$run_blizzard" + + # Privacy › Activision + run_activision=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_activision.ipset http://list.iblocklist.com/?list=gfnxlhxsijzrcuxwzebb" + eval "./$run_activision" + + # Privacy › Electronic Arts & IGN + run_ea_ign=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_electronicarts_ign.ipset http://list.iblocklist.com/?list=ejqebpcdmffinaetsvxj" + eval "./$run_ea_ign" + + # Privacy › Nintendo + run_nintendo=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_nintendo.ipset http://list.iblocklist.com/?list=pevkykuhgaegqyayzbnr" + eval "./$run_nintendo" + + # Privacy › Pandora + run_pandora=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_pandora.ipset http://list.iblocklist.com/?list=aevzidimyvwybzkletsg" + eval "./$run_pandora" + + # Privacy › Sony Entertainment + run_sony=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_sony.ipset http://list.iblocklist.com/?list=tukpvrvlubsputmkmiwg" + eval "./$run_sony" + + # Privacy › Sony Entertainment + run_sony=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_sony.ipset http://list.iblocklist.com/?list=tukpvrvlubsputmkmiwg" + eval "./$run_sony" + + # Privacy › Punkbuster + run_punkbuster=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_punkbuster.ipset http://list.iblocklist.com/?list=zvwwndvzulqcltsicwdg" + eval "./$run_punkbuster" + + # Privacy › Riot Games + run_riot_games=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_riot_games.ipset http://list.iblocklist.com/?list=sdlvfabdjvrdttfjotcy" + eval "./$run_riot_games" + + # Privacy › Pirate Bay + run_piratebay=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_piratebay.ipset http://list.iblocklist.com/?list=nzldzlpkgrcncdomnttb" + eval "./$run_piratebay" + + # Privacy › Steam + run_steam=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_steam.ipset http://list.iblocklist.com/?list=cnxkgiklecdaihzukrud" + eval "./$run_steam" + + # Privacy › Unisoft + run_ubisoft=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_ubisoft.ipset http://list.iblocklist.com/?list=etmcrglomupyxtaebzht" + eval "./$run_ubisoft" + + # Privacy › Xfire + run_xfire=".github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_xfire.ipset http://list.iblocklist.com/?list=ppqqnyihmcrryraaqsjo" + eval "./$run_xfire" # # # Generate > Spam @@ -263,7 +361,7 @@ jobs: - name: "🧱 Generate › Spam" id: task_blocklist_generate_spam run: | - run_spamhaus=".github/scripts/bl-plain.sh blocklists/${{ vars.API_03_SPAM_SPAMHAUS_OUT }} ${{ secrets.API_03_SPAM_SPAMHAUS_URL }}" + run_spamhaus=".github/scripts/bl-plain.sh blocklists/spam/spam_spamhaus.ipset ${{ secrets.API_03_SPAM_SPAMHAUS_URL }}" eval "./$run_spamhaus" # # @@ -274,24 +372,135 @@ jobs: - name: "🧱 Generate › Spam › Forums (1/day)" id: task_blocklist_spam_generate_forums - if: github.event_name == 'schedule' && github.event.schedule == '0 2 * * *' + if: (github.event_name == 'schedule' && github.event.schedule == '0 2 * * *') || inputs.RUN_ALL_ACTIONS run: | chmod +x ".github/scripts/bl-plain.sh" - run_forums=".github/scripts/bl-plain.sh blocklists/${{ vars.API_03_SPAM_FORUMS_OUT }} ${{ secrets.API_03_SPAM_FORUMS_URL }}" + run_forums=".github/scripts/bl-plain.sh blocklists/spam/spam_forums.ipset https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/stopforumspam_7d.ipset" eval "./$run_forums" # # - # Generate > Geographical > Geolite2 + # Generate > Internet Service Provider # - # only updated once per day (at 1am UTC) + # @resources https://ftp.arin.net/info/asn.txt + # https://networksdb.io + # https://rapidapi.com + # https://ip.guide/ + # https://2ip.io + # https://ip2location.com + # https://ipqualityscore.com + # https://ipinfo.io + # https://radb.net + # https://bgpview.io + # @info script on another server is responsible for ensuring this workflow list is kept up to date with the correct ASN. + # we use numerous resources to compare ASNs to see which ones are active and which ones have been migrated. + # # + + - name: "🧱 Generate › ISP" + id: task_blocklist_generate_isp + run: | + # ISP › AOL + run_isp_aol=".github/scripts/bl-block.sh blocklists/isp/isp_aol.ipset isp/aol.ipset" + eval "./$run_isp_aol" + + # ISP › ATT + run_isp_att=".github/scripts/bl-whois.sh blocklists/isp/isp_att.ipset AS7018" + eval "./$run_isp_att" + + # ISP › Cablevision | Later merged with Suddenlink + run_isp_cablevision=".github/scripts/bl-whois.sh blocklists/isp/isp_cablevision.ipset AS6128 AS13490 AS19720" + eval "./$run_isp_cablevision" + + # ISP › Suddenlink / Altice / Optiumum + run_isp_suddenlink_optimum=".github/scripts/bl-json.sh blocklists/isp/isp_suddenlink_altice_optimum.ipset https://ip.guide/AS19108 '.routes | .v4//empty,.v6//empty | .[]'" + eval "./$run_isp_suddenlink_optimum" + + # ISP › Frontier Communications | https://networksdb.io/ip-addresses-of/cox-communications-inc + run_isp_frontier=".github/scripts/bl-whois.sh blocklists/isp/isp_frontier_communications.ipset AS3593 AS5650 AS7011 AS26127 AS30064 AS32587" + eval "./$run_isp_frontier" + + # ISP › Charter & Spectrum (Previously Time Warner Cable) + run_isp_charter_spectrum=".github/scripts/bl-whois.sh blocklists/isp/isp_charter_spectrum_timewarnercable.ipset AS7843 AS11351 AS12271 AS20001 AS20115 AS3456 AS63365" + eval "./$run_isp_charter_spectrum" + + # ISP › Comcast + run_isp_comcast=".github/scripts/bl-whois.sh blocklists/isp/isp_comcast.ipset AS7922 AS7015 AS36732 AS36196 AS33651 AS33650 AS33542 AS33491 AS33490 AS33489 AS33351 AS33287 AS23266 AS23253 AS22909 AS22258 AS21508 AS20214 AS16748 AS14668 AS14042 AS13385 AS13367 AS11025" + eval "./$run_isp_comcast" + + # ISP › Embarq + run_isp_embarq=".github/scripts/bl-whois.sh blocklists/isp/isp_embarq.ipset AS22186 AS32855 AS2379 AS3447 AS4212 AS5778 AS6222 AS6367 AS11398 AS11530 AS13787 AS14905 AS14910 AS14921 AS16718 AS17402 AS18494 AS22186 AS32855" + eval "./$run_isp_embarq" + + # ISP › Qwest + run_isp_qwest=".github/scripts/bl-whois.sh blocklists/isp/isp_qwest.ipset AS3908 AS3909 AS3910 AS3951 AS4015 AS4911 AS6225 AS6226 AS6227 AS394190" + eval "./$run_isp_qwest" + + # ISP › Sprint + run_isp_sprint=".github/scripts/bl-whois.sh blocklists/isp/isp_sprint.ipset AS1239 AS150389 AS1789 AS1790 AS1791 AS1792 AS1793 AS1794 AS1795 AS197226 AS2014 AS2050 AS2053 AS206963 AS21288 AS2938 AS2942 AS2959 AS2981 AS3647 AS3648 AS3649 AS3650 AS3651 AS3652" + eval "./$run_isp_sprint" + + # ISP › Verizon | https://networksdb.io/search/org/verizon + run_isp_verizon=".github/scripts/bl-whois.sh blocklists/isp/isp_verizon.ipset AS701 AS702 AS1321 AS2125 AS7021 AS8385 AS6066 AS6167 AS9055 AS12367 AS22521" + eval "./$run_isp_verizon" + + # ISP › Cox Communications | https://networksdb.io/ip-addresses-of/cox-communications-inc + run_isp_cox=".github/scripts/bl-whois.sh blocklists/isp/isp_cox_communications.ipset AS31771 AS22773 AS13432 AS6298 AS12064 AS13493 AS15218 AS22318 AS25904 AS26204" + eval "./$run_isp_cox" + + # ISP › SpaceX Starlink + run_isp_starlink=".github/scripts/bl-whois.sh blocklists/isp/isp_spacex_starlink.ipset AS14593 AS36492 AS397763 AS27277 AS142475" + eval "./$run_isp_starlink" + + # # + # Generate > Geographical > Geolite2 > Setup + # + # this step should only be ran once per day (at 1am UTC). + # The vars defined below are used for caching. The current day of the year + year are calculated, this allows + # the same cached files to be used in a 24 hour period. When the day of the year changes, a new set of geo files will + # be updated. + # + # CACHE VARS: year_week outputs the current week of the year, and year + # 51_2024 + # + # year_day outputs the current day of the year, and year + # 308_2024 + # # + + - name: "🧱 Geographical › GeoLite2 (Setup)" + id: task_blocklist_geographical_generate_setup + if: (github.event_name == 'schedule' && github.event.schedule == '0 2 * * *') || ( github.event_name == 'workflow_dispatch' && inputs.RUN_ALL_ACTIONS == 'true' ) + run: | + echo "year_week=$(date +'%U_%Y')" >> $GITHUB_ENV + echo "year_day=$(date +'%j_%Y')" >> $GITHUB_ENV + + # # + # Generate > Geographical > Geolite2 > Cache + # + # uses the same cache in a 24 hour period. + # + # @output cache-hit + # only run step if cache hit found + # if: steps.task_blocklist_geographical_generate_cache.outputs.cache-hit == 'true' + # # + + - name: "🧱 Geographical › GeoLite2 (Cache)" + id: task_blocklist_geographical_generate_cache + uses: actions/cache@v4 + if: steps.task_blocklist_geographical_generate_setup.outcome == 'success' + with: + path: .github/.temp + key: cache-${{ runner.os }}-geolite2-${{ env.year_week }} + + # # + # Generate > Geographical > Geolite2 > Build # # - name: "🧱 Geographical › GeoLite2 (1/day)" id: task_blocklist_geographical_generate_geolite2 + if: steps.task_blocklist_geographical_generate_setup.outcome == 'success' run: | chmod +x ".github/scripts/bl-geolite2.sh" - run_geolite=".github/scripts/bl-geolite2.sh -p ${{ secrets.API_GEOLITE2_KEY }}" - eval "./$run_geolite" + run_geolite2=".github/scripts/bl-geolite2.sh -l ${{ secrets.API_GEOLITE2_KEY }}" + eval "./$run_geolite2" # # # Generate > Transmission