UMTS at Teleco 1 nedēļu atpakaļ
vecāks
revīzija
bdede69430
15 mainītis faili ar 2655 papildinājumiem un 1398 dzēšanām
  1. 2 1
      .gitignore
  2. 5 2
      Cargo.toml
  3. 453 251
      Lists.toml
  4. 18 11
      README.md
  5. 36 17
      doc/TUI.md
  6. 1245 0
      scripts/auto-violator.sh
  7. 36 0
      scripts/check_bogus.py
  8. 0 816
      scripts/fetch-tlds.sh
  9. 160 0
      scripts/violator.conf
  10. 99 81
      src/app.rs
  11. 4 6
      src/cli.rs
  12. 1 1
      src/lookup.rs
  13. 11 11
      src/output.rs
  14. 579 201
      src/tui.rs
  15. 6 0
      src/types.rs

+ 2 - 1
.gitignore

@@ -10,4 +10,5 @@ build/
 
 Cargo.lock
 .hoardom
-me.md
+me.md
+scripts/violator-workdir

+ 5 - 2
Cargo.toml

@@ -1,6 +1,6 @@
 [package]
 name = "hoardom"
-version = "2.0.1"
+version = "2.1.2"
 edition = "2021"
 description = "Domain hoarding made less painful"
 default-run = "hoardom"
@@ -35,7 +35,7 @@ clap = { version = "4", features = ["derive"] }
 tokio = { version = "1", features = ["full"] }
 reqwest = { version = "0.12", features = ["json"] }
 serde = { version = "1", features = ["derive"] }
-serde_json = "1"
+serde_json = "1" # Json DERULO !
 toml = "0.8"
 dirs = "6"
 colored = "3"
@@ -44,6 +44,9 @@ crossterm = "0.28"
 indicatif = "0.17"
 chrono = "0.4"
 futures = "0.3"
+rand = "0.9"
+idna = "1.0"
+arboard = "3"
 
 # gui wrapper deps (only built with --features gui)
 eframe = { version = "0.30", optional = true }

+ 453 - 251
Lists.toml

@@ -1,293 +1,495 @@
 # Lists.toml — Built-in TLD lists for hoardom
-# Auto-generated on 2026-03-08 from Porkbun + OVH + INWX + RDAP bootstrap + WHOIS server list
+# Auto-generated on 2026-03-09 from Porkbun + OVH + INWX + DomainOffer + RDAP bootstrap + WHOIS server list
 #
 # Format:
 #   "tld"              — TLD has RDAP support, lookup works directly
 #   "tld:whois.server" — No RDAP: use this WHOIS server for fallback
 #
-# 637 purchasable TLDs (handshake/sub-TLDs excluded)
-# 573 have RDAP, 58 need WHOIS override, 6 will auto-probe
+# 780 purchasable TLDs (handshake/sub-TLDs excluded)
+# 638 have RDAP, 142 need WHOIS override, 0 will auto-probe
 #
-# Lists:
-#   standard  — common desirable TLDs (com, net, org, io, dev, ...)
-#   decent    — very best short punchy TLDs for domain hacking
-#   swiss     — standard-like but with Swiss/Central European TLDs prioritized
-#   country   — all 2-letter country-code TLDs
-#   two       — all 2-letter TLDs
-#   three     — all TLDs with 3 or fewer letters
-#   four      — all TLDs with exactly 4 letters
-#   long      — all TLDs with 5+ letters
-#   all       — everything
 
 standard = [
-	"com", "net", "org", "io:whois.nic.io", "co:whois.registry.co", "dev", "app",
-	"me:whois.identitydigital.services", "info", "biz", "one", "xyz", "online",
-	"site", "tech", "pro", "tv", "cc", "to", "sh:whois.nic.sh",
-	"li:whois.nic.li", "fm", "am:whois.amnic.net", "gg:whois.gg",
-	"ws:whois.website.ws", "la:whois.nic.la", "ms", "nu:whois.iis.nu", "cx",
-	"mn:whois.nic.mn", "st", "tel", "ai", "id", "in", "it:whois.nic.it", "is",
-	"at:whois.nic.at", "be:whois.dns.be", "de:whois.denic.de", "eu:whois.eu",
-	"fr", "nl", "se:whois.iis.se", "uk", "us:whois.nic.us", "ca", "au",
-	"nz:whois.irs.net.nz", "club", "blog", "art", "fun", "lol", "wtf", "page",
-	"link", "space", "store", "shop",
+	"com", "net", "org", "ch:whois.nic.ch", "li:whois.nic.li", "eu:whois.eu",
+	"co:whois.registry.co", "cc", "sh:whois.nic.sh", "mx:whois.mx", "ms", "ss:whois.nic.ss", "ax:whois.ax", "ac:whois.nic.ac",
+	"tc:whois.nic.tc", "ie:whois.weare.ie", "im:whois.nic.im", "tel", "talk",
+	"plus", "surf", "aero", "wiki", "biz", "xyz", "top", "ai", "ag:whois.nic.ag",
+	"am:whois.amnic.net", "at:whois.nic.at", "be:whois.dns.be", "ca", "cz",
+	"de:whois.denic.de", "dk:whois.punktum.dk", "es:whois.nic.es", "fi", "fm",
+	"fr", "gg:whois.gg", "hk:whois.hkirc.hk", "id", "ie:whois.weare.ie",
+	"im:whois.nic.im", "in", "is", "it:whois.nic.it", "jp:whois.jprs.jp",
+	"kr:whois.kr", "la:whois.nic.la", "li:whois.nic.li", "nl", "no",
+	"nu:whois.iis.nu", "nz:whois.irs.net.nz", "pl", "pt:whois.dns.pt",
+	"ro:whois.rotld.ro", "se:whois.iis.se", "sg", "si", "sk:whois.sk-nic.sk",
+	"to", "tv", "tw", "uk", "us:whois.nic.us", "ws:whois.website.ws", "app",
+	"beer", "bio", "black", "blog", "blue", "cafe", "cam", "car", "cash", "cat",
+	"chat", "click", "cloud", "club", "code", "cool", "day", "deal", "dev",
+	"dog", "esq", "exchange", "express", "fail", "farm", "fast", "fish", "foo",
+	"free", "fun", "gay", "gold", "green", "guru", "hair", "help", "host", "hot",
+	"how", "immo", "inc", "ing", "ink", "land", "link", "live", "lol", "love",
+	"me:whois.identitydigital.services", "meme", "moe", "mov", "name", "network",
+	"new", "news", "nexus", "ngo", "now", "ooo", "one", "online", "open", "page",
+	"pics", "pink", "plus", "pro", "quest", "red", "rest", "rip", "rocks", "run",
+	"sale", "sex", "sexy", "shop", "show", "site", "social", "solutions",
+	"space", "spot", "store", "stream", "surf", "systems", "team", "tech",
+	"tips", "tools", "tube", "uno", "vision", "vodka", "wiki", "win", "work",
+	"world", "wtf", "xyz", "zone",
 ]
 
-decent = [
-	"com", "net", "org", "io:whois.nic.io", "dev", "app", "co:whois.registry.co",
-	"me:whois.identitydigital.services", "ai", "sh:whois.nic.sh", "to", "fm",
-	"tv", "gg:whois.gg", "cc", "li:whois.nic.li", "am:whois.amnic.net",
-	"la:whois.nic.la", "nu:whois.iis.nu", "id", "in", "it:whois.nic.it", "is",
-	"at:whois.nic.at", "ws:whois.website.ws", "one", "pro", "bio", "art", "ink",
-	"run", "win", "new", "lol", "pub", "fun", "vet", "fit", "rip", "wtf", "zip",
+compressed = [
+	"com", "net", "org", "ch:whois.nic.ch", "eu:whois.eu",
+	"co:whois.registry.co", "cc", "sh:whois.nic.sh", "mx:whois.mx", "ms",
+	"sx", "ss:whois.nic.ss", "ax:whois.ax", "ac:whois.nic.ac",
+	"tc:whois.nic.tc", "ie:whois.weare.ie", "im:whois.nic.im", "li:whois.nic.li",
+	"tel", "talk", "plus", "surf", "aero", "wiki", "biz", "xyz", "top",
+	"ag:whois.nic.ag", "ai", "am:whois.amnic.net", "at:whois.nic.at", "au", "ca",
+	"de:whois.denic.de", "dk:whois.punktum.dk", "es:whois.nic.es", "fi", "fm",
+	"fr", "gg:whois.gg", "hk:whois.hkirc.hk", "id", "in", "is",
+	"it:whois.nic.it", "jp:whois.jprs.jp", "kr:whois.kr", "la:whois.nic.la",
+	"li:whois.nic.li", "nl", "no", "nu:whois.iis.nu", "nz:whois.irs.net.nz",
+	"se:whois.iis.se", "to", "tv", "uk", "us:whois.nic.us",
+	"ws:whois.website.ws", "app", "beer", "blue", "cloud", "club", "cool", "day",
+	"dealer", "deals", "dev", "green", "hot", "info", "ink", "lol", "new", "ngo",
+	"one", "ooo", "page", "pro", "red", "rip", "run", "sex", "sexy", "site",
+	"space", "tech", "vodka", "wtf",
 ]
 
-swiss = [
-	"com", "net", "org", "ch:whois.nic.ch", "li:whois.nic.li", "swiss",
-	"zuerich", "io:whois.nic.io", "co:whois.registry.co", "dev", "app",
-	"me:whois.identitydigital.services", "info", "one", "pro",
-	"de:whois.denic.de", "at:whois.nic.at", "fr", "it:whois.nic.it",
-	"eu:whois.eu", "tech", "online", "site", "shop", "store", "biz", "xyz", "tv",
-	"cc", "to", "sh:whois.nic.sh", "fm", "am:whois.amnic.net", "gg:whois.gg",
+small = [
+	"com", "net", "org", "co:whois.registry.co", "cc", "ch:whois.nic.ch", "eu:whois.eu", "now", "win", "zone",
+	"sh:whois.nic.sh", "mx:whois.mx", "li:whois.nic.li", "tel", "talk", "biz","info", "open" , "one", "run", "space", 
+
+]
+
+mini = [
+	"com", "net", "org", "co:whois.registry.co", "cc", "ch:whois.nic.ch", "now", "tel", "talk", "info", "open" , "one",
+
+]
+
+comnetorg = [
+	"com", "net", "org",
+]
+
+biased = [
+	"com", "net", "org", "ch:whois.nic.ch", "eu:whois.eu",
+	"co:whois.registry.co", "cc", "sh:whois.nic.sh", "mx:whois.mx", "ms", "tel",
+	"aero", "ax:whois.ax", "eus", "exchange", "express", "one", "ooo", "open",
+	"plus", "pro", "red", "run", "ss:whois.nic.ss", "surf", "sx",
+	"talk", "win", "hot", "lol", "now", "rip", "wiki", "wtf",
+]
+
+tech = [
+	"com", "net", "co:whois.registry.co", "cc", "dev", "app", "io:whois.nic.io",
+	"me:whois.identitydigital.services", "tech", "code", "systems", "software",
+	"digital", "cloud", "ai", "bar", "biz", "boo", "bot", "build", "cam", "cc",
+	"click", "co:whois.registry.co", "codes", "computer", "email", "engineering",
+	"fly", "foo", "gg:whois.gg", "gmbh", "host", "hosting", "inc", "info",
+	"link", "llc", "ltd", "network", "nexus", "ninja", "online", "page", "pro",
+	"security", "sh:whois.nic.sh", "site", "solutions", "space", "surf", "to",
+	"tools", "vision", "wiki", "zip", "zone",
+]
+
+store = [
+	"com", "net", "co:whois.registry.co", "cc", "io:whois.nic.io",
+	"me:whois.identitydigital.services", "shop", "store", "market", "business",
+	"sale", "deals", "deal", "apartments", "auto", "autos", "auction",
+	"bargains", "beauty", "beer", "bid", "bike", "boats", "boutique", "cafe",
+	"cars", "casa", "cheap", "clothing", "coffee", "compare", "condos",
+	"cooking", "coupons", "delivery", "diamonds", "discount", "exchange",
+	"express", "farm", "fashion", "flowers", "food", "forsale", "furniture",
+	"garden", "gifts", "gold", "hair", "homes", "house", "immo", "jewelry",
+	"kitchen", "luxury", "makeup", "motorcycles", "organic", "pizza", "promo",
+	"property", "realestate", "rent", "rentals", "restaurant", "reviews",
+	"shoes", "style", "supplies", "supply", "toys", "watches", "wine",
+]
+
+goofy = [
+	"lol", "wtf", "rip", "vodka", "beer", "pizza", "rocks", "ninja", "gay",
+	"lgbt", "ceo", "adult", "best", "bible", "bingo", "boo", "buzz", "casino",
+	"cat", "cheap", "church", "cool", "dad", "dance", "day", "deal", "deals",
+	"dog", "eus", "exposed", "express", "fail", "fish", "free", "fun", "gratis",
+	"gripe", "hiv", "hot", "ie:whois.weare.ie", "kz:whois.nic.kz", "legal",
+	"love", "ly", "me:whois.identitydigital.services", "meme", "moi", "mom",
+	"monster", "no", "now", "ooo", "party", "porn", "rich", "run", "sex", "sexy",
+	"singles", "ss:whois.nic.ss", "sucks", "surgery", "rehab", "space", "vodka",
+	"ye", "you", "zone", "xxx",
+]
+
+sprichdeutsch = [
+	"de:whois.denic.de", "at:whois.nic.at", "ch:whois.nic.ch", "li:whois.nic.li",
+	"berlin", "hamburg", "koeln", "cologne", "bayern", "nrw", "ruhr", "saarland",
+	"wien", "tirol", "zuerich", "swiss", "gmbh", "kaufen", "jetzt", "schule",
+	"haus", "immobilien", "immo", "versicherung", "reise", "reisen",
 ]
 
 country = [
-	"ac:whois.nic.ac", "af:whois.nic.af", "ag:whois.nic.ag", "ai",
-	"am:whois.amnic.net", "as", "at:whois.nic.at", "au", "be:whois.dns.be",
-	"bh:whois.nic.bh", "bo:whois.nic.bo", "bz:whois.identitydigital.services",
-	"ca", "cc", "ch:whois.nic.ch", "cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn",
-	"co:whois.registry.co", "cr", "cu", "cv", "cx", "cz", "de:whois.denic.de",
-	"dk:whois.punktum.dk", "do:whois.nic.do", "ec", "es", "eu:whois.eu", "fi",
-	"fm", "fo", "fr", "gd", "gg:whois.gg", "gs", "gt", "gy", "hk:whois.hkirc.hk",
-	"hn", "hr:whois.dns.hr", "ht", "id", "ie:whois.weare.ie", "im:whois.nic.im",
-	"in", "io:whois.nic.io", "it:whois.nic.it", "je:whois.je", "ki:whois.nic.ki",
-	"la:whois.nic.la", "lc:whois.identitydigital.services", "li:whois.nic.li",
-	"lt:whois.domreg.lt", "lu:whois.dns.lu", "lv:whois.nic.lv",
-	"me:whois.identitydigital.services", "mg", "mk:whois.marnet.mk",
-	"mn:whois.nic.mn", "ms", "mu", "mx:whois.mx", "my:whois.mynic.my", "nf",
-	"ni", "nl", "nu:whois.iis.nu", "nz:whois.irs.net.nz", "pe:kero.yachay.pe",
-	"ph", "pl", "pm", "pr:whois.identitydigital.services", "pt:whois.dns.pt",
-	"pw", "re", "ro:whois.rotld.ro", "sb:whois.nic.net.sb", "sc:whois.nic.sc",
-	"se:whois.iis.se", "sh:whois.nic.sh", "si", "sn:whois.nic.sn",
-	"so:whois.nic.so", "sx:whois.sx", "tf", "tl:whois.nic.tl", "tm:whois.nic.tm",
-	"tn:whois.ati.tn", "to", "tv", "tw", "uk", "us:whois.nic.us", "uy",
-	"vc:whois.identitydigital.services", "vg", "wf", "ws:whois.website.ws", "yt",
+	"ac:whois.nic.ac", "ad", "ae:whois.aeda.net.ae", "af:whois.nic.af",
+	"ag:whois.nic.ag", "ai", "al:www.akep.al", "am:whois.amnic.net",
+	"ao:www.dns.ao", "aq:2day.com", "ar", "as", "at:whois.nic.at", "au",
+	"aw:whois.nic.aw", "ax:whois.ax", "be:whois.dns.be", "bf:whois.registre.bf",
+	"bg:whois.register.bg", "bh:whois.nic.bh", "bi:whois1.nic.bi",
+	"bj:whois.nic.bj", "bm", "bn:whois.bnnic.bn", "bo:whois.nic.bo", "br",
+	"bw:whois.nic.net.bw", "by:whois.cctld.by",
+	"bz:whois.identitydigital.services", "ca", "cc", "cd:whois.nic.cd",
+	"cf:whois.dot.cf", "cg:www.nic.cg", "ch:whois.nic.ch", "ci:whois.nic.ci",
+	"cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn", "co:whois.registry.co", "cr",
+	"cv", "cx", "cz", "de:whois.denic.de", "dk:whois.punktum.dk",
+	"dm:whois.dmdomains.dm", "do:whois.nic.do", "dz:whois.nic.dz", "ec",
+	"ee:whois.tld.ee", "es:whois.nic.es", "eu:whois.eu", "fi", "fj", "fm", "fo",
+	"fr", "ga:whois.nic.ga", "gd", "ge:whois.nic.ge", "gf:whois.mediaserv.net",
+	"gg:whois.gg", "gh:whois.nic.gh", "gi:whois.identitydigital.services",
+	"gl:whois.nic.gl", "gn:whois.ande.gov.gn", "gp:whois.nic.gp",
+	"gq:whois.dominio.gq", "gs", "gy", "hk:whois.hkirc.hk",
+	"hm:whois.registry.hm", "hn", "hr:whois.dns.hr", "ht", "hu:whois.nic.hu",
+	"id", "ie:whois.weare.ie", "il:whois.isoc.org.il", "im:whois.nic.im", "in",
+	"io:whois.nic.io", "iq:whois.iq", "ir:whois.nic.ir", "is", "it:whois.nic.it",
+	"je:whois.je", "jp:whois.jprs.jp", "ke", "kg", "ki:whois.nic.ki",
+	"km:www.domaine.km", "kn:whois.nic.kn", "kr:whois.kr", "kw:whois.nic.kw",
+	"ky", "kz:whois.nic.kz", "la:whois.nic.la", "lb",
+	"lc:whois.identitydigital.services", "li:whois.nic.li", "lk:whois.nic.lk",
+	"ls:whois.nic.ls", "lt:whois.domreg.lt", "lu:whois.dns.lu",
+	"lv:whois.nic.lv", "ly", "ma:whois.registre.ma", "mc:whois.nic.mc",
+	"md:whois.nic.md", "me:whois.identitydigital.services", "mg",
+	"mk:whois.marnet.mk", "ml", "mm:whois.registry.gov.mm", "mn:whois.nic.mn",
+	"mo:whois.monic.mo", "mp:get.mp", "mq:whois.mediaserv.net",
+	"mr:whois.nic.mr", "ms", "mt:whois.nic.org.mt", "mu",
+	"mv:www.dhiraagu.com.mv", "mw:whois.nic.mw", "mx:whois.mx",
+	"my:whois.mynic.my", "mz:whois.nic.mz", "na", "nc:whois.nc", "nf", "ng",
+	"nl", "no", "nu:whois.iis.nu", "nz:whois.irs.net.nz", "om:whois.registry.om",
+	"pe:kero.yachay.pe", "pf:whois.registry.pf", "pg", "pk:whois.pknic.net.pk",
+	"pl", "pm", "pn", "pr:whois.identitydigital.services",
+	"ps:whois.registry.ps", "pt:whois.dns.pt", "pw", "qa:whois.registry.qa",
+	"re", "ro:whois.rotld.ro", "rs:whois.rnids.rs", "ru:whois.tcinet.ru", "rw",
+	"sa:whois.nic.net.sa", "sb:whois.nic.net.sb", "sc:whois.nic.sc", "sd",
+	"se:whois.iis.se", "sg", "sh:whois.nic.sh", "si", "sk:whois.sk-nic.sk",
+	"sl:whois.nic.sl", "sm:whois.nic.sm", "sn:whois.nic.sn", "so:whois.nic.so",
+	"sr", "ss:whois.nic.ss", "st:whois.nic.st", "su:whois.tcinet.ru",
+	"sx", "sy:whois.tld.sy", "tc:whois.nic.tc", "td:whois.nic.td", "tf",
+	"tg:whois.nic.tg", "th", "tk:whois.dot.tk", "tl:whois.nic.tl",
+	"tm:whois.nic.tm", "tn:whois.ati.tn", "to", "tr:whois.trabis.gov.tr", "tv",
+	"tw", "tz", "ua", "ug:whois.co.ug", "uk", "us:whois.nic.us",
+	"uy:whois.nic.org.uy", "uz", "vc:whois.identitydigital.services",
+	"ve:whois.nic.ve", "vg", "vi", "vu:whois.dnrs.vu", "wf",
+	"ws:whois.website.ws", "ye", "yt", "za:whois.nic.za", "zm",
 ]
 
 two = [
-	"ac:whois.nic.ac", "af:whois.nic.af", "ag:whois.nic.ag", "ai",
-	"am:whois.amnic.net", "as", "at:whois.nic.at", "au", "be:whois.dns.be",
-	"bh:whois.nic.bh", "bo:whois.nic.bo", "bz:whois.identitydigital.services",
-	"ca", "cc", "ch:whois.nic.ch", "cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn",
-	"co:whois.registry.co", "cr", "cu", "cv", "cx", "cz", "de:whois.denic.de",
-	"dk:whois.punktum.dk", "do:whois.nic.do", "ec", "es", "eu:whois.eu", "fi",
-	"fm", "fo", "fr", "gd", "gg:whois.gg", "gs", "gt", "gy", "hk:whois.hkirc.hk",
-	"hn", "hr:whois.dns.hr", "ht", "id", "ie:whois.weare.ie", "im:whois.nic.im",
-	"in", "io:whois.nic.io", "it:whois.nic.it", "je:whois.je", "ki:whois.nic.ki",
-	"la:whois.nic.la", "lc:whois.identitydigital.services", "li:whois.nic.li",
-	"lt:whois.domreg.lt", "lu:whois.dns.lu", "lv:whois.nic.lv",
-	"me:whois.identitydigital.services", "mg", "mk:whois.marnet.mk",
-	"mn:whois.nic.mn", "ms", "mu", "mx:whois.mx", "my:whois.mynic.my", "nf",
-	"ni", "nl", "nu:whois.iis.nu", "nz:whois.irs.net.nz", "pe:kero.yachay.pe",
-	"ph", "pl", "pm", "pr:whois.identitydigital.services", "pt:whois.dns.pt",
-	"pw", "re", "ro:whois.rotld.ro", "sb:whois.nic.net.sb", "sc:whois.nic.sc",
-	"se:whois.iis.se", "sh:whois.nic.sh", "si", "sn:whois.nic.sn",
-	"so:whois.nic.so", "sx:whois.sx", "tf", "tl:whois.nic.tl", "tm:whois.nic.tm",
-	"tn:whois.ati.tn", "to", "tv", "tw", "uk", "us:whois.nic.us", "uy",
-	"vc:whois.identitydigital.services", "vg", "wf", "ws:whois.website.ws", "yt",
+	"ac:whois.nic.ac", "ad", "ae:whois.aeda.net.ae", "af:whois.nic.af",
+	"ag:whois.nic.ag", "ai", "al:www.akep.al", "am:whois.amnic.net",
+	"ao:www.dns.ao", "aq:2day.com", "ar", "as", "at:whois.nic.at", "au",
+	"aw:whois.nic.aw", "ax:whois.ax", "be:whois.dns.be", "bf:whois.registre.bf",
+	"bg:whois.register.bg", "bh:whois.nic.bh", "bi:whois1.nic.bi",
+	"bj:whois.nic.bj", "bm", "bn:whois.bnnic.bn", "bo:whois.nic.bo", "br",
+	"bw:whois.nic.net.bw", "by:whois.cctld.by",
+	"bz:whois.identitydigital.services", "ca", "cc", "cd:whois.nic.cd",
+	"cf:whois.dot.cf", "cg:www.nic.cg", "ch:whois.nic.ch", "ci:whois.nic.ci",
+	"cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn", "co:whois.registry.co", "cr",
+	"cv", "cx", "cz", "de:whois.denic.de", "dk:whois.punktum.dk",
+	"dm:whois.dmdomains.dm", "do:whois.nic.do", "dz:whois.nic.dz", "ec",
+	"ee:whois.tld.ee", "es:whois.nic.es", "eu:whois.eu", "fi", "fj", "fm", "fo",
+	"fr", "ga:whois.nic.ga", "gd", "ge:whois.nic.ge", "gf:whois.mediaserv.net",
+	"gg:whois.gg", "gh:whois.nic.gh", "gi:whois.identitydigital.services",
+	"gl:whois.nic.gl", "gn:whois.ande.gov.gn", "gp:whois.nic.gp",
+	"gq:whois.dominio.gq", "gs", "gy", "hk:whois.hkirc.hk",
+	"hm:whois.registry.hm", "hn", "hr:whois.dns.hr", "ht", "hu:whois.nic.hu",
+	"id", "ie:whois.weare.ie", "il:whois.isoc.org.il", "im:whois.nic.im", "in",
+	"io:whois.nic.io", "iq:whois.iq", "ir:whois.nic.ir", "is", "it:whois.nic.it",
+	"je:whois.je", "jp:whois.jprs.jp", "ke", "kg", "ki:whois.nic.ki",
+	"km:www.domaine.km", "kn:whois.nic.kn", "kr:whois.kr", "kw:whois.nic.kw",
+	"ky", "kz:whois.nic.kz", "la:whois.nic.la", "lb",
+	"lc:whois.identitydigital.services", "li:whois.nic.li", "lk:whois.nic.lk",
+	"ls:whois.nic.ls", "lt:whois.domreg.lt", "lu:whois.dns.lu",
+	"lv:whois.nic.lv", "ly", "ma:whois.registre.ma", "mc:whois.nic.mc",
+	"md:whois.nic.md", "me:whois.identitydigital.services", "mg",
+	"mk:whois.marnet.mk", "ml", "mm:whois.registry.gov.mm", "mn:whois.nic.mn",
+	"mo:whois.monic.mo", "mp:get.mp", "mq:whois.mediaserv.net",
+	"mr:whois.nic.mr", "ms", "mt:whois.nic.org.mt", "mu",
+	"mv:www.dhiraagu.com.mv", "mw:whois.nic.mw", "mx:whois.mx",
+	"my:whois.mynic.my", "mz:whois.nic.mz", "na", "nc:whois.nc", "nf", "ng",
+	"nl", "no", "nu:whois.iis.nu", "nz:whois.irs.net.nz", "om:whois.registry.om",
+	"pe:kero.yachay.pe", "pf:whois.registry.pf", "pg", "pk:whois.pknic.net.pk",
+	"pl", "pm", "pn", "pr:whois.identitydigital.services",
+	"ps:whois.registry.ps", "pt:whois.dns.pt", "pw", "qa:whois.registry.qa",
+	"re", "ro:whois.rotld.ro", "rs:whois.rnids.rs", "ru:whois.tcinet.ru", "rw",
+	"sa:whois.nic.net.sa", "sb:whois.nic.net.sb", "sc:whois.nic.sc", "sd",
+	"se:whois.iis.se", "sg", "sh:whois.nic.sh", "si", "sk:whois.sk-nic.sk",
+	"sl:whois.nic.sl", "sm:whois.nic.sm", "sn:whois.nic.sn", "so:whois.nic.so",
+	"sr", "ss:whois.nic.ss", "st:whois.nic.st", "su:whois.tcinet.ru",
+	"sx", "sy:whois.tld.sy", "tc:whois.nic.tc", "td:whois.nic.td", "tf",
+	"tg:whois.nic.tg", "th", "tk:whois.dot.tk", "tl:whois.nic.tl",
+	"tm:whois.nic.tm", "tn:whois.ati.tn", "to", "tr:whois.trabis.gov.tr", "tv",
+	"tw", "tz", "ua", "ug:whois.co.ug", "uk", "us:whois.nic.us",
+	"uy:whois.nic.org.uy", "uz", "vc:whois.identitydigital.services",
+	"ve:whois.nic.ve", "vg", "vi", "vu:whois.dnrs.vu", "wf",
+	"ws:whois.website.ws", "ye", "yt", "za:whois.nic.za", "zm",
 ]
 
 three = [
-	"ac:whois.nic.ac", "af:whois.nic.af", "ag:whois.nic.ag", "ai",
-	"am:whois.amnic.net", "app", "art", "as", "at:whois.nic.at", "au", "bar",
-	"be:whois.dns.be", "bet", "bh:whois.nic.bh", "bid", "bio", "biz",
-	"bo:whois.nic.bo", "boo", "bot", "bz:whois.identitydigital.services", "bzh",
-	"ca", "cab", "cam", "car", "cat", "cc", "ceo", "cfd", "ch:whois.nic.ch",
-	"cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn", "co:whois.registry.co", "com",
-	"cr", "cu", "cv", "cx", "cz", "dad", "day", "de:whois.denic.de", "dev",
-	"diy", "dk:whois.punktum.dk", "do:whois.nic.do", "dog", "ec", "eco", "es",
-	"esq", "eu:whois.eu", "eus", "fan", "fi", "fit", "fly", "fm", "fo", "foo",
-	"fr", "fun", "fyi", "gal", "gay", "gd", "gdn", "gg:whois.gg", "gs", "gt",
-	"gy", "hiv", "hk:whois.hkirc.hk", "hn", "hot", "how", "hr:whois.dns.hr",
-	"ht", "icu", "id", "ie:whois.weare.ie", "im:whois.nic.im", "in", "inc",
-	"ing", "ink", "io:whois.nic.io", "ist", "it:whois.nic.it", "je:whois.je",
-	"ki:whois.nic.ki", "kim", "la:whois.nic.la", "lat", "law",
-	"lc:whois.identitydigital.services", "li:whois.nic.li", "llc", "lol",
-	"lt:whois.domreg.lt", "ltd", "lu:whois.dns.lu", "lv:whois.nic.lv", "mba",
-	"me:whois.identitydigital.services", "med", "men", "mg",
-	"mk:whois.marnet.mk", "mn:whois.nic.mn", "moe", "moi", "mom", "mov", "ms",
-	"mu", "mx:whois.mx", "my:whois.mynic.my", "net", "new", "nf", "ngo", "ni",
-	"nl", "now", "nrw", "nu:whois.iis.nu", "nyc", "nz:whois.irs.net.nz", "one",
-	"ong", "onl", "ooo", "org", "ovh", "pe:kero.yachay.pe", "pet", "ph", "phd",
-	"pl", "pm", "pr:whois.identitydigital.services", "pro", "pt:whois.dns.pt",
-	"pub", "pw", "re", "red", "rip", "ro:whois.rotld.ro", "run",
-	"sb:whois.nic.net.sb", "sbs", "sc:whois.nic.sc", "se:whois.iis.se", "sex",
-	"sh:whois.nic.sh", "si", "ski", "sn:whois.nic.sn", "so:whois.nic.so", "soy",
-	"spa", "srl", "sx:whois.sx", "tax", "tel", "tf", "tl:whois.nic.tl",
-	"tm:whois.nic.tm", "tn:whois.ati.tn", "to", "top", "tv", "tw", "uk", "uno",
-	"us:whois.nic.us", "uy", "vc:whois.identitydigital.services", "vet", "vg",
-	"vin", "vip", "wf", "win", "ws:whois.website.ws", "wtf", "xin", "xxx", "xyz",
-	"you", "yt", "zip",
+	"app", "art", "bar", "bet", "bid", "bio", "biz", "boo", "bot", "bzh", "cab",
+	"cal", "cam", "car", "cat", "ceo", "cfd", "com", "cpa", "dad", "day", "dev",
+	"diy", "dog", "eco", "edu:whois.educause.edu", "esq", "eus", "fan", "fit",
+	"fly", "foo", "frl", "fun", "fyi", "gal", "gay", "gdn", "gop", "hiv", "hot",
+	"how", "icu", "inc", "ing", "ink", "int", "ist", "kim", "krd", "lat", "law",
+	"llc", "lol", "ltd", "mba", "med", "men", "moe", "moi", "mom", "mov", "net",
+	"new", "ngo", "now", "nrw", "nyc", "one", "ong", "onl", "ooo", "org", "ovh",
+	"pet", "phd", "pro", "pub", "red", "ren", "rio", "rip", "run", "sbs", "sex",
+	"ski", "soy", "spa", "srl", "tax", "tel", "top", "uno", "vet", "vin", "vip",
+	"win", "wtf", "xin", "xxx", "xyz", "you", "zip",
 ]
 
 four = [
-	"army", "asia", "auto", "baby", "band", "beer", "best", "bike", "blog",
-	"blue", "bond", "buzz", "cafe", "camp", "care", "cars", "casa", "cash",
-	"chat", "city", "club", "cool", "cyou", "date", "deal", "desi", "diet",
-	"fail", "fans", "farm", "fast", "film", "fish", "food", "free", "fund",
-	"game", "gift", "gmbh", "gold", "golf", "guru", "hair", "haus", "help",
-	"host", "immo", "info", "jobs", "kids", "kiwi", "land", "lgbt", "life",
-	"limo", "link", "live", "loan", "love", "ltda", "luxe", "meme", "menu",
-	"mobi", "moda", "name", "navy", "news", "page", "pics", "pink", "plus",
-	"porn", "prof", "qpon", "rent", "rest", "rich", "rsvp", "ruhr", "sale",
-	"sarl", "scot", "sexy", "shop", "show", "site", "skin", "spot", "surf",
-	"talk", "taxi", "team", "tech", "tips", "town", "toys", "tube", "vana",
-	"vote", "voto", "wang", "wiki", "wine", "work", "yoga", "zone",
+	"ac:whois.nic.ac", "ad", "ae:whois.aeda.net.ae", "aero", "af:whois.nic.af",
+	"ag:whois.nic.ag", "ai", "al:www.akep.al", "am:whois.amnic.net",
+	"ao:www.dns.ao", "app", "aq:2day.com", "ar", "army", "art", "as", "asia",
+	"at:whois.nic.at", "au", "auto", "aw:whois.nic.aw", "ax:whois.ax", "baby",
+	"band", "bank", "bar", "be:whois.dns.be", "beer", "best", "bet",
+	"bf:whois.registre.bf", "bg:whois.register.bg", "bh:whois.nic.bh",
+	"bi:whois1.nic.bi", "bid", "bike", "bio", "biz", "bj:whois.nic.bj", "blog",
+	"blue", "bm", "bn:whois.bnnic.bn", "bo:whois.nic.bo", "bond", "boo", "book",
+	"bot", "br", "buzz", "bw:whois.nic.net.bw", "by:whois.cctld.by",
+	"bz:whois.identitydigital.services", "bzh", "ca", "cab", "cafe", "cal",
+	"cam", "camp", "car", "care", "cars", "casa", "case", "cash", "cat", "cc",
+	"cd:whois.nic.cd", "ceo", "cf:whois.dot.cf", "cfd", "cg:www.nic.cg",
+	"ch:whois.nic.ch", "chat", "ci:whois.nic.ci", "city", "cl:whois.nic.cl",
+	"club", "cm", "cn:whois.cnnic.cn", "co:whois.registry.co", "com", "cool",
+	"coop", "cpa", "cr", "cv", "cx", "cyou", "cz", "dad", "date", "day",
+	"de:whois.denic.de", "deal", "desi", "dev", "diet", "diy",
+	"dk:whois.punktum.dk", "dm:whois.dmdomains.dm", "do:whois.nic.do", "dog",
+	"dz:whois.nic.dz", "ec", "eco", "edu:whois.educause.edu", "ee:whois.tld.ee",
+	"es:whois.nic.es", "esq", "eu:whois.eu", "eus", "fail", "fan", "fans",
+	"farm", "fast", "fi", "film", "fish", "fit", "fj", "fly", "fm", "fo", "foo",
+	"food", "fr", "free", "frl", "fun", "fund", "fyi", "ga:whois.nic.ga", "gal",
+	"game", "gay", "gd", "gdn", "ge:whois.nic.ge", "gent",
+	"gf:whois.mediaserv.net", "gg:whois.gg", "gh:whois.nic.gh",
+	"gi:whois.identitydigital.services", "gift", "gl:whois.nic.gl", "gmbh",
+	"gn:whois.ande.gov.gn", "gold", "golf", "gop", "gp:whois.nic.gp",
+	"gq:whois.dominio.gq", "gs", "guru", "gy", "hair", "haus", "help", "hiv",
+	"hk:whois.hkirc.hk", "hm:whois.registry.hm", "hn", "host", "hot", "how",
+	"hr:whois.dns.hr", "ht", "hu:whois.nic.hu", "icu", "id", "ie:whois.weare.ie",
+	"il:whois.isoc.org.il", "im:whois.nic.im", "immo", "in", "inc", "info",
+	"ing", "ink", "int", "io:whois.nic.io", "iq:whois.iq", "ir:whois.nic.ir",
+	"is", "ist", "it:whois.nic.it", "je:whois.je", "jobs", "jp:whois.jprs.jp",
+	"ke", "kg", "ki:whois.nic.ki", "kids", "kim", "kiwi", "km:www.domaine.km",
+	"kn:whois.nic.kn", "kr:whois.kr", "krd", "kw:whois.nic.kw", "ky",
+	"kz:whois.nic.kz", "la:whois.nic.la", "land", "lat", "law", "lb",
+	"lc:whois.identitydigital.services", "lgbt", "li:whois.nic.li", "life",
+	"limo", "link", "live", "lk:whois.nic.lk", "llc", "loan", "lol", "love",
+	"ls:whois.nic.ls", "lt:whois.domreg.lt", "ltd", "ltda", "lu:whois.dns.lu",
+	"luxe", "lv:whois.nic.lv", "ly", "ma:whois.registre.ma", "mba",
+	"mc:whois.nic.mc", "md:whois.nic.md", "me:whois.identitydigital.services",
+	"med", "meme", "men", "menu", "mg", "mk:whois.marnet.mk", "ml",
+	"mm:whois.registry.gov.mm", "mn:whois.nic.mn", "mo:whois.monic.mo", "mobi",
+	"moda", "moe", "moi", "mom", "mov", "mp:get.mp", "mq:whois.mediaserv.net",
+	"mr:whois.nic.mr", "ms", "mt:whois.nic.org.mt", "mu",
+	"mv:www.dhiraagu.com.mv", "mw:whois.nic.mw", "mx:whois.mx",
+	"my:whois.mynic.my", "mz:whois.nic.mz", "na", "name", "navy", "nc:whois.nc",
+	"net", "new", "news", "nf", "ng", "ngo", "nl", "no", "now", "nrw",
+	"nu:whois.iis.nu", "nyc", "nz:whois.irs.net.nz", "om:whois.registry.om",
+	"one", "ong", "onl", "ooo", "open", "org", "ovh", "page",
+	"pe:kero.yachay.pe", "pet", "pf:whois.registry.pf", "pg", "phd", "pics",
+	"pink", "pk:whois.pknic.net.pk", "pl", "plus", "pm", "pn", "porn", "post",
+	"pr:whois.identitydigital.services", "pro", "prof", "ps:whois.registry.ps",
+	"pt:whois.dns.pt", "pub", "pw", "qa:whois.registry.qa", "qpon", "re", "red",
+	"reit", "ren", "rent", "rest", "rich", "rio", "rip", "ro:whois.rotld.ro",
+	"rs:whois.rnids.rs", "rsvp", "ru:whois.tcinet.ru", "ruhr", "run", "rw",
+	"sa:whois.nic.net.sa", "sale", "sarl", "sb:whois.nic.net.sb", "sbs",
+	"sc:whois.nic.sc", "scot", "sd", "se:whois.iis.se", "sex", "sexy", "sg",
+	"sh:whois.nic.sh", "shop", "show", "si", "site", "sk:whois.sk-nic.sk", "ski",
+	"skin", "sl:whois.nic.sl", "sm:whois.nic.sm", "sn:whois.nic.sn",
+	"so:whois.nic.so", "soy", "spa", "spot", "sr", "srl", "ss:whois.nic.ss",
+	"st:whois.nic.st", "su:whois.tcinet.ru", "surf", "sx",
+	"sy:whois.tld.sy", "talk", "tax", "taxi", "tc:whois.nic.tc",
+	"td:whois.nic.td", "team", "tech", "tel", "tf", "tg:whois.nic.tg", "th",
+	"tips", "tk:whois.dot.tk", "tl:whois.nic.tl", "tm:whois.nic.tm",
+	"tn:whois.ati.tn", "to", "top", "town", "toys", "tr:whois.trabis.gov.tr",
+	"tube", "tv", "tw", "tz", "ua", "ug:whois.co.ug", "uk", "uno",
+	"us:whois.nic.us", "uy:whois.nic.org.uy", "uz", "vana",
+	"vc:whois.identitydigital.services", "ve:whois.nic.ve", "vet", "vg", "vi",
+	"vin", "vip", "vote", "voto", "vu:whois.dnrs.vu", "wang", "wf", "wien",
+	"wiki", "win", "wine", "work", "ws:whois.website.ws", "wtf", "xin", "xxx",
+	"xyz", "ye", "yoga", "you", "yt", "za:whois.nic.za", "zip", "zm", "zone",
 ]
 
 long = [
-	"abogado", "academy", "accountant", "accountants", "actor", "adult",
-	"agency", "airforce", "alsace", "amsterdam", "apartments", "archi",
-	"associates", "attorney", "auction", "audio", "autos", "barcelona",
-	"bargains", "basketball", "bayern", "beauty", "berlin", "bible", "bingo",
-	"black", "blackfriday", "boats", "boston", "boutique", "broker", "brussels",
-	"build", "builders", "business", "camera", "capital", "cards", "career",
-	"careers", "casino", "catering", "center", "channel", "charity", "cheap",
-	"christmas", "church", "claims", "cleaning", "click", "clinic", "clothing",
-	"cloud", "coach", "codes", "coffee", "college", "cologne", "community",
-	"company", "compare", "computer", "condos", "construction", "consulting",
-	"contact", "contractors", "cooking", "corsica", "country", "coupons",
-	"courses", "credit", "creditcard", "cricket", "cruises", "cymru", "dance",
-	"dating", "dealer", "deals", "degree", "delivery", "democrat", "dental",
-	"dentist", "design", "diamonds", "digital", "direct", "directory",
-	"discount", "doctor", "domains", "download", "earth", "education", "email",
-	"energy", "engineer", "engineering", "enterprises", "equipment", "estate",
-	"events", "exchange", "expert", "exposed", "express", "faith", "family",
-	"fashion", "feedback", "finance", "financial", "fishing", "fitness",
-	"flights", "florist", "flowers", "football", "forex", "forsale", "forum",
-	"foundation", "furniture", "futbol", "gallery", "games", "garden", "gifts",
-	"gives", "giving", "glass", "global", "graphics", "gratis", "green", "gripe",
-	"group", "guide", "guitars", "hamburg", "health", "healthcare", "hiphop",
-	"hockey", "holdings", "holiday", "homes", "horse", "hospital", "hosting",
-	"house", "immobilien", "industries", "institute", "insure", "international",
-	"investments", "irish", "istanbul", "jetzt", "jewelry", "juegos", "kaufen",
-	"kitchen", "koeln", "kyoto", "lawyer", "lease", "legal", "lifestyle",
-	"lighting", "limited", "living", "loans", "locker", "london", "lotto",
-	"luxury", "madrid", "maison", "makeup", "management", "market", "marketing",
-	"markets", "media", "melbourne", "memorial", "miami", "mobile", "money",
-	"monster", "mortgage", "motorcycles", "movie", "museum", "music", "nagoya",
-	"network", "nexus", "ninja", "observer", "okinawa", "online", "organic",
-	"osaka", "paris", "partners", "parts", "party", "photo", "photography",
-	"photos", "pictures", "pizza", "place", "plumbing", "poker", "press",
-	"productions", "promo", "properties", "property", "protection", "quebec",
-	"quest", "racing", "radio", "realty", "recipes", "rehab", "reise", "reisen",
-	"rentals", "repair", "report", "republican", "restaurant", "review",
-	"reviews", "rocks", "rodeo", "rugby", "ryukyu", "saarland", "salon",
-	"school", "schule", "science", "security", "select", "services", "shiksha",
-	"shoes", "shopping", "singles", "soccer", "social", "software", "solar",
-	"solutions", "space", "storage", "store", "stream", "studio", "study",
-	"style", "sucks", "supplies", "supply", "support", "surgery", "swiss",
-	"sydney", "systems", "taipei", "tattoo", "technology", "tennis", "theater",
-	"theatre", "tickets", "tienda", "tires", "tirol", "today", "tokyo", "tools",
-	"tours", "trade", "trading", "training", "travel", "university", "vacations",
-	"vegas", "ventures", "viajes", "video", "villas", "vision", "vlaanderen",
-	"vodka", "voyage", "wales", "watch", "watches", "webcam", "website",
-	"wedding", "works", "world", "xn--45q11c", "xn--5tzm5g", "xn--6frz82g",
-	"xn--c1avg", "xn--czrs0t", "xn--e1a4c:whois.eu", "xn--fjq720a",
-	"xn--hxt814e", "xn--i1b6b1a6a2e", "xn--ngbc5azd", "xn--nqv7f",
+	"abogado", "abudhabi", "academy", "accountant", "accountants", "actor",
+	"adult", "africa", "agency", "airforce", "alsace", "amsterdam", "apartments",
+	"archi", "associates", "attorney", "auction", "audio", "autos", "barcelona",
+	"bargains", "basketball", "bayern", "beauty", "berlin",
+	"bharat:whois.nixiregistry.in", "bible", "bingo", "black", "blackfriday",
+	"boats", "boston", "boutique", "broker", "brussels", "build", "builders",
+	"business", "camera", "capetown", "capital", "cards", "career", "careers",
+	"casino", "catering", "center", "channel", "charity", "cheap", "christmas",
+	"church", "claims", "cleaning", "click", "clinic", "clothing", "cloud",
+	"coach", "codes", "coffee", "college", "cologne", "community", "company",
+	"compare", "computer", "condos", "construction", "consulting", "contact",
+	"contractors", "cooking", "corsica", "country", "coupons", "courses",
+	"credit", "creditcard", "creditunion", "cricket", "cruises", "cymru",
+	"dance", "dating", "dealer", "deals", "degree", "delivery", "democrat",
+	"dental", "dentist", "design", "diamonds", "digital", "direct", "directory",
+	"discount", "doctor", "domains", "download", "durban", "earth", "education",
+	"email", "energy", "engineer", "engineering", "enterprises", "equipment",
+	"estate", "events", "exchange", "expert", "exposed", "express", "faith",
+	"family", "fashion", "feedback", "finance", "financial", "fishing",
+	"fitness", "flights", "florist", "flowers", "football", "forex", "forsale",
+	"forum", "foundation", "furniture", "futbol", "gallery", "games", "garden",
+	"gifts", "gives", "giving", "glass", "global", "graphics", "gratis", "green",
+	"gripe", "group", "guide", "guitars", "hamburg", "health", "healthcare",
+	"hiphop", "hockey", "holdings", "holiday", "homes", "horse", "hospital",
+	"hosting", "house", "immobilien", "industries", "institute", "insurance",
+	"insure", "international", "investments", "irish", "istanbul", "jetzt",
+	"jewelry", "joburg", "juegos", "kaufen", "kitchen", "koeln", "kyoto",
+	"lawyer", "lease", "legal", "lifestyle", "lighting", "limited", "living",
+	"loans", "locker", "london", "lotto", "luxury", "madrid", "maison", "makeup",
+	"management", "market", "marketing", "markets", "media", "melbourne",
+	"memorial", "miami", "mobile", "money", "monster", "mortgage", "moscow",
+	"motorcycles", "movie", "museum", "music", "nagoya", "network", "nexus",
+	"ninja", "nowruz", "observer", "okinawa", "online", "organic", "osaka",
+	"paris", "partners", "parts", "party", "pharmacy", "photo", "photography",
+	"photos", "physio", "pictures", "pizza", "place", "plumbing", "poker",
+	"press", "productions", "promo", "properties", "property", "protection",
+	"quebec", "quest", "racing", "radio", "realestate", "realty", "recipes",
+	"rehab", "reise", "reisen", "rentals", "repair", "report", "republican",
+	"restaurant", "review", "reviews", "rocks", "rodeo", "rugby", "ryukyu",
+	"saarland", "salon", "school", "schule", "science", "security", "select",
+	"services", "shabaka:whois.nic.xn–ngbc5azd", "shiksha", "shoes", "shopping",
+	"singles", "soccer", "social", "software", "solar", "solutions", "space",
+	"sport", "storage", "store", "stream", "studio", "study", "style", "sucks",
+	"supplies", "supply", "support", "surgery", "swiss", "sydney", "systems",
+	"taipei", "tatar", "tattoo", "technology", "tennis", "theater", "theatre",
+	"tickets", "tienda", "tires", "tirol", "today", "tokyo", "tools", "tours",
+	"trade", "trading", "training", "travel", "trust", "university", "vacations",
+	"vegas", "ventures", "versicherung", "viajes", "video", "villas", "vision",
+	"vlaanderen", "vodka", "voting", "voyage", "wales", "watch", "watches",
+	"webcam", "website", "wedding", "whoswho", "works", "world", "xn--45q11c",
+	"xn--5tzm5g", "xn--6frz82g", "xn--c1avg", "xn--czrs0t", "xn--e1a4c:whois.eu",
+	"xn--fjq720a", "xn--hxt814e", "xn--i1b6b1a6a2e", "xn--ngbc5azd", "xn--nqv7f",
 	"xn--pgbs0dh:whois.ati.tn", "xn--q9jyb4c", "xn--unup4y", "xn--vhquv",
-	"yachts", "yokohama",
+	"yachts", "yokohama", "zuerich",
 ]
 
 all = [
-	"abogado", "ac:whois.nic.ac", "academy", "accountant", "accountants",
-	"actor", "adult", "af:whois.nic.af", "ag:whois.nic.ag", "agency", "ai",
-	"airforce", "alsace", "am:whois.amnic.net", "amsterdam", "apartments", "app",
-	"archi", "army", "art", "as", "asia", "associates", "at:whois.nic.at",
-	"attorney", "au", "auction", "audio", "auto", "autos", "baby", "band", "bar",
-	"barcelona", "bargains", "basketball", "bayern", "be:whois.dns.be", "beauty",
-	"beer", "berlin", "best", "bet", "bh:whois.nic.bh", "bible", "bid", "bike",
-	"bingo", "bio", "biz", "black", "blackfriday", "blog", "blue",
-	"bo:whois.nic.bo", "boats", "bond", "boo", "boston", "bot", "boutique",
-	"broker", "brussels", "build", "builders", "business", "buzz",
-	"bz:whois.identitydigital.services", "bzh", "ca", "cab", "cafe", "cam",
-	"camera", "camp", "capital", "car", "cards", "care", "career", "careers",
-	"cars", "casa", "cash", "casino", "cat", "catering", "cc", "center", "ceo",
-	"cfd", "ch:whois.nic.ch", "channel", "charity", "chat", "cheap", "christmas",
-	"church", "city", "cl:whois.nic.cl", "claims", "cleaning", "click", "clinic",
-	"clothing", "cloud", "club", "cm", "cn:whois.cnnic.cn",
-	"co:whois.registry.co", "coach", "codes", "coffee", "college", "cologne",
-	"com", "community", "company", "compare", "computer", "condos",
-	"construction", "consulting", "contact", "contractors", "cooking", "cool",
-	"corsica", "country", "coupons", "courses", "cr", "credit", "creditcard",
-	"cricket", "cruises", "cu", "cv", "cx", "cymru", "cyou", "cz", "dad",
-	"dance", "date", "dating", "day", "de:whois.denic.de", "deal", "dealer",
-	"deals", "degree", "delivery", "democrat", "dental", "dentist", "desi",
-	"design", "dev", "diamonds", "diet", "digital", "direct", "directory",
-	"discount", "diy", "dk:whois.punktum.dk", "do:whois.nic.do", "doctor", "dog",
-	"domains", "download", "earth", "ec", "eco", "education", "email", "energy",
-	"engineer", "engineering", "enterprises", "equipment", "es", "esq", "estate",
-	"eu:whois.eu", "eus", "events", "exchange", "expert", "exposed", "express",
-	"fail", "faith", "family", "fan", "fans", "farm", "fashion", "fast",
-	"feedback", "fi", "film", "finance", "financial", "fish", "fishing", "fit",
-	"fitness", "flights", "florist", "flowers", "fly", "fm", "fo", "foo", "food",
-	"football", "forex", "forsale", "forum", "foundation", "fr", "free", "fun",
-	"fund", "furniture", "futbol", "fyi", "gal", "gallery", "game", "games",
-	"garden", "gay", "gd", "gdn", "gg:whois.gg", "gift", "gifts", "gives",
-	"giving", "glass", "global", "gmbh", "gold", "golf", "graphics", "gratis",
-	"green", "gripe", "group", "gs", "gt", "guide", "guitars", "guru", "gy",
-	"hair", "hamburg", "haus", "health", "healthcare", "help", "hiphop", "hiv",
-	"hk:whois.hkirc.hk", "hn", "hockey", "holdings", "holiday", "homes", "horse",
-	"hospital", "host", "hosting", "hot", "house", "how", "hr:whois.dns.hr",
-	"ht", "icu", "id", "ie:whois.weare.ie", "im:whois.nic.im", "immo",
-	"immobilien", "in", "inc", "industries", "info", "ing", "ink", "institute",
-	"insure", "international", "investments", "io:whois.nic.io", "irish", "ist",
-	"istanbul", "it:whois.nic.it", "je:whois.je", "jetzt", "jewelry", "jobs",
-	"juegos", "kaufen", "ki:whois.nic.ki", "kids", "kim", "kitchen", "kiwi",
-	"koeln", "kyoto", "la:whois.nic.la", "land", "lat", "law", "lawyer",
+	"abogado", "abudhabi", "ac:whois.nic.ac", "academy", "accountant",
+	"accountants", "actor", "ad", "adult", "ae:whois.aeda.net.ae", "aero",
+	"af:whois.nic.af", "africa", "ag:whois.nic.ag", "agency", "ai", "airforce",
+	"al:www.akep.al", "alsace", "am:whois.amnic.net", "amsterdam",
+	"ao:www.dns.ao", "apartments", "app", "aq:2day.com", "ar", "archi", "army",
+	"art", "as", "asia", "associates", "at:whois.nic.at", "attorney", "au",
+	"auction", "audio", "auto", "autos", "aw:whois.nic.aw", "ax:whois.ax",
+	"baby", "band", "bank", "bar", "barcelona", "bargains", "basketball",
+	"bayern", "be:whois.dns.be", "beauty", "beer", "berlin", "best", "bet",
+	"bf:whois.registre.bf", "bg:whois.register.bg", "bh:whois.nic.bh",
+	"bharat:whois.nixiregistry.in", "bi:whois1.nic.bi", "bible", "bid", "bike",
+	"bingo", "bio", "biz", "bj:whois.nic.bj", "black", "blackfriday", "blog",
+	"blue", "bm", "bn:whois.bnnic.bn", "bo:whois.nic.bo", "boats", "bond", "boo",
+	"book", "boston", "bot", "boutique", "br", "broker", "brussels", "build",
+	"builders", "business", "buzz", "bw:whois.nic.net.bw", "by:whois.cctld.by",
+	"bz:whois.identitydigital.services", "bzh", "ca", "cab", "cafe", "cal",
+	"cam", "camera", "camp", "capetown", "capital", "car", "cards", "care",
+	"career", "careers", "cars", "casa", "case", "cash", "casino", "cat",
+	"catering", "cc", "cd:whois.nic.cd", "center", "ceo", "cf:whois.dot.cf",
+	"cfd", "cg:www.nic.cg", "ch:whois.nic.ch", "channel", "charity", "chat",
+	"cheap", "christmas", "church", "ci:whois.nic.ci", "city", "cl:whois.nic.cl",
+	"claims", "cleaning", "click", "clinic", "clothing", "cloud", "club", "cm",
+	"cn:whois.cnnic.cn", "co:whois.registry.co", "coach", "codes", "coffee",
+	"college", "cologne", "com", "community", "company", "compare", "computer",
+	"condos", "construction", "consulting", "contact", "contractors", "cooking",
+	"cool", "coop", "corsica", "country", "coupons", "courses", "cpa", "cr",
+	"credit", "creditcard", "creditunion", "cricket", "cruises", "cv", "cx",
+	"cymru", "cyou", "cz", "dad", "dance", "date", "dating", "day",
+	"de:whois.denic.de", "deal", "dealer", "deals", "degree", "delivery",
+	"democrat", "dental", "dentist", "desi", "design", "dev", "diamonds", "diet",
+	"digital", "direct", "directory", "discount", "diy", "dk:whois.punktum.dk",
+	"dm:whois.dmdomains.dm", "do:whois.nic.do", "doctor", "dog", "domains",
+	"download", "durban", "dz:whois.nic.dz", "earth", "ec", "eco",
+	"edu:whois.educause.edu", "education", "ee:whois.tld.ee", "email", "energy",
+	"engineer", "engineering", "enterprises", "equipment", "es:whois.nic.es",
+	"esq", "estate", "eu:whois.eu", "eus", "events", "exchange", "expert",
+	"exposed", "express", "fail", "faith", "family", "fan", "fans", "farm",
+	"fashion", "fast", "feedback", "fi", "film", "finance", "financial", "fish",
+	"fishing", "fit", "fitness", "fj", "flights", "florist", "flowers", "fly",
+	"fm", "fo", "foo", "food", "football", "forex", "forsale", "forum",
+	"foundation", "fr", "free", "frl", "fun", "fund", "furniture", "futbol",
+	"fyi", "ga:whois.nic.ga", "gal", "gallery", "game", "games", "garden", "gay",
+	"gd", "gdn", "ge:whois.nic.ge", "gent", "gf:whois.mediaserv.net",
+	"gg:whois.gg", "gh:whois.nic.gh", "gi:whois.identitydigital.services",
+	"gift", "gifts", "gives", "giving", "gl:whois.nic.gl", "glass", "global",
+	"gmbh", "gn:whois.ande.gov.gn", "gold", "golf", "gop", "gp:whois.nic.gp",
+	"gq:whois.dominio.gq", "graphics", "gratis", "green", "gripe", "group", "gs",
+	"guide", "guitars", "guru", "gy", "hair", "hamburg", "haus", "health",
+	"healthcare", "help", "hiphop", "hiv", "hk:whois.hkirc.hk",
+	"hm:whois.registry.hm", "hn", "hockey", "holdings", "holiday", "homes",
+	"horse", "hospital", "host", "hosting", "hot", "house", "how",
+	"hr:whois.dns.hr", "ht", "hu:whois.nic.hu", "icu", "id", "ie:whois.weare.ie",
+	"il:whois.isoc.org.il", "im:whois.nic.im", "immo", "immobilien", "in", "inc",
+	"industries", "info", "ing", "ink", "institute", "insurance", "insure",
+	"int", "international", "investments", "io:whois.nic.io", "iq:whois.iq",
+	"ir:whois.nic.ir", "irish", "is", "ist", "istanbul", "it:whois.nic.it",
+	"je:whois.je", "jetzt", "jewelry", "jobs", "joburg", "jp:whois.jprs.jp",
+	"juegos", "kaufen", "ke", "kg", "ki:whois.nic.ki", "kids", "kim", "kitchen",
+	"kiwi", "km:www.domaine.km", "kn:whois.nic.kn", "koeln", "kr:whois.kr",
+	"krd", "kw:whois.nic.kw", "ky", "kyoto", "kz:whois.nic.kz",
+	"la:whois.nic.la", "land", "lat", "law", "lawyer", "lb",
 	"lc:whois.identitydigital.services", "lease", "legal", "lgbt",
 	"li:whois.nic.li", "life", "lifestyle", "lighting", "limited", "limo",
-	"link", "live", "living", "llc", "loan", "loans", "locker", "lol", "london",
-	"lotto", "love", "lt:whois.domreg.lt", "ltd", "ltda", "lu:whois.dns.lu",
-	"luxe", "luxury", "lv:whois.nic.lv", "madrid", "maison", "makeup",
-	"management", "market", "marketing", "markets", "mba",
-	"me:whois.identitydigital.services", "med", "media", "melbourne", "meme",
-	"memorial", "men", "menu", "mg", "miami", "mk:whois.marnet.mk",
-	"mn:whois.nic.mn", "mobi", "mobile", "moda", "moe", "moi", "mom", "money",
-	"monster", "mortgage", "motorcycles", "mov", "movie", "ms", "mu", "museum",
-	"music", "mx:whois.mx", "my:whois.mynic.my", "nagoya", "name", "navy", "net",
-	"network", "new", "news", "nexus", "nf", "ngo", "ni", "ninja", "nl", "now",
+	"link", "live", "living", "lk:whois.nic.lk", "llc", "loan", "loans",
+	"locker", "lol", "london", "lotto", "love", "ls:whois.nic.ls",
+	"lt:whois.domreg.lt", "ltd", "ltda", "lu:whois.dns.lu", "luxe", "luxury",
+	"lv:whois.nic.lv", "ly", "ma:whois.registre.ma", "madrid", "maison",
+	"makeup", "management", "market", "marketing", "markets", "mba",
+	"mc:whois.nic.mc", "md:whois.nic.md", "me:whois.identitydigital.services",
+	"med", "media", "melbourne", "meme", "memorial", "men", "menu", "mg",
+	"miami", "mk:whois.marnet.mk", "ml", "mm:whois.registry.gov.mm",
+	"mn:whois.nic.mn", "mo:whois.monic.mo", "mobi", "mobile", "moda", "moe",
+	"moi", "mom", "money", "monster", "mortgage", "moscow", "motorcycles", "mov",
+	"movie", "mp:get.mp", "mq:whois.mediaserv.net", "mr:whois.nic.mr", "ms",
+	"mt:whois.nic.org.mt", "mu", "museum", "music", "mv:www.dhiraagu.com.mv",
+	"mw:whois.nic.mw", "mx:whois.mx", "my:whois.mynic.my", "mz:whois.nic.mz",
+	"na", "nagoya", "name", "navy", "nc:whois.nc", "net", "network", "new",
+	"news", "nexus", "nf", "ng", "ngo", "ninja", "nl", "no", "now", "nowruz",
 	"nrw", "nu:whois.iis.nu", "nyc", "nz:whois.irs.net.nz", "observer",
-	"okinawa", "one", "ong", "onl", "online", "ooo", "org", "organic", "osaka",
-	"ovh", "page", "paris", "partners", "parts", "party", "pe:kero.yachay.pe",
-	"pet", "ph", "phd", "photo", "photography", "photos", "pics", "pictures",
-	"pink", "pizza", "pl", "place", "plumbing", "plus", "pm", "poker", "porn",
+	"okinawa", "om:whois.registry.om", "one", "ong", "onl", "online", "ooo",
+	"open", "org", "organic", "osaka", "ovh", "page", "paris", "partners",
+	"parts", "party", "pe:kero.yachay.pe", "pet", "pf:whois.registry.pf", "pg",
+	"pharmacy", "phd", "photo", "photography", "photos", "physio", "pics",
+	"pictures", "pink", "pizza", "pk:whois.pknic.net.pk", "pl", "place",
+	"plumbing", "plus", "pm", "pn", "poker", "porn", "post",
 	"pr:whois.identitydigital.services", "press", "pro", "productions", "prof",
-	"promo", "properties", "property", "protection", "pt:whois.dns.pt", "pub",
-	"pw", "qpon", "quebec", "quest", "racing", "radio", "re", "realty",
-	"recipes", "red", "rehab", "reise", "reisen", "rent", "rentals", "repair",
+	"promo", "properties", "property", "protection", "ps:whois.registry.ps",
+	"pt:whois.dns.pt", "pub", "pw", "qa:whois.registry.qa", "qpon", "quebec",
+	"quest", "racing", "radio", "re", "realestate", "realty", "recipes", "red",
+	"rehab", "reise", "reisen", "reit", "ren", "rent", "rentals", "repair",
 	"report", "republican", "rest", "restaurant", "review", "reviews", "rich",
-	"rip", "ro:whois.rotld.ro", "rocks", "rodeo", "rsvp", "rugby", "ruhr", "run",
-	"ryukyu", "saarland", "sale", "salon", "sarl", "sb:whois.nic.net.sb", "sbs",
-	"sc:whois.nic.sc", "school", "schule", "science", "scot", "se:whois.iis.se",
-	"security", "select", "services", "sex", "sexy", "sh:whois.nic.sh",
+	"rio", "rip", "ro:whois.rotld.ro", "rocks", "rodeo", "rs:whois.rnids.rs",
+	"rsvp", "ru:whois.tcinet.ru", "rugby", "ruhr", "run", "rw", "ryukyu",
+	"sa:whois.nic.net.sa", "saarland", "sale", "salon", "sarl",
+	"sb:whois.nic.net.sb", "sbs", "sc:whois.nic.sc", "school", "schule",
+	"science", "scot", "sd", "se:whois.iis.se", "security", "select", "services",
+	"sex", "sexy", "sg", "sh:whois.nic.sh", "shabaka:whois.nic.xn–ngbc5azd",
 	"shiksha", "shoes", "shop", "shopping", "show", "si", "singles", "site",
-	"ski", "skin", "sn:whois.nic.sn", "so:whois.nic.so", "soccer", "social",
-	"software", "solar", "solutions", "soy", "spa", "space", "spot", "srl",
-	"storage", "store", "stream", "studio", "study", "style", "sucks",
-	"supplies", "supply", "support", "surf", "surgery", "swiss", "sx:whois.sx",
-	"sydney", "systems", "taipei", "talk", "tattoo", "tax", "taxi", "team",
-	"tech", "technology", "tel", "tennis", "tf", "theater", "theatre", "tickets",
-	"tienda", "tips", "tires", "tirol", "tl:whois.nic.tl", "tm:whois.nic.tm",
-	"tn:whois.ati.tn", "to", "today", "tokyo", "tools", "top", "tours", "town",
-	"toys", "trade", "trading", "training", "travel", "tube", "tv", "tw", "uk",
-	"university", "uno", "us:whois.nic.us", "uy", "vacations", "vana",
-	"vc:whois.identitydigital.services", "vegas", "ventures", "vet", "vg",
-	"viajes", "video", "villas", "vin", "vip", "vision", "vlaanderen", "vodka",
-	"vote", "voto", "voyage", "wales", "wang", "watch", "watches", "webcam",
-	"website", "wedding", "wf", "wiki", "win", "wine", "work", "works", "world",
-	"ws:whois.website.ws", "wtf", "xin", "xn--45q11c", "xn--5tzm5g",
+	"sk:whois.sk-nic.sk", "ski", "skin", "sl:whois.nic.sl", "sm:whois.nic.sm",
+	"sn:whois.nic.sn", "so:whois.nic.so", "soccer", "social", "software",
+	"solar", "solutions", "soy", "spa", "space", "sport", "spot", "sr", "srl",
+	"ss:whois.nic.ss", "st:whois.nic.st", "storage", "store", "stream", "studio",
+	"study", "style", "su:whois.tcinet.ru", "sucks", "supplies", "supply",
+	"support", "surf", "surgery", "swiss", "sx", "sy:whois.tld.sy",
+	"sydney", "systems", "taipei", "talk", "tatar", "tattoo", "tax", "taxi",
+	"tc:whois.nic.tc", "td:whois.nic.td", "team", "tech", "technology", "tel",
+	"tennis", "tf", "tg:whois.nic.tg", "th", "theater", "theatre", "tickets",
+	"tienda", "tips", "tires", "tirol", "tk:whois.dot.tk", "tl:whois.nic.tl",
+	"tm:whois.nic.tm", "tn:whois.ati.tn", "to", "today", "tokyo", "tools", "top",
+	"tours", "town", "toys", "tr:whois.trabis.gov.tr", "trade", "trading",
+	"training", "travel", "trust", "tube", "tv", "tw", "tz", "ua",
+	"ug:whois.co.ug", "uk", "university", "uno", "us:whois.nic.us",
+	"uy:whois.nic.org.uy", "uz", "vacations", "vana",
+	"vc:whois.identitydigital.services", "ve:whois.nic.ve", "vegas", "ventures",
+	"versicherung", "vet", "vg", "vi", "viajes", "video", "villas", "vin", "vip",
+	"vision", "vlaanderen", "vodka", "vote", "voting", "voto", "voyage",
+	"vu:whois.dnrs.vu", "wales", "wang", "watch", "watches", "webcam", "website",
+	"wedding", "wf", "whoswho", "wien", "wiki", "win", "wine", "work", "works",
+	"world", "ws:whois.website.ws", "wtf", "xin", "xn--45q11c", "xn--5tzm5g",
 	"xn--6frz82g", "xn--c1avg", "xn--czrs0t", "xn--e1a4c:whois.eu",
 	"xn--fjq720a", "xn--hxt814e", "xn--i1b6b1a6a2e", "xn--ngbc5azd", "xn--nqv7f",
 	"xn--pgbs0dh:whois.ati.tn", "xn--q9jyb4c", "xn--unup4y", "xn--vhquv", "xxx",
-	"xyz", "yachts", "yoga", "yokohama", "you", "yt", "zip", "zone",
+	"xyz", "yachts", "ye", "yoga", "yokohama", "you", "yt", "za:whois.nic.za",
+	"zip", "zm", "zone", "zuerich",
 ]
+

+ 18 - 11
README.md

@@ -1,16 +1,26 @@
 # HoarDom
-Allows you to HOARd DOMains but with alot less pain associated with it.
+Allows you to "HOARd DOMains" more efficiently and make your wallet file for divorce more quickly!
 
 > "How to get my IP unbanned by Whois servers" <br>
 > -- Probably you after using this tool.
 
-## Latest Update : First Release !
-v2.0.1 marks the first actual release where I feel like this project can be considered release worthy :)
-(accidentally set the versioning wrong in cargo toml in v2.0.0 whoops so yea dont use that)
+> Note : My homes IP address is currently banned from the following domains due to this tool :
+> - whois.sx
+
 
 ![ScreenshotTUI1](https://git.teleco.ch/crt/hoardom.git/plain/doc/pics/image.png?h=master)
 
 
+## Latest Update : It now doesnt suck anymore !
+v2.1.2 added a bunch of improvements as I dont push often heres everything thats fixed or new :
+- fixed : catchup game for when you scroll to fast, none ascii notes text crashing the app, favorites false change indication, notes not scrollable, results sizing consistency issues, outdated help in prev "release", garbage comments, autolist slop script to actually pull inwx properly, more lists (biased and hand sorted for most part).
+- added : none ascii support for searching domains, scrollbars, more keyboard shortcuts, copy paste support, funny changing title bar headers
+
+tldr : 
+- 143 additional supported TLDs
+- app now cooler than before
+
+
 ## What is `hoardom`? (a tldr)
 `hoardom` is a rust based tool with cli, tui and to some degree gui interface. It is intended for checking and tracking the availability and status of domains and domain name ideas.
 
@@ -149,15 +159,12 @@ No guarantee whether or not I will ever fix em this "little" side project wasnt
 
 - **_BIG ISSUES_**
     - *Problem* : Not really ready for microdomains, If searched for SLD is only 1-3 chars some false positives from TLDs that dont allow such short SLDs are possible if that domain isnt registered.
-        - Temp Fix : Make custom list that excludes the missbehaving 
-    - *Problem* : No support for special characters like ö ä ü
-        - Temp Fix : Type prefix `xn--` and the punycode for ur special character by hand or use a converter.
+        - Temp Fix : Make custom list that excludes the missbehaving for microdomainhunting
 - **_Meh Issues_**
     - Problem : No real like settings pop up panel in TUI to control rest of configuration options in TUI
         - Edit your settings manually, by default in ~/.hoardam/config.toml
 - **_Tiny Issues_**
-    - scrolling then selecting something by mouse might select wrong thing as hitboxes for selection cant updated fast enough due to spasming touchpad scroll events.
-        - Temp Fix : Be Patient and give it like 1-5 seconds to catch up and dont scroll as fast
+    - All good :)
 
 ## Things for some day
 
@@ -165,5 +172,5 @@ No guarantee whether or not I will ever fix em this "little" side project wasnt
 - Automatic minimum lenght for TLD required + definition for it in Lists.toml
 - Better sorted and organized autogenerated AutomaticList.toml and deslopification of fetch-tld script.
 
-## How much Slop can I expect in this repo?
-Usually comments were left for things not done by me. Otherwise if code has like ... very minimal or barely any comments its probably my own spaghetti code : Conclusion mostly just spaghetti code very little ai slop (mostly bug fixes and coment sanetezation)
+## Credits
+I have made most of this program myself, generative work not done by me has mostly been marked in code apart from maybe some bugfixes. Code from other people on the internet I kinda forgot to mark properly perhaps but eh happens.

+ 36 - 17
doc/TUI.md

@@ -25,8 +25,8 @@ the TUI has a few panels:
 - **search bar** at the bottom: type your domain names here then obliterate enter to hacksor them
 - **results** in the middle: shows whats available and whats taken
 - **favorites** on the right side: your watchlist on pulling the trigger and shelling out moneu for em domainers
-- **settings** below favorites: toggle stuff duh
-- **scratchpad** on the left (if enabled in toggler settings): just a little text area for gathering inspiration and other stuff like amongus memes or the bee movie script (saves to config.toml btw so u dont loose your mommy asmr converted to base64 that you saved in ur notes)
+- **settings** (aka preferences) below favorites: toggle stuff duh
+- **notes** on the left (if enabled in toggler settings): just a little text area for gathering inspiration and other stuff like amongus memes or the bee movie script (saves to config.toml btw so u dont loose your mommy asmr converted to base64 that you saved in ur notes)
 
 and since version 1.0.1 theres also a top bar with an export button and help button.
 
@@ -46,28 +46,38 @@ coolproject.ch shitproject
 
 results stream in as they come back, theres a progress bar at the top of the results panel during a search.
 
-press `s` during a search to cancel it if youre impatient.
+you can keep typing in the search bar while a search is running to prepare your next query. starting a new search is blocked until the current one finishes.
 
-if `Clear on Search` is off in settings, results accumulate across searches. press `C` to clear manually.
+press `Ctrl+C` during a search to cancel it if youre impatient.
+
+if `Clear on Search` is off in settings, results accumulate across searches. press `Ctrl+X` to clear manually.
+
+oh and unicode domains work too! type `müller` and itll get converted to punycode for the lookup, but results display the nice unicode version back to you.
 
 ## keyboard shortcuts
 
 ### global
 | key                 | what                                                     |
 |---------------------|----------------------------------------------------------|
-| `F1`                | toggle help overlay                                      |
+| `F1`                | toggle help overlay (scrollable btw)                     |
 | `F2`                | open/close export popup                                  |
-| `Ctrl+C`            | quit                                                     |
-| `s`                 | cancel running search                                    |
+| `Esc`               | quit (closes help/dropdown/export first if open)         |
+| `Ctrl+C`            | cancel running search / copy to clipboard                |
+| `Ctrl+V`            | paste from clipboard (into search bar or notes)          |
+| `Ctrl+X`            | clear results                                            |
+| `Ctrl+S`            | focus search bar                                         |
+| `Ctrl+N`            | focus notes panel                                        |
+| `Ctrl+R`            | focus results                                            |
+| `Ctrl+F`            | focus favorites                                          |
+| `Ctrl+P`            | focus preferences (settings)                             |
 | `Tab` / `Shift+Tab` | cycle between panels                                     |
-| `Esc`               | close help/dropdown, or clear selection in current panel |
 
 ### search bar
-| key            | what                                     |
-|----------------|------------------------------------------|
-| `Enter`        | start the search                         |
-| typing         | works normally when no search is running |
-| `Home` / `End` | jump to start/end of input               |
+| key            | what                                            |
+|----------------|-------------------------------------------------|
+| `Enter`        | start the search                                |
+| typing         | works even while a search is running            |
+| `Home` / `End` | jump to start/end of input                      |
 
 ### results
 | key           | what                                |
@@ -87,9 +97,18 @@ if `Clear on Search` is off in settings, results accumulate across searches. pre
 |-------------------|-------------------------------------------------|
 | `Up` / `Down`     | move between settings rows                      |
 | `Enter` / `Space` | toggle checkboxes or open the TLD list dropdown |
+| `+` / `-`         | adjust concurrent jobs count                    |
+
+the jobs setting also has clickable `[-]` and `[+]` buttons below it.
+
+the TLD list dropdown is scrollable with mouse wheel and arrow keys.
 
-### scratchpad
-as was already said... put whatever u want in here
+### notes
+| key       | what                    |
+|-----------|-------------------------|
+| typing    | just type, its a textbox |
+| `Ctrl+K`  | clear current line      |
+| mouse     | click to place cursor   |
 
 ## mouse and scrolling support
 mouse is on by default. you can click and interact with stuff
@@ -106,9 +125,9 @@ theres 4 things in there:
 
 - **TLD List** dropdown: pick Standard, Decent, Country, All, or any custom imported lists
 - **Show Unavailable** checkbox: toggles whether taken domains show with premium details in results
-- **Show Notes Panel** checkbox: toggles the scratchpad panel on the left
+- **Show Notes Panel** checkbox: toggles the notes panel on the left
 - **Clear on Search** checkbox: if on, results get cleared before each new search. if off they pile up for the true hoarding feeling.
-// todo add the job amount selector here too
+- **Jobs** counter with `[-]` and `[+]` buttons: controls how many concurrent lookups run at once
 
 oh and settings auto save to config 
 

+ 1245 - 0
scripts/auto-violator.sh

@@ -0,0 +1,1245 @@
+#!/usr/bin/env bash
+# auto-vibrator.sh — piss slop that barely works <3
+#
+# Usage:
+#   ./scripts/auto-violator.sh              # fetch all, show summary
+#   ./scripts/auto-violator.sh porkbun      # porkbun only
+#   ./scripts/auto-violator.sh inwx         # inwx only
+#   ./scripts/auto-violator.sh --raw        # output raw TLD lists (one per line)
+#   ./scripts/auto-violator.sh --toml       # output TOML-ready arrays
+#   ./scripts/auto-violator.sh --diff       # compare against current Lists.toml
+#   ./scripts/auto-violator.sh --template   # generate Lists.toml into violator-workdir/
+#   ./scripts/auto-violator.sh --probe      # probe unknown TLDs for WHOIS servers
+#
+# Outputs:
+#   violator-workdir/cache/       — cached API responses
+#   violator-workdir/pdom.txt     — purchasable TLDs with known WHOIS/RDAP servers
+#   violator-workdir/sdom.txt     — TLDs where no WHOIS server could be found
+#   violator-workdir/Lists.toml   — generated Lists.toml (never overwrites project root)
+#
+# Config: scripts/violator.conf
+#
+# Notes : yea parts of this is ai slop, didnt make it myself oooo scary,
+#         but most of the rust i did myself just didnt feel like doing
+#         this at 4am and it somewhat works
+# Correction : The initial porkbun fetching was mostly me but porkbun
+#              lacked many domains so yea
+
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+LISTS_TOML="$PROJECT_DIR/Lists.toml"
+WORK_DIR="$SCRIPT_DIR/violator-workdir"
+CACHE_DIR="$WORK_DIR/cache"
+CONF_FILE="$SCRIPT_DIR/violator.conf"
+PDOM_FILE="$WORK_DIR/pdom.txt"
+SDOM_FILE="$WORK_DIR/sdom.txt"
+OUTPUT_TOML="$WORK_DIR/Lists.toml"
+
+mkdir -p "$CACHE_DIR"
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+CYAN='\033[0;36m'
+BOLD='\033[1m'
+NC='\033[0m'
+
+# ═══════════════════════════════════════════════════════════════════════════
+# Config parser
+# ═══════════════════════════════════════════════════════════════════════════
+
+# Read a value from violator.conf
+# Usage: conf_get section key [default]
+conf_get() {
+    local section="$1" key="$2" default="${3:-}"
+    if [[ ! -f "$CONF_FILE" ]]; then
+        echo "$default"
+        return
+    fi
+    awk -v section="$section" -v key="$key" -v default="$default" '
+        /^\[/ { in_section = ($0 == "[" section "]") ; next }
+        in_section && /^[[:space:]]*#/ { next }
+        in_section && /^[[:space:]]*$/ { next }
+        in_section {
+            # key = value
+            split($0, kv, "=")
+            gsub(/^[[:space:]]+|[[:space:]]+$/, "", kv[1])
+            gsub(/^[[:space:]]+|[[:space:]]+$/, "", kv[2])
+            if (kv[1] == key) { print kv[2]; found=1; exit }
+        }
+        END { if (!found) print default }
+    ' "$CONF_FILE"
+}
+
+# Read all keys from a section (returns "key value" lines)
+conf_section_keys() {
+    local section="$1"
+    if [[ ! -f "$CONF_FILE" ]]; then
+        return
+    fi
+    awk -v section="$section" '
+        /^\[/ { in_section = ($0 == "[" section "]") ; next }
+        in_section && /^[[:space:]]*#/ { next }
+        in_section && /^[[:space:]]*$/ { next }
+        in_section {
+            split($0, kv, "=")
+            gsub(/^[[:space:]]+|[[:space:]]+$/, "", kv[1])
+            gsub(/^[[:space:]]+|[[:space:]]+$/, "", kv[2])
+            if (kv[1] != "" && kv[2] != "") print kv[1], kv[2]
+        }
+    ' "$CONF_FILE"
+}
+
+# Read a bare-value section (values are just words, possibly multiline)
+conf_section_values() {
+    local section="$1"
+    if [[ ! -f "$CONF_FILE" ]]; then
+        return
+    fi
+    awk -v section="$section" '
+        /^\[/ { in_section = ($0 == "[" section "]") ; next }
+        in_section && /^[[:space:]]*#/ { next }
+        in_section && /^[[:space:]]*$/ { next }
+        in_section { print }
+    ' "$CONF_FILE"
+}
+
+# Read the tlds field from a [list.NAME] section (may be multiline, indented continuation)
+conf_list_tlds() {
+    local list_name="$1"
+    local section="list.${list_name}"
+    if [[ ! -f "$CONF_FILE" ]]; then
+        return
+    fi
+    awk -v section="$section" '
+        /^\[/ { in_section = ($0 == "[" section "]"); next }
+        in_section && /^[[:space:]]*#/ { next }
+        in_section && /^[[:space:]]*$/ { next }
+        in_section {
+            split($0, kv, "=")
+            gsub(/^[[:space:]]+|[[:space:]]+$/, "", kv[1])
+            if (kv[1] == "tlds") { in_tlds=1; gsub(/^[^=]*=/, ""); print; next }
+            if (in_tlds && /^[[:space:]]/) { print; next }
+            in_tlds=0
+        }
+    ' "$CONF_FILE" | tr '\n' ' ' | sed 's/[[:space:]]\+/ /g; s/^ //; s/ $//'
+}
+
+# Get all [list.*] section names
+conf_list_names() {
+    if [[ ! -f "$CONF_FILE" ]]; then
+        echo "standard decent swiss country two three four long all"
+        return
+    fi
+    grep -oE '^\[list\.[a-z0-9_]+\]' "$CONF_FILE" | sed 's/\[list\.//;s/\]//'
+}
+
+# Load skip TLDs from config
+load_skip_tlds() {
+    local skip
+    skip=$(conf_section_values "skip_tlds" | tr '\n' ' ')
+    if [[ -z "$skip" ]]; then
+        skip="bl bq eh mf gb bv sj kp hm"
+    fi
+    echo "$skip"
+}
+
+# Load whois overrides from config
+load_whois_overrides() {
+    conf_section_keys "whois_overrides"
+}
+
+# ═══════════════════════════════════════════════════════════════════════════
+# Fetchers
+# ═══════════════════════════════════════════════════════════════════════════
+
+# ─── Porkbun ────────────────────────────────────────────────────────────────
+fetch_porkbun() {
+    local cache="$CACHE_DIR/porkbun.json"
+    local max_age=86400
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching Porkbun pricing API...${NC}" >&2
+    if curl -sf -X POST "https://api.porkbun.com/api/json/v3/pricing/get" \
+        -H "Content-Type: application/json" \
+        -d '{}' \
+        -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${RED}Failed to fetch Porkbun data${NC}" >&2
+        return 1
+    fi
+}
+
+parse_porkbun() {
+    local json_file="$1"
+    if command -v jq &>/dev/null; then
+        jq -r '.pricing // {} | keys[]' "$json_file" 2>/dev/null | sort -u
+    else
+        grep -o '"[a-z][a-z0-9.-]*":{' "$json_file" | sed 's/"//g; s/:{//' | sort -u
+    fi
+}
+
+# ─── INWX ───────────────────────────────────────────────────────────────────
+fetch_inwx() {
+    local cache="$CACHE_DIR/inwx.csv"
+    local max_age=86400
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching INWX pricelist CSV...${NC}" >&2
+    if curl -sf "https://www.inwx.ch/en/domain/pricelist/vat/1/file/csv" \
+        -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${YELLOW}Could not fetch INWX${NC}" >&2
+        return 1
+    fi
+}
+
+parse_inwx() {
+    local csv_file="$1"
+    sed 's/;.*//' "$csv_file" | tr -d '"' | grep -E '^[a-z][a-z0-9]*$' | sort -u
+}
+
+# ─── OVH ────────────────────────────────────────────────────────────────────
+fetch_ovh() {
+    local cache="$CACHE_DIR/ovh.json"
+    local max_age=86400
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching OVH domain extensions...${NC}" >&2
+    if curl -sf "https://www.ovh.com/engine/apiv6/domain/extensions" \
+        -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
+        -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${YELLOW}Could not fetch OVH extensions${NC}" >&2
+        return 1
+    fi
+}
+
+parse_ovh() {
+    local json_file="$1"
+    if command -v jq &>/dev/null; then
+        jq -r '.[]' "$json_file" 2>/dev/null | grep -vE '\.' | sort -u
+    else
+        grep -oE '"[a-z]{2,20}"' "$json_file" | tr -d '"' | grep -vE '\.' | sort -u
+    fi
+}
+
+# ─── DomainOffer.net ────────────────────────────────────────────────────────
+fetch_domainoffer() {
+    local cache="$CACHE_DIR/domainoffer.html"
+    local max_age=86400
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching DomainOffer.net price compare...${NC}" >&2
+    if curl -sf "https://domainoffer.net/price-compare" \
+        -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
+        -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${YELLOW}Could not fetch DomainOffer.net${NC}" >&2
+        return 1
+    fi
+}
+
+parse_domainoffer() {
+    local html_file="$1"
+    local parsed_cache="$CACHE_DIR/domainoffer-tlds.txt"
+
+    # Return cached parsed list if newer than HTML
+    if [[ -f "$parsed_cache" && "$parsed_cache" -nt "$html_file" ]]; then
+        cat "$parsed_cache"
+        return
+    fi
+
+    local result=""
+    if command -v python3 &>/dev/null; then
+        result=$(python3 -c "
+import re, json, sys
+with open('$html_file') as f:
+    html = f.read()
+m = re.search(r'var domainData = (\[.*?\]);', html, re.DOTALL)
+if not m:
+    sys.exit(0)
+for entry in json.loads(m.group(1)):
+    tld = entry[0].lstrip('.').lower() if isinstance(entry, list) and entry else ''
+    if '.' not in tld and re.match(r'^[a-z][a-z0-9]*$', tld):
+        print(tld)
+" | sort -u)
+    else
+        result=$(grep -oE '\["[a-z][a-z0-9]*",' "$html_file" | sed 's/\["//;s/",//' | sort -u)
+    fi
+
+    # Cache the parsed result
+    echo "$result" > "$parsed_cache"
+    echo "$result"
+}
+
+# ─── tld-list.com ───────────────────────────────────────────────────────────
+fetch_tldlist() {
+    local cache="$CACHE_DIR/tldlist-basic.txt"
+    local max_age=86400
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching tld-list.com basic list...${NC}" >&2
+    if curl -sf "https://tld-list.com/df/tld-list-basic.csv" \
+        -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
+        -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${YELLOW}Could not fetch tld-list.com${NC}" >&2
+        return 1
+    fi
+}
+
+parse_tldlist() {
+    local file="$1"
+    tr -d '\r' < "$file" | grep -E '^[a-z][a-z0-9]*$' | sort -u
+}
+
+# ─── IANA root zone ─────────────────────────────────────────────────────────
+fetch_iana() {
+    local cache="$CACHE_DIR/iana-tlds.txt"
+    local max_age=604800
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching IANA TLD list...${NC}" >&2
+    if curl -sf "https://data.iana.org/TLD/tlds-alpha-by-domain.txt" -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${RED}Failed to fetch IANA list${NC}" >&2
+        return 1
+    fi
+}
+
+parse_iana() {
+    local file="$1"
+    tail -n +2 "$file" | tr '[:upper:]' '[:lower:]' | sort -u
+}
+
+parse_iana_cctlds() {
+    local file="$1"
+    tail -n +2 "$file" | tr '[:upper:]' '[:lower:]' | grep -E '^[a-z]{2}$' | sort -u
+}
+
+# ─── RDAP bootstrap ─────────────────────────────────────────────────────────
+fetch_rdap() {
+    local cache="$CACHE_DIR/rdap-dns.json"
+    local max_age=86400
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching RDAP bootstrap...${NC}" >&2
+    if curl -sf "https://data.iana.org/rdap/dns.json" -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${RED}Failed to fetch RDAP bootstrap${NC}" >&2
+        return 1
+    fi
+}
+
+parse_rdap_tlds() {
+    local json_file="$1"
+    if command -v jq &>/dev/null; then
+        jq -r '.services[][] | .[]' "$json_file" 2>/dev/null | grep -v '^http' | tr '[:upper:]' '[:lower:]' | sort -u
+    else
+        grep -oE '"[a-z]{2,20}"' "$json_file" | tr -d '"' | sort -u
+    fi
+}
+
+# ─── WHOIS server list ──────────────────────────────────────────────────────
+fetch_whois_servers() {
+    local cache="$CACHE_DIR/tld_serv_list.txt"
+    local max_age=604800
+
+    if [[ -f "$cache" ]]; then
+        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+        if (( age < max_age )); then
+            echo "$cache"
+            return 0
+        fi
+    fi
+
+    echo -e "${CYAN}Fetching WHOIS server list...${NC}" >&2
+    if curl -sf "https://raw.githubusercontent.com/rfc1036/whois/next/tld_serv_list" -o "$cache" 2>/dev/null; then
+        echo "$cache"
+    else
+        echo -e "${YELLOW}Could not fetch WHOIS server list${NC}" >&2
+        return 1
+    fi
+}
+
+get_whois_server() {
+    local tld="$1"
+    local serv_file="$2"
+    local line
+    line=$(grep -E "^\\.${tld}[[:space:]]" "$serv_file" 2>/dev/null | head -1)
+    if [[ -z "$line" ]]; then
+        echo ""
+        return
+    fi
+    local server
+    server=$(echo "$line" | awk '{
+        for (i=NF; i>=2; i--) {
+            if ($i ~ /^[a-z0-9].*\.[a-z]/) { print $i; exit }
+        }
+    }')
+    if [[ "$server" == "NONE" || "$server" == "ARPA" || -z "$server" || "$server" == http* ]]; then
+        echo ""
+    else
+        echo "$server"
+    fi
+}
+
+get_iana_whois_server() {
+    local tld="$1"
+    curl -s "https://www.iana.org/domains/root/db/${tld}.html" 2>/dev/null \
+        | sed -n 's/.*WHOIS Server:<\/b> *\([^ <]*\).*/\1/p' \
+        | head -1
+}
+
+# ═══════════════════════════════════════════════════════════════════════════
+# WHOIS Probe — try common server patterns for unknown TLDs
+# ═══════════════════════════════════════════════════════════════════════════
+
+probe_whois_server() {
+    local tld="$1"
+    local timeout_s="$2"
+    local patterns_str="$3"
+
+    # Split patterns on comma
+    IFS=',' read -ra patterns <<< "$patterns_str"
+    for pattern in "${patterns[@]}"; do
+        pattern=$(echo "$pattern" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
+        local server="${pattern//\{\}/$tld}"
+        # Perl socket connect — Timeout covers both DNS resolution and TCP connect
+        # unlike nc -w which only covers TCP and lets DNS hang forever on macOS
+        if perl -e 'use IO::Socket::INET; exit(0) if IO::Socket::INET->new(PeerAddr=>$ARGV[0],PeerPort=>43,Timeout=>$ARGV[1]); exit(1)' "$server" "$timeout_s" 2>/dev/null; then
+            echo "$server"
+            return 0
+        fi
+    done
+    return 1
+}
+
+# Worker function for parallel probing — writes result to a file and prints live
+_probe_worker() {
+    local tld="$1" timeout_s="$2" patterns="$3" result_dir="$4" idx="$5" total="$6"
+    local server
+    if server=$(probe_whois_server "$tld" "$timeout_s" "$patterns"); then
+        echo "${tld}:${server}" > "${result_dir}/${tld}.found"
+        echo -e "  [${idx}/${total}] ${GREEN}✓${NC} ${tld} → ${server}" >&2
+    else
+        touch "${result_dir}/${tld}.miss"
+        echo -e "  [${idx}/${total}] ${RED}✗${NC} ${tld}" >&2
+    fi
+}
+
+run_whois_probes() {
+    local sdom_file="$1"
+    local pdom_file="$2"
+
+    local probe_enabled
+    probe_enabled=$(conf_get "whois_probe" "enabled" "true")
+    if [[ "$probe_enabled" != "true" ]]; then
+        echo -e "  ${YELLOW}WHOIS probing disabled in config${NC}" >&2
+        return
+    fi
+
+    local timeout_s
+    timeout_s=$(conf_get "whois_probe" "timeout" "2")
+    local patterns
+    patterns=$(conf_get "whois_probe" "patterns" "whois.nic.{}, whois.{}, whois.registry.{}")
+    local max_jobs
+    max_jobs=$(conf_get "whois_probe" "parallel" "10")
+
+    if [[ ! -f "$sdom_file" ]] || [[ ! -s "$sdom_file" ]]; then
+        return
+    fi
+
+    # Count patterns for info
+    local pattern_count=0
+    IFS=',' read -ra _pats <<< "$patterns"
+    pattern_count=${#_pats[@]}
+    unset _pats
+
+    local total
+    total=$(wc -l < "$sdom_file" | tr -d ' ')
+    echo -e "  ${CYAN}Probing ${total} TLDs (${pattern_count} patterns, ${timeout_s}s timeout, ${max_jobs} parallel)${NC}" >&2
+
+    # Use workdir for temp results so they're visible
+    local result_dir="$WORK_DIR/probe-tmp"
+    rm -rf "$result_dir"
+    mkdir -p "$result_dir"
+
+    # Read all TLDs into array
+    local -a tld_list=()
+    while IFS= read -r tld; do
+        [[ -n "$tld" ]] && tld_list+=("$tld")
+    done < "$sdom_file"
+
+    # Launch all jobs with max_jobs concurrency
+    # Each worker prints its own result immediately to stderr
+    local running=0 idx=0
+    for tld in "${tld_list[@]}"; do
+        ((idx++)) || true
+        _probe_worker "$tld" "$timeout_s" "$patterns" "$result_dir" "$idx" "$total" &
+        ((running++)) || true
+
+        # Throttle: when we hit max_jobs, wait for any one to finish
+        if (( running >= max_jobs )); then
+            wait -n 2>/dev/null || wait
+            ((running--)) || true
+        fi
+    done
+
+    # Wait for remaining
+    wait
+
+    # Collect results: append found to pdom, rebuild sdom
+    local found=0
+    local new_sdom=""
+    for tld in "${tld_list[@]}"; do
+        if [[ -f "${result_dir}/${tld}.found" ]]; then
+            cat "${result_dir}/${tld}.found" >> "$pdom_file"
+            ((found++)) || true
+        else
+            new_sdom+="${tld}"$'\n'
+        fi
+    done
+
+    # Cleanup
+    rm -rf "$result_dir"
+
+    # Rewrite sdom with remaining unknowns
+    echo -n "$new_sdom" | sort -u > "$sdom_file"
+
+    if (( found > 0 )); then
+        sort -u -o "$pdom_file" "$pdom_file"
+        echo -e "  ${GREEN}Probing complete: found ${found}/${total} WHOIS servers${NC}" >&2
+    else
+        echo -e "  ${YELLOW}Probing complete: no new servers (0/${total})${NC}" >&2
+    fi
+}
+
+# ═══════════════════════════════════════════════════════════════════════════
+# Helpers
+# ═══════════════════════════════════════════════════════════════════════════
+
+parse_current_lists() {
+    local list_name="${1:-all}"
+    if [[ ! -f "$LISTS_TOML" ]]; then
+        echo -e "${RED}No Lists.toml found at $LISTS_TOML${NC}" >&2
+        return 1
+    fi
+    awk -v list="$list_name" '
+        $0 ~ "^"list" *= *\\[" { found=1; next }
+        found && /^\]/ { exit }
+        found && /^[[:space:]]*\[/ { exit }
+        found {
+            gsub(/["\t,]/, " ")
+            n = split($0, parts, " ")
+            for (i=1; i<=n; i++) {
+                if (parts[i] != "") {
+                    sub(/:.*/, "", parts[i])
+                    print parts[i]
+                }
+            }
+        }
+    ' "$LISTS_TOML" | sort -u
+}
+
+to_toml_array() {
+    local tlds=()
+    while IFS= read -r tld; do
+        [[ -z "$tld" ]] && continue
+        tlds+=("$tld")
+    done
+    local line='\t'
+    local first=true
+    for tld in "${tlds[@]}"; do
+        local entry="\"$tld\""
+        if $first; then
+            line+="$entry"
+            first=false
+        else
+            local test_line="$line, $entry"
+            if (( ${#test_line} > 78 )); then
+                echo -e "$line,"
+                line="\t$entry"
+            else
+                line+=", $entry"
+            fi
+        fi
+    done
+    [[ -n "$line" ]] && echo -e "$line,"
+}
+
+filter_cctlds() {
+    grep -E '^[a-z]{2}$'
+}
+
+filter_short_tlds() {
+    grep -E '^[a-z]{2,6}$'
+}
+
+SKIP_TLDS=""
+
+filter_skip() {
+    if [[ -z "$SKIP_TLDS" ]]; then
+        cat
+        return
+    fi
+    local skip_pattern
+    # trim whitespace and convert spaces to regex alternation
+    skip_pattern=$(echo "$SKIP_TLDS" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//' | tr -s ' ' '|')
+    if [[ -z "$skip_pattern" ]]; then
+        cat
+        return
+    fi
+    grep -vE "^($skip_pattern)$"
+}
+
+# ═══════════════════════════════════════════════════════════════════════════
+# Template generation (Lists.toml)
+# ═══════════════════════════════════════════════════════════════════════════
+
+generate_template() {
+    local all_registrar_tlds="$1"
+    local rdap_tlds="$2"
+    local source_summary="$3"
+
+    # Fetch WHOIS server list
+    local whois_serv_file=""
+    if whois_serv_file=$(fetch_whois_servers 2>/dev/null); then
+        true
+    fi
+
+    # Load manual whois overrides from config
+    local -A manual_whois=()
+    while IFS=' ' read -r tld server; do
+        [[ -z "$tld" ]] && continue
+        manual_whois["$tld"]="$server"
+    done < <(load_whois_overrides)
+
+    local buyable_tlds
+    buyable_tlds=$(echo "$all_registrar_tlds" | filter_skip | sort -u)
+
+    local buyable_count
+    buyable_count=$(echo "$buyable_tlds" | grep -c . || echo 0)
+
+    # Build annotated TLD list: "tld" or "tld:whois_server"
+    local annotated_all=()
+    local annotated_cc=()
+    local rdap_hit=0 whois_hit=0 bare_hit=0
+    local pdom_entries=()
+    local sdom_entries=()
+
+    while IFS= read -r tld; do
+        [[ -z "$tld" ]] && continue
+        local entry=""
+
+        # Check manual override first
+        if [[ -n "${manual_whois[$tld]:-}" ]]; then
+            entry="${tld}:${manual_whois[$tld]}"
+            ((whois_hit++)) || true
+            pdom_entries+=("$entry")
+        elif echo "$rdap_tlds" | grep -qx "$tld" 2>/dev/null; then
+            entry="$tld"
+            ((rdap_hit++)) || true
+            pdom_entries+=("$tld")
+        else
+            local server=""
+            if [[ -n "$whois_serv_file" ]]; then
+                server=$(get_whois_server "$tld" "$whois_serv_file")
+            fi
+            if [[ -n "$server" ]]; then
+                entry="${tld}:${server}"
+                ((whois_hit++)) || true
+                pdom_entries+=("$entry")
+            else
+                entry="$tld"
+                ((bare_hit++)) || true
+                sdom_entries+=("$tld")
+            fi
+        fi
+        annotated_all+=("$entry")
+        local base_tld="${tld%%:*}"
+        if [[ "$base_tld" =~ ^[a-z]{2}$ ]]; then
+            annotated_cc+=("$entry")
+        fi
+    done <<< "$buyable_tlds"
+
+    # Write pdom.txt and sdom.txt
+    printf '%s\n' "${pdom_entries[@]}" | sort -u > "$PDOM_FILE"
+    printf '%s\n' "${sdom_entries[@]}" | sort -u > "$SDOM_FILE"
+
+    echo -e "${CYAN}Building template...${NC}" >&2
+    echo -e "  ${GREEN}${rdap_hit}${NC} TLDs with RDAP (direct lookup)" >&2
+    echo -e "  ${YELLOW}${whois_hit}${NC} TLDs with WHOIS override" >&2
+    echo -e "  ${RED}${bare_hit}${NC} TLDs with no known server (will probe)" >&2
+    echo -e "  ${CYAN}pdom.txt:${NC} $(wc -l < "$PDOM_FILE" | tr -d ' ') entries" >&2
+    echo -e "  ${CYAN}sdom.txt:${NC} $(wc -l < "$SDOM_FILE" | tr -d ' ') entries" >&2
+    echo "" >&2
+
+    # Run WHOIS probe on sdom entries if enabled
+    if [[ "$DO_PROBE" == true ]]; then
+        run_whois_probes "$SDOM_FILE" "$PDOM_FILE"
+        # Reload pdom into annotated_all (probed ones now have servers)
+        if [[ -s "$PDOM_FILE" ]]; then
+            local probed_tlds
+            probed_tlds=$(cat "$PDOM_FILE")
+            # Rebuild annotated arrays with probed data
+            annotated_all=()
+            annotated_cc=()
+            while IFS= read -r entry; do
+                [[ -z "$entry" ]] && continue
+                annotated_all+=("$entry")
+                local base="${entry%%:*}"
+                if [[ "$base" =~ ^[a-z]{2}$ ]]; then
+                    annotated_cc+=("$entry")
+                fi
+            done <<< "$probed_tlds"
+            # Add remaining sdom entries (no server found) unless --strict
+            if [[ -s "$SDOM_FILE" && "$STRICT" != true ]]; then
+                while IFS= read -r tld; do
+                    [[ -z "$tld" ]] && continue
+                    annotated_all+=("$tld")
+                    if [[ "$tld" =~ ^[a-z]{2}$ ]]; then
+                        annotated_cc+=("$tld")
+                    fi
+                done < "$SDOM_FILE"
+            fi
+        fi
+    fi
+
+    # --strict without --probe: filter out sdom entries from annotated arrays
+    if [[ "$STRICT" == true && "$DO_PROBE" != true && -s "$SDOM_FILE" ]]; then
+        local -A sdom_set=()
+        while IFS= read -r tld; do
+            [[ -n "$tld" ]] && sdom_set[$tld]=1
+        done < "$SDOM_FILE"
+        local filtered_all=() filtered_cc=()
+        for ann in "${annotated_all[@]}"; do
+            local base="${ann%%:*}"
+            if [[ -z "${sdom_set[$base]:-}" ]]; then
+                filtered_all+=("$ann")
+                if [[ "$base" =~ ^[a-z]{2}$ ]]; then
+                    filtered_cc+=("$ann")
+                fi
+            fi
+        done
+        local stripped=$(( ${#annotated_all[@]} - ${#filtered_all[@]} ))
+        annotated_all=("${filtered_all[@]}")
+        annotated_cc=("${filtered_cc[@]}")
+        echo -e "  ${YELLOW}--strict:${NC} removed $stripped TLDs with no working server" >&2
+    fi
+
+    # ── Build list output from config ──────────────────────────────────
+
+    # Annotate a curated tld list with whois overrides
+    annotate_tld() {
+        local bare_tld="$1"
+        for ann in "${annotated_all[@]}"; do
+            local ann_base="${ann%%:*}"
+            if [[ "$ann_base" == "$bare_tld" ]]; then
+                echo "$ann"
+                return
+            fi
+        done
+        echo "$bare_tld"
+    }
+
+    filter_annotated_by_length() {
+        local min="$1" max="$2"
+        for ann in "${annotated_all[@]}"; do
+            local base="${ann%%:*}"
+            local len=${#base}
+            if (( len >= min && len <= max )); then
+                echo "$ann"
+            fi
+        done
+    }
+
+    filter_annotated_cctlds() {
+        for ann in "${annotated_all[@]}"; do
+            local base="${ann%%:*}"
+            if [[ "$base" =~ ^[a-z]{2}$ ]]; then
+                echo "$ann"
+            fi
+        done
+    }
+
+    # ── Output Lists.toml ──────────────────────────────────────────────
+    local date_str
+    date_str=$(date +%Y-%m-%d)
+
+    # Build description comments from config
+    local list_descriptions=""
+    while IFS= read -r name; do
+        local desc
+        desc=$(conf_get "list.${name}" "description" "$name")
+        list_descriptions+="#   ${name}$(printf '%*s' $((10 - ${#name})) '')— ${desc}"$'\n'
+    done < <(conf_list_names)
+
+    cat <<HEADER
+# Lists.toml — Built-in TLD lists for hoardom
+# Auto-generated on ${date_str} from ${source_summary}
+#
+# Format:
+#   "tld"              — TLD has RDAP support, lookup works directly
+#   "tld:whois.server" — No RDAP: use this WHOIS server for fallback
+#
+# ${buyable_count} purchasable TLDs (handshake/sub-TLDs excluded)
+# ${rdap_hit} have RDAP, ${whois_hit} need WHOIS override, ${bare_hit} will auto-probe
+#
+# Lists:
+${list_descriptions}
+HEADER
+
+    # Generate each list from config
+    while IFS= read -r name; do
+        local type
+        type=$(conf_get "list.${name}" "type" "curated")
+
+        case "$type" in
+            curated)
+                local tlds_str
+                tlds_str=$(conf_list_tlds "$name")
+                echo "${name} = ["
+                for bare_tld in $tlds_str; do
+                    annotate_tld "$bare_tld"
+                done | to_toml_array
+                echo "]"
+                echo ""
+                ;;
+            filter)
+                local min_len max_len country_only
+                min_len=$(conf_get "list.${name}" "min_length" "2")
+                max_len=$(conf_get "list.${name}" "max_length" "99")
+                country_only=$(conf_get "list.${name}" "country_only" "false")
+
+                echo "${name} = ["
+                if [[ "$country_only" == "true" ]]; then
+                    filter_annotated_cctlds | to_toml_array
+                else
+                    filter_annotated_by_length "$min_len" "$max_len" | to_toml_array
+                fi
+                echo "]"
+                echo ""
+                ;;
+            all)
+                echo "${name} = ["
+                printf '%s\n' "${annotated_all[@]}" | to_toml_array
+                echo "]"
+                echo ""
+                ;;
+        esac
+    done < <(conf_list_names)
+}
+
+# ═══════════════════════════════════════════════════════════════════════════
+# Main
+# ═══════════════════════════════════════════════════════════════════════════
+
+main() {
+    local mode="summary"
+    local source="all"
+    local all_sources=false
+    DO_PROBE=false
+    STRICT=false
+
+    # Load skip TLDs from config
+    SKIP_TLDS=$(load_skip_tlds)
+
+    for arg in "$@"; do
+        case "$arg" in
+            --raw)       mode="raw" ;;
+            --toml)      mode="toml" ;;
+            --diff)      mode="diff" ;;
+            --template)  mode="template" ;;
+            --probe)     DO_PROBE=true ;;
+            --strict)    STRICT=true ;;
+            --all-sources) all_sources=true ;;
+            porkbun)     source="porkbun" ;;
+            inwx)        source="inwx" ;;
+            ovh)         source="ovh" ;;
+            domainoffer) source="domainoffer" ;;
+            iana)        source="iana" ;;
+            rdap)        source="rdap" ;;
+            tldlist)     source="tldlist" ;;
+            --help|-h)
+                echo "Usage: $0 [source] [--raw|--toml|--diff|--template] [--probe] [--strict] [--all-sources]"
+                echo ""
+                echo "Sources: porkbun, ovh, inwx, domainoffer, iana, rdap, tldlist"
+                echo ""
+                echo "Flags:"
+                echo "  --raw          Output raw TLD list (one per line)"
+                echo "  --toml         Output TOML-ready arrays"
+                echo "  --diff         Compare against current Lists.toml"
+                echo "  --template     Generate Lists.toml into violator-workdir/"
+                echo "  --probe        Probe unknown TLDs for WHOIS servers"
+                echo "  --strict       Remove TLDs with no working server from output"
+                echo "  --all-sources  Include tld-list.com for extra coverage"
+                echo ""
+                echo "Config:  $CONF_FILE"
+                echo "Workdir: $WORK_DIR"
+                exit 0 ;;
+        esac
+    done
+
+    local porkbun_tlds="" inwx_tlds="" ovh_tlds="" domainoffer_tlds="" iana_tlds="" rdap_tlds="" tldlist_tlds=""
+    local porkbun_count=0 inwx_count=0 ovh_count=0 domainoffer_count=0 iana_count=0 rdap_count=0 tldlist_count=0
+
+    # Template mode needs all sources
+    if [[ "$mode" == "template" ]]; then
+        source="all"
+    fi
+
+    # ── Fetch from selected sources ──
+
+    if [[ "$source" == "all" || "$source" == "porkbun" ]]; then
+        if porkbun_file=$(fetch_porkbun); then
+            porkbun_tlds=$(parse_porkbun "$porkbun_file")
+            porkbun_count=$(echo "$porkbun_tlds" | grep -c . || true)
+        fi
+    fi
+
+    if [[ "$source" == "all" || "$source" == "ovh" ]]; then
+        if ovh_file=$(fetch_ovh); then
+            ovh_tlds=$(parse_ovh "$ovh_file")
+            ovh_count=$(echo "$ovh_tlds" | grep -c . || true)
+        fi
+    fi
+
+    if [[ "$source" == "all" || "$source" == "inwx" ]]; then
+        if inwx_file=$(fetch_inwx 2>/dev/null); then
+            inwx_tlds=$(parse_inwx "$inwx_file")
+            inwx_count=$(echo "$inwx_tlds" | grep -c . || true)
+        fi
+    fi
+
+    if [[ "$source" == "all" || "$source" == "domainoffer" ]]; then
+        if domainoffer_file=$(fetch_domainoffer 2>/dev/null); then
+            domainoffer_tlds=$(parse_domainoffer "$domainoffer_file")
+            domainoffer_count=$(echo "$domainoffer_tlds" | grep -c . || true)
+        fi
+    fi
+
+    if [[ "$source" == "all" || "$source" == "iana" ]]; then
+        if iana_file=$(fetch_iana); then
+            iana_tlds=$(parse_iana "$iana_file")
+            iana_count=$(echo "$iana_tlds" | grep -c . || true)
+        fi
+    fi
+
+    if [[ "$source" == "all" || "$source" == "rdap" ]]; then
+        if rdap_file=$(fetch_rdap); then
+            rdap_tlds=$(parse_rdap_tlds "$rdap_file")
+            rdap_count=$(echo "$rdap_tlds" | grep -c . || true)
+        fi
+    fi
+
+    if [[ "$all_sources" == true || "$source" == "tldlist" ]]; then
+        if tldlist_file=$(fetch_tldlist); then
+            tldlist_tlds=$(parse_tldlist "$tldlist_file")
+            tldlist_count=$(echo "$tldlist_tlds" | grep -c . || true)
+        fi
+    fi
+
+    # ── Filter porkbun: no handshake, no sub-TLDs ──
+    local porkbun_filtered=""
+    if [[ -n "$porkbun_tlds" ]]; then
+        local porkbun_file="$CACHE_DIR/porkbun.json"
+        if command -v jq &>/dev/null && [[ -f "$porkbun_file" ]]; then
+            porkbun_filtered=$(jq -r '
+                .pricing // {} | to_entries[] |
+                select(.key | contains(".") | not) |
+                select(.value.specialType // "" | test("handshake") | not) |
+                .key
+            ' "$porkbun_file" 2>/dev/null | sort -u)
+        else
+            porkbun_filtered=$(echo "$porkbun_tlds" | grep -v '\.' | sort -u)
+        fi
+    fi
+
+    # ── Merge all registrar TLDs ──
+    local registrar_tlds
+    registrar_tlds=$(echo -e "${porkbun_filtered}\n${ovh_tlds}\n${inwx_tlds}\n${domainoffer_tlds}" | grep -E '^[a-z]' | sort -u | filter_skip)
+
+    if [[ "$all_sources" == true && -n "$tldlist_tlds" ]]; then
+        local tldlist_extra
+        tldlist_extra=$(comm -23 <(echo "$tldlist_tlds") <(echo "$registrar_tlds") 2>/dev/null || true)
+        local extra_count
+        extra_count=$(echo "$tldlist_extra" | grep -c . || echo 0)
+        echo -e "  ${YELLOW}tld-list.com:${NC} $extra_count TLDs with no registrar pricing (excluded)" >&2
+    fi
+
+    local all_tlds="$registrar_tlds"
+
+    # Also include IANA ccTLDs with RDAP/WHOIS
+    if [[ -n "$iana_tlds" ]]; then
+        local iana_cctlds
+        iana_cctlds=$(echo "$iana_tlds" | filter_cctlds | filter_skip)
+        local whois_serv_file_extra=""
+        if [[ -f "$CACHE_DIR/tld_serv_list.txt" ]]; then
+            whois_serv_file_extra="$CACHE_DIR/tld_serv_list.txt"
+        elif whois_serv_file_extra=$(fetch_whois_servers 2>/dev/null); then
+            true
+        fi
+        local iana_extra=0
+        while IFS= read -r cctld; do
+            [[ -z "$cctld" ]] && continue
+            if echo "$registrar_tlds" | grep -qx "$cctld" 2>/dev/null; then
+                continue
+            fi
+            if echo "$rdap_tlds" | grep -qx "$cctld" 2>/dev/null; then
+                all_tlds=$(echo -e "${all_tlds}\n${cctld}")
+                ((iana_extra++)) || true
+                continue
+            fi
+            if [[ -n "$whois_serv_file_extra" ]]; then
+                local srv
+                srv=$(get_whois_server "$cctld" "$whois_serv_file_extra")
+                if [[ -n "$srv" ]]; then
+                    all_tlds=$(echo -e "${all_tlds}\n${cctld}")
+                    ((iana_extra++)) || true
+                fi
+            fi
+        done <<< "$iana_cctlds"
+        all_tlds=$(echo "$all_tlds" | sort -u)
+        if (( iana_extra > 0 )); then
+            echo -e "  ${CYAN}IANA adds${NC}    $iana_extra ccTLDs with RDAP/WHOIS not at any registrar" >&2
+        fi
+    fi
+    local all_cctlds
+    all_cctlds=$(echo "$all_tlds" | filter_cctlds)
+
+    # Always generate pdom.txt / sdom.txt (even outside template mode)
+    generate_pdom_sdom() {
+        local whois_serv_file=""
+        if whois_serv_file=$(fetch_whois_servers 2>/dev/null); then true; fi
+
+        local -A manual_whois=()
+        while IFS=' ' read -r tld server; do
+            [[ -z "$tld" ]] && continue
+            manual_whois["$tld"]="$server"
+        done < <(load_whois_overrides)
+
+        local pdom_list=() sdom_list=()
+        while IFS= read -r tld; do
+            [[ -z "$tld" ]] && continue
+            if [[ -n "${manual_whois[$tld]:-}" ]]; then
+                pdom_list+=("${tld}:${manual_whois[$tld]}")
+            elif echo "$rdap_tlds" | grep -qx "$tld" 2>/dev/null; then
+                pdom_list+=("$tld")
+            else
+                local server=""
+                if [[ -n "$whois_serv_file" ]]; then
+                    server=$(get_whois_server "$tld" "$whois_serv_file")
+                fi
+                if [[ -n "$server" ]]; then
+                    pdom_list+=("${tld}:${server}")
+                else
+                    sdom_list+=("$tld")
+                fi
+            fi
+        done <<< "$all_tlds"
+
+        printf '%s\n' "${pdom_list[@]}" | sort -u > "$PDOM_FILE"
+        if (( ${#sdom_list[@]} > 0 )); then
+            printf '%s\n' "${sdom_list[@]}" | sort -u > "$SDOM_FILE"
+        else
+            > "$SDOM_FILE"
+        fi
+
+        # Run probes on sdom if enabled
+        if [[ "$DO_PROBE" == true ]]; then
+            run_whois_probes "$SDOM_FILE" "$PDOM_FILE"
+        fi
+    }
+
+    # Build source summary
+    local sources_used=()
+    [[ $porkbun_count -gt 0 ]] && sources_used+=("Porkbun")
+    [[ $ovh_count -gt 0 ]] && sources_used+=("OVH")
+    [[ $inwx_count -gt 0 ]] && sources_used+=("INWX")
+    [[ $domainoffer_count -gt 0 ]] && sources_used+=("DomainOffer")
+    local source_summary joined
+    joined=$(printf " + %s" "${sources_used[@]}")
+    joined="${joined:3}"
+    source_summary="${joined} + RDAP bootstrap + WHOIS server list"
+
+    case "$mode" in
+        raw)
+            generate_pdom_sdom
+            echo "$all_tlds"
+            ;;
+        toml)
+            generate_pdom_sdom
+            echo -e "${BOLD}# Purchasable TLDs from all registrars ($(echo "$all_tlds" | wc -l | tr -d ' ') total)${NC}"
+            echo "all_registrars = ["
+            echo "$all_tlds" | to_toml_array
+            echo "]"
+            echo ""
+            echo "# Country-code TLDs (purchasable)"
+            echo "cctlds = ["
+            echo "$all_cctlds" | to_toml_array
+            echo "]"
+            ;;
+        diff)
+            generate_pdom_sdom
+            echo -e "${BOLD}Comparing registrar data vs current Lists.toml${NC}"
+            echo ""
+            local current_all current_country
+            current_all=$(parse_current_lists "all")
+            current_country=$(parse_current_lists "country")
+
+            if [[ -n "$all_tlds" ]]; then
+                local missing_from_all
+                missing_from_all=$(comm -23 <(echo "$all_tlds" | filter_short_tlds | sort) <(echo "$current_all" | sort) 2>/dev/null || true)
+                if [[ -n "$missing_from_all" ]]; then
+                    local mc
+                    mc=$(echo "$missing_from_all" | wc -l | tr -d ' ')
+                    echo -e "${YELLOW}TLDs at registrars but NOT in our 'all' list ($mc):${NC}"
+                    echo "$missing_from_all" | tr '\n' ' '
+                    echo "" && echo ""
+                fi
+
+                local missing_cc
+                missing_cc=$(comm -23 <(echo "$all_cctlds" | sort) <(echo "$current_country" | sort) 2>/dev/null || true)
+                if [[ -n "$missing_cc" ]]; then
+                    local mcc
+                    mcc=$(echo "$missing_cc" | wc -l | tr -d ' ')
+                    echo -e "${YELLOW}ccTLDs at registrars but NOT in 'country' list ($mcc):${NC}"
+                    echo "$missing_cc" | tr '\n' ' '
+                    echo "" && echo ""
+                fi
+
+                local extra
+                extra=$(comm -13 <(echo "$all_tlds" | sort) <(echo "$current_all" | sort) 2>/dev/null || true)
+                if [[ -n "$extra" ]]; then
+                    local ec
+                    ec=$(echo "$extra" | wc -l | tr -d ' ')
+                    echo -e "${CYAN}TLDs in our 'all' list but NOT at any registrar ($ec):${NC}"
+                    echo "$extra" | tr '\n' ' '
+                    echo "" && echo ""
+                fi
+            fi
+
+            if [[ -n "$rdap_tlds" && -n "$current_all" ]]; then
+                local no_rdap
+                no_rdap=$(comm -23 <(echo "$current_all" | sort) <(echo "$rdap_tlds" | sort) 2>/dev/null || true)
+                if [[ -n "$no_rdap" ]]; then
+                    local nrc
+                    nrc=$(echo "$no_rdap" | wc -l | tr -d ' ')
+                    echo -e "${RED}TLDs in our lists with NO RDAP server ($nrc) — need WHOIS fallback:${NC}"
+                    echo "$no_rdap" | tr '\n' ' '
+                    echo ""
+                fi
+            fi
+            ;;
+        template)
+            generate_template "$registrar_tlds" "$rdap_tlds" "$source_summary" > "$OUTPUT_TOML"
+            echo -e "  ${GREEN}Lists.toml written to:${NC} ${OUTPUT_TOML}" >&2
+            echo -e "  ${GREEN}pdom.txt:${NC} $(wc -l < "$PDOM_FILE" | tr -d ' ') purchasable TLDs with servers" >&2
+            echo -e "  ${GREEN}sdom.txt:${NC} $(wc -l < "$SDOM_FILE" | tr -d ' ') TLDs with no known server" >&2
+            ;;
+        summary)
+            generate_pdom_sdom
+            echo -e "${BOLD}═══ TLD Source Summary ═══${NC}"
+            echo ""
+            [[ $porkbun_count -gt 0 ]]      && echo -e "  ${GREEN}Porkbun${NC}      $(echo "$porkbun_filtered" | grep -c . || echo 0) TLDs ($(echo "$porkbun_filtered" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+            [[ $ovh_count -gt 0 ]]          && echo -e "  ${GREEN}OVH${NC}          $ovh_count TLDs ($(echo "$ovh_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+            [[ $inwx_count -gt 0 ]]         && echo -e "  ${GREEN}INWX${NC}         $inwx_count TLDs ($(echo "$inwx_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+            [[ $domainoffer_count -gt 0 ]]  && echo -e "  ${GREEN}DomainOffer${NC}  $domainoffer_count TLDs ($(echo "$domainoffer_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+            [[ $tldlist_count -gt 0 ]]      && echo -e "  ${GREEN}tld-list.com${NC} $tldlist_count TLDs (community registry)"
+            [[ $iana_count -gt 0 ]]         && echo -e "  ${GREEN}IANA${NC}         $iana_count TLDs"
+            [[ $rdap_count -gt 0 ]]         && echo -e "  ${GREEN}RDAP${NC}         $rdap_count TLDs with lookup servers"
+            echo ""
+
+            if [[ $porkbun_count -gt 0 && $ovh_count -gt 0 ]]; then
+                local ovh_unique inwx_unique domainoffer_unique
+                ovh_unique=$(comm -23 <(echo "$ovh_tlds" | sort) <(echo "$porkbun_filtered" | sort) | wc -l | tr -d ' ')
+                echo -e "  ${CYAN}OVH adds${NC}     $ovh_unique TLDs not on Porkbun"
+                if [[ $inwx_count -gt 0 ]]; then
+                    inwx_unique=$(comm -23 <(echo "$inwx_tlds" | sort) <(echo -e "${porkbun_filtered}\n${ovh_tlds}" | sort -u) | wc -l | tr -d ' ')
+                    echo -e "  ${CYAN}INWX adds${NC}    $inwx_unique TLDs not on Porkbun/OVH"
+                fi
+                if [[ $domainoffer_count -gt 0 ]]; then
+                    domainoffer_unique=$(comm -23 <(echo "$domainoffer_tlds" | sort) <(echo -e "${porkbun_filtered}\n${ovh_tlds}\n${inwx_tlds}" | sort -u) | wc -l | tr -d ' ')
+                    echo -e "  ${CYAN}DomainOffer adds${NC} $domainoffer_unique TLDs not on Porkbun/OVH/INWX"
+                fi
+                echo ""
+            fi
+
+            echo -e "  ${BOLD}Merged purchasable:${NC} $(echo "$all_tlds" | wc -l | tr -d ' ') TLDs"
+            echo -e "  ${BOLD}Merged ccTLDs:${NC}      $(echo "$all_cctlds" | wc -l | tr -d ' ')"
+            echo -e "  ${BOLD}pdom.txt:${NC}           $(wc -l < "$PDOM_FILE" | tr -d ' ') with known servers"
+            echo -e "  ${BOLD}sdom.txt:${NC}           $(wc -l < "$SDOM_FILE" | tr -d ' ') with no known server"
+            echo ""
+            echo -e "  Workdir: ${CYAN}$WORK_DIR${NC}"
+            echo -e "  Config:  ${CYAN}$CONF_FILE${NC}"
+            echo -e "  Use ${BOLD}--diff${NC} to compare against Lists.toml"
+            echo -e "  Use ${BOLD}--toml${NC} to output TOML-ready arrays"
+            echo -e "  Use ${BOLD}--template${NC} to generate Lists.toml into workdir"
+            echo -e "  Use ${BOLD}--probe${NC} to probe unknown TLDs for WHOIS servers"
+            echo -e "  Use ${BOLD}--all-sources${NC} to also fetch tld-list.com"
+            echo -e "  Use ${BOLD}--raw${NC}  for raw TLD list (one per line)"
+            ;;
+    esac
+}
+
+main "$@"

+ 36 - 0
scripts/check_bogus.py

@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+"""Check all curated TLDs in violator.conf against Lists.toml all section."""
+import re
+
+# Parse all TLDs from Lists.toml
+with open('scripts/violator-workdir/Lists.toml') as f:
+    toml = f.read()
+
+m = re.search(r'^all = \[(.*?)\]', toml, re.DOTALL | re.MULTILINE)
+all_tlds = set(re.findall(r'"([a-z][a-z0-9]*)', m.group(1)))
+
+# Parse violator.conf
+with open('scripts/violator.conf') as f:
+    conf = f.read()
+
+# Find all curated list sections and their tlds
+sections = re.findall(r'\[list\.(\w+)\].*?type\s*=\s*curated.*?tlds\s*=\s*(.*?)(?=\n\[|\Z)', conf, re.DOTALL)
+
+bogus = {}
+for name, tlds_block in sections:
+    words = re.findall(r'[a-z][a-z0-9]*', tlds_block)
+    # skip config keywords
+    keywords = {'type', 'curated', 'description', 'tlds', 'min', 'max', 'filter', 'all'}
+    for w in words:
+        if w not in all_tlds and w not in keywords:
+            bogus.setdefault(name, []).append(w)
+
+if not bogus:
+    print("All curated TLDs exist in the all list!")
+else:
+    total = 0
+    for name, bads in bogus.items():
+        print(f"\n[list.{name}] — {len(bads)} fake TLDs:")
+        print("  " + " ".join(bads))
+        total += len(bads)
+    print(f"\nTotal: {total} TLDs to remove")

+ 0 - 816
scripts/fetch-tlds.sh

@@ -1,816 +0,0 @@
-#!/usr/bin/env bash
-# fetch-tlds.sh — Scrape purchasable TLD lists from registrar APIs
-# Outputs clean sorted lists for use in Lists.toml
-#
-# Usage:
-#   ./scripts/fetch-tlds.sh              # fetch all, show summary
-#   ./scripts/fetch-tlds.sh porkbun      # porkbun only
-#   ./scripts/fetch-tlds.sh inwx         # inwx only
-#   ./scripts/fetch-tlds.sh --raw        # output raw TLD lists (one per line)
-#   ./scripts/fetch-tlds.sh --toml       # output TOML-ready arrays
-#   ./scripts/fetch-tlds.sh --diff       # compare against current Lists.toml
-#   ./scripts/fetch-tlds.sh --template   # generate full Lists.toml with whois overrides if necessary
-# 
-# Notes : yea this is ai slop, didnt make it myself oooo scary, but most of the rust i did myself just didnt feel like doing this at 4am and it somewhat works
-# Correction : The initial porkbun fetching was mostly me but porkbun lacked many domains so yea
-
-set -euo pipefail
-
-SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
-LISTS_TOML="$PROJECT_DIR/Lists.toml"
-CACHE_DIR="$PROJECT_DIR/.tld-cache"
-mkdir -p "$CACHE_DIR"
-
-RED='\033[0;31m'
-GREEN='\033[0;32m'
-YELLOW='\033[1;33m'
-CYAN='\033[0;36m'
-BOLD='\033[1m'
-NC='\033[0m'
-
-# ─── Porkbun ────────────────────────────────────────────────────────────────
-fetch_porkbun() {
-    local cache="$CACHE_DIR/porkbun.json"
-    local max_age=86400 # 24h cache
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching Porkbun pricing API...${NC}" >&2
-    # abusing porkbun public no money needed ah endpoint is no API key is even needed
-    if curl -sf -X POST "https://api.porkbun.com/api/json/v3/pricing/get" \
-        -H "Content-Type: application/json" \
-        -d '{}' \
-        -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${RED}Failed to fetch Porkbun data${NC}" >&2
-        return 1
-    fi
-}
-
-parse_porkbun() {
-    local json_file="$1"
-    # Extract TLD keys from the pricing response
-    # Response format: {"status":"SUCCESS","pricing":{"com":{...},"net":{...},...}}
-    if command -v jq &>/dev/null; then
-        jq -r '.pricing // {} | keys[]' "$json_file" 2>/dev/null | sort -u
-    else
-        # Fallback: grep for TLD keys (less reliable but works)
-        grep -o '"[a-z][a-z0-9.-]*":{' "$json_file" | sed 's/"//g; s/:{//' | sort -u
-    fi
-}
-
-# ─── INWX ───────────────────────────────────────────────────────────────────
-fetch_inwx() {
-    local cache="$CACHE_DIR/inwx.html"
-    local max_age=86400
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching INWX domain list...${NC}" >&2
-    # INWX domain check page has TLD list embedded as JSON
-    if curl -sfL "https://www.inwx.de/en/domain/check" \
-        -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
-        -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${YELLOW}Could not fetch INWX${NC}" >&2
-        return 1
-    fi
-}
-
-parse_inwx() {
-    local html_file="$1"
-    # TLDs are embedded as JSON objects with "tld":"xxx" in the page
-    grep -oE '"tld":"[a-z]{2,20}"' "$html_file" | sed 's/"tld":"//;s/"//' | sort -u
-}
-
-# ─── OVH ────────────────────────────────────────────────────────────────────
-fetch_ovh() {
-    local cache="$CACHE_DIR/ovh.json"
-    local max_age=86400
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching OVH domain extensions...${NC}" >&2
-    if curl -sf "https://www.ovh.com/engine/apiv6/domain/extensions" \
-        -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
-        -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${YELLOW}Could not fetch OVH extensions${NC}" >&2
-        return 1
-    fi
-}
-
-parse_ovh() {
-    local json_file="$1"
-    if command -v jq &>/dev/null; then
-        # Only top-level TLDs (no dots = not sub-TLDs like com.au)
-        jq -r '.[]' "$json_file" 2>/dev/null | grep -vE '\.' | sort -u
-    else
-        grep -oE '"[a-z]{2,20}"' "$json_file" | tr -d '"' | grep -vE '\.' | sort -u
-    fi
-}
-
-# ─── tld-list.com (comprehensive registry, free basic list) ────────────────
-fetch_tldlist() {
-    local cache="$CACHE_DIR/tldlist-basic.txt"
-    local max_age=86400
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching tld-list.com basic list...${NC}" >&2
-    if curl -sf "https://tld-list.com/df/tld-list-basic.csv" \
-        -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
-        -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${YELLOW}Could not fetch tld-list.com${NC}" >&2
-        return 1
-    fi
-}
-
-parse_tldlist() {
-    local file="$1"
-    # One TLD per line, CR/LF endings, includes IDN entries — filter to ASCII only
-    tr -d '\r' < "$file" | grep -E '^[a-z][a-z0-9]*$' | sort -u
-}
-
-# ─── IANA root zone (fallback) ──────────────────────────────────────────────
-fetch_iana() {
-    local cache="$CACHE_DIR/iana-tlds.txt"
-    local max_age=604800 # 7 days
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching IANA TLD list...${NC}" >&2
-    if curl -sf "https://data.iana.org/TLD/tlds-alpha-by-domain.txt" -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${RED}Failed to fetch IANA list${NC}" >&2
-        return 1
-    fi
-}
-
-parse_iana() {
-    local file="$1"
-    # Skip header line, lowercase everything, filter to 2-3 char ccTLDs
-    tail -n +2 "$file" | tr '[:upper:]' '[:lower:]' | sort -u
-}
-
-parse_iana_cctlds() {
-    local file="$1"
-    tail -n +2 "$file" | tr '[:upper:]' '[:lower:]' | grep -E '^[a-z]{2}$' | sort -u
-}
-
-# ─── RDAP bootstrap (what actually has lookup servers) ──────────────────────
-fetch_rdap() {
-    local cache="$CACHE_DIR/rdap-dns.json"
-    local max_age=86400
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching RDAP bootstrap...${NC}" >&2
-    if curl -sf "https://data.iana.org/rdap/dns.json" -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${RED}Failed to fetch RDAP bootstrap${NC}" >&2
-        return 1
-    fi
-}
-
-parse_rdap_tlds() {
-    local json_file="$1"
-    if command -v jq &>/dev/null; then
-        jq -r '.services[][] | .[]' "$json_file" 2>/dev/null | grep -v '^http' | tr '[:upper:]' '[:lower:]' | sort -u
-    else
-        grep -oE '"[a-z]{2,20}"' "$json_file" | tr -d '"' | sort -u
-    fi
-}
-
-# ─── WHOIS server list (rfc1036/whois project) ─────────────────────────────
-fetch_whois_servers() {
-    local cache="$CACHE_DIR/tld_serv_list.txt"
-    local max_age=604800 # 7 days
-
-    if [[ -f "$cache" ]]; then
-        local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
-        if (( age < max_age )); then
-            echo "$cache"
-            return 0
-        fi
-    fi
-
-    echo -e "${CYAN}Fetching WHOIS server list...${NC}" >&2
-    if curl -sf "https://raw.githubusercontent.com/rfc1036/whois/next/tld_serv_list" -o "$cache" 2>/dev/null; then
-        echo "$cache"
-    else
-        echo -e "${YELLOW}Could not fetch WHOIS server list${NC}" >&2
-        return 1
-    fi
-}
-
-# Get the WHOIS server for a given TLD from the cached server list
-# Returns empty string if no server found or server is NONE/ARPA/etc
-get_whois_server() {
-    local tld="$1"
-    local serv_file="$2"
-    # Format: .tld  [optional-tag] server
-    # Some entries have VERISIGN or similar tag before the server
-    local line
-    line=$(grep -E "^\\.${tld}[[:space:]]" "$serv_file" 2>/dev/null | head -1)
-    if [[ -z "$line" ]]; then
-        echo ""
-        return
-    fi
-    # Extract server: last word on the line that looks like a hostname
-    local server
-    server=$(echo "$line" | awk '{
-        for (i=NF; i>=2; i--) {
-            if ($i ~ /^[a-z0-9].*\.[a-z]/) { print $i; exit }
-        }
-    }')
-    # Filter out unusable entries
-    if [[ "$server" == "NONE" || "$server" == "ARPA" || -z "$server" || "$server" == http* ]]; then
-        echo ""
-    else
-        echo "$server"
-    fi
-}
-
-# Get WHOIS server from IANA directly (slower, single TLD at a time)
-get_iana_whois_server() {
-    local tld="$1"
-    curl -s "https://www.iana.org/domains/root/db/${tld}.html" 2>/dev/null \
-        | sed -n 's/.*WHOIS Server:<\/b> *\([^ <]*\).*/\1/p' \
-        | head -1
-}
-
-# ─── Extract current Lists.toml entries ─────────────────────────────────────
-parse_current_lists() {
-    local list_name="${1:-all}"
-    if [[ ! -f "$LISTS_TOML" ]]; then
-        echo -e "${RED}No Lists.toml found at $LISTS_TOML${NC}" >&2
-        return 1
-    fi
-    # Extract TLDs from a named list, stripping quotes, colons (whois overrides), commas
-    awk -v list="$list_name" '
-        $0 ~ "^"list" *= *\\[" { found=1; next }
-        found && /^\]/ { exit }
-        found && /^[[:space:]]*\[/ { exit }
-        found {
-            gsub(/["\t,]/, " ")
-            n = split($0, parts, " ")
-            for (i=1; i<=n; i++) {
-                if (parts[i] != "") {
-                    # Strip whois override suffix
-                    sub(/:.*/, "", parts[i])
-                    print parts[i]
-                }
-            }
-        }
-    ' "$LISTS_TOML" | sort -u
-}
-
-# ─── Helpers ────────────────────────────────────────────────────────────────
-to_toml_array() {
-    # Reads TLDs from stdin, outputs TOML array format (wrapped at ~80 chars)
-    local tlds=()
-    while IFS= read -r tld; do
-        [[ -z "$tld" ]] && continue
-        tlds+=("$tld")
-    done
-    local line='\t'
-    local first=true
-    for tld in "${tlds[@]}"; do
-        local entry="\"$tld\""
-        if $first; then
-            line+="$entry"
-            first=false
-        else
-            local test_line="$line, $entry"
-            if (( ${#test_line} > 78 )); then
-                echo -e "$line,"
-                line="\t$entry"
-            else
-                line+=", $entry"
-            fi
-        fi
-    done
-    [[ -n "$line" ]] && echo -e "$line,"
-}
-
-filter_cctlds() {
-    grep -E '^[a-z]{2}$'
-}
-
-filter_short_tlds() {
-    # 2-6 char TLDs that are useful for domain hacking
-    grep -E '^[a-z]{2,6}$'
-}
-
-# ─── Known broken/unregistrable TLDs ────────────────────────────────────────
-SKIP_TLDS="bl bq eh mf gb bv sj kp hm"
-
-filter_skip() {
-    local skip_pattern
-    skip_pattern=$(echo "$SKIP_TLDS" | tr ' ' '|')
-    grep -vE "^($skip_pattern)$"
-}
-
-# ─── Template generation ────────────────────────────────────────────────────
-# Generates a full Lists.toml with:
-#   - "tld"              for TLDs with RDAP support (direct lookup works)
-#   - "tld:whois.server" for TLDs needing WHOIS fallback
-#   - skip TLDs omitted entirely (no Patch.toml needed)
-#
-# Uses: Porkbun + OVH + INWX (purchasable), RDAP bootstrap (has server?), WHOIS server list
-# With --all-sources: also cross-references tld-list.com
-generate_template() {
-    local all_registrar_tlds="$1"
-    local rdap_tlds="$2"
-    local source_summary="$3"
-
-    # Fetch WHOIS server list for fallback
-    local whois_serv_file=""
-    if whois_serv_file=$(fetch_whois_servers 2>/dev/null); then
-        true  # got it
-    fi
-
-    # The input is already merged + filtered from all registrar sources
-    local buyable_tlds
-    buyable_tlds=$(echo "$all_registrar_tlds" | filter_skip | sort -u)
-
-    local buyable_count
-    buyable_count=$(echo "$buyable_tlds" | grep -c . || echo 0)
-
-    # Build annotated TLD list: "tld" or "tld:whois_server"
-    # A TLD needs a whois override if it's NOT in the RDAP bootstrap
-    local annotated_all=()
-    local annotated_cc=()
-    local rdap_hit=0 whois_hit=0 bare_hit=0
-
-    while IFS= read -r tld; do
-        [[ -z "$tld" ]] && continue
-        local entry=""
-        if echo "$rdap_tlds" | grep -qx "$tld" 2>/dev/null; then
-            # Has RDAP server — no override needed
-            entry="$tld"
-            ((rdap_hit++)) || true
-        else
-            # No RDAP — try to find WHOIS server
-            local server=""
-            if [[ -n "$whois_serv_file" ]]; then
-                server=$(get_whois_server "$tld" "$whois_serv_file")
-            fi
-            if [[ -n "$server" ]]; then
-                entry="${tld}:${server}"
-                ((whois_hit++)) || true
-            else
-                # No known server — include bare, hoardom will try common patterns
-                entry="$tld"
-                ((bare_hit++)) || true
-            fi
-        fi
-        annotated_all+=("$entry")
-        # Also track ccTLDs (2-letter entries)
-        local base_tld="${tld%%:*}"
-        if [[ "$base_tld" =~ ^[a-z]{2}$ ]]; then
-            annotated_cc+=("$entry")
-        fi
-    done <<< "$buyable_tlds"
-
-    echo -e "${CYAN}Building template...${NC}" >&2
-    echo -e "  ${GREEN}${rdap_hit}${NC} TLDs with RDAP (direct lookup)" >&2
-    echo -e "  ${YELLOW}${whois_hit}${NC} TLDs with WHOIS override" >&2
-    echo -e "  ${RED}${bare_hit}${NC} TLDs with no known server (will probe)" >&2
-    echo "" >&2
-
-    # ── Curated lists (bare TLD names, annotated automatically) ─────────
-
-    # Standard: com, net, org + generally desirable / well-known TLDs
-    local standard_tlds=(
-        "com" "net" "org" "io" "co" "dev" "app" "me" "info"
-        "biz" "one" "xyz" "online" "site" "tech" "pro" "tv"
-        "cc" "to" "sh" "li" "fm" "am" "gg" "ws" "la"
-        "ms" "nu" "cx" "mn" "st" "tel" "ai" "id" "in"
-        "it" "is" "at" "be" "de" "eu" "fr" "nl" "se"
-        "uk" "us" "ca" "au" "nz" "club" "blog" "art" "fun"
-        "lol" "wtf" "page" "link" "space" "store" "shop"
-    )
-
-    # Decent: the best of the best — com, net, org, io + short desirable ones
-    # that work great for domain hacking and are punchy
-    local decent_tlds=(
-        "com" "net" "org" "io" "dev" "app" "co" "me"
-        "ai" "sh" "to" "fm" "tv" "gg" "cc" "li" "am"
-        "la" "nu" "id" "in" "it" "is" "at" "ws"
-        "one" "pro" "bio" "art" "ink" "run" "win" "new"
-        "lol" "pub" "fun" "vet" "fit" "rip" "wtf" "zip"
-    )
-
-    # Swiss: standard-like but with Swiss / Central European related TLDs up front
-    local swiss_tlds=(
-        "com" "net" "org" "ch" "li" "swiss" "zuerich"
-        "io" "co" "dev" "app" "me" "info" "one" "pro"
-        "de" "at" "fr" "it" "eu"
-        "tech" "online" "site" "shop" "store"
-        "biz" "xyz" "tv" "cc" "to" "sh" "fm" "am" "gg"
-    )
-
-    # Annotate curated lists with whois overrides where needed
-    annotate_list() {
-        local -n input_list=$1
-        local result=()
-        for bare_tld in "${input_list[@]}"; do
-            local found=false
-            for ann in "${annotated_all[@]}"; do
-                local ann_base="${ann%%:*}"
-                if [[ "$ann_base" == "$bare_tld" ]]; then
-                    result+=("$ann")
-                    found=true
-                    break
-                fi
-            done
-            if ! $found; then
-                result+=("$bare_tld")
-            fi
-        done
-        printf '%s\n' "${result[@]}"
-    }
-
-    # Length-based filtered lists from annotated_all
-    filter_annotated_by_length() {
-        local min="$1"
-        local max="$2"
-        for ann in "${annotated_all[@]}"; do
-            local base="${ann%%:*}"
-            local len=${#base}
-            if (( len >= min && len <= max )); then
-                echo "$ann"
-            fi
-        done
-    }
-
-    # ─── Output ─────────────────────────────────────────────────────────
-    local date_str
-    date_str=$(date +%Y-%m-%d)
-
-    cat <<HEADER
-# Lists.toml — Built-in TLD lists for hoardom
-# Auto-generated on ${date_str} from ${source_summary}
-#
-# Format:
-#   "tld"              — TLD has RDAP support, lookup works directly
-#   "tld:whois.server" — No RDAP: use this WHOIS server for fallback
-#
-# ${buyable_count} purchasable TLDs (handshake/sub-TLDs excluded)
-# ${rdap_hit} have RDAP, ${whois_hit} need WHOIS override, ${bare_hit} will auto-probe
-#
-# Lists:
-#   standard  — common desirable TLDs (com, net, org, io, dev, ...)
-#   decent    — very best short punchy TLDs for domain hacking
-#   swiss     — standard-like but with Swiss/Central European TLDs prioritized
-#   country   — all 2-letter country-code TLDs
-#   two       — all 2-letter TLDs
-#   three     — all TLDs with 3 or fewer letters
-#   four      — all TLDs with exactly 4 letters
-#   long      — all TLDs with 5+ letters
-#   all       — everything
-
-HEADER
-
-    echo "standard = ["
-    annotate_list standard_tlds | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "decent = ["
-    annotate_list decent_tlds | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "swiss = ["
-    annotate_list swiss_tlds | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "country = ["
-    printf '%s\n' "${annotated_cc[@]}" | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "two = ["
-    filter_annotated_by_length 2 2 | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "three = ["
-    filter_annotated_by_length 2 3 | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "four = ["
-    filter_annotated_by_length 4 4 | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "long = ["
-    filter_annotated_by_length 5 99 | to_toml_array
-    echo "]"
-    echo ""
-
-    echo "all = ["
-    printf '%s\n' "${annotated_all[@]}" | to_toml_array
-    echo "]"
-}
-
-# ─── Main ───────────────────────────────────────────────────────────────────
-main() {
-    local mode="summary"
-    local source="all"
-    local all_sources=false
-
-    for arg in "$@"; do
-        case "$arg" in
-            --raw)  mode="raw" ;;
-            --toml) mode="toml" ;;
-            --diff) mode="diff" ;;
-            --template) mode="template" ;;
-            --all-sources) all_sources=true ;;
-            porkbun) source="porkbun" ;;
-            inwx) source="inwx" ;;
-            ovh) source="ovh" ;;
-            iana) source="iana" ;;
-            rdap) source="rdap" ;;
-            tldlist) source="tldlist" ;;
-            --help|-h)
-                echo "Usage: $0 [source] [--raw|--toml|--diff|--template] [--all-sources]"
-                echo ""
-                echo "Sources: porkbun, ovh, inwx, iana, rdap, tldlist"
-                echo ""
-                echo "Flags:"
-                echo "  --raw          Output raw TLD list (one per line)"
-                echo "  --toml         Output TOML-ready arrays"
-                echo "  --diff         Compare against current Lists.toml"
-                echo "  --template     Generate full Lists.toml with whois overrides"
-                echo "  --all-sources  Include tld-list.com for extra coverage (used as"
-                echo "                 a filter: only TLDs also in a registrar are kept)"
-                exit 0 ;;
-        esac
-    done
-
-    local porkbun_tlds="" inwx_tlds="" ovh_tlds="" iana_tlds="" rdap_tlds="" tldlist_tlds=""
-    local porkbun_count=0 inwx_count=0 ovh_count=0 iana_count=0 rdap_count=0 tldlist_count=0
-
-    # Template mode needs all registrar sources + rdap regardless of source filter
-    if [[ "$mode" == "template" ]]; then
-        source="all"
-    fi
-
-    # ── Fetch from selected sources ──
-
-    if [[ "$source" == "all" || "$source" == "porkbun" ]]; then
-        if porkbun_file=$(fetch_porkbun); then
-            porkbun_tlds=$(parse_porkbun "$porkbun_file")
-            porkbun_count=$(echo "$porkbun_tlds" | grep -c . || true)
-        fi
-    fi
-
-    if [[ "$source" == "all" || "$source" == "ovh" ]]; then
-        if ovh_file=$(fetch_ovh); then
-            ovh_tlds=$(parse_ovh "$ovh_file")
-            ovh_count=$(echo "$ovh_tlds" | grep -c . || true)
-        fi
-    fi
-
-    if [[ "$source" == "all" || "$source" == "inwx" ]]; then
-        if inwx_file=$(fetch_inwx 2>/dev/null); then
-            inwx_tlds=$(parse_inwx "$inwx_file")
-            inwx_count=$(echo "$inwx_tlds" | grep -c . || true)
-        fi
-    fi
-
-    if [[ "$source" == "all" || "$source" == "iana" ]]; then
-        if iana_file=$(fetch_iana); then
-            iana_tlds=$(parse_iana "$iana_file")
-            iana_count=$(echo "$iana_tlds" | grep -c . || true)
-        fi
-    fi
-
-    if [[ "$source" == "all" || "$source" == "rdap" ]]; then
-        if rdap_file=$(fetch_rdap); then
-            rdap_tlds=$(parse_rdap_tlds "$rdap_file")
-            rdap_count=$(echo "$rdap_tlds" | grep -c . || true)
-        fi
-    fi
-
-    if [[ "$all_sources" == true || "$source" == "tldlist" ]]; then
-        if tldlist_file=$(fetch_tldlist); then
-            tldlist_tlds=$(parse_tldlist "$tldlist_file")
-            tldlist_count=$(echo "$tldlist_tlds" | grep -c . || true)
-        fi
-    fi
-
-    # ── Filter porkbun: no handshake, no sub-TLDs ──
-    local porkbun_filtered=""
-    if [[ -n "$porkbun_tlds" ]]; then
-        local porkbun_file="$CACHE_DIR/porkbun.json"
-        if command -v jq &>/dev/null && [[ -f "$porkbun_file" ]]; then
-            porkbun_filtered=$(jq -r '
-                .pricing // {} | to_entries[] |
-                select(.key | contains(".") | not) |
-                select(.value.specialType // "" | test("handshake") | not) |
-                .key
-            ' "$porkbun_file" 2>/dev/null | sort -u)
-        else
-            porkbun_filtered=$(echo "$porkbun_tlds" | grep -v '\.' | sort -u)
-        fi
-    fi
-
-    # ── Merge all registrar-confirmed purchasable TLDs ──
-    # Only TLDs that have pricing at a real registrar are included
-    local registrar_tlds
-    registrar_tlds=$(echo -e "${porkbun_filtered}\n${ovh_tlds}\n${inwx_tlds}" | grep -E '^[a-z]' | sort -u | filter_skip)
-
-    # If --all-sources, also include tld-list.com TLDs that appear in at least
-    # one registrar (cross-reference = purchasable + known to community list)
-    if [[ "$all_sources" == true && -n "$tldlist_tlds" ]]; then
-        # tld-list.com entries that are ALSO in a registrar = confirmed purchasable
-        local tldlist_confirmed
-        tldlist_confirmed=$(comm -12 <(echo "$tldlist_tlds") <(echo "$registrar_tlds") 2>/dev/null || true)
-        # They're already in registrar_tlds, so this just validates.
-        # More useful: tld-list entries NOT in any registrar = brand/reserved (skip them)
-        local tldlist_extra
-        tldlist_extra=$(comm -23 <(echo "$tldlist_tlds") <(echo "$registrar_tlds") 2>/dev/null || true)
-        local extra_count
-        extra_count=$(echo "$tldlist_extra" | grep -c . || echo 0)
-        echo -e "  ${YELLOW}tld-list.com:${NC} $extra_count TLDs with no registrar pricing (brand/reserved, excluded)" >&2
-    fi
-
-    local all_tlds="$registrar_tlds"
-    local all_cctlds
-    all_cctlds=$(echo "$all_tlds" | filter_cctlds)
-
-    # Build source summary string for template header
-    local sources_used=()
-    [[ $porkbun_count -gt 0 ]] && sources_used+=("Porkbun")
-    [[ $ovh_count -gt 0 ]] && sources_used+=("OVH")
-    [[ $inwx_count -gt 0 ]] && sources_used+=("INWX")
-    local source_summary
-    local joined
-    joined=$(printf " + %s" "${sources_used[@]}")
-    joined="${joined:3}" # strip leading " + "
-    source_summary="${joined} + RDAP bootstrap + WHOIS server list"
-
-    case "$mode" in
-        raw)
-            echo "$all_tlds"
-            ;;
-        toml)
-            echo -e "${BOLD}# Purchasable TLDs from all registrars ($(echo "$all_tlds" | wc -l | tr -d ' ') total)${NC}"
-            echo "all_registrars = ["
-            echo "$all_tlds" | to_toml_array
-            echo "]"
-            echo ""
-            echo "# Country-code TLDs (purchasable)"
-            echo "cctlds = ["
-            echo "$all_cctlds" | to_toml_array
-            echo "]"
-            ;;
-        diff)
-            echo -e "${BOLD}Comparing registrar data vs current Lists.toml${NC}"
-            echo ""
-            local current_all current_country
-            current_all=$(parse_current_lists "all")
-            current_country=$(parse_current_lists "country")
-
-            # TLDs in registrars but NOT in our 'all' list
-            if [[ -n "$all_tlds" ]]; then
-                local missing_from_all
-                missing_from_all=$(comm -23 <(echo "$all_tlds" | filter_short_tlds | sort) <(echo "$current_all" | sort) 2>/dev/null || true)
-                if [[ -n "$missing_from_all" ]]; then
-                    local mc
-                    mc=$(echo "$missing_from_all" | wc -l | tr -d ' ')
-                    echo -e "${YELLOW}TLDs at registrars but NOT in our 'all' list ($mc):${NC}"
-                    echo "$missing_from_all" | tr '\n' ' '
-                    echo ""
-                    echo ""
-                fi
-
-                # ccTLDs at registrars but NOT in our 'country' list
-                local missing_cc
-                missing_cc=$(comm -23 <(echo "$all_cctlds" | sort) <(echo "$current_country" | sort) 2>/dev/null || true)
-                if [[ -n "$missing_cc" ]]; then
-                    local mcc
-                    mcc=$(echo "$missing_cc" | wc -l | tr -d ' ')
-                    echo -e "${YELLOW}ccTLDs at registrars but NOT in 'country' list ($mcc):${NC}"
-                    echo "$missing_cc" | tr '\n' ' '
-                    echo ""
-                    echo ""
-                fi
-
-                # TLDs in our 'all' list but NOT at any registrar
-                local extra
-                extra=$(comm -13 <(echo "$all_tlds" | sort) <(echo "$current_all" | sort) 2>/dev/null || true)
-                if [[ -n "$extra" ]]; then
-                    local ec
-                    ec=$(echo "$extra" | wc -l | tr -d ' ')
-                    echo -e "${CYAN}TLDs in our 'all' list but NOT at any registrar ($ec):${NC}"
-                    echo "$extra" | tr '\n' ' '
-                    echo ""
-                    echo ""
-                fi
-            fi
-
-            # Check which of our TLDs have RDAP servers
-            if [[ -n "$rdap_tlds" && -n "$current_all" ]]; then
-                local no_rdap
-                no_rdap=$(comm -23 <(echo "$current_all" | sort) <(echo "$rdap_tlds" | sort) 2>/dev/null || true)
-                if [[ -n "$no_rdap" ]]; then
-                    local nrc
-                    nrc=$(echo "$no_rdap" | wc -l | tr -d ' ')
-                    echo -e "${RED}TLDs in our lists with NO RDAP server ($nrc) — need WHOIS fallback:${NC}"
-                    echo "$no_rdap" | tr '\n' ' '
-                    echo ""
-                fi
-            fi
-            ;;
-        template)
-            generate_template "$registrar_tlds" "$rdap_tlds" "$source_summary"
-            ;;
-        summary)
-            echo -e "${BOLD}═══ TLD Source Summary ═══${NC}"
-            echo ""
-            [[ $porkbun_count -gt 0 ]] && echo -e "  ${GREEN}Porkbun${NC}      $(echo "$porkbun_filtered" | grep -c . || echo 0) TLDs ($(echo "$porkbun_filtered" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
-            [[ $ovh_count -gt 0 ]]     && echo -e "  ${GREEN}OVH${NC}          $ovh_count TLDs ($(echo "$ovh_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
-            [[ $inwx_count -gt 0 ]]    && echo -e "  ${GREEN}INWX${NC}         $inwx_count TLDs ($(echo "$inwx_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
-            [[ $tldlist_count -gt 0 ]] && echo -e "  ${GREEN}tld-list.com${NC} $tldlist_count TLDs (community registry, no pricing)"
-            [[ $iana_count -gt 0 ]]    && echo -e "  ${GREEN}IANA${NC}         $iana_count TLDs"
-            [[ $rdap_count -gt 0 ]]    && echo -e "  ${GREEN}RDAP${NC}         $rdap_count TLDs with lookup servers"
-            echo ""
-
-            # Show what each registrar uniquely contributes
-            if [[ $porkbun_count -gt 0 && $ovh_count -gt 0 ]]; then
-                local ovh_unique inwx_unique
-                ovh_unique=$(comm -23 <(echo "$ovh_tlds" | sort) <(echo "$porkbun_filtered" | sort) | wc -l | tr -d ' ')
-                echo -e "  ${CYAN}OVH adds${NC}     $ovh_unique TLDs not on Porkbun"
-                if [[ $inwx_count -gt 0 ]]; then
-                    inwx_unique=$(comm -23 <(echo "$inwx_tlds" | sort) <(echo -e "${porkbun_filtered}\n${ovh_tlds}" | sort -u) | wc -l | tr -d ' ')
-                    echo -e "  ${CYAN}INWX adds${NC}    $inwx_unique TLDs not on Porkbun/OVH"
-                fi
-                echo ""
-            fi
-
-            echo -e "  ${BOLD}Merged purchasable:${NC} $(echo "$all_tlds" | wc -l | tr -d ' ') TLDs"
-            echo -e "  ${BOLD}Merged ccTLDs:${NC}      $(echo "$all_cctlds" | wc -l | tr -d ' ')"
-            echo ""
-            echo -e "  Cached data in: ${CYAN}$CACHE_DIR${NC}"
-            echo -e "  Use ${BOLD}--diff${NC} to compare against Lists.toml"
-            echo -e "  Use ${BOLD}--toml${NC} to output TOML-ready arrays"
-            echo -e "  Use ${BOLD}--template${NC} to generate template Lists.toml"
-            echo -e "  Use ${BOLD}--all-sources${NC} to also fetch tld-list.com"
-            echo -e "  Use ${BOLD}--raw${NC}  for raw TLD list (one per line)"
-            ;;
-    esac
-}
-
-main "$@"

+ 160 - 0
scripts/violator.conf

@@ -0,0 +1,160 @@
+# violator.conf — Configuration for auto-violator.sh
+# ────────────────────────────────────────────────────────────────────────────
+#
+# This file controls how auto-violator.sh builds TLD lists.
+# Lines starting with # are comments. Blank lines are ignored.
+# Sections are marked with [section_name].
+#
+# ────────────────────────────────────────────────────────────────────────────
+
+# This file all be it aided by AI was a huge pain to write, i hope yall appreciate it, open to suggestions for new lists and so on.
+
+# ── [whois_overrides] ───────────────────────────────────────────────────────
+# Manual WHOIS server assignments for TLDs where auto-detection fails.
+# Format: tld = whois.server.hostname
+# These take precedence over auto-detected servers.
+[whois_overrides]
+ax = whois.ax
+sx = whois.sx
+ss = whois.nic.ss
+iq = whois.iq
+es = whois.nic.es
+uy = whois.nic.org.uy
+bf = whois.registre.bf
+cf = whois.dot.cf
+gq = whois.dominio.gq
+ps = whois.registry.ps
+bharat = whois.nixiregistry.in
+shabaka = whois.nic.xn–ngbc5azd
+# ck = whois.nic.ck
+# kh = whois.nic.kh
+# np = whois.nic.np
+
+# ── [skip_tlds] ────────────────────────────────────────────────────────────
+# TLDs to always exclude from all lists (non-registrable, broken, no whois, not real etc.)
+# One per line or space-separated.
+[skip_tlds]
+bd bv adultblock bs bb adultblockplus an corp bt cnidn cnregional code cu cw cyrillic dpml domgate epp eth example az eg ba gm et cy gt ilangai hotel gw islam innet istambul itregional kid lanka jo lk1 jm nameemail gr gu kh nlm fk photograpy dj lr sagathan pa spreadbetting substack ne test tt py nr np twidn ck sv ni sz vuelos xk ph vn tj zw
+
+# ── [whois_probe] ──────────────────────────────────────────────────────────
+# Settings for probing unknown TLDs for WHOIS servers.
+# When enabled, TLDs in sdom.txt (no known server) will be tested
+# against common WHOIS server patterns to try to find a working server.
+# If found, they get moved to pdom.txt.
+#
+# enabled = true/false
+# timeout = connection timeout in seconds per probe
+# patterns = comma-separated patterns where {} is replaced with the TLD
+#   e.g. "whois.nic.{}" tries "whois.nic.com" for .com
+[whois_probe]
+enabled = true
+timeout = 1
+parallel = 16
+patterns = whois.nic.{}, whois.{}, whois.registry.{}, whois.nic.net.{}, whois.domains.{}, whois.isoc.org.{}
+
+# ── [lists] ────────────────────────────────────────────────────────────────
+# Define which lists appear in the generated Lists.toml.
+# Each list has:
+#   type = curated | filter | all
+#     curated  — manually specified TLDs (order preserved)
+#     filter   — auto-generated from all purchasable TLDs by length/pattern
+#     all      — everything
+#   tlds = space-separated TLD names (for curated lists)
+#   min_length / max_length = length filter (for filter lists)
+#   country_only = true  — only include 2-letter ccTLDs (for filter lists)
+
+[list.standard]
+type = curated
+description = should cover the basics
+tlds = com net org ch li eu co cc sh mx ms sx ss ax ac tc ie im tel talk plus surf aero wiki biz xyz top
+    ai ag am at be ca cz de dk es fi fm fr gg hk id ie im in is it jp kr la li nl no nu nz pl pt ro se sg si sk to tv tw uk us ws
+    app beer bio black blog blue cafe cam car cash cat chat click cloud club code cool day deal dev dog esq exchange express
+    fail farm fast fish foo free fun gay gold green guru hair help host hot how immo inc ing ink land link live lol love
+    me meme moe mov name network new news nexus ngo now ooo one online open page pics pink plus pro quest
+    red rest rip rocks run sale sex sexy shop show site social solutions space spot store stream surf systems
+    team tech tips tools tube uno vision vodka wiki win work world wtf xyz zone
+
+[list.compressed]
+type = curated
+description = should cover the basics
+tlds = com net org ch eu co cc sh mx ms sx ss ax ac tc ie im li tel talk plus surf aero wiki biz xyz top
+    ag ai am at au ca de dk es fi fm fr gg hk id in is it jp kr la li nl no nu nz se to tv uk us ws
+    app beer blue cloud club cool day dealer deals dev green hot info ink lol new ngo one ooo page pro red rip run sex sexy
+    site space tech vodka wtf
+
+[list.biased]
+type = curated
+description = personal favorites, short punchy memorable TLDs
+tlds = com net org ch eu co cc sh mx ms tel
+    aero ax eus exchange express one ooo open plus pro red run ss surf sx talk win
+    hot lol now rip wiki wtf
+
+[list.tech]
+type = curated
+description = tech / developer / IT oriented TLDs
+tlds = com net co cc dev app io me tech code systems software digital cloud
+    ai bar biz boo bot build cam cc click co codes computer email engineering fly foo gg gmbh
+    host hosting inc info link llc ltd network nexus ninja online page pro security sh
+    site solutions space surf to tools vision wiki zip zone
+
+[list.store]
+type = curated
+description = TLDs useful for shops, stores, brands and commerce
+tlds = com net co cc io me shop store market business sale deals deal
+    apartments auto autos auction bargains beauty beer bid bike boats boutique cafe cars casa cheap clothing
+    coffee compare condos cooking coupons delivery diamonds discount exchange express farm fashion flowers
+    food forsale furniture garden gifts gold hair homes house immo jewelry kitchen luxury makeup
+    motorcycles organic pizza promo property realestate rent rentals restaurant reviews shoes style
+    supplies supply toys watches wine
+
+[list.goofy]
+type = curated
+description = goofy ah TLDs, the silly the weird the wonderful
+tlds = lol wtf rip vodka beer pizza rocks ninja gay lgbt ceo
+    adult best bible bingo boo buzz casino cat cheap church cool dad dance day deal deals dog eus
+    exposed express fail fish free fun gratis gripe hiv hot ie kz legal love ly
+    me meme moi mom monster no now ooo party porn rich run sex sexy singles ss sucks surgery
+    rehab space vodka ye you zone xxx
+
+[list.sprichdeutsch]
+type = curated
+description = German-language and DACH-region TLDs
+tlds = de at ch li
+    berlin hamburg koeln cologne bayern nrw ruhr saarland
+    wien tirol zuerich swiss
+    gmbh kaufen jetzt schule haus
+    immobilien immo versicherung
+    reise reisen
+
+[list.country]
+type = filter
+description = all 2-letter country-code TLDs
+country_only = true
+
+[list.two]
+type = filter
+description = all 2-letter TLDs (only 2 letter)
+min_length = 2
+max_length = 2
+
+[list.three]
+type = filter
+description = all 3-letter TLDs (only 3 letter)
+min_length = 3
+max_length = 3
+
+[list.four]
+type = filter
+description = all TLDs with 4 or fewer letters
+min_length = 2
+max_length = 4
+
+[list.long]
+type = filter
+description = all TLDs with 5+ letters
+min_length = 5
+max_length = 99
+
+[list.all]
+type = all
+description = everything

+ 99 - 81
src/app.rs

@@ -1,8 +1,11 @@
-// e gui emo wrapper for the hoardom tui
-// spawns hoardom --tui in a pty and renders it in its own window
-// so it shows up with its own icon in the dock (mac) or taskbar (linux) for people that want that
+// e gui emo minus wrapper, a undercooked taco wrap for em tui apps with mouse, some symbol and scroll support
+//  adapted slightly for the hoardom tui
 //
-// built with: cargo build --features gui
+// only built when u enable taco wrapper with : "cargo build --features gui"
+// 
+// like basically most of the code in this file is either recycled from my eguiemo-minus project i have yet to release, stolen from stackoverflow and in some cases repaired by generative ai (where ive added comments)
+// i didnt feel like writing an entire terminal emulator from scratch yall guys please chill
+
 
 use eframe::egui::{self, Color32, FontId, Rect, Sense};
 use portable_pty::{native_pty_system, CommandBuilder, PtySize};
@@ -15,10 +18,8 @@ use std::sync::{Arc, Mutex};
 use std::thread;
 use std::time::Duration;
 
-// thx gemma for the formated section comments i didnt want... thats not what I wanted when i said sanetize my comments from swearing and german language.
-// i mean it is prettier than my usual way, will check otherfiles to see if can atleast make it somewhat consistant.
 
-// ---- constants ----
+// constants
 
 const FONT_SIZE: f32 = 14.0;
 const DEFAULT_COLS: u16 = 120;
@@ -27,7 +28,7 @@ const DEFAULT_ROWS: u16 = 35;
 const DEFAULT_FG: Color32 = Color32::from_rgb(204, 204, 204);
 const DEFAULT_BG: Color32 = Color32::from_rgb(24, 24, 24);
 
-// ----- terminal colors -----
+// terminally ill colors 
 
 #[derive(Clone, Copy, PartialEq)]
 enum TermColor {
@@ -37,7 +38,7 @@ enum TermColor {
 }
 
 
-// ai made this fn because i gave up on trying.
+// this fn wasnt made by me i think i let ai handle this or i stole it from somewhere idk just so yk
 fn ansi_color(idx: u8) -> Color32 {
     match idx {
         0 => Color32::from_rgb(0, 0, 0),
@@ -93,7 +94,7 @@ fn resolve_color(c: TermColor, is_fg: bool) -> Color32 {
     }
 }
 
-// ----- terminal cell -----
+// terminal incell stuff
 
 #[derive(Clone, Copy)]
 struct Cell {
@@ -146,9 +147,7 @@ impl Cell {
     }
 }
 
-// ----- terminal grid -----
-
-
+// terminal grid 
 // contains quiet a few ai solved bugfixes they seem ... fineish... to me and work
 struct TermGrid {
     cells: Vec<Vec<Cell>>,
@@ -172,14 +171,14 @@ struct TermGrid {
     // alternate screen buffer
     alt_saved: Option<(Vec<Vec<Cell>>, usize, usize)>,
 
-    // mouse tracking modes
-    mouse_normal: bool, // ?1000 - normal tracking (clicks)
-    mouse_button: bool, // ?1002 - button-event tracking (drag)
-    mouse_any: bool,    // ?1003 - any-event tracking (all motion)
-    mouse_sgr: bool,    // ?1006 - SGR extended coordinates
+    // mouse tracking modes not sure if i need all but whatever, been partially corrected by ai at somepoint i think
+    mouse_normal: bool, // ?1000 = clicks
+    mouse_button: bool, // ?1002 = press and release tracking
+    mouse_any: bool,    // ?1003 = hover and stuff
+    mouse_sgr: bool,    // ?1006 = when sending mouse events to the child process (the TUI app) we be using the SGR encoding format instead of the legacy X10 format cuz we cool guys
 }
 
-// i partially stole this from somewhere i forgot from where though.
+// wohoo more basic tty handling that I definitely did not steal and totally know what it does
 impl TermGrid {
     fn new(rows: usize, cols: usize) -> Self {
         TermGrid {
@@ -289,7 +288,6 @@ impl TermGrid {
                 }
             }
             1 => {
-                // start to cursor
                 for r in 0..self.cursor_row {
                     for c in 0..self.cols {
                         self.cells[r][c] = Cell::default();
@@ -300,7 +298,6 @@ impl TermGrid {
                 }
             }
             2 | 3 => {
-                // whole screen
                 for r in 0..self.rows {
                     for c in 0..self.cols {
                         self.cells[r][c] = Cell::default();
@@ -413,7 +410,7 @@ impl TermGrid {
         }
     }
 
-    // slopfix : SGR , set graphics rendition (colors and attributes)
+    // sgr = set graphics rendition (colors and attributes) not the mouse tracking stuff like above!
     fn sgr(&mut self, params: &[u16]) {
         if params.is_empty() {
             self.reset_attrs();
@@ -429,7 +426,7 @@ impl TermGrid {
                 27 => self.attr_reverse = false,
                 30..=37 => self.attr_fg = TermColor::Indexed(params[i] as u8 - 30),
                 38 => {
-                    // extended fg color
+                    // this is extended foreground color
                     if i + 2 < params.len() && params[i + 1] == 5 {
                         self.attr_fg = TermColor::Indexed(params[i + 2] as u8);
                         i += 2;
@@ -445,7 +442,7 @@ impl TermGrid {
                 39 => self.attr_fg = TermColor::Default,
                 40..=47 => self.attr_bg = TermColor::Indexed(params[i] as u8 - 40),
                 48 => {
-                    // extended bg color
+                    // and dis da extended bg color
                     if i + 2 < params.len() && params[i + 1] == 5 {
                         self.attr_bg = TermColor::Indexed(params[i + 2] as u8);
                         i += 2;
@@ -577,8 +574,7 @@ impl TermGrid {
     }
 }
 
-// ----- vte perform implementation -----
-
+// vte perform implementation, basically the heart of the term emu
 impl Perform for TermGrid {
     fn print(&mut self, c: char) {
         self.put_char(c);
@@ -591,7 +587,7 @@ impl Perform for TermGrid {
                 self.cursor_col = self.cursor_col.saturating_sub(1);
             }
             0x09 => {
-                // tab - next tab stop (every 8 cols)
+                // tab (ltrly just moves cursor 8 spaces forward)
                 self.cursor_col = ((self.cursor_col / 8) + 1) * 8;
                 if self.cursor_col >= self.cols {
                     self.cursor_col = self.cols.saturating_sub(1);
@@ -602,7 +598,7 @@ impl Perform for TermGrid {
                 self.line_feed();
             }
             0x0D => {
-                // carriage return
+                // enter (cr)
                 self.cursor_col = 0;
             }
             _ => {}
@@ -633,7 +629,7 @@ impl Perform for TermGrid {
             }
             b'D' => self.line_feed(),
             b'M' => {
-                // reverse index
+                // reverse indexsexd
                 if self.cursor_row == self.scroll_top {
                     self.scroll_down();
                 } else {
@@ -650,10 +646,10 @@ impl Perform for TermGrid {
     fn unhook(&mut self) {}
 }
 
-// ----- keyboard input mapping -----
+// keyboard input mapping 
 
 // map egui keys to terminal escape sequences
-// yk because maybe i want to recycle this file for some other project in the future hence why i implemented it
+// ik i wont need this for eguiemo-minus but who knows maybe ill recycle this wrapper
 fn special_key_bytes(key: &egui::Key, modifiers: &egui::Modifiers) -> Option<Vec<u8>> {
     use egui::Key;
     match key {
@@ -727,8 +723,10 @@ fn ctrl_key_byte(key: &egui::Key) -> Option<u8> {
     }
 }
 
-// ----- the egui app -----
+// the actual egui app lol
+
 
+// scrolling bug fixed by llm was too lazy took too long debugging myself, added comments for my own clarity
 struct HoardomApp {
     grid: Arc<Mutex<TermGrid>>,
     pty_writer: Mutex<Box<dyn Write + Send>>,
@@ -739,6 +737,7 @@ struct HoardomApp {
     current_cols: u16,
     current_rows: u16,
     last_mouse_button: Option<u8>, // track held mouse button for drag/release
+    scroll_accumulator: f32,        // accumulate pixel scroll delta for discrete line events
 }
 
 impl eframe::App for HoardomApp {
@@ -749,7 +748,7 @@ impl eframe::App for HoardomApp {
             return;
         }
 
-        // measure cell dimensions on first frame (cant do it in creation callback)
+        // measure cell dimensions on first frame (aka basic egui init stuff)
         if self.cell_width == 0.0 {
             let (cw, ch) = ctx.fonts(|f| {
                 let fid = FontId::monospace(FONT_SIZE);
@@ -761,12 +760,12 @@ impl eframe::App for HoardomApp {
             self.cell_height = ch;
         }
 
-        // handle keyboard input
+        // handle actual keyboard input
         ctx.input(|input| {
             for event in &input.events {
                 match event {
                     egui::Event::Text(text) => {
-                        // only pass printable chars (specials handled via Key events)
+                        // only pass printable chars (also partial workaround for crashing on none ascii input like this: ü ö ä)
                         let filtered: String = text.chars().filter(|c| !c.is_control()).collect();
                         if !filtered.is_empty() {
                             if let Ok(mut w) = self.pty_writer.lock() {
@@ -800,7 +799,7 @@ impl eframe::App for HoardomApp {
         // handle mouse input
         self.handle_mouse(ctx);
 
-        // check if window was resized, update pty dimensions
+        // check if window was resized and update em tty dimensions
         let avail = ctx.available_rect();
         if self.cell_width > 0.0 && self.cell_height > 0.0 {
             let new_cols = (avail.width() / self.cell_width).floor() as u16;
@@ -823,7 +822,6 @@ impl eframe::App for HoardomApp {
             }
         }
 
-        // render the terminal grid
         egui::CentralPanel::default()
             .frame(egui::Frame::default().fill(DEFAULT_BG))
             .show(ctx, |ui| {
@@ -835,7 +833,7 @@ impl eframe::App for HoardomApp {
 }
 
 impl HoardomApp {
-    // translate egui pointer events to terminal mouse sequences
+    // translate egui to terminal mouse events
     fn handle_mouse(&mut self, ctx: &egui::Context) {
         let (mouse_enabled, use_sgr) = {
             match self.grid.lock() {
@@ -862,24 +860,45 @@ impl HoardomApp {
                 let col = col.max(0) as u16;
                 let row = row.max(0) as u16;
 
-                // scroll events
+                // ai fix ahead, had issues where buttons in lists would loose their selection/highlighting box caused by scrolling too fast + smooth scrolling (mainly on macos, linux was fine)
+                // either i suck at googling or the internet is just lobotomized by now but i was unable to find a fix so yea ... llm was used sry
+
+                // scroll events — accumulate pixel delta, fire at most a few events per frame
                 let scroll_y = input.raw_scroll_delta.y;
                 if scroll_y != 0.0 {
-                    let button: u8 = if scroll_y > 0.0 { 64 } else { 65 };
-                    let seq = if use_sgr {
-                        format!("\x1b[<{};{};{}M", button, col + 1, row + 1)
-                    } else {
-                        let cb = (button + 32) as char;
-                        let cx = (col + 33).min(255) as u8 as char;
-                        let cy = (row + 33).min(255) as u8 as char;
-                        format!("\x1b[M{}{}{}", cb, cx, cy)
-                    };
-                    if let Ok(mut w) = self.pty_writer.lock() {
-                        let _ = w.write_all(seq.as_bytes());
+                    self.scroll_accumulator += scroll_y;
+                    // use 3x cell height as threshold so scrolling feels natural
+                    // and cap at 3 events per frame to prevent list flying to the top
+                    let step = (ch * 3.0).max(30.0);
+                    let max_events = 3u8;
+                    let mut fired = 0u8;
+                    while self.scroll_accumulator.abs() >= step && fired < max_events {
+                        let button: u8 = if self.scroll_accumulator > 0.0 { 64 } else { 65 };
+                        if self.scroll_accumulator > 0.0 {
+                            self.scroll_accumulator -= step;
+                        } else {
+                            self.scroll_accumulator += step;
+                        }
+                        fired += 1;
+                        let seq = if use_sgr {
+                            format!("\x1b[<{};{};{}M", button, col + 1, row + 1)
+                        } else {
+                            let cb = (button + 32) as char;
+                            let cx = (col + 33).min(255) as u8 as char;
+                            let cy = (row + 33).min(255) as u8 as char;
+                            format!("\x1b[M{}{}{}", cb, cx, cy)
+                        };
+                        if let Ok(mut w) = self.pty_writer.lock() {
+                            let _ = w.write_all(seq.as_bytes());
+                        }
+                    }
+                    // drain leftover so momentum so no infinite scrolling mlol
+                    if fired == max_events {
+                        self.scroll_accumulator = 0.0;
                     }
                 }
 
-                // button press
+                // je suis pressing le bouton (i dont speak french btw only swiss-german)
                 if input.pointer.any_pressed() {
                     let button: u8 = if input.pointer.button_pressed(egui::PointerButton::Primary) {
                         0
@@ -904,7 +923,7 @@ impl HoardomApp {
                     }
                 }
 
-                // button release
+                // oui oui ratatui je suis machen le button nolonger pressing
                 if input.pointer.any_released() {
                     let button = self.last_mouse_button.unwrap_or(0);
                     self.last_mouse_button = None;
@@ -921,7 +940,7 @@ impl HoardomApp {
                     }
                 }
 
-                // drag / motion
+                // drag queen 
                 if input.pointer.is_moving() && self.last_mouse_button.is_some() {
                     let button = self.last_mouse_button.unwrap_or(0) + 32; // motion flag
                     let seq = if use_sgr {
@@ -951,12 +970,11 @@ impl HoardomApp {
         let cw = self.cell_width;
         let ch = self.cell_height;
 
-        // draw each row - render character by character at exact cell positions
-        // to keep backgrounds and text perfectly aligned
+        // attemt to make text be alligned more sexily
         for row in 0..grid.rows {
             let y = rect.min.y + row as f32 * ch;
 
-            // draw background spans (batch consecutive same-bg cells)
+            // draw sexy background and batch ones where bg is le same
             let mut bg_start = 0usize;
             let mut current_bg = grid.cells[row][0].resolved_bg();
 
@@ -964,11 +982,11 @@ impl HoardomApp {
                 let cell_bg = if col < grid.cols {
                     grid.cells[row][col].resolved_bg()
                 } else {
-                    Color32::TRANSPARENT // sentinel to flush last span
+                    Color32::TRANSPARENT // le flush de toilete of last span
                 };
 
                 if cell_bg != current_bg || col == grid.cols {
-                    // draw the background span
+                    // draw the background spank
                     if current_bg != DEFAULT_BG {
                         let x0 = rect.min.x + bg_start as f32 * cw;
                         let x1 = rect.min.x + col as f32 * cw;
@@ -983,8 +1001,7 @@ impl HoardomApp {
                 }
             }
 
-            // draw text - render each cell at its exact x position
-            // this prevents sub-pixel drift that causes bg/text misalignment
+            // attempt at preventing subbixel shift of text 
             for col in 0..grid.cols {
                 let cell = &grid.cells[row][col];
                 if cell.ch == ' ' || cell.ch == '\0' {
@@ -1004,7 +1021,7 @@ impl HoardomApp {
             }
         }
 
-        // draw cursor
+        // le cursor
         if grid.cursor_visible && grid.cursor_row < grid.rows && grid.cursor_col < grid.cols {
             let cx = rect.min.x + grid.cursor_col as f32 * cw;
             let cy = rect.min.y + grid.cursor_row as f32 * ch;
@@ -1015,7 +1032,7 @@ impl HoardomApp {
             );
         }
 
-        // reserve the space so egui knows we used it
+        // reserve nough space so egui knows we used it
         ui.allocate_exact_size(
             egui::vec2(grid.cols as f32 * cw, grid.rows as f32 * ch),
             Sense::hover(),
@@ -1023,29 +1040,30 @@ impl HoardomApp {
     }
 }
 
-// ----- find the hoardom binary -----
+// here comes the main special sauce for hoardom app instead of eguiemo-minus wow. update this stuff mosetly if you wanna use this tacowrap for ur app
+// to make it easier for u to find : command argument start app idk send help
 
 fn find_hoardom() -> PathBuf {
     // check same directory as ourselves
     if let Ok(exe) = std::env::current_exe() {
         if let Some(dir) = exe.parent() {
-            // check for hoardom next to us
+            // look left and right, if see haordom we launchin
             let candidate = dir.join("hoardom");
             if candidate.exists() && candidate != exe {
                 return candidate;
             }
-            // in a mac .app bundle the binary might be named differently
+            // mac os is special and accoustic try finder it with spotlights magnifying glass (haha im so unfunny) 
             let candidate = dir.join("hoardom-bin");
             if candidate.exists() {
                 return candidate;
             }
         }
     }
-    // fall back to PATH
+    // fall back to path
     PathBuf::from("hoardom")
 }
 
-// ----- load app icon -----
+// app icon for u to change incase is somewhere else 
 
 fn load_icon() -> egui::IconData {
     let png_bytes = include_bytes!("../dist/AppIcon.png");
@@ -1060,7 +1078,7 @@ fn load_icon() -> egui::IconData {
     }
 }
 
-// ----- main -----
+// main course, also slightly adjusted for hoardom
 
 fn main() -> eframe::Result<()> {
     let hoardom_bin = find_hoardom();
@@ -1074,9 +1092,9 @@ fn main() -> eframe::Result<()> {
             pixel_width: 0,
             pixel_height: 0,
         })
-        .expect("failed to open pty");
+        .expect("fucking failed to open pty what the fuck did you do???");
 
-    // spawn hoardom --tui in the pty
+    // command args go here lol
     let mut cmd = CommandBuilder::new(&hoardom_bin);
     cmd.arg("--tui");
     cmd.env("TERM", "xterm-256color");
@@ -1086,7 +1104,7 @@ fn main() -> eframe::Result<()> {
         .spawn_command(cmd)
         .unwrap_or_else(|e| panic!("failed to spawn {:?}: {}", hoardom_bin, e));
 
-    // close the slave end in the parent so pty gets proper eof
+    // the slaves end in the parent so tty gets proper eof (oof)
     drop(pair.slave);
 
     let reader = pair
@@ -1104,10 +1122,10 @@ fn main() -> eframe::Result<()> {
     )));
     let child_exited = Arc::new(AtomicBool::new(false));
 
-    // egui context holder so the reader thread can request repaints
+    // egui context so it can repaint
     let ctx_holder: Arc<Mutex<Option<egui::Context>>> = Arc::new(Mutex::new(None));
 
-    // reader thread: reads pty output and feeds it through the vt parser
+    // reading reading yes yes je suis cooked
     let grid_clone = grid.clone();
     let exited_clone = child_exited.clone();
     let ctx_clone = ctx_holder.clone();
@@ -1140,15 +1158,14 @@ fn main() -> eframe::Result<()> {
         }
     });
 
-    // child reaper thread
+    // child raper thread (epstein style)
     let exited_clone2 = child_exited.clone();
     thread::spawn(move || {
         let _ = child.wait();
         exited_clone2.store(true, Ordering::Relaxed);
     });
 
-    // calculate initial window size from cell dimensions
-    // (rough estimate, refined on first frame)
+    // calculate window size from incell by throwing spaghetti at the wall (realizing it doesnt stick and use a nail to actually calculate it later)
     let est_width = DEFAULT_COLS as f32 * 8.5 + 20.0;
     let est_height = DEFAULT_ROWS as f32 * 18.0 + 20.0;
 
@@ -1165,17 +1182,17 @@ fn main() -> eframe::Result<()> {
         "hoardom",
         options,
         Box::new(move |cc| {
-            // store the egui context for the reader thread
+            // emo egui in reader theread
             if let Ok(mut holder) = ctx_holder.lock() {
                 *holder = Some(cc.egui_ctx.clone());
             }
 
             cc.egui_ctx.set_visuals(egui::Visuals::dark());
 
-            // font fallback chain for monospace: Hack (default) -> NotoSansMono -> NotoSansSymbols2
-            // Hack is missing box drawing, block elements, ellipsis
-            // NotoSansMono covers those but is missing dingbats (symbols)
-            // NotoSansSymbols2-subset has just those two glyphs from which only the necessary ones are extracted afaik
+            // goofy ah font fallback chain : "Hack"sor Font -> NotoSansMono -> NotoSansSymbols2 (Undertale)
+            // But for some reason it doesnt work right in this app so I got autistic and did the following : 
+            // NotoSansMono looks somewhat decent but no symbols (also called dingbats afaik) ... which i need.
+            // NotoSansSymbols2 (without-subset) has the symbols but is way too fat. With -subset is me taking out most crap I dont use and slimin it down.
             let mut fonts = egui::FontDefinitions::default();
             fonts.font_data.insert(
                 "NotoSansMono".to_owned(),
@@ -1207,6 +1224,7 @@ fn main() -> eframe::Result<()> {
                 current_cols: DEFAULT_COLS,
                 current_rows: DEFAULT_ROWS,
                 last_mouse_button: None,
+                scroll_accumulator: 0.0,
             }))
         }),
     )

+ 4 - 6
src/cli.rs

@@ -6,11 +6,9 @@ use std::path::PathBuf;
     name = "hoardom",
     version = "0.0.1",
     about = "Domain hoarding made less painful"
-)] // static version infos ???? whoops
+)] // static version infos ???? was I high on crack when doing this
 #[command(disable_help_flag = true, disable_version_flag = true)]
 pub struct Args {
-    /// ffs why were a million comments added at some point !?!? basically all of these are self explanatory. removed dumb ass redundant ones.
-
     #[arg(value_name = "DOMAIN")]
     pub domains: Vec<String>,
     #[arg(long = "cli", default_value_t = false)]
@@ -92,6 +90,7 @@ impl Args {
 pub fn print_help() {
     println!(
         "hoardom {} - Domain hoarding made less painful
+
 Mode :
 --cli                            Default none interactive mode
 --tui                            Easy to use Terminal based Graphical user interface
@@ -114,8 +113,6 @@ hoardom -a idea1 idea2           See Table of available domains starting with th
 pub fn print_fullhelp() {
     println!(
         "hoardom {} - they are inside your walls! 
-        
-## if you see this send a fax to +41 43 543 04 47 that mentions hoardom to know your fate ##
 
 Mode :
 --cli                            Default none interactive mode
@@ -124,7 +121,8 @@ Mode :
 Basics :
 -a --all                         Show all in list even when unavailable
 -c --csv=PATH                    Out in CSV, Path is optional
--l --list=LIST                   Built in TLD Lists are : {}
+-l --list=LIST                   Built in TLD Lists are :
+                                 {}
 
 Advanced :
 -e --environement=PATH           Define where .hoardom folder should be

+ 1 - 1
src/lookup.rs

@@ -469,7 +469,7 @@ async fn whois_lookup(
                     } else {
                         ErrorKind::Unknown
                     },
-                    message: format!("whois {}: {}", server, e),
+                    message: format!("{} ({})", e, server),
                 },
             ),
         };

+ 11 - 11
src/output.rs

@@ -11,7 +11,7 @@ pub fn print_available_table(results: &[DomainResult], no_color: bool, no_unicod
         return;
     }
 
-    let max_len = available.iter().map(|r| r.full.len()).max().unwrap_or(20);
+    let max_len = available.iter().map(|r| r.display_full().len()).max().unwrap_or(20);
     let title = "Available Domains";
     let width = max_len.max(title.len()) + 4; // padding, ensure title fits
     let title_padded = format!("{:^width$}", title, width = width);
@@ -27,7 +27,7 @@ pub fn print_available_table(results: &[DomainResult], no_color: bool, no_unicod
         }
         println!("+{}+", "-".repeat(width));
         for r in &available {
-            println!("| {:<pad$} |", r.full, pad = width - 2);
+            println!("| {:<pad$} |", r.display_full(), pad = width - 2);
         }
         println!("{}", border);
     } else {
@@ -43,7 +43,7 @@ pub fn print_available_table(results: &[DomainResult], no_color: bool, no_unicod
         }
         println!("{}", sep);
         for r in &available {
-            println!("│ {:<pad$} │", r.full, pad = width - 2);
+            println!("│ {:<pad$} │", r.display_full(), pad = width - 2);
         }
         println!("{}", bot);
     }
@@ -58,7 +58,7 @@ pub fn print_full_table(results: &[DomainResult], no_color: bool, no_unicode: bo
     // calc column widths
     let domain_w = results
         .iter()
-        .map(|r| r.full.len())
+        .map(|r| r.display_full().len())
         .max()
         .unwrap_or(10)
         .max(7);
@@ -105,7 +105,7 @@ fn print_full_table_unicode(
     println!("{}", sep);
 
     for r in results {
-        let domain_str = format!(" {:<width$} ", r.full, width = dc - 2);
+        let domain_str = format!(" {:<width$} ", r.display_full(), width = dc - 2);
         let status_str = format!(" {:<width$} ", r.status_str(), width = sc - 2);
         let note_str = format!(" {:<width$} ", r.note_str(), width = nc - 2);
 
@@ -142,7 +142,7 @@ fn print_full_table_ascii(
     println!("{}", border);
 
     for r in results {
-        let domain_str = format!(" {:<width$} ", r.full, width = dc - 2);
+        let domain_str = format!(" {:<width$} ", r.display_full(), width = dc - 2);
         let status_str = format!(" {:<width$} ", r.status_str(), width = sc - 2);
         let note_str = format!(" {:<width$} ", r.note_str(), width = nc - 2);
 
@@ -171,7 +171,7 @@ fn color_domain(domain: &str, status: &DomainStatus) -> ColoredString {
 pub fn print_csv(results: &[DomainResult]) {
     println!("Domains, Status, Note");
     for r in results {
-        println!("{}, {}, {}", r.full, r.status_str(), r.note_str());
+        println!("{}, {}, {}", r.display_full(), r.status_str(), r.note_str());
     }
 }
 
@@ -180,7 +180,7 @@ pub fn write_csv_file(results: &[DomainResult], path: &PathBuf) -> Result<(), St
         std::fs::File::create(path).map_err(|e| format!("Could not create CSV file: {}", e))?;
     writeln!(file, "Domains, Status, Note").map_err(|e| format!("Write error: {}", e))?;
     for r in results {
-        writeln!(file, "{}, {}, {}", r.full, r.status_str(), r.note_str())
+        writeln!(file, "{}, {}, {}", r.display_full(), r.status_str(), r.note_str())
             .map_err(|e| format!("Write error: {}", e))?;
     }
     Ok(())
@@ -191,15 +191,15 @@ pub fn print_errors(results: &[DomainResult], verbose: bool) {
         if let DomainStatus::Error { kind, message } = &r.status {
             match kind {
                 ErrorKind::InvalidTld => {
-                    eprintln!("Error for {} : does not seem to exist", r.full);
+                    eprintln!("Error for {} : does not seem to exist", r.display_full());
                 }
                 _ => {
                     if verbose {
-                        eprintln!("Error for {} : {} (raw: {})", r.full, message, message);
+                        eprintln!("Error for {} : {} (raw: {})", r.display_full(), message, message);
                     } else {
                         eprintln!(
                             "Error for {} : unknown error",
-                            r.full
+                            r.display_full()
                         );
                     }
                 }

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 579 - 201
src/tui.rs


+ 6 - 0
src/types.rs

@@ -75,6 +75,12 @@ impl DomainResult {
         }
     }
 
+    /// Returns the domain with punycode labels decoded to unicode for display
+    pub fn display_full(&self) -> String {
+        let (unicode, _) = idna::domain_to_unicode(&self.full);
+        unicode
+    }
+
     pub fn note_str(&self) -> String {
         match &self.status {
             DomainStatus::Available => "-".to_string(),

Daži faili netika attēloti, jo izmaiņu fails ir pārāk liels