Files

123 lines
2.4 KiB
Go

package trafficmode
import (
"fmt"
"net/netip"
"sort"
"strconv"
"strings"
)
func TokenizeList(raw []string) []string {
repl := strings.NewReplacer(",", " ", ";", " ", "\n", " ", "\t", " ")
out := make([]string, 0, len(raw))
for _, line := range raw {
for _, tok := range strings.Fields(repl.Replace(line)) {
val := strings.TrimSpace(tok)
if val != "" {
out = append(out, val)
}
}
}
return out
}
func NormalizeSubnetList(raw []string) []string {
seen := map[string]struct{}{}
out := make([]string, 0, len(raw))
for _, tok := range TokenizeList(raw) {
var cidr string
if strings.Contains(tok, "/") {
pfx, err := netip.ParsePrefix(tok)
if err != nil || !pfx.Addr().Is4() {
continue
}
cidr = pfx.Masked().String()
} else {
ip, err := netip.ParseAddr(tok)
if err != nil || !ip.Is4() {
continue
}
cidr = netip.PrefixFrom(ip, 32).String()
}
if _, ok := seen[cidr]; ok {
continue
}
seen[cidr] = struct{}{}
out = append(out, cidr)
}
sort.Strings(out)
return out
}
func NormalizeUIDToken(tok string) (string, bool) {
t := strings.TrimSpace(tok)
if t == "" {
return "", false
}
parseOne := func(s string) (uint64, bool) {
n, err := strconv.ParseUint(strings.TrimSpace(s), 10, 32)
if err != nil {
return 0, false
}
return n, true
}
if strings.Contains(t, "-") {
parts := strings.SplitN(t, "-", 2)
if len(parts) != 2 {
return "", false
}
start, okA := parseOne(parts[0])
end, okB := parseOne(parts[1])
if !okA || !okB || end < start {
return "", false
}
return fmt.Sprintf("%d-%d", start, end), true
}
n, ok := parseOne(t)
if !ok {
return "", false
}
return fmt.Sprintf("%d-%d", n, n), true
}
func NormalizeUIDList(raw []string) []string {
seen := map[string]struct{}{}
out := make([]string, 0, len(raw))
for _, tok := range TokenizeList(raw) {
v, ok := NormalizeUIDToken(tok)
if !ok {
continue
}
if _, exists := seen[v]; exists {
continue
}
seen[v] = struct{}{}
out = append(out, v)
}
sort.Strings(out)
return out
}
func NormalizeCgroupList(raw []string) []string {
seen := map[string]struct{}{}
out := make([]string, 0, len(raw))
for _, tok := range TokenizeList(raw) {
v := strings.TrimSpace(tok)
if v == "" {
continue
}
v = strings.TrimSuffix(v, "/")
if v == "" {
v = "/"
}
if _, exists := seen[v]; exists {
continue
}
seen[v] = struct{}{}
out = append(out, v)
}
sort.Strings(out)
return out
}