From 13478d72d51bb70148bbf0a6a7e113cf5d017d51 Mon Sep 17 00:00:00 2001 From: unidcml Date: Sun, 2 Feb 2025 19:31:58 +0300 Subject: [PATCH] Add xray-geosite to src --- src/xray-geosite/go.mod | 16 ++ src/xray-geosite/go.sum | 20 ++ src/xray-geosite/main.go | 391 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 427 insertions(+) create mode 100644 src/xray-geosite/go.mod create mode 100644 src/xray-geosite/go.sum create mode 100644 src/xray-geosite/main.go diff --git a/src/xray-geosite/go.mod b/src/xray-geosite/go.mod new file mode 100644 index 0000000..72fceab --- /dev/null +++ b/src/xray-geosite/go.mod @@ -0,0 +1,16 @@ +module github.com/v2fly/domain-list-community + +go 1.22 + +toolchain go1.23.1 + +require ( + github.com/v2fly/v2ray-core/v5 v5.19.0 + google.golang.org/protobuf v1.34.2 +) + +require ( + github.com/adrg/xdg v0.5.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + golang.org/x/sys v0.25.0 // indirect +) diff --git a/src/xray-geosite/go.sum b/src/xray-geosite/go.sum new file mode 100644 index 0000000..8326946 --- /dev/null +++ b/src/xray-geosite/go.sum @@ -0,0 +1,20 @@ +github.com/adrg/xdg v0.5.0 h1:dDaZvhMXatArP1NPHhnfaQUqWBLBsmx1h1HXQdMoFCY= +github.com/adrg/xdg v0.5.0/go.mod h1:dDdY4M4DF9Rjy4kHPeNL+ilVF+p2lK8IdM9/rTSGcI4= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/v2fly/v2ray-core/v5 v5.19.0 h1:TF2noX1c1npgSg98TASHLdYDWDRhi97gBLpid1cwMUY= +github.com/v2fly/v2ray-core/v5 v5.19.0/go.mod h1:iRydCoQWwE8mhaf/VOWe5jKB8r7LkZfHsbOvwRfJWUo= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/src/xray-geosite/main.go b/src/xray-geosite/main.go new file mode 100644 index 0000000..d874270 --- /dev/null +++ b/src/xray-geosite/main.go @@ -0,0 +1,391 @@ +package main + +import ( + "bufio" + "errors" + "flag" + "fmt" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + + router "github.com/v2fly/v2ray-core/v5/app/router/routercommon" + "google.golang.org/protobuf/proto" +) + +var ( + dataPath = flag.String("datapath", "./data", "Path to your custom 'data' directory") + outputName = flag.String("outputname", "dlc.dat", "Name of the generated dat file") + outputDir = flag.String("outputdir", "./", "Directory to place all generated files") + exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma") +) + +type Entry struct { + Type string + Value string + Attrs []*router.Domain_Attribute +} + +type List struct { + Name string + Entry []Entry +} + +type ParsedList struct { + Name string + Inclusion map[string]bool + Entry []Entry +} + +func (l *ParsedList) toPlainText(listName string) error { + var entryBytes []byte + for _, entry := range l.Entry { + var attrString string + if entry.Attrs != nil { + for _, attr := range entry.Attrs { + attrString += "@" + attr.GetKey() + "," + } + attrString = strings.TrimRight(":"+attrString, ",") + } + // Entry output format is: type:domain.tld:@attr1,@attr2 + entryBytes = append(entryBytes, []byte(entry.Type+":"+entry.Value+attrString+"\n")...) + } + if err := os.WriteFile(filepath.Join(*outputDir, listName+".txt"), entryBytes, 0644); err != nil { + return fmt.Errorf(err.Error()) + } + return nil +} + +func (l *ParsedList) toProto() (*router.GeoSite, error) { + site := &router.GeoSite{ + CountryCode: l.Name, + } + for _, entry := range l.Entry { + switch entry.Type { + case "domain": + site.Domain = append(site.Domain, &router.Domain{ + Type: router.Domain_RootDomain, + Value: entry.Value, + Attribute: entry.Attrs, + }) + case "regexp": + site.Domain = append(site.Domain, &router.Domain{ + Type: router.Domain_Regex, + Value: entry.Value, + Attribute: entry.Attrs, + }) + case "keyword": + site.Domain = append(site.Domain, &router.Domain{ + Type: router.Domain_Plain, + Value: entry.Value, + Attribute: entry.Attrs, + }) + case "full": + site.Domain = append(site.Domain, &router.Domain{ + Type: router.Domain_Full, + Value: entry.Value, + Attribute: entry.Attrs, + }) + default: + return nil, errors.New("unknown domain type: " + entry.Type) + } + } + return site, nil +} + +func exportPlainTextList(list []string, refName string, pl *ParsedList) { + for _, listName := range list { + if strings.EqualFold(refName, listName) { + if err := pl.toPlainText(strings.ToLower(refName)); err != nil { + fmt.Println("Failed: ", err) + continue + } + fmt.Printf("'%s' has been generated successfully.\n", listName) + } + } +} + +func removeComment(line string) string { + idx := strings.Index(line, "#") + if idx == -1 { + return line + } + return strings.TrimSpace(line[:idx]) +} + +func parseDomain(domain string, entry *Entry) error { + kv := strings.Split(domain, ":") + if len(kv) == 1 { + entry.Type = "domain" + entry.Value = strings.ToLower(kv[0]) + return nil + } + + if len(kv) == 2 { + entry.Type = strings.ToLower(kv[0]) + entry.Value = strings.ToLower(kv[1]) + return nil + } + + return errors.New("Invalid format: " + domain) +} + +func parseAttribute(attr string) (*router.Domain_Attribute, error) { + var attribute router.Domain_Attribute + if len(attr) == 0 || attr[0] != '@' { + return &attribute, errors.New("invalid attribute: " + attr) + } + + // Trim attribute prefix `@` character + attr = attr[1:] + parts := strings.Split(attr, "=") + if len(parts) == 1 { + attribute.Key = strings.ToLower(parts[0]) + attribute.TypedValue = &router.Domain_Attribute_BoolValue{BoolValue: true} + } else { + attribute.Key = strings.ToLower(parts[0]) + intv, err := strconv.Atoi(parts[1]) + if err != nil { + return &attribute, errors.New("invalid attribute: " + attr + ": " + err.Error()) + } + attribute.TypedValue = &router.Domain_Attribute_IntValue{IntValue: int64(intv)} + } + return &attribute, nil +} + +func parseEntry(line string) (Entry, error) { + line = strings.TrimSpace(line) + parts := strings.Split(line, " ") + + var entry Entry + if len(parts) == 0 { + return entry, errors.New("empty entry") + } + + if err := parseDomain(parts[0], &entry); err != nil { + return entry, err + } + + for i := 1; i < len(parts); i++ { + attr, err := parseAttribute(parts[i]) + if err != nil { + return entry, err + } + entry.Attrs = append(entry.Attrs, attr) + } + + return entry, nil +} + +func Load(path string) (*List, error) { + file, err := os.Open(path) + if err != nil { + return nil, err + } + defer file.Close() + + list := &List{ + Name: strings.ToUpper(filepath.Base(path)), + } + scanner := bufio.NewScanner(file) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + line = removeComment(line) + if len(line) == 0 { + continue + } + entry, err := parseEntry(line) + if err != nil { + return nil, err + } + list.Entry = append(list.Entry, entry) + } + + return list, nil +} + +func isMatchAttr(Attrs []*router.Domain_Attribute, includeKey string) bool { + isMatch := false + mustMatch := true + matchName := includeKey + if strings.HasPrefix(includeKey, "!") { + isMatch = true + mustMatch = false + matchName = strings.TrimLeft(includeKey, "!") + } + + for _, Attr := range Attrs { + attrName := Attr.Key + if mustMatch { + if matchName == attrName { + isMatch = true + break + } + } else { + if matchName == attrName { + isMatch = false + break + } + } + } + return isMatch +} + +func createIncludeAttrEntrys(list *List, matchAttr *router.Domain_Attribute) []Entry { + newEntryList := make([]Entry, 0, len(list.Entry)) + matchName := matchAttr.Key + for _, entry := range list.Entry { + matched := isMatchAttr(entry.Attrs, matchName) + if matched { + newEntryList = append(newEntryList, entry) + } + } + return newEntryList +} + +func ParseList(list *List, ref map[string]*List) (*ParsedList, error) { + pl := &ParsedList{ + Name: list.Name, + Inclusion: make(map[string]bool), + } + entryList := list.Entry + for { + newEntryList := make([]Entry, 0, len(entryList)) + hasInclude := false + for _, entry := range entryList { + if entry.Type == "include" { + refName := strings.ToUpper(entry.Value) + if entry.Attrs != nil { + for _, attr := range entry.Attrs { + InclusionName := strings.ToUpper(refName + "@" + attr.Key) + if pl.Inclusion[InclusionName] { + continue + } + pl.Inclusion[InclusionName] = true + + refList := ref[refName] + if refList == nil { + return nil, errors.New(entry.Value + " not found.") + } + attrEntrys := createIncludeAttrEntrys(refList, attr) + if len(attrEntrys) != 0 { + newEntryList = append(newEntryList, attrEntrys...) + } + } + } else { + InclusionName := refName + if pl.Inclusion[InclusionName] { + continue + } + pl.Inclusion[InclusionName] = true + refList := ref[refName] + if refList == nil { + return nil, errors.New(entry.Value + " not found.") + } + newEntryList = append(newEntryList, refList.Entry...) + } + hasInclude = true + } else { + newEntryList = append(newEntryList, entry) + } + } + entryList = newEntryList + if !hasInclude { + break + } + } + pl.Entry = entryList + + return pl, nil +} + +func main() { + flag.Parse() + + dir := *dataPath + fmt.Println("Use domain lists in", dir) + + ref := make(map[string]*List) + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + list, err := Load(path) + if err != nil { + return err + } + ref[list.Name] = list + return nil + }) + if err != nil { + fmt.Println("Failed: ", err) + os.Exit(1) + } + + // Create output directory if not exist + if _, err := os.Stat(*outputDir); os.IsNotExist(err) { + if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil { + fmt.Println("Failed: ", mkErr) + os.Exit(1) + } + } + + protoList := new(router.GeoSiteList) + var existList []string + for refName, list := range ref { + pl, err := ParseList(list, ref) + if err != nil { + fmt.Println("Failed: ", err) + os.Exit(1) + } + site, err := pl.toProto() + if err != nil { + fmt.Println("Failed: ", err) + os.Exit(1) + } + protoList.Entry = append(protoList.Entry, site) + + // Flatten and export plaintext list + if *exportLists != "" { + if existList != nil { + exportPlainTextList(existList, refName, pl) + } else { + exportedListSlice := strings.Split(*exportLists, ",") + for _, exportedListName := range exportedListSlice { + fileName := filepath.Join(dir, exportedListName) + _, err := os.Stat(fileName) + if err == nil || os.IsExist(err) { + existList = append(existList, exportedListName) + } else { + fmt.Printf("'%s' list does not exist in '%s' directory.\n", exportedListName, dir) + } + } + if existList != nil { + exportPlainTextList(existList, refName, pl) + } + } + } + } + + // Sort protoList so the marshaled list is reproducible + sort.SliceStable(protoList.Entry, func(i, j int) bool { + return protoList.Entry[i].CountryCode < protoList.Entry[j].CountryCode + }) + + protoBytes, err := proto.Marshal(protoList) + if err != nil { + fmt.Println("Failed:", err) + os.Exit(1) + } + if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil { + fmt.Println("Failed: ", err) + os.Exit(1) + } else { + fmt.Println(*outputName, "has been generated successfully.") + } +}