From 9c54318e97f7d68a197b1d781553b62cc8138165 Mon Sep 17 00:00:00 2001 From: shadowabi <50265741+shadowabi@users.noreply.github.com> Date: Wed, 15 May 2024 22:26:37 +0800 Subject: [PATCH] =?UTF-8?q?=E9=87=8D=E5=81=9AreqString=E7=B3=BB=E5=88=97?= =?UTF-8?q?=E7=9A=84=E8=AF=B7=E6=B1=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- cmd/daydaymap/daydaymap.go | 22 +++++++++++++++++----- cmd/fofa/fofa.go | 18 +++++++++++------- cmd/hunter/hunter.go | 18 +++++++++++------- cmd/quake/quake.go | 18 +++++++++++------- define/var.go | 7 ++++--- main/main.go | 2 +- pkg/data.go | 10 +++++----- pkg/daydaymap/req.go | 25 +++++++++++++------------ pkg/fofa/req.go | 10 +++++----- pkg/hunter/req.go | 10 +++++----- pkg/quake/req.go | 8 ++++---- 11 files changed, 87 insertions(+), 61 deletions(-) diff --git a/cmd/daydaymap/daydaymap.go b/cmd/daydaymap/daydaymap.go index e342004..4d1ed2d 100644 --- a/cmd/daydaymap/daydaymap.go +++ b/cmd/daydaymap/daydaymap.go @@ -29,11 +29,24 @@ var DaydayMapCmd = &cobra.Command{ fmt.Printf("[+] daydaymap is working...\n") client := pkg.GenerateHTTPClient(define.TimeOut) - reqString := daydaymap.MergeReqListToReqString(define.ReqIpList, define.ReqDomainList) - reqBody := daydaymap.DayDayMapRequest(client, reqString, 1) // find result total + reqStringList := pkg.MergeReqListToReqStringList("daydaymap", define.ReqIpList, define.ReqDomainList) + reqBody := daydaymap.DayDayMapRequest(client, 1, 1, reqStringList...) reqResult := daydaymap.ParseDaydaymapResult(reqBody...) - reqBody = daydaymap.DayDayMapRequest(client, reqString, reqResult[0].Data.Total) // real query - reqResult = daydaymap.ParseDaydaymapResult(reqBody...) + + for i, _ := range reqResult { + if int(reqResult[i].Data.Total) == 0 { + continue + } + if int(reqResult[i].Data.Total) > 10000 { + for j := 1; i <= reqResult[i].Data.Total/10000; j++ { + reqBody = daydaymap.DayDayMapRequest(client, j, 10000, reqStringList[i]) + reqResult = append(reqResult, daydaymap.ParseDaydaymapResult(reqBody...)...) + } + } else { + reqBody = daydaymap.DayDayMapRequest(client, 1, reqResult[i].Data.Total, reqStringList[i]) + reqResult = append(reqResult, daydaymap.ParseDaydaymapResult(reqBody...)...) + } + } chanNum := cap(reqResult) if chanNum != 0 { @@ -43,7 +56,6 @@ var DaydayMapCmd = &cobra.Command{ pkg.FetchResultFromChanel(resultChannel) } - fmt.Printf("[+] daydaymap search complete\n") }, } diff --git a/cmd/fofa/fofa.go b/cmd/fofa/fofa.go index c4a7d8f..e0c7722 100644 --- a/cmd/fofa/fofa.go +++ b/cmd/fofa/fofa.go @@ -30,15 +30,19 @@ var FofaCmd = &cobra.Command{ fmt.Printf("[+] fofa is working...\n") client := pkg.GenerateHTTPClient(define.TimeOut) - reqString := pkg.MergeReqListToReqString("fofa", define.ReqIpList, define.ReqDomainList) - reqBody := fofa.FofaRequest(client, reqString, 1) + reqStringList := pkg.MergeReqListToReqStringList("fofa", define.ReqIpList, define.ReqDomainList) + reqBody := fofa.FofaRequest(client, 1, reqStringList...) reqResult := fofa.ParseFofaResult(reqBody...) - if int(reqResult[0].Size) > 1000 { - pageList := net2.GeneratePageList(reqResult[0].Size) - reqBody2 := fofa.FofaRequest(client, reqString, pageList...) - reqResult2 := fofa.ParseFofaResult(reqBody2...) - reqResult = append(reqResult, reqResult2...) + for i, _ := range reqResult { + if int(reqResult[i].Size) > 1000 { + pageList := net2.GeneratePageList(reqResult[i].Size) + for _, v := range pageList { + reqBody2 := fofa.FofaRequest(client, v, reqStringList[i]) + reqResult2 := fofa.ParseFofaResult(reqBody2...) + reqResult = append(reqResult, reqResult2...) + } + } } chanNum := cap(reqResult) diff --git a/cmd/hunter/hunter.go b/cmd/hunter/hunter.go index 0c1048f..4ff1b9e 100644 --- a/cmd/hunter/hunter.go +++ b/cmd/hunter/hunter.go @@ -31,15 +31,19 @@ var HunterCmd = &cobra.Command{ client := pkg.GenerateHTTPClient(define.TimeOut) - reqString := pkg.MergeReqListToReqString("hunter", define.ReqIpList, define.ReqDomainList) - reqBody := hunter.HunterRequest(client, reqString, 1) + reqStringList := pkg.MergeReqListToReqStringList("hunter", define.ReqIpList, define.ReqDomainList) + reqBody := hunter.HunterRequest(client, 1, reqStringList...) reqResult := hunter.ParseHunterResult(reqBody...) - if int(reqResult[0].Data.Total) > 100 { - pageList := net2.GeneratePageList(reqResult[0].Data.Total) - reqBody2 := hunter.HunterRequest(client, reqString, pageList...) - reqResult2 := hunter.ParseHunterResult(reqBody2...) - reqResult = append(reqResult, reqResult2...) + for i, _ := range reqResult { + if int(reqResult[i].Data.Total) > 1000 { + pageList := net2.GeneratePageList(reqResult[i].Data.Total) + for _, v := range pageList { + reqBody2 := hunter.HunterRequest(client, v, reqStringList[i]) + reqResult2 := hunter.ParseHunterResult(reqBody2...) + reqResult = append(reqResult, reqResult2...) + } + } } chanNum := cap(reqResult) diff --git a/cmd/quake/quake.go b/cmd/quake/quake.go index 7302675..d0da134 100644 --- a/cmd/quake/quake.go +++ b/cmd/quake/quake.go @@ -31,15 +31,19 @@ var QuakeCmd = &cobra.Command{ client := pkg.GenerateHTTPClient(define.TimeOut) - reqString := pkg.MergeReqListToReqString("quake", define.ReqIpList, define.ReqDomainList) - reqBody := quake.QuakeRequest(client, reqString, 1) + reqStringList := pkg.MergeReqListToReqStringList("quake", define.ReqIpList, define.ReqDomainList) + reqBody := quake.QuakeRequest(client, 1, reqStringList...) reqResult := quake.ParseQuakeResult(reqBody...) - if int(reqResult[0].Meta.Pagination.Total) > 100 { - pageList := net2.GeneratePageList(reqResult[0].Meta.Pagination.Total) - reqBody2 := quake.QuakeRequest(client, reqString, pageList...) - reqResult2 := quake.ParseQuakeResult(reqBody2...) - reqResult = append(reqResult, reqResult2...) + for i, _ := range reqResult { + if int(reqResult[i].Meta.Pagination.Total) > 100 { + pageList := net2.GeneratePageList(reqResult[i].Meta.Pagination.Total) + for _, v := range pageList { + reqBody2 := quake.QuakeRequest(client, v, reqStringList[i]) + reqResult2 := quake.ParseQuakeResult(reqBody2...) + reqResult = append(reqResult, reqResult2...) + } + } } chanNum := cap(reqResult) diff --git a/define/var.go b/define/var.go index 78cbd4a..84041c0 100644 --- a/define/var.go +++ b/define/var.go @@ -32,9 +32,10 @@ type Configure struct { } var ModeToGrammar = map[string]string{ - "fofa": "=", - "hunter": "=", - "quake": ":", + "fofa": "=", + "hunter": "=", + "quake": ":", + "daydaymap": ":", } var Once sync.Once diff --git a/main/main.go b/main/main.go index c7f2db3..f5e7a1d 100644 --- a/main/main.go +++ b/main/main.go @@ -27,7 +27,7 @@ import ( ) func init() { - log.Init("trace") + log.Init("info") configFile := pkg.GetPwd() configFile = strings.Join([]string{configFile, "/config.json"}, "") err := File.FileNonExistCreate(configFile) diff --git a/pkg/data.go b/pkg/data.go index 24a622e..342f9c8 100644 --- a/pkg/data.go +++ b/pkg/data.go @@ -2,6 +2,7 @@ package pkg import ( "bufio" + "fmt" "github.com/shadowabi/AutoDomain_rebuild/define" "github.com/shadowabi/AutoDomain_rebuild/utils/Compare" "github.com/shadowabi/AutoDomain_rebuild/utils/Error" @@ -66,18 +67,17 @@ func ConvertToReqDomainList(param ...string) (reqDomainList []string) { return reqDomainList } -func MergeReqListToReqString(mode string, reqIpList []string, reqDomainList []string) (reqString string) { +func MergeReqListToReqStringList(mode string, reqIpList []string, reqDomainList []string) (reqStringList []string) { grammar := define.ModeToGrammar[mode] if grammar != "" { for _, host := range reqIpList { - reqString += "ip" + grammar + host + " || " + reqStringList = append(reqStringList, fmt.Sprintf("ip%v\"%v\"", grammar, host)) } for _, host := range reqDomainList { - reqString += "domain" + grammar + host + " || " + reqStringList = append(reqStringList, fmt.Sprintf("domain%v\"%v\"", grammar, host)) } } - reqString = strings.TrimSuffix(reqString, " || ") - return reqString + return reqStringList } func FetchResultFromChanel(resultChannel chan []string) { diff --git a/pkg/daydaymap/req.go b/pkg/daydaymap/req.go index e40555f..524239d 100644 --- a/pkg/daydaymap/req.go +++ b/pkg/daydaymap/req.go @@ -2,6 +2,7 @@ package daydaymap import ( "bytes" + "encoding/base64" "encoding/json" "github.com/shadowabi/AutoDomain_rebuild/config" "github.com/shadowabi/AutoDomain_rebuild/define" @@ -11,24 +12,24 @@ import ( "time" ) -func DayDayMapRequest(client *http.Client, reqString string, totalList ...int) (respBody []string) { - if len(totalList) != 0 { - for _, total := range totalList { - data := struct { - Page int `json:"page"` - Size int `json:"page_size"` - Keyword string `json:"keyword"` - }{ - Page: 1, - Size: total, - Keyword: reqString, - } +type DaydaymapData struct { + Page int `json:"page"` + Size int `json:"page_size"` + Keyword string `json:"keyword"` +} + +func DayDayMapRequest(client *http.Client, page int, total int, reqStringList ...string) (respBody []string) { + if len(reqStringList) != 0 { + for _, reqString := range reqStringList { + reqString = base64.URLEncoding.EncodeToString([]byte(reqString)) + data := DaydaymapData{Page: page, Size: total, Keyword: reqString} dataJson, _ := json.Marshal(data) dataReq := bytes.NewBuffer(dataJson) req, _ := http.NewRequest("POST", "https://www.daydaymap.com/api/v1/raymap/search/all", dataReq) req.Header.Set("User-Agent", define.UserAgent) req.Header.Set("Content-Type", "application/json") req.Header.Set("api-key", config.C.DaydaymapKey) + resp, err := client.Do(req) time.Sleep(500 * time.Millisecond) Error.HandleError(err) diff --git a/pkg/fofa/req.go b/pkg/fofa/req.go index 86c56bc..1fdfd14 100644 --- a/pkg/fofa/req.go +++ b/pkg/fofa/req.go @@ -11,12 +11,12 @@ import ( "time" ) -func FofaRequest(client *http.Client, reqString string, page ...int) (respBody []string) { - if len(page) != 0 { - reqString = base64.URLEncoding.EncodeToString([]byte(reqString)) - for _, num := range page { +func FofaRequest(client *http.Client, page int, reqStringList ...string) (respBody []string) { + if len(reqStringList) != 0 { + for _, reqString := range reqStringList { + reqString = base64.URLEncoding.EncodeToString([]byte(reqString)) url := fmt.Sprintf("https://fofa.info/api/v1/search/all?email=%s&key=%s&qbase64=%s&full=false&fields=protocol,host&size=1000&page=%v", - config.C.FofaMail, config.C.FofaKey, reqString, num) + config.C.FofaMail, config.C.FofaKey, reqString, page) req, _ := http.NewRequest("GET", url, nil) req.Header.Set("User-Agent", define.UserAgent) diff --git a/pkg/hunter/req.go b/pkg/hunter/req.go index 9fbf60d..0d0c8d9 100644 --- a/pkg/hunter/req.go +++ b/pkg/hunter/req.go @@ -11,12 +11,12 @@ import ( "time" ) -func HunterRequest(client *http.Client, reqString string, page ...int) (respBody []string) { - if len(page) != 0 { - reqString = base64.URLEncoding.EncodeToString([]byte(reqString)) - for _, num := range page { +func HunterRequest(client *http.Client, page int, reqStringList ...string) (respBody []string) { + if len(reqStringList) != 0 { + for _, reqString := range reqStringList { + reqString = base64.URLEncoding.EncodeToString([]byte(reqString)) url := fmt.Sprintf("https://hunter.qianxin.com/openApi/search?api-key=%s&search=%s&page=%v&page_size=100&is_web=3", - config.C.HunterKey, reqString, num) + config.C.HunterKey, reqString, page) req, _ := http.NewRequest("GET", url, nil) req.Header.Set("User-Agent", define.UserAgent) diff --git a/pkg/quake/req.go b/pkg/quake/req.go index 152c461..5b8e3c8 100644 --- a/pkg/quake/req.go +++ b/pkg/quake/req.go @@ -11,10 +11,10 @@ import ( "time" ) -func QuakeRequest(client *http.Client, reqString string, page ...int) (respBody []string) { - if len(page) != 0 { - for _, num := range page { - data := strings.NewReader(fmt.Sprintf("query=%s&start=%v&size=100&include=service.name&include=port&include=service.http.host", reqString, num)) +func QuakeRequest(client *http.Client, page int, reqStringList ...string) (respBody []string) { + if len(reqStringList) != 0 { + for _, reqString := range reqStringList { + data := strings.NewReader(fmt.Sprintf("query=%s&start=%v&size=100&include=service.name&include=port&include=service.http.host", reqString, page)) req, _ := http.NewRequest("POST", "https://quake.360.net/api/v3/search/quake_service", data) req.Header.Set("User-Agent", define.UserAgent) req.Header.Set("Content-Type", "application/x-www-form-urlencoded")