Skip to content

Commit 67fddea

Browse files
LSD00LSD00
LSD00
authored and
LSD00
committedJun 4, 2024
* Cleaned code
1 parent 91d1e44 commit 67fddea

File tree

3 files changed

+47
-52
lines changed

3 files changed

+47
-52
lines changed
 

‎main.go

+13-15
Original file line numberDiff line numberDiff line change
@@ -9,28 +9,26 @@ import (
99
)
1010

1111
var (
12-
domain, reqfile, wordlist, invalid, flags string
13-
th int
14-
opt workers.Options
12+
invalid, encoders string
13+
options workers.Options
1514
)
1615

1716
func main() {
18-
kingpin.Flag("domain", "target domain").Required().Short('d').StringVar(&domain)
19-
kingpin.Flag("request", "file with http request").Required().Short('r').StringVar(&reqfile)
20-
kingpin.Flag("wordlist", "file with wordlist").Required().Short('w').StringVar(&wordlist)
21-
kingpin.Flag("regex", "regex for search").Default(".+").StringVar(&opt.Regex)
22-
kingpin.Flag("threads", "setting threads").Default("25").IntVar(&th)
23-
kingpin.Flag("tls", "is target use tls").Default("false").BoolVar(&opt.TlsEnabled)
17+
kingpin.Flag("domain", "target domain").Required().Short('d').StringVar(&options.Domain)
18+
kingpin.Flag("request", "file with http request").Required().Short('r').StringVar(&options.RequestFile)
19+
kingpin.Flag("wordlist", "file with wordlist").Required().Short('w').StringVar(&options.WordlistFile)
20+
kingpin.Flag("regex", "regex for search").Default(".+").StringVar(&options.Regex)
21+
kingpin.Flag("threads", "setting threads").Default("25").IntVar(&options.Concurrents)
22+
kingpin.Flag("tls", "is target use tls").Default("false").BoolVar(&options.TlsEnabled)
2423
kingpin.Flag("bad-codes", "invalid codes for fuzzing ex. 404, 503, 400").Default("404").StringVar(&invalid)
25-
kingpin.Flag("encoders", "encode payloads: urlencode, base64, hex").Default("none").Short('e').StringVar(&flags)
24+
kingpin.Flag("encoders", "encode payloads: urlencode, base64, hex").Default("none").Short('e').StringVar(&encoders)
2625
kingpin.Parse()
27-
opt.InvalidCode = strings.Split(strings.ReplaceAll(invalid, " ", ""), ",")
28-
worker, err := workers.NewPool(wordlist, reqfile, th)
26+
options.InvalidCode = strings.Split(strings.ReplaceAll(invalid, " ", ""), ",")
27+
options.Encoders = strings.Split(strings.ReplaceAll(encoders, " ", ""), ",")
28+
worker, err := workers.NewPool(options)
2929
if err != nil {
3030
fmt.Println(err)
3131
}
32-
worker.AddFlags(strings.Split(strings.ReplaceAll(flags, " ", ""), ","))
33-
worker.Options = opt
3432
fmt.Println(`
3533
_ _ _ ______
3634
| | | | | | | ___|
@@ -41,5 +39,5 @@ func main() {
4139
4240
4341
`)
44-
worker.Fuzz(domain)
42+
worker.Fuzz()
4543
}

‎pkg/formatter/formatter.go

+6-8
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,12 @@ type Payload struct {
1414
encoders map[string]EncoderFunc
1515
encoderFlags []string
1616
Payload string
17-
defaultContentLength, ContentLength int
17+
DefaultContentLength, ContentLength int
1818
}
1919

2020
func NewPayload(payload string, flags []string) *Payload {
2121
encoders := map[string]EncoderFunc{
22+
// "none" is needed in case the encoder is not passed, because then we get an error
2223
"none": func(data *string) {
2324
return
2425
},
@@ -35,12 +36,13 @@ func NewPayload(payload string, flags []string) *Payload {
3536
return &Payload{Payload: payload, encoders: encoders, encoderFlags: flags}
3637
}
3738

38-
func CountDefaultContentLenght(requestfile string) int {
39+
// This function is needed because fasthttp does not calculate the Content-length header.
40+
func CountDefaultContentLength(requestfile string) int {
3941
request, _ := os.ReadFile(requestfile)
4042
var content string
4143
splited_request := strings.Split(strings.ReplaceAll(string(request), "\r\n", "\n"), "\n")
4244
for i := 0; i < len(splited_request); i++ {
43-
if len(splited_request[i]) == 0 {
45+
if len(splited_request[i]) == 0 { // 0 - no data, just empty string
4446
dataslice := splited_request[i:]
4547
content = strings.Join(strings.Fields(strings.Join(dataslice[:], "")), "")
4648
break
@@ -49,13 +51,9 @@ func CountDefaultContentLenght(requestfile string) int {
4951
return len(content) - len("{{.Payload}}")
5052
}
5153

52-
func (p *Payload) AddDefaultContentLenght(lenght int) {
53-
p.defaultContentLength = lenght
54-
}
55-
5654
func (p *Payload) CreatePayload() {
5755
for _, value := range p.encoderFlags {
5856
p.encoders[value](&p.Payload)
5957
}
60-
p.ContentLength = p.defaultContentLength + len(p.Payload)
58+
p.ContentLength = p.DefaultContentLength + len(p.Payload)
6159
}

‎pkg/workers/workers.go

+28-29
Original file line numberDiff line numberDiff line change
@@ -15,23 +15,23 @@ import (
1515
)
1616

1717
type Options struct {
18-
TlsEnabled bool
19-
Regex string
20-
InvalidCode []string
18+
Concurrents int
19+
TlsEnabled bool
20+
Regex, Domain, RequestFile, WordlistFile string
21+
InvalidCode, Encoders []string
2122
}
2223

2324
type Pool struct {
24-
concurrents int
25-
wordlists, flags []string
26-
deflenght int
27-
req *template.Template
28-
Options Options
25+
wordlists []string
26+
default_length int
27+
req *template.Template
28+
options Options
2929
}
3030

31-
func NewPool(wordlist, reqfile string, concurrents int) (*Pool, error) {
31+
func NewPool(options Options) (*Pool, error) {
3232
var pool Pool
33-
pool.concurrents = concurrents
34-
file, err := os.Open(wordlist)
33+
pool.options = options
34+
file, err := os.Open(options.WordlistFile)
3535
if err != nil {
3636
return nil, err
3737
}
@@ -40,34 +40,33 @@ func NewPool(wordlist, reqfile string, concurrents int) (*Pool, error) {
4040
for reader.Scan() {
4141
pool.wordlists = append(pool.wordlists, reader.Text())
4242
}
43-
pool.req, err = template.ParseFiles(reqfile)
44-
pool.deflenght = formatter.CountDefaultContentLenght(reqfile)
43+
pool.req, err = template.ParseFiles(options.RequestFile)
44+
pool.default_length = formatter.CountDefaultContentLength(options.RequestFile)
4545
if err != nil {
4646
return nil, err
4747
}
4848
return &pool, nil
4949
}
5050

51-
func (p *Pool) AddFlags(flags []string) {
52-
p.flags = flags
53-
}
54-
55-
func (p *Pool) worker(wg *sync.WaitGroup, wordlist []string, domain string) {
51+
func (p *Pool) worker(wg *sync.WaitGroup, chunked_wordlist []string) {
5652
defer wg.Done()
57-
re := regexp.MustCompile(p.Options.Regex)
58-
for _, value := range wordlist {
53+
re := regexp.MustCompile(p.options.Regex)
54+
for _, value := range chunked_wordlist {
5955
var buf bytes.Buffer
60-
payload := formatter.NewPayload(value, p.flags)
61-
payload.AddDefaultContentLenght(p.deflenght)
56+
payload := formatter.NewPayload(value, p.options.Encoders)
57+
payload.DefaultContentLength = p.default_length
6258
payload.CreatePayload()
6359
p.req.Execute(&buf, payload)
64-
worker := reqgen.NewWorker(p.Options.TlsEnabled, bufio.NewReader(&buf), domain)
60+
worker := reqgen.NewWorker(p.options.TlsEnabled, bufio.NewReader(&buf), p.options.Domain)
6561
resp, err := worker.MakeRequest()
62+
63+
// Io.WriteString is necessary because it immediately writes to stdout
64+
// with fmt.Println there are problems and it may not output all
6665
io.WriteString(os.Stdout, buf.String())
6766
if err != nil {
6867
io.WriteString(os.Stdout, err.Error())
6968
return
70-
} else if !slices.Contains(p.Options.InvalidCode, fmt.Sprint(resp.Status)) && re.MatchString(resp.BodyData) {
69+
} else if !slices.Contains(p.options.InvalidCode, fmt.Sprint(resp.Status)) && re.MatchString(resp.BodyData) {
7170
var formatted_code string
7271
if resp.Status >= 200 && resp.Status < 300 {
7372
formatted_code = fmt.Sprintf("\033[92m%d\033[0m", resp.Status)
@@ -81,18 +80,18 @@ func (p *Pool) worker(wg *sync.WaitGroup, wordlist []string, domain string) {
8180
}
8281
}
8382

84-
func (p *Pool) Fuzz(domain string) {
83+
func (p *Pool) Fuzz() {
8584
var wg sync.WaitGroup
86-
chunks := len(p.wordlists) / p.concurrents
87-
for i := 0; i < p.concurrents; i++ {
85+
chunks := len(p.wordlists) / p.options.Concurrents
86+
for i := 0; i < p.options.Concurrents; i++ {
8887
var chunked_wordlist []string
89-
if i != p.concurrents-1 {
88+
if i != p.options.Concurrents-1 {
9089
chunked_wordlist = p.wordlists[i*chunks : (i+1)*chunks]
9190
} else {
9291
chunked_wordlist = p.wordlists[i*chunks:]
9392
}
9493
wg.Add(1)
95-
go p.worker(&wg, chunked_wordlist, domain)
94+
go p.worker(&wg, chunked_wordlist)
9695
}
9796
wg.Wait()
9897
}

0 commit comments

Comments
 (0)
Please sign in to comment.