2018-11-08 09:26:32 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2020-01-15 09:08:24 +00:00
|
|
|
"bufio"
|
2018-11-08 09:26:32 +00:00
|
|
|
"context"
|
|
|
|
"flag"
|
|
|
|
"fmt"
|
2019-10-20 15:38:11 +00:00
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
2020-02-21 20:43:19 +00:00
|
|
|
"net/textproto"
|
2019-01-21 20:43:04 +00:00
|
|
|
"net/url"
|
2018-11-08 09:26:32 +00:00
|
|
|
"os"
|
2018-12-05 22:57:42 +00:00
|
|
|
"strconv"
|
2018-11-08 09:26:32 +00:00
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/ffuf/ffuf/pkg/ffuf"
|
|
|
|
"github.com/ffuf/ffuf/pkg/filter"
|
|
|
|
"github.com/ffuf/ffuf/pkg/input"
|
|
|
|
"github.com/ffuf/ffuf/pkg/output"
|
|
|
|
"github.com/ffuf/ffuf/pkg/runner"
|
|
|
|
)
|
|
|
|
|
|
|
|
type cliOptions struct {
|
2019-10-15 12:38:45 +00:00
|
|
|
extensions string
|
|
|
|
delay string
|
|
|
|
filterStatus string
|
|
|
|
filterSize string
|
|
|
|
filterRegexp string
|
|
|
|
filterWords string
|
2019-11-09 20:09:12 +00:00
|
|
|
filterLines string
|
2019-10-15 12:38:45 +00:00
|
|
|
matcherStatus string
|
|
|
|
matcherSize string
|
|
|
|
matcherRegexp string
|
|
|
|
matcherWords string
|
2019-11-09 20:09:12 +00:00
|
|
|
matcherLines string
|
2019-10-15 12:38:45 +00:00
|
|
|
proxyURL string
|
2020-01-17 07:49:25 +00:00
|
|
|
replayProxyURL string
|
2020-01-15 09:08:24 +00:00
|
|
|
request string
|
|
|
|
requestProto string
|
2020-02-18 17:20:30 +00:00
|
|
|
URL string
|
2019-10-15 12:38:45 +00:00
|
|
|
outputFormat string
|
2020-03-20 10:42:54 +00:00
|
|
|
ignoreBody bool
|
2019-11-10 21:30:54 +00:00
|
|
|
wordlists multiStringFlag
|
|
|
|
inputcommands multiStringFlag
|
2019-10-15 12:38:45 +00:00
|
|
|
headers multiStringFlag
|
|
|
|
cookies multiStringFlag
|
|
|
|
AutoCalibrationStrings multiStringFlag
|
|
|
|
showVersion bool
|
2019-10-20 15:38:11 +00:00
|
|
|
debugLog string
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
|
|
|
|
2018-12-05 22:57:42 +00:00
|
|
|
type multiStringFlag []string
|
2018-11-08 09:26:32 +00:00
|
|
|
|
2018-12-05 22:57:42 +00:00
|
|
|
func (m *multiStringFlag) String() string {
|
2018-11-08 09:26:32 +00:00
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2018-12-05 22:57:42 +00:00
|
|
|
func (m *multiStringFlag) Set(value string) error {
|
|
|
|
*m = append(*m, value)
|
2018-11-08 09:26:32 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
|
|
defer cancel()
|
|
|
|
conf := ffuf.NewConfig(ctx)
|
|
|
|
opts := cliOptions{}
|
2019-06-04 21:26:27 +00:00
|
|
|
var ignored bool
|
2020-01-15 09:19:18 +00:00
|
|
|
flag.BoolVar(&conf.IgnoreWordlistComments, "ic", false, "Ignore wordlist comments")
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.StringVar(&opts.extensions, "e", "", "Comma separated list of extensions. Extends FUZZ keyword.")
|
|
|
|
flag.BoolVar(&conf.DirSearchCompat, "D", false, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.")
|
2018-11-12 21:24:37 +00:00
|
|
|
flag.Var(&opts.headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.")
|
2020-02-18 17:20:30 +00:00
|
|
|
flag.StringVar(&opts.URL, "u", "", "Target URL")
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.Var(&opts.wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'")
|
2020-01-07 16:25:42 +00:00
|
|
|
flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility")
|
2018-12-05 22:57:42 +00:00
|
|
|
flag.StringVar(&opts.delay, "p", "", "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"")
|
2019-06-27 15:26:20 +00:00
|
|
|
flag.StringVar(&opts.filterStatus, "fc", "", "Filter HTTP status codes from response. Comma separated list of codes and ranges")
|
|
|
|
flag.StringVar(&opts.filterSize, "fs", "", "Filter HTTP response size. Comma separated list of sizes and ranges")
|
2018-11-12 17:47:49 +00:00
|
|
|
flag.StringVar(&opts.filterRegexp, "fr", "", "Filter regexp")
|
2019-06-27 15:26:20 +00:00
|
|
|
flag.StringVar(&opts.filterWords, "fw", "", "Filter by amount of words in response. Comma separated list of word counts and ranges")
|
2019-11-09 20:09:12 +00:00
|
|
|
flag.StringVar(&opts.filterLines, "fl", "", "Filter by amount of lines in response. Comma separated list of line counts and ranges")
|
2019-06-04 21:26:27 +00:00
|
|
|
flag.StringVar(&conf.Data, "d", "", "POST data")
|
|
|
|
flag.StringVar(&conf.Data, "data", "", "POST data (alias of -d)")
|
2019-06-26 19:44:52 +00:00
|
|
|
flag.StringVar(&conf.Data, "data-ascii", "", "POST data (alias of -d)")
|
|
|
|
flag.StringVar(&conf.Data, "data-binary", "", "POST data (alias of -d)")
|
2018-11-09 13:21:23 +00:00
|
|
|
flag.BoolVar(&conf.Colors, "c", false, "Colorize output.")
|
2019-06-04 21:26:27 +00:00
|
|
|
flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)")
|
2019-11-10 21:30:54 +00:00
|
|
|
flag.Var(&opts.inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.")
|
2019-06-16 21:42:42 +00:00
|
|
|
flag.IntVar(&conf.InputNum, "input-num", 100, "Number of inputs to test. Used in conjunction with --input-cmd.")
|
2019-11-15 22:40:04 +00:00
|
|
|
flag.StringVar(&conf.InputMode, "mode", "clusterbomb", "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork")
|
2019-06-26 19:44:52 +00:00
|
|
|
flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)")
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.Var(&opts.cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.")
|
2019-06-26 19:44:52 +00:00
|
|
|
flag.Var(&opts.cookies, "cookie", "Cookie data (alias of -b)")
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.StringVar(&opts.matcherStatus, "mc", "200,204,301,302,307,401,403", "Match HTTP status codes, or \"all\" for everything.")
|
2018-11-08 09:26:32 +00:00
|
|
|
flag.StringVar(&opts.matcherSize, "ms", "", "Match HTTP response size")
|
2018-11-12 17:47:49 +00:00
|
|
|
flag.StringVar(&opts.matcherRegexp, "mr", "", "Match regexp")
|
2018-11-12 17:06:49 +00:00
|
|
|
flag.StringVar(&opts.matcherWords, "mw", "", "Match amount of words in response")
|
2019-11-09 20:09:12 +00:00
|
|
|
flag.StringVar(&opts.matcherLines, "ml", "", "Match amount of lines in response")
|
2019-01-21 20:43:04 +00:00
|
|
|
flag.StringVar(&opts.proxyURL, "x", "", "HTTP Proxy URL")
|
2020-01-15 09:08:24 +00:00
|
|
|
flag.StringVar(&opts.request, "request", "", "File containing the raw http request")
|
|
|
|
flag.StringVar(&opts.requestProto, "request-proto", "https", "Protocol to use along with raw request")
|
2019-01-21 20:43:04 +00:00
|
|
|
flag.StringVar(&conf.Method, "X", "GET", "HTTP method to use")
|
2019-03-29 23:02:41 +00:00
|
|
|
flag.StringVar(&conf.OutputFile, "o", "", "Write output to file")
|
2019-11-15 23:48:00 +00:00
|
|
|
flag.StringVar(&opts.outputFormat, "of", "json", "Output file format. Available formats: json, ejson, html, md, csv, ecsv")
|
2019-12-28 15:46:44 +00:00
|
|
|
flag.StringVar(&conf.OutputDirectory, "od", "", "Directory path to store matched results to.")
|
2020-03-20 10:42:54 +00:00
|
|
|
flag.BoolVar(&conf.IgnoreBody, "ignore-body", false, "Do not fetch the response content.")
|
2018-11-08 09:26:32 +00:00
|
|
|
flag.BoolVar(&conf.Quiet, "s", false, "Do not print additional information (silent mode)")
|
2019-04-27 07:47:24 +00:00
|
|
|
flag.BoolVar(&conf.StopOn403, "sf", false, "Stop when > 95% of responses return 403 Forbidden")
|
2019-04-03 20:11:49 +00:00
|
|
|
flag.BoolVar(&conf.StopOnErrors, "se", false, "Stop on spurious errors")
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.BoolVar(&conf.StopOnAll, "sa", false, "Stop on all error cases. Implies -sf and -se.")
|
2019-04-03 09:54:32 +00:00
|
|
|
flag.BoolVar(&conf.FollowRedirects, "r", false, "Follow redirects")
|
2019-12-31 12:19:27 +00:00
|
|
|
flag.BoolVar(&conf.Recursion, "recursion", false, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.")
|
|
|
|
flag.IntVar(&conf.RecursionDepth, "recursion-depth", 0, "Maximum recursion depth.")
|
2020-01-17 07:49:25 +00:00
|
|
|
flag.StringVar(&opts.replayProxyURL, "replay-proxy", "", "Replay matched requests using this proxy.")
|
2019-04-20 17:46:43 +00:00
|
|
|
flag.BoolVar(&conf.AutoCalibration, "ac", false, "Automatically calibrate filtering options")
|
2019-10-15 12:38:45 +00:00
|
|
|
flag.Var(&opts.AutoCalibrationStrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac")
|
2018-11-12 18:51:29 +00:00
|
|
|
flag.IntVar(&conf.Threads, "t", 40, "Number of concurrent threads.")
|
2019-04-27 07:29:05 +00:00
|
|
|
flag.IntVar(&conf.Timeout, "timeout", 10, "HTTP request timeout in seconds.")
|
2020-02-27 13:19:07 +00:00
|
|
|
flag.IntVar(&conf.MaxTime, "maxtime", 0, "Maximum running time in seconds for entire process.")
|
|
|
|
flag.IntVar(&conf.MaxTimeJob, "maxtime-job", 0, "Maximum running time in seconds per job.")
|
2019-11-16 14:32:11 +00:00
|
|
|
flag.BoolVar(&conf.Verbose, "v", false, "Verbose output, printing full URL and redirect location (if any) with the results.")
|
2018-11-15 08:47:43 +00:00
|
|
|
flag.BoolVar(&opts.showVersion, "V", false, "Show version information.")
|
2019-10-20 15:38:11 +00:00
|
|
|
flag.StringVar(&opts.debugLog, "debug-log", "", "Write all of the internal logging to the specified file.")
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.Usage = Usage
|
2018-11-08 09:26:32 +00:00
|
|
|
flag.Parse()
|
2018-11-15 08:47:43 +00:00
|
|
|
if opts.showVersion {
|
|
|
|
fmt.Printf("ffuf version: %s\n", ffuf.VERSION)
|
|
|
|
os.Exit(0)
|
|
|
|
}
|
2019-10-20 15:38:11 +00:00
|
|
|
if len(opts.debugLog) != 0 {
|
|
|
|
f, err := os.OpenFile(opts.debugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err)
|
|
|
|
log.SetOutput(ioutil.Discard)
|
|
|
|
} else {
|
|
|
|
log.SetOutput(f)
|
|
|
|
defer f.Close()
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.SetOutput(ioutil.Discard)
|
|
|
|
}
|
2018-11-08 09:26:32 +00:00
|
|
|
if err := prepareConfig(&opts, &conf); err != nil {
|
2018-11-08 13:54:12 +00:00
|
|
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
2020-01-29 22:23:58 +00:00
|
|
|
Usage()
|
2018-11-08 09:26:32 +00:00
|
|
|
os.Exit(1)
|
|
|
|
}
|
2019-04-28 16:36:48 +00:00
|
|
|
job, err := prepareJob(&conf)
|
|
|
|
if err != nil {
|
2018-11-08 13:54:12 +00:00
|
|
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
2020-01-29 22:23:58 +00:00
|
|
|
Usage()
|
2018-11-08 09:26:32 +00:00
|
|
|
os.Exit(1)
|
|
|
|
}
|
2019-04-28 16:36:48 +00:00
|
|
|
if err := prepareFilters(&opts, &conf); err != nil {
|
2018-11-08 13:54:12 +00:00
|
|
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
2020-01-29 22:23:58 +00:00
|
|
|
Usage()
|
2018-11-08 09:26:32 +00:00
|
|
|
os.Exit(1)
|
|
|
|
}
|
2019-04-20 17:46:43 +00:00
|
|
|
|
2019-04-28 16:36:48 +00:00
|
|
|
if err := filter.CalibrateIfNeeded(job); err != nil {
|
|
|
|
fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err)
|
|
|
|
os.Exit(1)
|
2019-04-20 17:46:43 +00:00
|
|
|
}
|
|
|
|
|
2018-11-08 09:26:32 +00:00
|
|
|
// Job handles waiting for goroutines to complete itself
|
|
|
|
job.Start()
|
|
|
|
}
|
|
|
|
|
|
|
|
func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
|
2020-01-17 07:49:25 +00:00
|
|
|
job := &ffuf.Job{
|
|
|
|
Config: conf,
|
|
|
|
}
|
2018-11-14 22:18:43 +00:00
|
|
|
errs := ffuf.NewMultierror()
|
2019-06-16 21:42:42 +00:00
|
|
|
var err error
|
2019-11-15 22:40:04 +00:00
|
|
|
inputprovider, err := input.NewInputProvider(conf)
|
|
|
|
if err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
2018-11-08 09:26:32 +00:00
|
|
|
// TODO: implement error handling for runnerprovider and outputprovider
|
|
|
|
// We only have http runner right now
|
2020-01-17 07:49:25 +00:00
|
|
|
job.Runner = runner.NewRunnerByName("http", conf, false)
|
|
|
|
if len(conf.ReplayProxyURL) > 0 {
|
|
|
|
job.ReplayRunner = runner.NewRunnerByName("http", conf, true)
|
|
|
|
}
|
2019-06-16 21:42:42 +00:00
|
|
|
// Initialize the correct inputprovider
|
2019-11-10 21:30:54 +00:00
|
|
|
for _, v := range conf.InputProviders {
|
|
|
|
err = inputprovider.AddProvider(v)
|
|
|
|
if err != nil {
|
2019-11-15 22:40:04 +00:00
|
|
|
errs.Add(err)
|
2019-11-10 21:30:54 +00:00
|
|
|
}
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
2020-01-17 07:49:25 +00:00
|
|
|
job.Input = inputprovider
|
2018-11-08 09:26:32 +00:00
|
|
|
// We only have stdout outputprovider right now
|
2020-01-17 07:49:25 +00:00
|
|
|
job.Output = output.NewOutputProviderByName("stdout", conf)
|
|
|
|
return job, errs.ErrorOrNil()
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
|
|
|
|
2019-04-28 16:36:48 +00:00
|
|
|
func prepareFilters(parseOpts *cliOptions, conf *ffuf.Config) error {
|
|
|
|
errs := ffuf.NewMultierror()
|
2020-01-29 22:23:58 +00:00
|
|
|
// If any other matcher is set, ignore -mc default value
|
|
|
|
matcherSet := false
|
|
|
|
statusSet := false
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody := false
|
2020-01-29 22:23:58 +00:00
|
|
|
flag.Visit(func(f *flag.Flag) {
|
|
|
|
if f.Name == "mc" {
|
|
|
|
statusSet = true
|
|
|
|
}
|
|
|
|
if f.Name == "ms" {
|
|
|
|
matcherSet = true
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody = true
|
2020-01-29 22:23:58 +00:00
|
|
|
}
|
|
|
|
if f.Name == "ml" {
|
|
|
|
matcherSet = true
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody = true
|
2020-01-29 22:23:58 +00:00
|
|
|
}
|
|
|
|
if f.Name == "mr" {
|
|
|
|
matcherSet = true
|
|
|
|
}
|
|
|
|
if f.Name == "mw" {
|
|
|
|
matcherSet = true
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody = true
|
2020-01-29 22:23:58 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
if statusSet || !matcherSet {
|
|
|
|
if err := filter.AddMatcher(conf, "status", parseOpts.matcherStatus); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-28 16:36:48 +00:00
|
|
|
if parseOpts.filterStatus != "" {
|
|
|
|
if err := filter.AddFilter(conf, "status", parseOpts.filterStatus); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if parseOpts.filterSize != "" {
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody = true
|
2019-04-28 16:36:48 +00:00
|
|
|
if err := filter.AddFilter(conf, "size", parseOpts.filterSize); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if parseOpts.filterRegexp != "" {
|
|
|
|
if err := filter.AddFilter(conf, "regexp", parseOpts.filterRegexp); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if parseOpts.filterWords != "" {
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody = true
|
2019-04-28 16:36:48 +00:00
|
|
|
if err := filter.AddFilter(conf, "word", parseOpts.filterWords); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
2019-11-09 20:09:12 +00:00
|
|
|
if parseOpts.filterLines != "" {
|
2020-03-20 10:42:54 +00:00
|
|
|
warningIgnoreBody = true
|
2019-11-09 20:09:12 +00:00
|
|
|
if err := filter.AddFilter(conf, "line", parseOpts.filterLines); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
2019-04-28 16:36:48 +00:00
|
|
|
if parseOpts.matcherSize != "" {
|
|
|
|
if err := filter.AddMatcher(conf, "size", parseOpts.matcherSize); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if parseOpts.matcherRegexp != "" {
|
|
|
|
if err := filter.AddMatcher(conf, "regexp", parseOpts.matcherRegexp); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if parseOpts.matcherWords != "" {
|
|
|
|
if err := filter.AddMatcher(conf, "word", parseOpts.matcherWords); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
2019-11-09 20:09:12 +00:00
|
|
|
if parseOpts.matcherLines != "" {
|
|
|
|
if err := filter.AddMatcher(conf, "line", parseOpts.matcherLines); err != nil {
|
|
|
|
errs.Add(err)
|
|
|
|
}
|
|
|
|
}
|
2020-03-20 10:42:54 +00:00
|
|
|
if conf.IgnoreBody && warningIgnoreBody {
|
|
|
|
fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n")
|
|
|
|
}
|
2019-04-28 16:36:48 +00:00
|
|
|
return errs.ErrorOrNil()
|
|
|
|
}
|
|
|
|
|
2018-11-08 09:26:32 +00:00
|
|
|
func prepareConfig(parseOpts *cliOptions, conf *ffuf.Config) error {
|
|
|
|
//TODO: refactor in a proper flag library that can handle things like required flags
|
2018-11-14 22:18:43 +00:00
|
|
|
errs := ffuf.NewMultierror()
|
2018-12-05 22:57:42 +00:00
|
|
|
|
|
|
|
var err error
|
|
|
|
var err2 error
|
2020-02-19 07:04:21 +00:00
|
|
|
if len(parseOpts.URL) == 0 && parseOpts.request == "" {
|
2020-01-15 09:08:24 +00:00
|
|
|
errs.Add(fmt.Errorf("-u flag or -request flag is required"))
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
2020-01-15 09:08:24 +00:00
|
|
|
|
2019-04-06 16:49:09 +00:00
|
|
|
// prepare extensions
|
|
|
|
if parseOpts.extensions != "" {
|
|
|
|
extensions := strings.Split(parseOpts.extensions, ",")
|
|
|
|
conf.Extensions = extensions
|
|
|
|
}
|
|
|
|
|
2019-06-26 19:44:52 +00:00
|
|
|
// Convert cookies to a header
|
|
|
|
if len(parseOpts.cookies) > 0 {
|
|
|
|
parseOpts.headers.Set("Cookie: " + strings.Join(parseOpts.cookies, "; "))
|
|
|
|
}
|
2019-11-10 21:30:54 +00:00
|
|
|
|
|
|
|
//Prepare inputproviders
|
|
|
|
for _, v := range parseOpts.wordlists {
|
|
|
|
wl := strings.SplitN(v, ":", 2)
|
|
|
|
if len(wl) == 2 {
|
|
|
|
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
|
|
Name: "wordlist",
|
|
|
|
Value: wl[0],
|
|
|
|
Keyword: wl[1],
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
|
|
Name: "wordlist",
|
|
|
|
Value: wl[0],
|
|
|
|
Keyword: "FUZZ",
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, v := range parseOpts.inputcommands {
|
|
|
|
ic := strings.SplitN(v, ":", 2)
|
|
|
|
if len(ic) == 2 {
|
|
|
|
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
|
|
Name: "command",
|
|
|
|
Value: ic[0],
|
|
|
|
Keyword: ic[1],
|
|
|
|
})
|
|
|
|
conf.CommandKeywords = append(conf.CommandKeywords, ic[0])
|
|
|
|
} else {
|
|
|
|
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
|
|
Name: "command",
|
|
|
|
Value: ic[0],
|
|
|
|
Keyword: "FUZZ",
|
|
|
|
})
|
|
|
|
conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(conf.InputProviders) == 0 {
|
|
|
|
errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required"))
|
|
|
|
}
|
|
|
|
|
2020-01-15 09:08:24 +00:00
|
|
|
// Prepare the request using body
|
|
|
|
if parseOpts.request != "" {
|
|
|
|
err := parseRawRequest(parseOpts, conf)
|
|
|
|
if err != nil {
|
|
|
|
errmsg := fmt.Sprintf("Could not parse raw request: %s", err)
|
|
|
|
errs.Add(fmt.Errorf(errmsg))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-18 17:20:30 +00:00
|
|
|
//Prepare URL
|
|
|
|
if parseOpts.URL != "" {
|
|
|
|
conf.Url = parseOpts.URL
|
|
|
|
}
|
|
|
|
|
2020-02-21 20:43:19 +00:00
|
|
|
//Prepare headers and make canonical
|
2018-11-08 09:26:32 +00:00
|
|
|
for _, v := range parseOpts.headers {
|
|
|
|
hs := strings.SplitN(v, ":", 2)
|
|
|
|
if len(hs) == 2 {
|
2020-02-21 20:43:19 +00:00
|
|
|
// trim and make canonical
|
|
|
|
// except if used in custom defined header
|
|
|
|
var CanonicalNeeded bool = true
|
|
|
|
for _, a := range conf.CommandKeywords {
|
|
|
|
if a == hs[0] {
|
|
|
|
CanonicalNeeded = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// check if part of InputProviders
|
|
|
|
if CanonicalNeeded {
|
|
|
|
for _, b := range conf.InputProviders {
|
|
|
|
if b.Keyword == hs[0] {
|
|
|
|
CanonicalNeeded = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if CanonicalNeeded {
|
|
|
|
var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0]))
|
|
|
|
conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1])
|
|
|
|
} else {
|
|
|
|
conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1])
|
|
|
|
}
|
2018-11-08 09:26:32 +00:00
|
|
|
} else {
|
2018-11-14 22:18:43 +00:00
|
|
|
errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator"))
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
|
|
|
}
|
2020-02-21 20:43:19 +00:00
|
|
|
|
2018-12-05 22:57:42 +00:00
|
|
|
//Prepare delay
|
|
|
|
d := strings.Split(parseOpts.delay, "-")
|
|
|
|
if len(d) > 2 {
|
|
|
|
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
|
|
|
|
} else if len(d) == 2 {
|
|
|
|
conf.Delay.IsRange = true
|
|
|
|
conf.Delay.HasDelay = true
|
|
|
|
conf.Delay.Min, err = strconv.ParseFloat(d[0], 64)
|
|
|
|
conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64)
|
|
|
|
if err != nil || err2 != nil {
|
|
|
|
errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5"))
|
|
|
|
}
|
|
|
|
} else if len(parseOpts.delay) > 0 {
|
|
|
|
conf.Delay.IsRange = false
|
|
|
|
conf.Delay.HasDelay = true
|
|
|
|
conf.Delay.Min, err = strconv.ParseFloat(parseOpts.delay, 64)
|
|
|
|
if err != nil {
|
|
|
|
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-21 20:43:04 +00:00
|
|
|
// Verify proxy url format
|
|
|
|
if len(parseOpts.proxyURL) > 0 {
|
2020-01-07 16:27:43 +00:00
|
|
|
_, err := url.Parse(parseOpts.proxyURL)
|
2019-01-21 20:43:04 +00:00
|
|
|
if err != nil {
|
|
|
|
errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err))
|
|
|
|
} else {
|
2020-01-07 16:27:43 +00:00
|
|
|
conf.ProxyURL = parseOpts.proxyURL
|
2019-01-21 20:43:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-17 07:49:25 +00:00
|
|
|
// Verify replayproxy url format
|
|
|
|
if len(parseOpts.replayProxyURL) > 0 {
|
|
|
|
_, err := url.Parse(parseOpts.replayProxyURL)
|
|
|
|
if err != nil {
|
|
|
|
errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err))
|
|
|
|
} else {
|
|
|
|
conf.ReplayProxyURL = parseOpts.replayProxyURL
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-29 23:02:41 +00:00
|
|
|
//Check the output file format option
|
|
|
|
if conf.OutputFile != "" {
|
|
|
|
//No need to check / error out if output file isn't defined
|
2019-11-15 23:48:00 +00:00
|
|
|
outputFormats := []string{"json", "ejson", "html", "md", "csv", "ecsv"}
|
2019-03-29 23:02:41 +00:00
|
|
|
found := false
|
|
|
|
for _, f := range outputFormats {
|
|
|
|
if f == parseOpts.outputFormat {
|
|
|
|
conf.OutputFormat = f
|
|
|
|
found = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.outputFormat))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-15 12:38:45 +00:00
|
|
|
// Auto-calibration strings
|
2020-01-07 16:27:43 +00:00
|
|
|
if len(parseOpts.AutoCalibrationStrings) > 0 {
|
|
|
|
conf.AutoCalibrationStrings = parseOpts.AutoCalibrationStrings
|
|
|
|
}
|
2019-10-15 12:38:45 +00:00
|
|
|
// Using -acc implies -ac
|
|
|
|
if len(conf.AutoCalibrationStrings) > 0 {
|
|
|
|
conf.AutoCalibration = true
|
|
|
|
}
|
|
|
|
|
2019-09-02 14:18:36 +00:00
|
|
|
// Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP
|
2019-06-04 21:26:27 +00:00
|
|
|
if conf.Method == "GET" {
|
|
|
|
if len(conf.Data) > 0 {
|
|
|
|
conf.Method = "POST"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-29 23:02:41 +00:00
|
|
|
conf.CommandLine = strings.Join(os.Args, " ")
|
|
|
|
|
2019-11-10 21:30:54 +00:00
|
|
|
for _, provider := range conf.InputProviders {
|
|
|
|
if !keywordPresent(provider.Keyword, conf) {
|
|
|
|
errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword)
|
|
|
|
errs.Add(fmt.Errorf(errmsg))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-31 12:19:27 +00:00
|
|
|
// Do checks for recursion mode
|
|
|
|
if conf.Recursion {
|
|
|
|
if !strings.HasSuffix(conf.Url, "FUZZ") {
|
|
|
|
errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.")
|
|
|
|
errs.Add(fmt.Errorf(errmsg))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-10 21:30:54 +00:00
|
|
|
return errs.ErrorOrNil()
|
|
|
|
}
|
|
|
|
|
2020-01-15 09:08:24 +00:00
|
|
|
func parseRawRequest(parseOpts *cliOptions, conf *ffuf.Config) error {
|
|
|
|
file, err := os.Open(parseOpts.request)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not open request file: %s", err)
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
r := bufio.NewReader(file)
|
|
|
|
|
|
|
|
s, err := r.ReadString('\n')
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not read request: %s", err)
|
|
|
|
}
|
|
|
|
parts := strings.Split(s, " ")
|
|
|
|
if len(parts) < 3 {
|
|
|
|
return fmt.Errorf("malformed request supplied")
|
|
|
|
}
|
|
|
|
// Set the request Method
|
|
|
|
conf.Method = parts[0]
|
|
|
|
|
|
|
|
for {
|
|
|
|
line, err := r.ReadString('\n')
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
|
|
|
|
if err != nil || line == "" {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
p := strings.SplitN(line, ":", 2)
|
|
|
|
if len(p) != 2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if strings.EqualFold(p[0], "content-length") {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1])
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle case with the full http url in path. In that case,
|
|
|
|
// ignore any host header that we encounter and use the path as request URL
|
|
|
|
if strings.HasPrefix(parts[1], "http") {
|
|
|
|
parsed, err := url.Parse(parts[1])
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not parse request URL: %s", err)
|
|
|
|
}
|
|
|
|
conf.Url = parts[1]
|
|
|
|
conf.Headers["Host"] = parsed.Host
|
|
|
|
} else {
|
|
|
|
// Build the request URL from the request
|
|
|
|
conf.Url = parseOpts.requestProto + "://" + conf.Headers["Host"] + parts[1]
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set the request body
|
|
|
|
b, err := ioutil.ReadAll(r)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not read request body: %s", err)
|
|
|
|
}
|
|
|
|
conf.Data = string(b)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-10 21:30:54 +00:00
|
|
|
func keywordPresent(keyword string, conf *ffuf.Config) bool {
|
2019-09-02 14:18:36 +00:00
|
|
|
//Search for keyword from HTTP method, URL and POST data too
|
2019-11-10 21:30:54 +00:00
|
|
|
if strings.Index(conf.Method, keyword) != -1 {
|
|
|
|
return true
|
2019-09-02 14:18:36 +00:00
|
|
|
}
|
2019-11-10 21:30:54 +00:00
|
|
|
if strings.Index(conf.Url, keyword) != -1 {
|
|
|
|
return true
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
2019-11-10 21:30:54 +00:00
|
|
|
if strings.Index(conf.Data, keyword) != -1 {
|
|
|
|
return true
|
2018-11-08 09:49:06 +00:00
|
|
|
}
|
2019-11-10 21:30:54 +00:00
|
|
|
for k, v := range conf.Headers {
|
|
|
|
if strings.Index(k, keyword) != -1 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if strings.Index(v, keyword) != -1 {
|
|
|
|
return true
|
|
|
|
}
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|
2019-11-10 21:30:54 +00:00
|
|
|
return false
|
2018-11-08 09:26:32 +00:00
|
|
|
}
|