Interactive mode and recursion-strategy (#426)

* Add new feature: recursion-strategy

* Implementation of interactive mode (#8)

* Add interactive mode documentation (#9)

* Prepare for release 1.3.0 (#11)
This commit is contained in:
Joona Hoikkala 2021-04-18 12:54:17 +03:00 committed by GitHub
parent ac63d5357e
commit f97c2f7600
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 646 additions and 194 deletions

View file

@ -1,7 +1,14 @@
## Changelog
- master
- New
- Changed
- v1.3.0
- New
- All output file formats now include the `Content-Type`.
- New CLI flag `-recursion-strategy` that allows adding new queued recursion jobs for non-redirect responses.
- Ability to enter interactive mode by pressing `ENTER` during the ffuf execution. The interactive mode allows
user to change filters, manage recursion queue, save snapshot of matches to a file etc.
- Changed
- Fix a badchar in progress output

View file

@ -20,6 +20,7 @@ A fast web fuzzer written in Go.
- [Using external mutator](https://github.com/ffuf/ffuf#using-external-mutator-to-produce-test-cases)
- [Configuration files](https://github.com/ffuf/ffuf#configuration-files)
- [Help](https://github.com/ffuf/ffuf#usage)
- [Interactive mode](https://github.com/ffuf/ffuf#interactive-mode)
- [Sponsorware?](https://github.com/ffuf/ffuf#sponsorware)
## Sponsors
@ -160,18 +161,19 @@ To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-
Fuzz Faster U Fool - v1.2.0-git
HTTP OPTIONS:
-H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted.
-X HTTP method to use (default: GET)
-b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality.
-d POST data
-ignore-body Do not fetch the response content. (default: false)
-r Follow redirects (default: false)
-recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false)
-recursion-depth Maximum recursion depth. (default: 0)
-replay-proxy Replay matched requests using this proxy.
-timeout HTTP request timeout in seconds. (default: 10)
-u Target URL
-x HTTP Proxy URL
-H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted.
-X HTTP method to use
-b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality.
-d POST data
-ignore-body Do not fetch the response content. (default: false)
-r Follow redirects (default: false)
-recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false)
-recursion-depth Maximum recursion depth. (default: 0)
-recursion-strategy Recursion strategy: "default" for a redirect based, and "greedy" to recurse on all matches (default: default)
-replay-proxy Replay matched requests using this proxy.
-timeout HTTP request timeout in seconds. (default: 10)
-u Target URL
-x Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080
GENERAL OPTIONS:
-V Show version information. (default: false)
@ -241,6 +243,43 @@ EXAMPLE USAGE:
```
### Interactive mode
By pressing `ENTER` during ffuf execution, the process is paused and user is dropped to a shell-like interactive mode:
```
entering interactive mode
type "help" for a list of commands, or ENTER to resume.
> help
available commands:
fc [value] - (re)configure status code filter
fl [value] - (re)configure line count filter
fw [value] - (re)configure word count filter
fs [value] - (re)configure size filter
queueshow - show recursive job queue
queuedel [number] - delete a recursion job in the queue
queueskip - advance to the next queued recursion job
restart - restart and resume the current ffuf job
resume - resume current ffuf job (or: ENTER)
show - show results
savejson [filename] - save current matches to a file
help - you are looking at it
>
```
in this mode, filters can be reconfigured, queue managed and the current state saved to disk.
When (re)configuring the filters, they get applied posthumously and all the false positive matches from memory that
would have been filtered out by the newly added filters get deleted.
The new state of matches can be printed out with a command `show` that will print out all the matches as like they
would have been found by `ffuf`.
As "negative" matches are not stored to memory, relaxing the filters cannot unfortunately bring back the lost matches.
For this kind of scenario, the user is able to use the command `restart`, which resets the state and starts the current
job from the beginning.
## Sponsorware
`ffuf` employs a sponsorware model. This means that all new features developed by its author are initially exclusively

View file

@ -15,7 +15,8 @@
method = "GET"
proxyurl = "http://127.0.0.1:8080"
recursion = false
recursiondepth = 0
recursion_depth = 0
recursion_strategy = "default"
replayproxyurl = "http://127.0.0.1:8080"
timeout = 10
url = "https://example.org/FUZZ"

View file

@ -54,7 +54,7 @@ func Usage() {
Description: "Options controlling the HTTP request and its parts.",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "replay-proxy", "timeout", "ignore-body", "x"},
ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x"},
}
u_general := UsageSection{
Name: "GENERAL OPTIONS",

21
main.go
View file

@ -4,16 +4,16 @@ import (
"context"
"flag"
"fmt"
"github.com/ffuf/ffuf/pkg/ffuf"
"github.com/ffuf/ffuf/pkg/filter"
"github.com/ffuf/ffuf/pkg/input"
"github.com/ffuf/ffuf/pkg/interactive"
"github.com/ffuf/ffuf/pkg/output"
"github.com/ffuf/ffuf/pkg/runner"
"io/ioutil"
"log"
"os"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
"github.com/ffuf/ffuf/pkg/filter"
"github.com/ffuf/ffuf/pkg/input"
"github.com/ffuf/ffuf/pkg/output"
"github.com/ffuf/ffuf/pkg/runner"
)
type multiStringFlag []string
@ -91,8 +91,9 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
flag.StringVar(&opts.HTTP.Data, "data-ascii", opts.HTTP.Data, "POST data (alias of -d)")
flag.StringVar(&opts.HTTP.Data, "data-binary", opts.HTTP.Data, "POST data (alias of -d)")
flag.StringVar(&opts.HTTP.Method, "X", opts.HTTP.Method, "HTTP method to use")
flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "HTTP Proxy URL")
flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080")
flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.")
flag.StringVar(&opts.HTTP.RecursionStrategy, "recursion-strategy", opts.HTTP.RecursionStrategy, "Recursion strategy: \"default\" for a redirect based, and \"greedy\" to recurse on all matches")
flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL")
flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.")
flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork")
@ -197,6 +198,12 @@ func main() {
fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err)
os.Exit(1)
}
go func() {
err := interactive.Handle(job)
if err != nil {
log.Printf("Error while trying to initialize interactive session: %s", err)
}
}()
// Job handles waiting for goroutines to complete itself
job.Start()

View file

@ -33,13 +33,14 @@ type Config struct {
OutputDirectory string `json:"outputdirectory"`
OutputFile string `json:"outputfile"`
OutputFormat string `json:"outputformat"`
OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"`
OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"`
ProgressFrequency int `json:"-"`
ProxyURL string `json:"proxyurl"`
Quiet bool `json:"quiet"`
Rate int64 `json:"rate"`
Recursion bool `json:"recursion"`
RecursionDepth int `json:"recursion_depth"`
RecursionStrategy string `json:"recursion_strategy"`
ReplayProxyURL string `json:"replayproxyurl"`
StopOn403 bool `json:"stop_403"`
StopOnAll bool `json:"stop_all"`
@ -84,6 +85,7 @@ func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
conf.Rate = 0
conf.Recursion = false
conf.RecursionDepth = 0
conf.RecursionStrategy = "default"
conf.StopOn403 = false
conf.StopOnAll = false
conf.StopOnErrors = false

View file

@ -4,6 +4,7 @@ package ffuf
type FilterProvider interface {
Filter(response *Response) (bool, error)
Repr() string
ReprVerbose() string
}
//RunnerProvider is an interface for request executors
@ -40,6 +41,27 @@ type OutputProvider interface {
Progress(status Progress)
Info(infostring string)
Error(errstring string)
Raw(output string)
Warning(warnstring string)
Result(resp Response)
PrintResult(res Result)
SaveFile(filename, format string) error
GetResults() []Result
SetResults(results []Result)
Reset()
}
type Result struct {
Input map[string][]byte `json:"input"`
Position int `json:"position"`
StatusCode int64 `json:"status"`
ContentLength int64 `json:"length"`
ContentWords int64 `json:"words"`
ContentLines int64 `json:"lines"`
ContentType string `json:"content-type"`
RedirectLocation string `json:"redirectlocation"`
Url string `json:"url"`
ResultFile string `json:"resultfile"`
Host string `json:"host"`
HTMLColor string `json:"-"`
}

View file

@ -25,6 +25,7 @@ type Job struct {
Total int
Running bool
RunningJob bool
Paused bool
Count403 int
Count429 int
Error string
@ -33,7 +34,9 @@ type Job struct {
startTimeJob time.Time
queuejobs []QueueJob
queuepos int
skipQueue bool
currentDepth int
pauseWg sync.WaitGroup
}
type QueueJob struct {
@ -49,10 +52,12 @@ func NewJob(conf *Config) *Job {
j.SpuriousErrorCounter = 0
j.Running = false
j.RunningJob = false
j.Paused = false
j.queuepos = 0
j.queuejobs = make([]QueueJob, 0)
j.currentDepth = 0
j.Rate = NewRateThrottle(conf)
j.skipQueue = false
return &j
}
@ -85,6 +90,17 @@ func (j *Job) resetSpuriousErrors() {
j.SpuriousErrorCounter = 0
}
//DeleteQueueItem deletes a recursion job from the queue by its index in the slice
func (j *Job) DeleteQueueItem(index int) {
index = j.queuepos + index - 1
j.queuejobs = append(j.queuejobs[:index], j.queuejobs[index+1:]...)
}
//QueuedJobs returns the slice of queued recursive jobs
func (j *Job) QueuedJobs() []QueueJob {
return j.queuejobs[j.queuepos-1:]
}
//Start the execution of the Job
func (j *Job) Start() {
if j.startTime.IsZero() {
@ -107,15 +123,8 @@ func (j *Job) Start() {
j.interruptMonitor()
for j.jobsInQueue() {
j.prepareQueueJob()
if j.queuepos > 1 && !j.RunningJob {
// Print info for queued recursive jobs
j.Output.Info(fmt.Sprintf("Scanning: %s", j.Config.Url))
}
j.Input.Reset()
j.startTimeJob = time.Now()
j.Reset()
j.RunningJob = true
j.Counter = 0
j.startExecution()
}
@ -125,6 +134,15 @@ func (j *Job) Start() {
}
}
// Reset resets the counters and wordlist position for a job
func (j *Job) Reset() {
j.Input.Reset()
j.Counter = 0
j.skipQueue = false
j.startTimeJob = time.Now()
j.Output.Reset()
}
func (j *Job) jobsInQueue() bool {
return j.queuepos < len(j.queuejobs)
}
@ -135,6 +153,11 @@ func (j *Job) prepareQueueJob() {
j.queuepos += 1
}
//SkipQueue allows to skip the current job and advance to the next queued recursion job
func (j *Job) SkipQueue() {
j.skipQueue = true
}
func (j *Job) sleepIfNeeded() {
var sleepDuration time.Duration
if j.Config.Delay.HasDelay {
@ -153,14 +176,38 @@ func (j *Job) sleepIfNeeded() {
}
}
// Pause pauses the job process
func (j *Job) Pause() {
if !j.Paused {
j.Paused = true
j.pauseWg.Add(1)
j.Output.Info("------ PAUSING ------")
}
}
// Resume resumes the job process
func (j *Job) Resume() {
if j.Paused {
j.Paused = false
j.Output.Info("------ RESUMING -----")
j.pauseWg.Done()
}
}
func (j *Job) startExecution() {
var wg sync.WaitGroup
wg.Add(1)
go j.runBackgroundTasks(&wg)
// Print the base URL when starting a new recursion queue job
if j.queuepos > 1 {
j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url))
}
//Limiter blocks after reaching the buffer, ensuring limited concurrency
limiter := make(chan bool, j.Config.Threads)
for j.Input.Next() {
for j.Input.Next() && !j.skipQueue {
// Check if we should stop the process
j.CheckStop()
@ -168,6 +215,7 @@ func (j *Job) startExecution() {
defer j.Output.Warning(j.Error)
break
}
j.pauseWg.Wait()
limiter <- true
nextInput := j.Input.Value()
nextPosition := j.Input.Position()
@ -200,6 +248,11 @@ func (j *Job) interruptMonitor() {
go func() {
for range sigChan {
j.Error = "Caught keyboard interrupt (Ctrl-C)\n"
// resume if paused
if j.Paused {
j.pauseWg.Done()
}
// Stop the job
j.Stop()
}
}()
@ -208,8 +261,8 @@ func (j *Job) interruptMonitor() {
func (j *Job) runBackgroundTasks(wg *sync.WaitGroup) {
defer wg.Done()
totalProgress := j.Input.Total()
for j.Counter <= totalProgress {
for j.Counter <= totalProgress && !j.skipQueue {
j.pauseWg.Wait()
if !j.Running {
break
}
@ -315,22 +368,39 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
j.Output.Result(resp)
// Refresh the progress indicator as we printed something out
j.updateProgress()
if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" {
j.handleGreedyRecursionJob(resp)
}
}
if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 {
j.handleRecursionJob(resp)
if j.Config.Recursion && j.Config.RecursionStrategy == "default" && len(resp.GetRedirectLocation(false)) > 0 {
j.handleDefaultRecursionJob(resp)
}
}
//handleRecursionJob adds a new recursion job to the job queue if a new directory is found
func (j *Job) handleRecursionJob(resp Response) {
//handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached
func (j *Job) handleGreedyRecursionJob(resp Response) {
// Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob
if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
recUrl := resp.Request.Url + "/" + "FUZZ"
newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1}
j.queuejobs = append(j.queuejobs, newJob)
j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl))
} else {
j.Output.Warning(fmt.Sprintf("Maximum recursion depth reached. Ignoring: %s", resp.Request.Url))
}
}
//handleDefaultRecursionJob adds a new recursion job to the job queue if a new directory is found and maximum depth has
//not been reached
func (j *Job) handleDefaultRecursionJob(resp Response) {
recUrl := resp.Request.Url + "/" + "FUZZ"
if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) {
// Not a directory, return early
return
}
if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
// We have yet to reach the maximum recursion depth
recUrl := resp.Request.Url + "/" + "FUZZ"
newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1}
j.queuejobs = append(j.queuejobs, newJob)
j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl))

View file

@ -26,18 +26,19 @@ type ConfigOptions struct {
}
type HTTPOptions struct {
Cookies []string
Data string
FollowRedirects bool
Headers []string
IgnoreBody bool
Method string
ProxyURL string
Recursion bool
RecursionDepth int
ReplayProxyURL string
Timeout int
URL string
Cookies []string
Data string
FollowRedirects bool
Headers []string
IgnoreBody bool
Method string
ProxyURL string
Recursion bool
RecursionDepth int
RecursionStrategy string
ReplayProxyURL string
Timeout int
URL string
}
type GeneralOptions struct {
@ -72,11 +73,11 @@ type InputOptions struct {
}
type OutputOptions struct {
DebugLog string
OutputDirectory string
OutputFile string
OutputFormat string
OutputCreateEmptyFile bool
DebugLog string
OutputDirectory string
OutputFile string
OutputFormat string
OutputCreateEmptyFile bool
}
type FilterOptions struct {
@ -123,6 +124,7 @@ func NewConfigOptions() *ConfigOptions {
c.HTTP.ProxyURL = ""
c.HTTP.Recursion = false
c.HTTP.RecursionDepth = 0
c.HTTP.RecursionStrategy = "default"
c.HTTP.ReplayProxyURL = ""
c.HTTP.Timeout = 10
c.HTTP.URL = ""
@ -387,6 +389,7 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
conf.FollowRedirects = parseOpts.HTTP.FollowRedirects
conf.Recursion = parseOpts.HTTP.Recursion
conf.RecursionDepth = parseOpts.HTTP.RecursionDepth
conf.RecursionStrategy = parseOpts.HTTP.RecursionStrategy
conf.AutoCalibration = parseOpts.General.AutoCalibration
conf.Threads = parseOpts.General.Threads
conf.Timeout = parseOpts.HTTP.Timeout

View file

@ -30,21 +30,25 @@ func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) {
//AddFilter adds a new filter to Config
func AddFilter(conf *ffuf.Config, name string, option string) error {
newf, err := NewFilterByName(name, option)
if err == nil {
// valid filter create or append
if conf.Filters[name] == nil {
conf.Filters[name] = newf
} else {
currentfilter := conf.Filters[name].Repr()
newoption := strings.TrimSpace(strings.Split(currentfilter, ":")[1]) + "," + option
newerf, err := NewFilterByName(name, newoption)
if err == nil {
conf.Filters[name] = newerf
}
}
}
return err
newf, err := NewFilterByName(name, option)
if err == nil {
// valid filter create or append
if conf.Filters[name] == nil {
conf.Filters[name] = newf
} else {
newoption := conf.Filters[name].Repr() + "," + option
newerf, err := NewFilterByName(name, newoption)
if err == nil {
conf.Filters[name] = newerf
}
}
}
return err
}
//RemoveFilter removes a filter of a given type
func RemoveFilter(conf *ffuf.Config, name string) {
delete(conf.Filters, name)
}
//AddMatcher adds a new matcher to Config

View file

@ -60,5 +60,9 @@ func (f *LineFilter) Repr() string {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response lines: %s", strings.Join(strval, ","))
return strings.Join(strval, ",")
}
func (f *LineFilter) ReprVerbose() string {
return fmt.Sprintf("Response lines: %s", f.Repr())
}

View file

@ -51,5 +51,9 @@ func (f *RegexpFilter) Filter(response *ffuf.Response) (bool, error) {
}
func (f *RegexpFilter) Repr() string {
return f.valueRaw
}
func (f *RegexpFilter) ReprVerbose() string {
return fmt.Sprintf("Regexp: %s", f.valueRaw)
}

View file

@ -60,5 +60,9 @@ func (f *SizeFilter) Repr() string {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response size: %s", strings.Join(strval, ","))
return strings.Join(strval, ",")
}
func (f *SizeFilter) ReprVerbose() string {
return fmt.Sprintf("Response size: %s", f.Repr())
}

View file

@ -75,5 +75,9 @@ func (f *StatusFilter) Repr() string {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response status: %s", strings.Join(strval, ","))
return strings.Join(strval, ",")
}
func (f *StatusFilter) ReprVerbose() string {
return fmt.Sprintf("Response status: %s", f.Repr())
}

View file

@ -60,5 +60,9 @@ func (f *WordFilter) Repr() string {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response words: %s", strings.Join(strval, ","))
return strings.Join(strval, ",")
}
func (f *WordFilter) ReprVerbose() string {
return fmt.Sprintf("Response words: %s", f.Repr())
}

9
pkg/interactive/posix.go Normal file
View file

@ -0,0 +1,9 @@
// +build !windows
package interactive
import "os"
func termHandle() (*os.File, error) {
return os.Open("/dev/tty")
}

View file

@ -0,0 +1,237 @@
package interactive
import (
"bufio"
"fmt"
"github.com/ffuf/ffuf/pkg/ffuf"
"github.com/ffuf/ffuf/pkg/filter"
"strconv"
"strings"
"time"
)
type interactive struct {
Job *ffuf.Job
paused bool
}
func Handle(job *ffuf.Job) error {
i := interactive{job, false}
tty, err := termHandle()
if err != nil {
return err
}
defer tty.Close()
inreader := bufio.NewScanner(tty)
inreader.Split(bufio.ScanLines)
for inreader.Scan() {
i.handleInput(inreader.Bytes())
}
return nil
}
func (i *interactive) handleInput(in []byte) {
instr := string(in)
args := strings.Split(strings.TrimSpace(instr), " ")
if len(args) == 1 && args[0] == "" {
// Enter pressed - toggle interactive state
i.paused = !i.paused
if i.paused {
i.Job.Pause()
time.Sleep(500 * time.Millisecond)
i.printBanner()
} else {
i.Job.Resume()
}
} else {
switch args[0] {
case "?":
i.printHelp()
case "help":
i.printHelp()
case "resume":
i.paused = false
i.Job.Resume()
case "restart":
i.Job.Reset()
i.paused = false
i.Job.Output.Info("Restarting the current ffuf job!")
i.Job.Resume()
case "show":
for _, r := range i.Job.Output.GetResults() {
i.Job.Output.PrintResult(r)
}
case "savejson":
if len(args) < 2 {
i.Job.Output.Error("Please define the filename")
} else if len(args) > 2 {
i.Job.Output.Error("Too many arguments for \"savejson\"")
} else {
err := i.Job.Output.SaveFile(args[1], "json")
if err != nil {
i.Job.Output.Error(fmt.Sprintf("%s", err))
} else {
i.Job.Output.Info("Output file successfully saved!")
}
}
case "fc":
if len(args) < 2 {
i.Job.Output.Error("Please define a value for status code filter, or \"none\" for removing it")
} else if len(args) > 2 {
i.Job.Output.Error("Too many arguments for \"fc\"")
} else {
i.updateFilter("status", args[1])
i.Job.Output.Info("New status code filter value set")
}
case "fl":
if len(args) < 2 {
i.Job.Output.Error("Please define a value for line count filter, or \"none\" for removing it")
} else if len(args) > 2 {
i.Job.Output.Error("Too many arguments for \"fl\"")
} else {
i.updateFilter("line", args[1])
i.Job.Output.Info("New line count filter value set")
}
case "fw":
if len(args) < 2 {
i.Job.Output.Error("Please define a value for word count filter, or \"none\" for removing it")
} else if len(args) > 2 {
i.Job.Output.Error("Too many arguments for \"fw\"")
} else {
i.updateFilter("word", args[1])
i.Job.Output.Info("New word count filter value set")
}
case "fs":
if len(args) < 2 {
i.Job.Output.Error("Please define a value for response size filter, or \"none\" for removing it")
} else if len(args) > 2 {
i.Job.Output.Error("Too many arguments for \"fs\"")
} else {
i.updateFilter("size", args[1])
i.Job.Output.Info("New response size filter value set")
}
case "queueshow":
i.printQueue()
case "queuedel":
if len(args) < 2 {
i.Job.Output.Error("Please define the index of a queued job to remove. Use \"queueshow\" for listing of jobs.")
} else if len(args) > 2 {
i.Job.Output.Error("Too many arguments for \"queuedel\"")
} else {
i.deleteQueue(args[1])
}
case "queueskip":
i.Job.SkipQueue()
i.Job.Output.Info("Skipping to the next queued job")
default:
if i.paused {
i.Job.Output.Warning(fmt.Sprintf("Unknown command: \"%s\". Enter \"help\" for a list of available commands", args[0]))
} else {
i.Job.Output.Error("NOPE")
}
}
}
if i.paused {
i.printPrompt()
}
}
func (i *interactive) updateFilter(name, value string) {
if value == "none" {
filter.RemoveFilter(i.Job.Config, name)
} else {
newFc, err := filter.NewFilterByName(name, value)
if err != nil {
i.Job.Output.Error(fmt.Sprintf("Error while setting new filter value: %s", err))
return
} else {
i.Job.Config.Filters[name] = newFc
}
results := make([]ffuf.Result, 0)
for _, res := range i.Job.Output.GetResults() {
fakeResp := &ffuf.Response{
StatusCode: res.StatusCode,
ContentLines: res.ContentLength,
ContentWords: res.ContentWords,
ContentLength: res.ContentLength,
}
filterOut, _ := newFc.Filter(fakeResp)
if !filterOut {
results = append(results, res)
}
}
i.Job.Output.SetResults(results)
}
}
func (i *interactive) printQueue() {
if len(i.Job.QueuedJobs()) > 0 {
i.Job.Output.Raw("Queued recursion jobs:\n")
for index, job := range i.Job.QueuedJobs() {
postfix := ""
if index == 0 {
postfix = " (active job)"
}
i.Job.Output.Raw(fmt.Sprintf(" [%d] : %s%s\n", index, job.Url, postfix))
}
} else {
i.Job.Output.Info("Recursion job queue is empty")
}
}
func (i *interactive) deleteQueue(in string) {
index, err := strconv.Atoi(in)
if err != nil {
i.Job.Output.Warning(fmt.Sprintf("Not a number: %s", in))
} else {
if index < 0 || index > len(i.Job.QueuedJobs())-1 {
i.Job.Output.Warning("No such queued job. Use \"queueshow\" to list the jobs in queue")
} else if index == 0 {
i.Job.Output.Warning("Cannot delete the currently running job. Use \"queueskip\" to advance to the next one")
} else {
i.Job.DeleteQueueItem(index)
i.Job.Output.Info("Recursion job successfully deleted!")
}
}
}
func (i *interactive) printBanner() {
i.Job.Output.Raw("entering interactive mode\ntype \"help\" for a list of commands, or ENTER to resume.\n")
}
func (i *interactive) printPrompt() {
i.Job.Output.Raw("> ")
}
func (i *interactive) printHelp() {
var fc, fl, fs, fw string
for name, filter := range i.Job.Config.Filters {
switch name {
case "status":
fc = "(active: " + filter.Repr() + ")"
case "line":
fl = "(active: " + filter.Repr() + ")"
case "word":
fw = "(active: " + filter.Repr() + ")"
case "size":
fs = "(active: " + filter.Repr() + ")"
}
}
help := `
available commands:
fc [value] - (re)configure status code filter %s
fl [value] - (re)configure line count filter %s
fw [value] - (re)configure word count filter %s
fs [value] - (re)configure size filter %s
queueshow - show recursive job queue
queuedel [number] - delete a recursion job in the queue
queueskip - advance to the next queued recursion job
restart - restart and resume the current ffuf job
resume - resume current ffuf job (or: ENTER)
show - show results
savejson [filename] - save current matches to a file
help - you are looking at it
`
i.Job.Output.Raw(fmt.Sprintf(help, fc, fl, fw, fs))
}

View file

@ -0,0 +1,21 @@
// +build windows
package interactive
import (
"os"
"syscall"
)
func termHandle() (*os.File, error) {
var tty *os.File
_, err := syscall.Open("CONIN$", syscall.O_RDWR, 0)
if err != nil {
return tty, err
}
tty, err = os.Open("CONIN$")
if err != nil {
return tty, err
}
return tty, nil
}

View file

@ -11,14 +11,14 @@ import (
var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "resultfile"}
func writeCSV(config *ffuf.Config, res []Result, encode bool) error {
if(config.OutputCreateEmptyFile && (len(res) == 0)){
func writeCSV(filename string, config *ffuf.Config, res []ffuf.Result, encode bool) error {
if config.OutputCreateEmptyFile && (len(res) == 0) {
return nil
}
header := make([]string, 0)
f, err := os.Create(config.OutputFile)
f, err := os.Create(filename)
if err != nil {
return err
}
@ -56,7 +56,7 @@ func base64encode(in []byte) string {
return base64.StdEncoding.EncodeToString(in)
}
func toCSV(r Result) []string {
func toCSV(r ffuf.Result) []string {
res := make([]string, 0)
for _, v := range r.Input {
res = append(res, string(v))

View file

@ -12,7 +12,7 @@ type htmlFileOutput struct {
CommandLine string
Time string
Keys []string
Results []Result
Results []ffuf.Result
}
const (
@ -145,8 +145,8 @@ const (
)
// colorizeResults returns a new slice with HTMLColor attribute
func colorizeResults(results []Result) []Result {
newResults := make([]Result, 0)
func colorizeResults(results []ffuf.Result) []ffuf.Result {
newResults := make([]ffuf.Result, 0)
for _, r := range results {
result := r
@ -176,12 +176,12 @@ func colorizeResults(results []Result) []Result {
return newResults
}
func writeHTML(config *ffuf.Config, results []Result) error {
func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) error {
if(config.OutputCreateEmptyFile && (len(results) == 0)){
if config.OutputCreateEmptyFile && (len(results) == 0) {
return nil
}
}
results = colorizeResults(results)
ti := time.Now()
@ -198,7 +198,7 @@ func writeHTML(config *ffuf.Config, results []Result) error {
Keys: keywords,
}
f, err := os.Create(config.OutputFile)
f, err := os.Create(filename)
if err != nil {
return err
}

View file

@ -9,10 +9,10 @@ import (
)
type ejsonFileOutput struct {
CommandLine string `json:"commandline"`
Time string `json:"time"`
Results []Result `json:"results"`
Config *ffuf.Config `json:"config"`
CommandLine string `json:"commandline"`
Time string `json:"time"`
Results []ffuf.Result `json:"results"`
Config *ffuf.Config `json:"config"`
}
type JsonResult struct {
@ -36,12 +36,12 @@ type jsonFileOutput struct {
Config *ffuf.Config `json:"config"`
}
func writeEJSON(config *ffuf.Config, res []Result) error {
if(config.OutputCreateEmptyFile && (len(res) == 0)){
func writeEJSON(filename string, config *ffuf.Config, res []ffuf.Result) error {
if config.OutputCreateEmptyFile && (len(res) == 0) {
return nil
}
t := time.Now()
outJSON := ejsonFileOutput{
CommandLine: config.CommandLine,
@ -53,14 +53,14 @@ func writeEJSON(config *ffuf.Config, res []Result) error {
if err != nil {
return err
}
err = ioutil.WriteFile(config.OutputFile, outBytes, 0644)
err = ioutil.WriteFile(filename, outBytes, 0644)
if err != nil {
return err
}
return nil
}
func writeJSON(config *ffuf.Config, res []Result) error {
func writeJSON(filename string, config *ffuf.Config, res []ffuf.Result) error {
t := time.Now()
jsonRes := make([]JsonResult, 0)
for _, r := range res {
@ -92,7 +92,7 @@ func writeJSON(config *ffuf.Config, res []Result) error {
if err != nil {
return err
}
err = ioutil.WriteFile(config.OutputFile, outBytes, 0644)
err = ioutil.WriteFile(filename, outBytes, 0644)
if err != nil {
return err
}

View file

@ -20,9 +20,9 @@ const (
{{end}}` // The template format is not pretty but follows the markdown guide
)
func writeMarkdown(config *ffuf.Config, res []Result) error {
func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error {
if(config.OutputCreateEmptyFile && (len(res) == 0)){
if config.OutputCreateEmptyFile && (len(res) == 0) {
return nil
}
@ -40,7 +40,7 @@ func writeMarkdown(config *ffuf.Config, res []Result) error {
Keys: keywords,
}
f, err := os.Create(config.OutputFile)
f, err := os.Create(filename)
if err != nil {
return err
}

View file

@ -27,28 +27,13 @@ const (
type Stdoutput struct {
config *ffuf.Config
Results []Result
}
type Result struct {
Input map[string][]byte `json:"input"`
Position int `json:"position"`
StatusCode int64 `json:"status"`
ContentLength int64 `json:"length"`
ContentWords int64 `json:"words"`
ContentLines int64 `json:"lines"`
ContentType string `json:"content-type"`
RedirectLocation string `json:"redirectlocation"`
Url string `json:"url"`
ResultFile string `json:"resultfile"`
Host string `json:"host"`
HTMLColor string `json:"-"`
Results []ffuf.Result
}
func NewStdoutput(conf *ffuf.Config) *Stdoutput {
var outp Stdoutput
outp.config = conf
outp.Results = []Result{}
outp.Results = []ffuf.Result{}
return &outp
}
@ -137,15 +122,30 @@ func (s *Stdoutput) Banner() {
// Print matchers
for _, f := range s.config.Matchers {
printOption([]byte("Matcher"), []byte(f.Repr()))
printOption([]byte("Matcher"), []byte(f.ReprVerbose()))
}
// Print filters
for _, f := range s.config.Filters {
printOption([]byte("Filter"), []byte(f.Repr()))
printOption([]byte("Filter"), []byte(f.ReprVerbose()))
}
fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP)
}
// Reset resets the result slice
func (s *Stdoutput) Reset() {
s.Results = make([]ffuf.Result, 0)
}
// GetResults returns the result slice
func (s *Stdoutput) GetResults() []ffuf.Result {
return s.Results
}
// SetResults sets the result slice
func (s *Stdoutput) SetResults(results []ffuf.Result) {
s.Results = results
}
func (s *Stdoutput) Progress(status ffuf.Progress) {
if s.config.Quiet {
// No progress for quiet mode
@ -175,9 +175,9 @@ func (s *Stdoutput) Info(infostring string) {
fmt.Fprintf(os.Stderr, "%s", infostring)
} else {
if !s.config.Colors {
fmt.Fprintf(os.Stderr, "%s[INFO] %s\n", TERMINAL_CLEAR_LINE, infostring)
fmt.Fprintf(os.Stderr, "%s[INFO] %s\n\n", TERMINAL_CLEAR_LINE, infostring)
} else {
fmt.Fprintf(os.Stderr, "%s[%sINFO%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_BLUE, ANSI_CLEAR, infostring)
fmt.Fprintf(os.Stderr, "%s[%sINFO%s] %s\n\n", TERMINAL_CLEAR_LINE, ANSI_BLUE, ANSI_CLEAR, infostring)
}
}
}
@ -199,14 +199,18 @@ func (s *Stdoutput) Warning(warnstring string) {
fmt.Fprintf(os.Stderr, "%s", warnstring)
} else {
if !s.config.Colors {
fmt.Fprintf(os.Stderr, "%s[WARN] %s", TERMINAL_CLEAR_LINE, warnstring)
fmt.Fprintf(os.Stderr, "%s[WARN] %s\n", TERMINAL_CLEAR_LINE, warnstring)
} else {
fmt.Fprintf(os.Stderr, "%s[%sWARN%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_RED, ANSI_CLEAR, warnstring)
}
}
}
func (s *Stdoutput) writeToAll(config *ffuf.Config, res []Result) error {
func (s *Stdoutput) Raw(output string) {
fmt.Fprintf(os.Stderr, "%s%s", TERMINAL_CLEAR_LINE, output)
}
func (s *Stdoutput) writeToAll(filename string, config *ffuf.Config, res []ffuf.Result) error {
var err error
var BaseFilename string = s.config.OutputFile
@ -218,37 +222,37 @@ func (s *Stdoutput) writeToAll(config *ffuf.Config, res []Result) error {
}
s.config.OutputFile = BaseFilename + ".json"
err = writeJSON(s.config, s.Results)
err = writeJSON(filename, s.config, s.Results)
if err != nil {
s.Error(err.Error())
}
s.config.OutputFile = BaseFilename + ".ejson"
err = writeEJSON(s.config, s.Results)
err = writeEJSON(filename, s.config, s.Results)
if err != nil {
s.Error(err.Error())
}
s.config.OutputFile = BaseFilename + ".html"
err = writeHTML(s.config, s.Results)
err = writeHTML(filename, s.config, s.Results)
if err != nil {
s.Error(err.Error())
}
s.config.OutputFile = BaseFilename + ".md"
err = writeMarkdown(s.config, s.Results)
err = writeMarkdown(filename, s.config, s.Results)
if err != nil {
s.Error(err.Error())
}
s.config.OutputFile = BaseFilename + ".csv"
err = writeCSV(s.config, s.Results, false)
err = writeCSV(filename, s.config, s.Results, false)
if err != nil {
s.Error(err.Error())
}
s.config.OutputFile = BaseFilename + ".ecsv"
err = writeCSV(s.config, s.Results, true)
err = writeCSV(filename, s.config, s.Results, true)
if err != nil {
s.Error(err.Error())
}
@ -257,24 +261,33 @@ func (s *Stdoutput) writeToAll(config *ffuf.Config, res []Result) error {
}
// SaveFile saves the current results to a file of a given type
func (s *Stdoutput) SaveFile(filename, format string) error {
var err error
switch format {
case "all":
err = s.writeToAll(filename, s.config, s.Results)
case "json":
err = writeJSON(filename, s.config, s.Results)
case "ejson":
err = writeEJSON(filename, s.config, s.Results)
case "html":
err = writeHTML(filename, s.config, s.Results)
case "md":
err = writeMarkdown(filename, s.config, s.Results)
case "csv":
err = writeCSV(filename, s.config, s.Results, false)
case "ecsv":
err = writeCSV(filename, s.config, s.Results, true)
}
return err
}
// Finalize gets run after all the ffuf jobs are completed
func (s *Stdoutput) Finalize() error {
var err error
if s.config.OutputFile != "" {
if s.config.OutputFormat == "all" {
err = s.writeToAll(s.config, s.Results)
} else if s.config.OutputFormat == "json" {
err = writeJSON(s.config, s.Results)
} else if s.config.OutputFormat == "ejson" {
err = writeEJSON(s.config, s.Results)
} else if s.config.OutputFormat == "html" {
err = writeHTML(s.config, s.Results)
} else if s.config.OutputFormat == "md" {
err = writeMarkdown(s.config, s.Results)
} else if s.config.OutputFormat == "csv" {
err = writeCSV(s.config, s.Results, false)
} else if s.config.OutputFormat == "ecsv" {
err = writeCSV(s.config, s.Results, true)
}
err = s.SaveFile(s.config.OutputFile, s.config.OutputFormat)
if err != nil {
s.Error(err.Error())
}
@ -288,30 +301,27 @@ func (s *Stdoutput) Result(resp ffuf.Response) {
if len(s.config.OutputDirectory) > 0 {
resp.ResultFile = s.writeResultToFile(resp)
}
// Output the result
s.printResult(resp)
// Check if we need the data later
if s.config.OutputFile != "" {
// No need to store results if we're not going to use them later
inputs := make(map[string][]byte, len(resp.Request.Input))
for k, v := range resp.Request.Input {
inputs[k] = v
}
sResult := Result{
Input: inputs,
Position: resp.Request.Position,
StatusCode: resp.StatusCode,
ContentLength: resp.ContentLength,
ContentWords: resp.ContentWords,
ContentLines: resp.ContentLines,
ContentType: resp.ContentType,
RedirectLocation: resp.GetRedirectLocation(false),
Url: resp.Request.Url,
ResultFile: resp.ResultFile,
Host: resp.Request.Host,
}
s.Results = append(s.Results, sResult)
inputs := make(map[string][]byte, len(resp.Request.Input))
for k, v := range resp.Request.Input {
inputs[k] = v
}
sResult := ffuf.Result{
Input: inputs,
Position: resp.Request.Position,
StatusCode: resp.StatusCode,
ContentLength: resp.ContentLength,
ContentWords: resp.ContentWords,
ContentLines: resp.ContentLines,
ContentType: resp.ContentType,
RedirectLocation: resp.GetRedirectLocation(false),
Url: resp.Request.Url,
ResultFile: resp.ResultFile,
Host: resp.Request.Host,
}
s.Results = append(s.Results, sResult)
// Output the result
s.PrintResult(sResult)
}
func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string {
@ -339,35 +349,35 @@ func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string {
return fileName
}
func (s *Stdoutput) printResult(resp ffuf.Response) {
func (s *Stdoutput) PrintResult(res ffuf.Result) {
if s.config.Quiet {
s.resultQuiet(resp)
s.resultQuiet(res)
} else {
if len(resp.Request.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 {
if len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 {
// Print a multi-line result (when using multiple input keywords and wordlists)
s.resultMultiline(resp)
s.resultMultiline(res)
} else {
s.resultNormal(resp)
s.resultNormal(res)
}
}
}
func (s *Stdoutput) prepareInputsOneLine(resp ffuf.Response) string {
func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string {
inputs := ""
if len(resp.Request.Input) > 1 {
for k, v := range resp.Request.Input {
if len(res.Input) > 1 {
for k, v := range res.Input {
if inSlice(k, s.config.CommandKeywords) {
// If we're using external command for input, display the position instead of input
inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(resp.Request.Position))
inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(res.Position))
} else {
inputs = fmt.Sprintf("%s%s : %s ", inputs, k, v)
}
}
} else {
for k, v := range resp.Request.Input {
for k, v := range res.Input {
if inSlice(k, s.config.CommandKeywords) {
// If we're using external command for input, display the position instead of input
inputs = strconv.Itoa(resp.Request.Position)
inputs = strconv.Itoa(res.Position)
} else {
inputs = string(v)
}
@ -376,30 +386,30 @@ func (s *Stdoutput) prepareInputsOneLine(resp ffuf.Response) string {
return inputs
}
func (s *Stdoutput) resultQuiet(resp ffuf.Response) {
fmt.Println(s.prepareInputsOneLine(resp))
func (s *Stdoutput) resultQuiet(res ffuf.Result) {
fmt.Println(s.prepareInputsOneLine(res))
}
func (s *Stdoutput) resultMultiline(resp ffuf.Response) {
func (s *Stdoutput) resultMultiline(res ffuf.Result) {
var res_hdr, res_str string
res_str = "%s%s * %s: %s\n"
res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, resp.StatusCode, resp.ContentLength, resp.ContentWords, resp.ContentLines)
res_hdr = s.colorize(res_hdr, resp.StatusCode)
res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines)
res_hdr = s.colorize(res_hdr, res.StatusCode)
reslines := ""
if s.config.Verbose {
reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, resp.Request.Url)
redirectLocation := resp.GetRedirectLocation(false)
reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, res.Url)
redirectLocation := res.RedirectLocation
if redirectLocation != "" {
reslines = fmt.Sprintf("%s%s| --> | %s\n", reslines, TERMINAL_CLEAR_LINE, redirectLocation)
}
}
if resp.ResultFile != "" {
reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, resp.ResultFile)
if res.ResultFile != "" {
reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, res.ResultFile)
}
for k, v := range resp.Request.Input {
for k, v := range res.Input {
if inSlice(k, s.config.CommandKeywords) {
// If we're using external command for input, display the position instead of input
reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(resp.Request.Position))
reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(res.Position))
} else {
// Wordlist input
reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, v)
@ -408,9 +418,9 @@ func (s *Stdoutput) resultMultiline(resp ffuf.Response) {
fmt.Printf("%s\n%s\n", res_hdr, reslines)
}
func (s *Stdoutput) resultNormal(resp ffuf.Response) {
res := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines)
fmt.Println(res)
func (s *Stdoutput) resultNormal(res ffuf.Result) {
resnormal := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(res), s.colorize(fmt.Sprintf("%d", res.StatusCode), res.StatusCode), res.ContentLength, res.ContentWords, res.ContentLines)
fmt.Println(resnormal)
}
func (s *Stdoutput) colorize(input string, status int64) string {