Scraper functionality (#633)

* Scraper functionality

* Handle matched only - case for scraper

* Add scraper output to different formats

* Fix the ancient merge reminders

* Load scraper rules from directory

* Scraper fixes
This commit is contained in:
Joona Hoikkala 2023-02-04 13:23:31 +02:00 committed by GitHub
parent 39c89344a0
commit 643f6b883f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 498 additions and 81 deletions

View file

@ -3,6 +3,7 @@
- New
- Added a new, dynamic keyword `FFUFHASH` that generates hash from job configuration and wordlist position to map blind payloads back to the initial request.
- New command line parameter for searching a hash: `-search FFUFHASH`
- Data scraper functionality
- Changed
- Multiline output prints out alphabetically sorted by keyword
- Default configuration directories now follow `XDG_CONFIG_HOME` variable (less spam in your home directory)

View file

@ -37,6 +37,7 @@
noninteractive = false
quiet = false
rate = 0
scrapers = "all"
stopon403 = false
stoponall = false
stoponerrors = false

5
go.mod
View file

@ -3,6 +3,9 @@ module github.com/ffuf/ffuf
go 1.13
require (
github.com/PuerkitoBio/goquery v1.8.0
github.com/adrg/xdg v0.4.0
github.com/pelletier/go-toml v1.8.1
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pelletier/go-toml v1.9.5
golang.org/x/net v0.5.0 // indirect
)

41
go.sum
View file

@ -1,17 +1,52 @@
github.com/PuerkitoBio/goquery v1.8.0 h1:PJTF7AmFCFKk1N6V6jmKfrNH9tV5pNE6lZMkG0gta/U=
github.com/PuerkitoBio/goquery v1.8.0/go.mod h1:ypIiRMtY7COPGk+I/YbZLbxsxn9g5ejnI2HSMtkjZvI=
github.com/adrg/xdg v0.4.0 h1:RzRqFcjH4nE5C6oTAxhBtoE2IRyjBSa62SCbyPidvls=
github.com/adrg/xdg v0.4.0/go.mod h1:N6ag73EX4wyxeaoeHctc1mas01KZgsj5tYiAIwqJE/E=
github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c=
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM=
github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359 h1:2B5p2L5IfGiD7+b9BOoRMC6DgObAVZV+Fsp050NqXik=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw=
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=

13
help.go
View file

@ -61,7 +61,7 @@ func Usage() {
Description: "",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"ac", "acc", "ack", "ach", "acs", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "search", "s", "sa", "se", "sf", "t", "v", "V"},
ExpectedFlags: []string{"ac", "acc", "ack", "ach", "acs", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "scraperfile", "scrapers", "search", "s", "sa", "se", "sf", "t", "v", "V"},
}
u_compat := UsageSection{
Name: "COMPATIBILITY OPTIONS",
@ -105,7 +105,7 @@ func Usage() {
flag.VisitAll(func(f *flag.Flag) {
found := false
for i, section := range sections {
if strInSlice(f.Name, section.ExpectedFlags) {
if ffuf.StrInSlice(f.Name, section.ExpectedFlags) {
sections[i].Flags = append(sections[i].Flags, UsageFlag{
Name: f.Name,
Description: f.Usage,
@ -149,12 +149,3 @@ func Usage() {
fmt.Printf(" More information and examples: https://github.com/ffuf/ffuf\n\n")
}
func strInSlice(val string, slice []string) bool {
for _, v := range slice {
if v == val {
return true
}
}
return false
}

28
main.go
View file

@ -4,17 +4,19 @@ import (
"context"
"flag"
"fmt"
"io"
"log"
"os"
"strings"
"time"
"github.com/ffuf/ffuf/pkg/ffuf"
"github.com/ffuf/ffuf/pkg/filter"
"github.com/ffuf/ffuf/pkg/input"
"github.com/ffuf/ffuf/pkg/interactive"
"github.com/ffuf/ffuf/pkg/output"
"github.com/ffuf/ffuf/pkg/runner"
"io"
"log"
"os"
"strings"
"time"
"github.com/ffuf/ffuf/pkg/scraper"
)
type multiStringFlag []string
@ -88,6 +90,8 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
flag.StringVar(&opts.General.AutoCalibrationKeyword, "ack", opts.General.AutoCalibrationKeyword, "Autocalibration keyword")
flag.StringVar(&opts.General.AutoCalibrationStrategy, "acs", opts.General.AutoCalibrationStrategy, "Autocalibration strategy: \"basic\" or \"advanced\"")
flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file")
flag.StringVar(&opts.General.ScraperFile, "scraperfile", "", "Custom scraper file path")
flag.StringVar(&opts.General.Scrapers, "scrapers", opts.General.Scrapers, "Active scraper groups")
flag.StringVar(&opts.Filter.Mode, "fmode", opts.Filter.Mode, "Filter set operator. Either of: and, or")
flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges")
flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp")
@ -245,6 +249,7 @@ func main() {
}
func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
var err error
job := ffuf.NewJob(conf)
var errs ffuf.Multierror
job.Input, errs = input.NewInputProvider(conf)
@ -256,6 +261,19 @@ func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
}
// We only have stdout outputprovider right now
job.Output = output.NewOutputProviderByName("stdout", conf)
// Initialize scraper
newscraper, scraper_err := scraper.FromDir(ffuf.SCRAPERDIR, conf.Scrapers)
if scraper_err.ErrorOrNil() != nil {
errs.Add(scraper_err.ErrorOrNil())
}
job.Scraper = newscraper
if conf.ScraperFile != "" {
err = job.Scraper.AppendFromFile(conf.ScraperFile)
if err != nil {
errs.Add(err)
}
}
return job, errs.ErrorOrNil()
}

View file

@ -51,6 +51,8 @@ type Config struct {
ReplayProxyURL string `json:"replayproxyurl"`
RequestFile string `json:"requestfile"`
RequestProto string `json:"requestproto"`
ScraperFile string `json:"scraperfile"`
Scrapers string `json:"scrapers"`
SNI string `json:"sni"`
StopOn403 bool `json:"stop_403"`
StopOnAll bool `json:"stop_all"`
@ -107,6 +109,8 @@ func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
conf.RequestFile = ""
conf.RequestProto = "https"
conf.SNI = ""
conf.ScraperFile = ""
conf.Scrapers = "all"
conf.StopOn403 = false
conf.StopOnAll = false
conf.StopOnErrors = false

View file

@ -49,6 +49,8 @@ func (c *Config) ToOptions() ConfigOptions {
o.General.Noninteractive = c.Noninteractive
o.General.Quiet = c.Quiet
o.General.Rate = int(c.Rate)
o.General.ScraperFile = c.ScraperFile
o.General.Scrapers = c.Scrapers
o.General.StopOn403 = c.StopOn403
o.General.StopOnAll = c.StopOnAll
o.General.StopOnErrors = c.StopOnErrors

View file

@ -12,4 +12,5 @@ var (
VERSION_APPENDIX = "-dev"
CONFIGDIR = filepath.Join(xdg.ConfigHome, "ffuf")
HISTORYDIR = filepath.Join(CONFIGDIR, "history")
SCRAPERDIR = filepath.Join(CONFIGDIR, "scraper")
)

View file

@ -80,11 +80,4 @@ func configFromHistory(dirname string) (ConfigOptionsHistory, error) {
tmpOptions := ConfigOptionsHistory{}
err = json.Unmarshal(jsonOptions, &tmpOptions)
return tmpOptions, err
/*
// These are dummy values for this use case
ctx, cancel := context.WithCancel(context.Background())
conf, err := ConfigFromOptions(&tmpOptions.ConfigOptions, ctx, cancel)
job.Input, errs = input.NewInputProvider(conf)
return conf, tmpOptions.Time, err
*/
}

View file

@ -79,18 +79,31 @@ type OutputProvider interface {
Cycle()
}
type Result struct {
Input map[string][]byte `json:"input"`
Position int `json:"position"`
StatusCode int64 `json:"status"`
ContentLength int64 `json:"length"`
ContentWords int64 `json:"words"`
ContentLines int64 `json:"lines"`
ContentType string `json:"content-type"`
RedirectLocation string `json:"redirectlocation"`
Url string `json:"url"`
Duration time.Duration `json:"duration"`
ResultFile string `json:"resultfile"`
Host string `json:"host"`
HTMLColor string `json:"-"`
type Scraper interface {
Execute(resp *Response, matched bool) []ScraperResult
AppendFromFile(path string) error
}
type ScraperResult struct {
Name string `json:"name"`
Type string `json:"type"`
Action []string `json:"action"`
Results []string `json:"results"`
}
type Result struct {
Input map[string][]byte `json:"input"`
Position int `json:"position"`
StatusCode int64 `json:"status"`
ContentLength int64 `json:"length"`
ContentWords int64 `json:"words"`
ContentLines int64 `json:"lines"`
ContentType string `json:"content-type"`
RedirectLocation string `json:"redirectlocation"`
Url string `json:"url"`
Duration time.Duration `json:"duration"`
ScraperData map[string][]string `json:"scraper"`
ResultFile string `json:"resultfile"`
Host string `json:"host"`
HTMLColor string `json:"-"`
}

View file

@ -18,6 +18,7 @@ type Job struct {
Input InputProvider
Runner RunnerProvider
ReplayRunner RunnerProvider
Scraper Scraper
Output OutputProvider
Jobhash string
Counter int
@ -432,6 +433,14 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
// Handle autocalibration, must be done after the actual request to ensure sane value in req.Host
_ = j.CalibrateIfNeeded(HostURLFromRequest(req), input)
// Handle scraper actions
if j.Scraper != nil {
for _, sres := range j.Scraper.Execute(&resp, j.isMatch(resp)) {
resp.ScraperData[sres.Name] = sres.Results
j.handleScraperResult(&resp, sres)
}
}
if j.isMatch(resp) {
// Re-send request through replay-proxy if needed
if j.ReplayRunner != nil {
@ -452,6 +461,11 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" {
j.handleGreedyRecursionJob(resp)
}
} else {
if len(resp.ScraperData) > 0 {
// print the result anyway, as scraper found something
j.Output.Result(resp)
}
}
if j.Config.Recursion && j.Config.RecursionStrategy == "default" && len(resp.GetRedirectLocation(false)) > 0 {
@ -459,6 +473,15 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
}
}
func (j *Job) handleScraperResult(resp *Response, sres ScraperResult) {
for _, a := range sres.Action {
switch a {
case "output":
resp.ScraperData[sres.Name] = sres.Results
}
}
}
// handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached
func (j *Job) handleGreedyRecursionJob(resp Response) {
// Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob

View file

@ -58,6 +58,8 @@ type GeneralOptions struct {
Noninteractive bool `json:"noninteractive"`
Quiet bool `json:"quiet"`
Rate int `json:"rate"`
ScraperFile string `json:"scraperfile"`
Scrapers string `json:"scrapers"`
Searchhash string `json:"-"`
ShowVersion bool `toml:"-" json:"-"`
StopOn403 bool `json:"stop_on_403"`
@ -130,6 +132,8 @@ func NewConfigOptions() *ConfigOptions {
c.General.Quiet = false
c.General.Rate = 0
c.General.Searchhash = ""
c.General.ScraperFile = ""
c.General.Scrapers = "all"
c.General.ShowVersion = false
c.General.StopOn403 = false
c.General.StopOnAll = false
@ -247,7 +251,13 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
wl = strings.SplitN(v, ":", 2)
}
// Try to use absolute paths for wordlists
fullpath, err := filepath.Abs(wl[0])
fullpath := ""
if wl[0] != "-" {
fullpath, err = filepath.Abs(wl[0])
} else {
fullpath = wl[0]
}
if err == nil {
wl[0] = fullpath
}
@ -456,6 +466,8 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
conf.OutputSkipEmptyFile = parseOpts.Output.OutputSkipEmptyFile
conf.IgnoreBody = parseOpts.HTTP.IgnoreBody
conf.Quiet = parseOpts.General.Quiet
conf.ScraperFile = parseOpts.General.ScraperFile
conf.Scrapers = parseOpts.General.Scrapers
conf.StopOn403 = parseOpts.General.StopOn403
conf.StopOnAll = parseOpts.General.StopOnAll
conf.StopOnErrors = parseOpts.General.StopOnErrors
@ -540,7 +552,6 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
if parseOpts.General.Verbose && parseOpts.General.Json {
errs.Add(fmt.Errorf("Cannot have -json and -v"))
}
return &conf, errs.ErrorOrNil()
}
@ -691,7 +702,7 @@ func ReadConfig(configFile string) (*ConfigOptions, error) {
func ReadDefaultConfig() (*ConfigOptions, error) {
// Try to create configuration directory, ignore the potential error
_ = CheckOrCreateConfigDir()
conffile := filepath.Join(CONFIGDIR, ".ffufrc")
conffile := filepath.Join(CONFIGDIR, "ffufrc")
if !FileExists(conffile) {
userhome, err := os.UserHomeDir()
if err == nil {

View file

@ -19,6 +19,7 @@ type Response struct {
Request *Request
Raw string
ResultFile string
ScraperData map[string][]string
Time time.Duration
}
@ -86,5 +87,6 @@ func NewResponse(httpresp *http.Response, req *Request) Response {
resp.Cancelled = false
resp.Raw = ""
resp.ResultFile = ""
resp.ScraperData = make(map[string][]string)
return resp
}

View file

@ -89,6 +89,10 @@ func CheckOrCreateConfigDir() error {
return err
}
err = createConfigDir(HISTORYDIR)
if err != nil {
return err
}
err = createConfigDir(SCRAPERDIR)
return err
}
@ -103,3 +107,12 @@ func createConfigDir(path string) error {
}
return nil
}
func StrInSlice(key string, slice []string) bool {
for _, v := range slice {
if v == key {
return true
}
}
return false
}

View file

@ -1,6 +1,7 @@
package output
import (
"html"
"html/template"
"os"
"time"
@ -8,11 +9,27 @@ import (
"github.com/ffuf/ffuf/pkg/ffuf"
)
type htmlResult struct {
Input map[string]string
Position int
StatusCode int64
ContentLength int64
ContentWords int64
ContentLines int64
ContentType string
RedirectLocation string
ScraperData string
Duration time.Duration
ResultFile string
Url string
Host string
}
type htmlFileOutput struct {
CommandLine string
Time string
Keys []string
Results []ffuf.Result
Results []htmlResult
}
const (
@ -65,7 +82,7 @@ const (
<table id="ffufreport">
<thead>
<div style="display:none">
|result_raw|StatusCode|Input|Position|ContentLength|ContentWords|ContentLines|
|result_raw|StatusCode|Input|Position|ContentLength|ContentWords|ContentLines|ContentType|Duration|Resultfile|ScraperData|
</div>
<tr>
<th>Status</th>
@ -78,8 +95,9 @@ const (
<th>Words</th>
<th>Lines</th>
<th>Type</th>
<th>Duration</th>
<th>Duration</th>
<th>Resultfile</th>
<th>Scraper data</th>
</tr>
</thead>
@ -100,8 +118,9 @@ const (
<td>{{ $result.ContentWords }}</td>
<td>{{ $result.ContentLines }}</td>
<td>{{ $result.ContentType }}</td>
<td>{{ $result.Duration }}</td>
<td>{{ $result.Duration }}</td>
<td>{{ $result.ResultFile }}</td>
<td>{{ $result.ScraperData }}
</tr>
{{ end }}
</tbody>
@ -187,11 +206,49 @@ func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) erro
for _, inputprovider := range config.InputProviders {
keywords = append(keywords, inputprovider.Keyword)
}
htmlResults := make([]htmlResult, 0)
for _, r := range results {
strinput := make(map[string]string)
for k, v := range r.Input {
strinput[k] = string(v)
}
strscraper := ""
for k, v := range r.ScraperData {
if len(v) > 0 {
strscraper = strscraper + "<p><b>" + html.EscapeString(k) + ":</b><br />"
firstval := true
for _, val := range v {
if !firstval {
strscraper += "<br />"
}
strscraper += html.EscapeString(val)
firstval = false
}
strscraper += "</p>"
}
}
hres := htmlResult{
Input: strinput,
Position: r.Position,
StatusCode: r.StatusCode,
ContentLength: r.ContentLength,
ContentWords: r.ContentWords,
ContentLines: r.ContentLines,
ContentType: r.ContentType,
RedirectLocation: r.RedirectLocation,
ScraperData: strscraper,
Duration: r.Duration,
ResultFile: r.ResultFile,
Url: r.Url,
Host: r.Host,
}
htmlResults = append(htmlResults, hres)
}
outHTML := htmlFileOutput{
CommandLine: config.CommandLine,
Time: ti.Format(time.RFC3339),
Results: results,
Results: htmlResults,
Keys: keywords,
}

View file

@ -16,18 +16,19 @@ type ejsonFileOutput struct {
}
type JsonResult struct {
Input map[string]string `json:"input"`
Position int `json:"position"`
StatusCode int64 `json:"status"`
ContentLength int64 `json:"length"`
ContentWords int64 `json:"words"`
ContentLines int64 `json:"lines"`
ContentType string `json:"content-type"`
RedirectLocation string `json:"redirectlocation"`
Duration time.Duration `json:"duration"`
ResultFile string `json:"resultfile"`
Url string `json:"url"`
Host string `json:"host"`
Input map[string]string `json:"input"`
Position int `json:"position"`
StatusCode int64 `json:"status"`
ContentLength int64 `json:"length"`
ContentWords int64 `json:"words"`
ContentLines int64 `json:"lines"`
ContentType string `json:"content-type"`
RedirectLocation string `json:"redirectlocation"`
ScraperData map[string][]string `json:"scraper"`
Duration time.Duration `json:"duration"`
ResultFile string `json:"resultfile"`
Url string `json:"url"`
Host string `json:"host"`
}
type jsonFileOutput struct {
@ -73,6 +74,7 @@ func writeJSON(filename string, config *ffuf.Config, res []ffuf.Result) error {
ContentLines: r.ContentLines,
ContentType: r.ContentType,
RedirectLocation: r.RedirectLocation,
ScraperData: r.ScraperData,
Duration: r.Duration,
ResultFile: r.ResultFile,
Url: r.Url,

View file

@ -14,13 +14,13 @@ const (
Command line : ` + "`{{.CommandLine}}`" + `
Time: ` + "{{ .Time }}" + `
{{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile |
{{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- |
{{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} |
{{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile | ScraperData
{{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- | :------------ |
{{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} | {{ .ScraperData }} |
{{end}}` // The template format is not pretty but follows the markdown guide
)
func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error {
func writeMarkdown(filename string, config *ffuf.Config, results []ffuf.Result) error {
ti := time.Now()
keywords := make([]string, 0)
@ -28,10 +28,50 @@ func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) erro
keywords = append(keywords, inputprovider.Keyword)
}
htmlResults := make([]htmlResult, 0)
for _, r := range results {
strinput := make(map[string]string)
for k, v := range r.Input {
strinput[k] = string(v)
}
strscraper := ""
for k, v := range r.ScraperData {
if len(v) > 0 {
strscraper = strscraper + "<p><b>" + k + ":</b><br />"
firstval := true
for _, val := range v {
if !firstval {
strscraper += "<br />"
}
strscraper += val
firstval = false
}
strscraper += "</p>"
}
}
hres := htmlResult{
Input: strinput,
Position: r.Position,
StatusCode: r.StatusCode,
ContentLength: r.ContentLength,
ContentWords: r.ContentWords,
ContentLines: r.ContentLines,
ContentType: r.ContentType,
RedirectLocation: r.RedirectLocation,
ScraperData: strscraper,
Duration: r.Duration,
ResultFile: r.ResultFile,
Url: r.Url,
Host: r.Host,
}
htmlResults = append(htmlResults, hres)
}
outMD := htmlFileOutput{
CommandLine: config.CommandLine,
Time: ti.Format(time.RFC3339),
Results: res,
Results: htmlResults,
Keys: keywords,
}

View file

@ -330,6 +330,7 @@ func (s *Stdoutput) Result(resp ffuf.Response) {
ContentLines: resp.ContentLines,
ContentType: resp.ContentType,
RedirectLocation: resp.GetRedirectLocation(false),
ScraperData: resp.ScraperData,
Url: resp.Request.Url,
Duration: resp.Time,
ResultFile: resp.ResultFile,
@ -371,7 +372,7 @@ func (s *Stdoutput) PrintResult(res ffuf.Result) {
s.resultJson(res)
case s.config.Quiet:
s.resultQuiet(res)
case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0:
case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 || len(res.ScraperData) > 0:
// Print a multi-line result (when using multiple input keywords and wordlists)
s.resultMultiline(res)
default:
@ -383,7 +384,7 @@ func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string {
inputs := ""
if len(res.Input) > 1 {
for k, v := range res.Input {
if inSlice(k, s.config.CommandKeywords) {
if ffuf.StrInSlice(k, s.config.CommandKeywords) {
// If we're using external command for input, display the position instead of input
inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(res.Position))
} else {
@ -392,7 +393,7 @@ func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string {
}
} else {
for k, v := range res.Input {
if inSlice(k, s.config.CommandKeywords) {
if ffuf.StrInSlice(k, s.config.CommandKeywords) {
// If we're using external command for input, display the position instead of input
inputs = strconv.Itoa(res.Position)
} else {
@ -423,7 +424,7 @@ func (s *Stdoutput) resultMultiline(res ffuf.Result) {
reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, res.ResultFile)
}
for _, k := range s.fuzzkeywords {
if inSlice(k, s.config.CommandKeywords) {
if ffuf.StrInSlice(k, s.config.CommandKeywords) {
// If we're using external command for input, display the position instead of input
reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(res.Position))
} else {
@ -431,6 +432,14 @@ func (s *Stdoutput) resultMultiline(res ffuf.Result) {
reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, res.Input[k])
}
}
if len(res.ScraperData) > 0 {
reslines = fmt.Sprintf("%s%s| SCR |\n", reslines, TERMINAL_CLEAR_LINE)
for k, vslice := range res.ScraperData {
for _, v := range vslice {
reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, v)
}
}
}
fmt.Printf("%s\n%s\n", res_hdr, reslines)
}
@ -472,12 +481,3 @@ func (s *Stdoutput) colorize(status int64) string {
func printOption(name []byte, value []byte) {
fmt.Fprintf(os.Stderr, " :: %-16s : %s\n", name, value)
}
func inSlice(key string, slice []string) bool {
for _, v := range slice {
if v == key {
return true
}
}
return false
}

View file

@ -2,6 +2,7 @@ package runner
import (
"bytes"
"compress/gzip"
"crypto/tls"
"fmt"
"io"
@ -154,8 +155,18 @@ func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) {
resp.Request.Raw = string(rawreq)
resp.Raw = string(rawresp)
}
var bodyReader io.ReadCloser
if httpresp.Header.Get("Content-Encoding") == "gzip" {
bodyReader, err = gzip.NewReader(httpresp.Body)
if err != nil {
// fallback to raw data
bodyReader = httpresp.Body
}
} else {
bodyReader = httpresp.Body
}
if respbody, err := io.ReadAll(httpresp.Body); err == nil {
if respbody, err := io.ReadAll(bodyReader); err == nil {
resp.ContentLength = int64(len(string(respbody)))
resp.Data = respbody
}
@ -165,7 +176,6 @@ func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) {
resp.ContentWords = int64(wordsSize)
resp.ContentLines = int64(linesSize)
resp.Time = firstByteTime
return resp, nil
}

168
pkg/scraper/scraper.go Normal file
View file

@ -0,0 +1,168 @@
package scraper
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
"github.com/PuerkitoBio/goquery"
)
type ScraperRule struct {
Name string `json:"name"`
Rule string `json:"rule"`
Target string `json:"target"`
compiledRule *regexp.Regexp
Type string `json:"type"`
OnlyMatched bool `json:"onlymatched"`
Action []string `json:"action"`
}
type ScraperGroup struct {
Rules []*ScraperRule `json:"rules"`
Name string `json:"groupname"`
Active bool `json:"active"`
}
type Scraper struct {
Rules []*ScraperRule
}
func readGroupFromFile(filename string) (ScraperGroup, error) {
data, err := os.ReadFile(filename)
if err != nil {
return ScraperGroup{Rules: make([]*ScraperRule, 0)}, err
}
sc := ScraperGroup{}
err = json.Unmarshal([]byte(data), &sc)
return sc, err
}
func FromDir(dirname string, activestr string) (ffuf.Scraper, ffuf.Multierror) {
scr := Scraper{Rules: make([]*ScraperRule, 0)}
errs := ffuf.NewMultierror()
activegrps := parseActiveGroups(activestr)
all_files, err := os.ReadDir(ffuf.SCRAPERDIR)
if err != nil {
errs.Add(err)
return &scr, errs
}
for _, filename := range all_files {
if filename.Type().IsRegular() && strings.HasSuffix(filename.Name(), ".json") {
sg, err := readGroupFromFile(filepath.Join(dirname, filename.Name()))
if err != nil {
cerr := fmt.Errorf("%s : %s", filepath.Join(dirname, filename.Name()), err)
errs.Add(cerr)
continue
}
if (sg.Active && isActive("all", activegrps)) || isActive(sg.Name, activegrps) {
for _, r := range sg.Rules {
err = r.init()
if err != nil {
cerr := fmt.Errorf("%s : %s", filepath.Join(dirname, filename.Name()), err)
errs.Add(cerr)
continue
}
scr.Rules = append(scr.Rules, r)
}
}
}
}
return &scr, errs
}
// FromFile initializes a scraper instance and reads rules from a file
func (s *Scraper) AppendFromFile(path string) error {
sg, err := readGroupFromFile(path)
if err != nil {
return err
}
for _, r := range sg.Rules {
err = r.init()
if err != nil {
continue
}
s.Rules = append(s.Rules, r)
}
return err
}
func (s *Scraper) Execute(resp *ffuf.Response, matched bool) []ffuf.ScraperResult {
res := make([]ffuf.ScraperResult, 0)
for _, rule := range s.Rules {
if !matched && rule.OnlyMatched {
// pass this rule as there was no match
continue
}
sourceData := ""
if rule.Target == "body" {
sourceData = string(resp.Data)
} else if rule.Target == "headers" {
sourceData = headerString(resp.Headers)
} else {
sourceData = headerString(resp.Headers) + string(resp.Data)
}
val := rule.Check(sourceData)
if len(val) > 0 {
res = append(res, ffuf.ScraperResult{
Name: rule.Name,
Type: rule.Type,
Action: rule.Action,
Results: val,
})
}
}
return res
}
// init initializes the scraper rule, and returns an error in case there's an error in the syntax
func (r *ScraperRule) init() error {
var err error
if r.Type == "regexp" {
r.compiledRule, err = regexp.Compile(r.Rule)
if err != nil {
return err
}
}
return err
}
func (r *ScraperRule) Check(data string) []string {
if r.Type == "regexp" {
return r.checkRegexp(data)
} else if r.Type == "query" {
return r.checkQuery(data)
}
return []string{}
}
func (r *ScraperRule) checkQuery(data string) []string {
val := make([]string, 0)
doc, err := goquery.NewDocumentFromReader(strings.NewReader(data))
if err != nil {
return []string{}
}
doc.Find(r.Rule).Each(func(i int, sel *goquery.Selection) {
val = append(val, sel.Text())
})
return val
}
func (r *ScraperRule) checkRegexp(data string) []string {
val := make([]string, 0)
if r.compiledRule != nil {
res := r.compiledRule.FindAllStringSubmatch(data, -1)
for _, grp := range res {
val = append(val, grp...)
}
return val
}
return []string{}
}

29
pkg/scraper/util.go Normal file
View file

@ -0,0 +1,29 @@
package scraper
import (
"fmt"
"github.com/ffuf/ffuf/pkg/ffuf"
"strings"
)
func headerString(headers map[string][]string) string {
val := ""
for k, vslice := range headers {
for _, v := range vslice {
val += fmt.Sprintf("%s: %s\n", k, v)
}
}
return val
}
func isActive(name string, activegroups []string) bool {
return ffuf.StrInSlice(strings.ToLower(strings.TrimSpace(name)), activegroups)
}
func parseActiveGroups(activestr string) []string {
retslice := make([]string, 0)
for _, v := range strings.Split(activestr, ",") {
retslice = append(retslice, strings.ToLower(strings.TrimSpace(v)))
}
return retslice
}