Skip to content

Commit

Permalink
Initial
Browse files Browse the repository at this point in the history
  • Loading branch information
tomnomnom committed Jun 23, 2019
0 parents commit 407f60a
Show file tree
Hide file tree
Showing 11 changed files with 511 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
assetfinder
21 changes: 21 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# assetfinder

## Install

```
go get -u github.com/tomnomnom/assetfinder
```

## Usage

```
assetfinder [--subs-only] <domain>
```

## TODO:
* http://api.passivetotal.org/api/docs/
* https://findsubdomains.com
* https://community.riskiq.com/ (?)
* https://riddler.io/
* http://www.dnsdb.org/
* https://certdb.com/api-documentation
25 changes: 25 additions & 0 deletions certspotter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package main

import (
"fmt"
)

func fetchCertSpotter(domain string) ([]string, error) {
out := make([]string, 0)

fetchURL := fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain)

wrapper := []struct {
DNSNames []string `json:"dns_names"`
}{}
err := fetchJSON(fetchURL, &wrapper)
if err != nil {
return out, err
}

for _, w := range wrapper {
out = append(out, w.DNSNames...)
}

return out, nil
}
38 changes: 38 additions & 0 deletions crtsh.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
package main

import (
"encoding/json"
"fmt"
"net/http"
)

func fetchCrtSh(domain string) ([]string, error) {
resp, err := http.Get(
fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain),
)
if err != nil {
return []string{}, err
}
defer resp.Body.Close()

output := make([]string, 0)

dec := json.NewDecoder(resp.Body)

// The crt.sh API is a little funky... It returns multiple
// JSON objects with no delimiter, so you just have to keep
// attempting a decode until you hit EOF
for {
wrapper := struct {
Name string `json:"name_value"`
}{}

err := dec.Decode(&wrapper)
if err != nil {
break
}

output = append(output, wrapper.Name)
}
return output, nil
}
100 changes: 100 additions & 0 deletions facebook.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
package main

import (
"encoding/json"
"errors"
"fmt"
"net/http"
"os"
)

func fetchFacebook(domain string) ([]string, error) {

appId := os.Getenv("FB_APP_ID")
appSecret := os.Getenv("FB_APP_SECRET")
if appId == "" || appSecret == "" {
// fail silently because it's reasonable not to have
// the Facebook API creds
return []string{}, nil
}

accessToken, err := facebookAuth(appId, appSecret)
if err != nil {
return []string{}, err
}

domains, err := getFacebookCerts(accessToken, domain)
if err != nil {
return []string{}, err
}

return domains, nil
}

func getFacebookCerts(accessToken, query string) ([]string, error) {
out := make([]string, 0)
fetchURL := fmt.Sprintf(
"https://graph.facebook.com/certificates?fields=domains&access_token=%s&query=*.%s",
accessToken, query,
)

for {

wrapper := struct {
Data []struct {
Domains []string `json:"domains"`
} `json:"data"`

Paging struct {
Next string `json:"next"`
} `json:"paging"`
}{}

err := fetchJSON(fetchURL, &wrapper)
if err != nil {
return out, err
}

for _, data := range wrapper.Data {
for _, d := range data.Domains {
out = append(out, d)
}
}

fetchURL = wrapper.Paging.Next
if fetchURL == "" {
break
}
}
return out, nil
}

func facebookAuth(appId, appSecret string) (string, error) {
authUrl := fmt.Sprintf(
"https://graph.facebook.com/oauth/access_token?client_id=%s&client_secret=%s&grant_type=client_credentials",
appId, appSecret,
)

resp, err := http.Get(authUrl)
if err != nil {
return "", err
}

defer resp.Body.Close()

dec := json.NewDecoder(resp.Body)

auth := struct {
AccessToken string `json:"access_token"`
}{}
err = dec.Decode(&auth)
if err != nil {
return "", err
}

if auth.AccessToken == "" {
return "", errors.New("no access token in Facebook API response")
}

return auth.AccessToken, nil
}
31 changes: 31 additions & 0 deletions hackertarget.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package main

import (
"bufio"
"bytes"
"fmt"
"strings"
)

func fetchHackerTarget(domain string) ([]string, error) {
out := make([]string, 0)

raw, err := httpGet(
fmt.Sprintf("https://api.hackertarget.com/hostsearch/?q=%s", domain),
)
if err != nil {
return out, err
}

sc := bufio.NewScanner(bytes.NewReader(raw))
for sc.Scan() {
parts := strings.SplitN(sc.Text(), ",", 2)
if len(parts) != 2 {
continue
}

out = append(out, parts[0])
}

return out, sc.Err()
}
129 changes: 129 additions & 0 deletions main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
package main

import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net/http"
"os"
"strings"
"sync"
)

func main() {
var subsOnly bool
flag.BoolVar(&subsOnly, "subs-only", false, "Only incluse subdomains of search domain")
flag.Parse()

domain := flag.Arg(0)
if domain == "" {
fmt.Println("no domain specified")
return
}
domain = strings.ToLower(domain)

sources := []fetchFn{
fetchCertSpotter,
fetchHackerTarget,
fetchThreatCrowd,
fetchCrtSh,
fetchFacebook,
//fetchWayback, // A little too slow :(
fetchVirusTotal,
}

out := make(chan string)
var wg sync.WaitGroup

// call each of the source workers in a goroutine
for _, source := range sources {
wg.Add(1)
fn := source

go func() {
defer wg.Done()

names, err := fn(domain)

if err != nil {
fmt.Fprintf(os.Stderr, "err: %s\n", err)
return
}

for _, n := range names {
out <- n
}
}()
}

// close the output channel when all the workers are done
go func() {
wg.Wait()
close(out)
}()

// track what we've already printed to avoid duplicates
printed := make(map[string]bool)

for n := range out {
n = cleanDomain(n)
if _, ok := printed[n]; ok {
continue
}
if subsOnly && !strings.HasSuffix(n, domain) {
continue
}
fmt.Println(n)
printed[n] = true
}
}

type fetchFn func(string) ([]string, error)

func httpGet(url string) ([]byte, error) {
res, err := http.Get(url)
if err != nil {
return []byte{}, err
}

raw, err := ioutil.ReadAll(res.Body)

res.Body.Close()
if err != nil {
return []byte{}, err
}

return raw, nil
}

func cleanDomain(d string) string {
d = strings.ToLower(d)

// no idea what this is, but we can't clean it ¯\_(ツ)_/¯
if len(d) < 2 {
return d
}

if d[0] == '*' || d[0] == '%' {
d = d[1:]
}

if d[0] == '.' {
d = d[1:]
}

return d

}

func fetchJSON(url string, wrapper interface{}) error {
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
dec := json.NewDecoder(resp.Body)

return dec.Decode(wrapper)
}
Loading

0 comments on commit 407f60a

Please sign in to comment.