Adding Akamai filters for data recieved in Shodan + allow deactivating Balboa search

This commit is contained in:
Christopher Talib 2020-02-14 11:36:47 +01:00
parent e963633c89
commit 02a014262a
7 changed files with 2460 additions and 145 deletions

View file

@ -41,6 +41,10 @@ func SendEventToKafka(conn *kafka.Conn, node models.Node) {
// ReadEventFromKafka read the event sent to Kafka and acts upon it.
func ReadEventFromKafka() {
if !viper.GetBool("kafka.activated") {
return
}
Topic := viper.GetString("kafka.topic")
Partition := viper.GetInt("kafka.partition")
Host := viper.GetString("kafka.host")
@ -67,20 +71,22 @@ func ReadEventFromKafka() {
var node models.Node
json.Unmarshal(m.Value, &node)
if len(node.ID) != 0 {
// TODO: refactor this context
ctx := context.Background()
entries, err := c.GetAllEntries(ctx, node.Data, "", "", int32(1))
if err != nil {
logrus.Error("error from balboa", err)
}
if len(entries) != 0 {
balboaNode := models.BuildBalboaNode(entries)
models.SaveBalboaNode("bnodes.json", balboaNode)
edge := models.BuildEdge("balboa", node.ID, balboaNode.ID)
models.SaveEdge(edge)
}
if viper.GetBool("balboa.activated") {
if len(node.ID) != 0 {
// TODO: refactor this context
ctx := context.Background()
entries, err := c.GetAllEntries(ctx, node.Data, "", "", int32(1))
if err != nil {
logrus.Error("error from balboa", err)
}
if len(entries) != 0 {
balboaNode := models.BuildBalboaNode(entries)
models.SaveBalboaNode("bnodes.json", balboaNode)
edge := models.BuildEdge("balboa", node.ID, balboaNode.ID)
models.SaveEdge(edge)
}
}
}
}
}

1923
filters/akamai.cidr Normal file

File diff suppressed because it is too large Load diff

54
filters/akamai.go Normal file
View file

@ -0,0 +1,54 @@
package filters
import (
"bufio"
"net"
"os"
"github.com/sirupsen/logrus"
)
// IsAkamai checks for the presence of the given IP in the Akamain CIDR.
func IsAkamai(ip net.IP) bool {
var file *os.File
var err error
if ip.To4() != nil {
file, err = os.Open("filters/akamai.cidr")
if err != nil {
logrus.Fatal("filters#IsAkamai", err)
}
} else if ip.To16() != nil {
file, err = os.Open("filters/akamaiv6.cidr")
if err != nil {
logrus.Fatal("filters#IsAkamai", err)
}
} else {
logrus.Error("filters#IsAkamai#invalid ip format")
return false
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
_, ipNet, err := net.ParseCIDR(scanner.Text())
if err != nil {
continue
}
if ipNet.Contains(ip) {
return true
}
}
return false
}
// Check the version of the IP address (IPv4 or IPv6).
func checkIPversion(ip string) (string, bool) {
if net.ParseIP(ip).To4() != nil {
return "IPv4", true
} else if net.ParseIP(ip).To16() != nil {
return "IPv6", true
} else {
return "", false
}
}

357
filters/akamaiv6.cidr Normal file
View file

@ -0,0 +1,357 @@
# 2001:4878::/48
# 2600:1400:7::/48
# 2600:1400:8::/48
# 2600:1400:9::/48
# 2600:1400:a::/48
# 2600:1400:b::/48
# 2600:1400:c::/48
# 2600:1401:1::/48
# 2600:1401:2::/48
# 2600:1401:3::/48
# 2600:1402:11::/48
# 2600:1402:13::/48
# 2600:1402:14::/48
# 2600:1402:6::/48
# 2600:1402:7::/48
# 2600:1402:a::/48
# 2600:1402:d::/48
# 2600:1402:e::/48
# 2600:1402:f::/48
# 2600:1403:10::/64
# 2600:1403:11::/48
# 2600:1403:12::/48
# 2600:1403:4::/48
# 2600:1403:6::/48
# 2600:1403:8::/48
# 2600:1403:9::/48
# 2600:1403:b::/48
# 2600:1403:c::/48
# 2600:1403:d::/48
# 2600:1403:e::/48
# 2600:1404:12::/48
# 2600:1404:13::/48
# 2600:1404:14::/48
# 2600:1404:15::/48
# 2600:1404:16::/48
# 2600:1404:17::/48
# 2600:1404:18::/48
# 2600:1404:1a::/48
# 2600:1404:1c::/48
# 2600:1404:1d::/48
# 2600:1404:1e::/48
# 2600:1404:1f::/48
# 2600:1404:20::/48
# 2600:1404:7::/48
# 2600:1404:8::/48
# 2600:1404:a::/48
# 2600:1404:b::/48
# 2600:1404:c::/48
# 2600:1404:d::/48
# 2600:1404:e::/48
# 2600:1404:f::/48
# 2600:1405:1::/48
# 2600:1405:2::/48
# 2600:1406:12::/48
# 2600:1406:13::/48
# 2600:1406:16::/48
# 2600:1406:19::/48
# 2600:1406:1a::/48
# 2600:1406:1b::/48
# 2600:1406:1c::/48
# 2600:1406:1f::/48
# 2600:1406:20::/48
# 2600:1406:21::/48
# 2600:1406:22::/48
# 2600:1406:27::/48
# 2600:1406:28::/48
# 2600:1406:29::/48
# 2600:1406:2a::/48
# 2600:1406:2b::/48
# 2600:1406:2c::/48
# 2600:1406:2f::/48
# 2600:1406:30::/48
# 2600:1406:31::/48
# 2600:1406:32::/48
# 2600:1406:34::/48
# 2600:1406:f::/48
# 2600:1407:10::/48
# 2600:1407:11::/48
# 2600:1407:12::/48
# 2600:1407:13::/48
# 2600:1407:15::/48
# 2600:1407:16::/48
# 2600:1407:17::/48
# 2600:1407:19::/48
# 2600:1407:8::/48
# 2600:1407:9::/48
# 2600:1407:a::/48
# 2600:1407:b::/48
# 2600:1407:c::/48
# 2600:1407:d::/48
# 2600:1407:f::/48
# 2600:1408:10::/48
# 2600:1408:12::/48
# 2600:1408:13::/48
# 2600:1408:14::/48
# 2600:1408:15::/48
# 2600:1408:16::/48
# 2600:1408:17::/48
# 2600:1408:19::/48
# 2600:1408:1a::/48
# 2600:1408:1c::/48
# 2600:1408:1d::/48
# 2600:1408:9::/48
# 2600:1408:b::/48
# 2600:1408:c::/48
# 2600:1408:d::/48
# 2600:1408:e::/48
# 2600:1409:5::/48
# 2600:1409:a::/48
# 2600:1409:b::/48
# 2600:1409:c::/48
# 2600:1409:d::/48
# 2600:1409:e::/48
# 2600:1409:f::/48
# 2600:140a:2::/48
# 2600:140a:3::/48
# 2600:140a:4::/48
# 2600:140a:5::/48
# 2600:140a:6::/48
# 2600:140a:7::/48
# 2600:140a:8::/48
# 2600:140a:9::/48
# 2600:140b:10::/48
# 2600:140b:11::/48
# 2600:140b:12::/48
# 2600:140b:13::/48
# 2600:140b:15::/48
# 2600:140b:2::/48
# 2600:140b:a::/48
# 2600:140b:b::/48
# 2600:140b:c::/48
# 2600:140b:d::/48
# 2600:140b:e::/48
# 2600:140b:f::/48
# 2600:140f:9::/48
# 2600:140f:a::/48
# 2600:1410:1::/48
# 2600:1410:2::/48
# 2600:1410:3::/48
# 2600:1415:1::/48
# 2600:1415:5::/48
# 2600:1415:7::/48
# 2600:1415:8::/48
# 2600:1415:9::/48
# 2600:1415:b::/48
# 2600:1415:c::/48
# 2600:1415:d::/48
# 2600:1416::/48
# 2600:1417:11::/48
# 2600:1417:13::/48
# 2600:1417:14::/48
# 2600:1417:15::/48
# 2600:1417:17::/48
# 2600:1417:18::/48
# 2600:1417:19::/48
# 2600:1417:1::/48
# 2600:1417:1b::/48
# 2600:1417:1c::/48
# 2600:1417:22::/48
# 2600:1417:25::/48
# 2600:1417:26::/48
# 2600:1417:27::/48
# 2600:1417:28::/48
# 2600:1417:29::/48
# 2600:1417:2a::/48
# 2600:1417:2c::/48
# 2600:1417:2d::/48
# 2600:1417:2e::/48
# 2600:1417:2f::/48
# 2600:1417:34::/48
# 2600:1417:35::/48
# 2600:1417:36::/48
# 2600:1417:37::/48
# 2600:1417:38::/48
# 2600:1417:39::/48
# 2600:1417:3::/48
# 2600:1417:3a::/48
# 2600:1417:3b::/48
# 2600:1417:3f::/48
# 2600:1417:9::/48
# 2600:1417::/48
# 2600:1417:a::/48
# 2600:1417:b::/48
# 2600:1417:c::/48
# 2600:1417:d::/48
# 2600:1417:e::/48
# 2600:1418:1::/48
# 2600:1418:2::/48
# 2600:1419:1::/48
# 2600:1419:2::/48
# 2600:1419:2::/64
# 2600:1419:3::/48
# 2600:1419:4::/48
# 2600:1419:5::/48
# 2600:1419:7::/48
# 2600:1419:8::/48
# 2600:1419:9::/48
# 2600:1419::/48
# 2600:1419:a::/48
# 2600:1419:b::/48
# 2600:141a::/48
# 2600:141b:10::/48
# 2600:141b:1::/48
# 2600:141b:2::/48
# 2600:141b:3::/48
# 2600:141b:4::/48
# 2600:141b:5::/48
# 2600:141b:6::/48
# 2600:141c:1::/48
# 2600:141c:2::/48
# 2600:141c:3::/48
# 2600:141c:4::/48
# 2600:141c::/48
# 2600:141d:1::/48
# 2600:141d::/48
# 2600:141e:1::/48
# 2600:141e:2::/48
# 2600:141e::/48
# 2600:141f:1::/48
# 2600:141f::/48
# 2600:807:320:100::/56
# 2600:807:320:200::/56
# 2600:807:320:300::/56
# 2600:807:320:400::/56
# 2600:807:320::/56
# 2a02:26f0:10::/48
# 2a02:26f0:11::/48
# 2a02:26f0:12::/48
# 2a02:26f0:13::/48
# 2a02:26f0:14::/48
# 2a02:26f0:15::/48
# 2a02:26f0:16::/48
# 2a02:26f0:17::/48
# 2a02:26f0:18::/48
# 2a02:26f0:19::/48
# 2a02:26f0:20::/48
# 2a02:26f0:21::/48
# 2a02:26f0:22::/48
# 2a02:26f0:23::/48
# 2a02:26f0:26::/48
# 2a02:26f0:27::/48
# 2a02:26f0:28::/48
# 2a02:26f0:2::/48
# 2a02:26f0:2a::/48
# 2a02:26f0:2c::/48
# 2a02:26f0:2d::/48
# 2a02:26f0:2e::/48
# 2a02:26f0:2f::/48
# 2a02:26f0:30::/48
# 2a02:26f0:32::/48
# 2a02:26f0:38::/48
# 2a02:26f0:3::/48
# 2a02:26f0:3a::/48
# 2a02:26f0:3b::/48
# 2a02:26f0:3c::/48
# 2a02:26f0:3d::/48
# 2a02:26f0:40::/48
# 2a02:26f0:41::/48
# 2a02:26f0:43::/48
# 2a02:26f0:44::/48
# 2a02:26f0:45::/48
# 2a02:26f0:46::/48
# 2a02:26f0:47::/48
# 2a02:26f0:48::/48
# 2a02:26f0:49::/48
# 2a02:26f0:4a::/48
# 2a02:26f0:4b::/48
# 2a02:26f0:4c::/48
# 2a02:26f0:4d::/48
# 2a02:26f0:4e::/48
# 2a02:26f0:4f::/48
# 2a02:26f0:50::/48
# 2a02:26f0:51::/48
# 2a02:26f0:52::/48
# 2a02:26f0:53::/48
# 2a02:26f0:54::/48
# 2a02:26f0:55::/48
# 2a02:26f0:58::/48
# 2a02:26f0:59::/48
# 2a02:26f0:5::/48
# 2a02:26f0:5a::/48
# 2a02:26f0:5b::/48
# 2a02:26f0:5c::/48
# 2a02:26f0:5d::/48
# 2a02:26f0:5f::/48
# 2a02:26f0:60::/48
# 2a02:26f0:61::/48
# 2a02:26f0:62::/48
# 2a02:26f0:63::/48
# 2a02:26f0:64::/48
# 2a02:26f0:65::/48
# 2a02:26f0:67::/48
# 2a02:26f0:68::/48
# 2a02:26f0:69::/48
# 2a02:26f0:6::/48
# 2a02:26f0:6a::/48
# 2a02:26f0:6b::/48
# 2a02:26f0:6f::/48
# 2a02:26f0:70::/48
# 2a02:26f0:71::/48
# 2a02:26f0:72::/48
# 2a02:26f0:75::/48
# 2a02:26f0:76::/48
# 2a02:26f0:78::/48
# 2a02:26f0:79::/48
# 2a02:26f0:7b::/48
# 2a02:26f0:7d::/48
# 2a02:26f0:80::/48
# 2a02:26f0:82::/48
# 2a02:26f0:8::/48
# 2a02:26f0:8a::/48
# 2a02:26f0:8b::/48
# 2a02:26f0:8c::/48
# 2a02:26f0:8d::/48
# 2a02:26f0:8e::/48
# 2a02:26f0:8f::/48
# 2a02:26f0:94::/48
# 2a02:26f0:96::/48
# 2a02:26f0:97::/48
# 2a02:26f0:98::/48
# 2a02:26f0:9::/48
# 2a02:26f0::/48
# 2a02:26f0:a0::/48
# 2a02:26f0:a1::/48
# 2a02:26f0:a4::/48
# 2a02:26f0:a6::/48
# 2a02:26f0:ab::/48
# 2a02:26f0:ac::/48
# 2a02:26f0:ad::/48
# 2a02:26f0:ae::/48
# 2a02:26f0:af::/48
# 2a02:26f0:b0::/48
# 2a02:26f0:b1::/48
# 2a02:26f0:b2::/48
# 2a02:26f0:b3::/48
# 2a02:26f0:b5::/48
# 2a02:26f0:b6::/48
# 2a02:26f0:b7::/48
# 2a02:26f0:b8::/48
# 2a02:26f0:b9::/48
# 2a02:26f0:b::/48
# 2a02:26f0:ba::/48
# 2a02:26f0:bd::/48
# 2a02:26f0:be::/48
# 2a02:26f0:c0::/48
# 2a02:26f0:c2::/48
# 2a02:26f0:c3::/48
# 2a02:26f0:c5::/48
# 2a02:26f0:c7::/48
# 2a02:26f0:c8::/48
# 2a02:26f0:c9::/48
# 2a02:26f0:c::/48
# 2a02:26f0:ca::/48
# 2a02:26f0:cb::/48
# 2a02:26f0:cc::/48
# 2a02:26f0:cd::/48
# 2a02:26f0:f::/48

186
main.go
View file

@ -8,11 +8,13 @@ import (
"time"
"github.com/CaliDog/certstream-go"
"github.com/jmoiron/jsonq"
"github.com/ns3777k/go-shodan/v4/shodan"
"github.com/segmentio/kafka-go"
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
"gitlab.dcso.lolcat/LABS/styx/broker"
"gitlab.dcso.lolcat/LABS/styx/filters"
"gitlab.dcso.lolcat/LABS/styx/models"
)
@ -37,8 +39,7 @@ func main() {
fmt.Println("Starting to get data from the Internet...")
// The false flag specifies that we want heartbeat messages.
stream, errStream := certstream.CertStreamEventStream(false)
Conn, err := broker.SetUpKafkaConnecter()
conn, err := broker.SetUpKafkaConnecter()
if err != nil {
panic(err)
}
@ -51,67 +52,15 @@ func main() {
go broker.ReadEventFromKafka()
// certstream
go func() {
for {
select {
case jq := <-stream:
if data, err := models.ExtractCertFromStream(jq); err == nil {
rawNode := models.WrapCertStreamData(*data)
models.SaveCertStreamRaw("raw_certstream.json", rawNode)
certNode := models.BuildCertNode(rawNode)
models.SaveCertNode("cert_nodes.json", certNode)
mainNode := models.BuildNode("node", "certstream", certNode.ID)
models.SaveNode("nodes.json", mainNode)
edge := models.BuildEdge("certstream", rawNode.ID, mainNode.ID)
models.SaveEdge(edge)
edge = models.BuildEdge("certstream", mainNode.ID, certNode.ID)
models.SaveEdge(edge)
allDomains := data.Data.LeafCert.AllDomains
saveSingleValues(Conn, "certstream", "domain", certNode.ID, allDomains)
}
case err := <-errStream:
logrus.Error(err)
case <-stopChan:
wg.Done()
return
}
}
}()
stream, errStream := certstream.CertStreamEventStream(false)
if viper.GetBool("certstream.activated") {
go certstreamRoutine(stream, errStream, conn, stopChan, &wg)
}
// pastebin
go func() {
for {
select {
default:
pastes, err := models.QueryPastes()
if err != nil {
logrus.Panic(err)
}
for _, p := range pastes {
paste, err := models.FetchPaste(p)
if err != nil {
logrus.Error("cannot fetch paste", err)
}
fp := models.FullPaste{
Meta: p,
Full: paste,
}
res := models.BuildPasteNode(&fp)
models.SavePaste("paste_formatted.json", res)
time.Sleep(1 * time.Second)
}
time.Sleep(3 * time.Second)
case <-stopChan:
wg.Done()
return
}
}
}()
if viper.GetBool("pastebin.activated") {
go pastebinRoutine(stopChan, &wg)
}
// shodan
client := shodan.NewEnvClient(nil)
@ -121,38 +70,115 @@ func main() {
logrus.Panic(err)
}
go func() {
for {
select {
default:
banner, ok := <-ch
if !ok {
logrus.Error("channel is closed")
break
}
if viper.GetBool("shodan.activated") {
go shodanRoutine(client, ch, conn, stopChan, &wg)
}
shodanNode := models.BuildShodanNode(banner)
wg.Wait()
}
// routines
// CertstreamRoutine wraps the routine for grabbing Certstream data.
func certstreamRoutine(stream chan jsonq.JsonQuery, errStream chan error, conn *kafka.Conn, stopChan chan os.Signal, wg *sync.WaitGroup) {
fmt.Println("certstream is activated")
for {
select {
case jq := <-stream:
if data, err := models.ExtractCertFromStream(jq); err == nil {
rawNode := models.WrapCertStreamData(*data)
models.SaveCertStreamRaw("raw_certstream.json", rawNode)
certNode := models.BuildCertNode(rawNode)
models.SaveCertNode("cert_nodes.json", certNode)
mainNode := models.BuildNode("node", "certstream", certNode.ID)
models.SaveNode("nodes.json", mainNode)
edge := models.BuildEdge("certstream", rawNode.ID, mainNode.ID)
models.SaveEdge(edge)
edge = models.BuildEdge("certstream", mainNode.ID, certNode.ID)
models.SaveEdge(edge)
allDomains := data.Data.LeafCert.AllDomains
saveSingleValues(conn, "certstream", "domain", certNode.ID, allDomains)
}
case err := <-errStream:
logrus.Error(err)
case <-stopChan:
wg.Done()
return
}
}
}
func pastebinRoutine(stopChan chan os.Signal, wg *sync.WaitGroup) {
fmt.Println("pastebin is activated")
for {
select {
default:
pastes, err := models.QueryPastes()
if err != nil {
logrus.Panic(err)
}
for _, p := range pastes {
paste, err := models.FetchPaste(p)
if err != nil {
logrus.Error("cannot fetch paste", err)
}
fp := models.FullPaste{
Meta: p,
Full: paste,
}
res := models.BuildPasteNode(&fp)
models.SavePaste("paste_formatted.json", res)
time.Sleep(1 * time.Second)
}
time.Sleep(3 * time.Second)
case <-stopChan:
wg.Done()
return
}
}
}
func shodanRoutine(client *shodan.Client, shodanChan chan *shodan.HostData, conn *kafka.Conn, stopChan chan os.Signal, wg *sync.WaitGroup) {
fmt.Println("shodan is activated")
for {
select {
default:
banner, ok := <-shodanChan
if !ok {
logrus.Error("channel is closed")
break
}
shodanNode := models.BuildShodanNode(banner)
// first filter poc
if !filters.IsAkamai(shodanNode.Data.IP) {
fmt.Println("is not Akamai", shodanNode.Data.IP)
hostnames := shodanNode.Data.Hostnames
if len(hostnames) != 0 {
saveSingleValues(Conn, "shodan_stream", "hostname", shodanNode.ID, hostnames)
saveSingleValues(conn, "shodan_stream", "hostname", shodanNode.ID, hostnames)
}
domains := shodanNode.Data.Domains
if len(domains) != 0 {
saveSingleValues(Conn, "shodan_stream", "domain", shodanNode.ID, domains)
saveSingleValues(conn, "shodan_stream", "domain", shodanNode.ID, domains)
}
models.SaveShodanNode("shodan_raw.json", shodanNode)
models.SaveShodanNode("raw_shodan.json", shodanNode)
node := models.BuildNode("shodan", "shodan_stream", shodanNode.ID)
models.SaveNode("nodes.json", node)
edge := models.BuildEdge("shodan", shodanNode.ID, node.ID)
models.SaveEdge(edge)
case <-stopChan:
wg.Done()
return
} else {
fmt.Println("is akamai", shodanNode.Data.IP)
}
case <-stopChan:
wg.Done()
return
}
}()
wg.Wait()
}
}
// helpers

View file

@ -1,53 +1 @@
package parser
import (
"os"
"github.com/sirupsen/logrus"
)
// read node recieved on kafka
// create a node in the node file
// save domains in another file with node ID
// parallel routine
// look throught domain names and if find some that exists already, create the
// edge file
const (
NodesFilename = "nodes.json"
EdgesFilename = "edges.json"
)
// func ParseEvent(domains []string) {
// nodeFile, err := ioutil.ReadFile(NodesFilename)
// if err != nil {
// logrus.Error(err)
// }
// nodeDatas := []models.Node{}
// if err := json.Unmarshal(nodeFile, &nodeDatas); err != nil {
// logrus.Error(err)
// }
// for _, node := range nodeDatas {
// SaveDomains(node.Data.Data.LeafCert.AllDomains)
// }
// // saveDomains()
// // go findDomainEdges()
// }
func SaveDomains(domains []string) {
f, err := os.OpenFile("domains.txt", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
logrus.Error(err)
}
defer f.Close()
for _, d := range domains {
if _, err := f.WriteString(d + ","); err != nil {
logrus.Error(err)
}
}
}

View file

@ -2,6 +2,7 @@ package utils
import "os"
// FileExists looks for a file, if it doesn't exist, it creates it.
func FileExists(filename string) error {
_, err := os.Stat(filename)
if os.IsNotExist(err) {