Major version update

This new work implements the server and the loader in two different
binaries allowing the code while updating the IOC list.

It updates also the documentation to reflect the new changes.
This commit is contained in:
Christopher Talib 2020-08-24 16:25:49 +02:00
parent 65ad547860
commit 84e4937f85
9 changed files with 228 additions and 71 deletions

View File

@ -39,3 +39,9 @@
* Upsert is not optimal
* What do we do with the data so it can be exploitable by analysts
* Sould we store matched data in an SQL-like db?
## TDH
* patterns: stream to IP address and not a domain (so the HTTP hostname won't be interesting) => look into any canonical name resolving to a NXDOMAIN (tls connection directly to an IP address) NO DNS => hostname is a DGA (should be a way to identify visually)
* any canonical name that is a IP address and not a domain name

View File

@ -36,15 +36,23 @@ docker run --rm -it -p 8080:8080 -p 9080:9080 -p 8000:8000 -v ~/dgraph:/dgraph d
```sh
go get -u gitlab.dcso.lolcat/LABS/styx
cd $GOPATH/src/gitlab.dcso.lolcat/LABS/styx
go build
go build gitlab.dcso.lolcat/LABS/styx/cmd/styxd
docker-compose up -d # or the other docker command
./styx
./styxd
# build the loader helper binary
go build gitlab.dcso.lolcat/LABS/styx/cmd/iocloader
# update the IOC list while the programm is already running
./iocloader
```
*Note*: if you have issues with the docker compose, make sure it runs on the
same subnet. Check [this](https://serverfault.com/questions/916941/configuring-docker-to-not-use-the-172-17-0-0-range) for inspiration.
### Example configuration:
*Note*: For Pastebin, you will have to authorise your IP address when you login through the web interface.
```
certstream:
activated: true
@ -62,12 +70,12 @@ shodan:
## Dgraph Interface
You can connect to the Dgraph interface at this default address: localhost:8000.
You can connect to the Dgraph interface at this default address: http://localhost:8000.
There you would be able to run GraphQL+ queries, here to query a node.
```graphql
query {
Node(func: eq(id, "node--cde8decb-0a8b-4d19-bd77-c2decb6dab9c")) {
Node(func: eq(uid, 0x23)) {
uid
ndata
modified
@ -106,17 +114,6 @@ query {
notBefore
notAfter
}
shodanNode {
uid
hostData {
product
ip
version
hostnames
port
html
}
}
}
}
```
@ -158,7 +155,7 @@ query {
Dgraph also supports full text search, so you can query things like:
```
```graphql
query {
Node(func: allofterms(full, "code")) {
uid
@ -190,8 +187,7 @@ each time you query something.
### Meta
Edges are not implemented yet. They will prove an existing relation between two
nodes of different origin.
Edges provide an existing relation between two nodes of different origin. They are part of Dgraph features.
Node --[Edge]-- Node

View File

@ -12,13 +12,33 @@ import (
"gitlab.dcso.lolcat/LABS/styx/models"
)
var (
Topic string
Partition int
Protocol string
Host string
Port string
)
func init() {
viper.SetConfigName("config")
viper.SetConfigType("yaml")
viper.AddConfigPath(".")
err := viper.ReadInConfig()
if err != nil {
panic(err)
}
Topic = viper.GetString("kafka.topic")
Partition = viper.GetInt("kafka.partition")
Protocol = viper.GetString("kafka.protocol")
Host = viper.GetString("kafka.host")
Port = viper.GetString("kafka.port")
}
// SetUpKafkaConnecter builds the connection to Kafka with a timeout.
func SetUpKafkaConnecter() (*kafka.Conn, error) {
Topic := viper.GetString("kafka.topic")
Partition := viper.GetInt("kafka.partition")
Protocol := viper.GetString("kafka.protocol")
Host := viper.GetString("kafka.host")
Port := viper.GetString("kafka.port")
conn, err := kafka.DialLeader(context.Background(), Protocol, Host+":"+Port, Topic, Partition)
if err != nil {
return nil, err
@ -28,11 +48,31 @@ func SetUpKafkaConnecter() (*kafka.Conn, error) {
return conn, nil
}
// CreateTopic creates a topic in the Kafka Broker based on a target. Don't
// forget to close them as they are channels.
func CreateTopic(conn *kafka.Conn, target string) *kafka.Reader {
r := kafka.NewReader(kafka.ReaderConfig{
Brokers: []string{Host + ":" + Port},
Topic: target,
Partition: Partition,
MinBytes: 10e3,
MaxBytes: 10e6,
})
r.SetOffset(42)
return r
}
// CloseReader closes the Kafka reader.
func CloseReader(r kafka.Reader) {
r.Close()
}
// SendEventToKafka sends a node to the broker.
func SendEventToKafka(conn *kafka.Conn, node models.Node) {
conn.SetWriteDeadline(time.Now().Add(10 * time.Second))
packaged, _ := json.Marshal(node)
_, err := conn.WriteMessages(kafka.Message{Value: packaged})
_, err := conn.WriteMessages(kafka.Message{Value: packaged, Topic: node.Type})
if err != nil {
logrus.Error(err)
}
@ -45,10 +85,6 @@ func ReadEventFromKafka() {
return
}
Topic := viper.GetString("kafka.topic")
Partition := viper.GetInt("kafka.partition")
Host := viper.GetString("kafka.host")
Port := viper.GetString("kafka.port")
r := kafka.NewReader(kafka.ReaderConfig{
Brokers: []string{Host + ":" + Port},
Topic: Topic,

117
cmd/iocloader/main.go Normal file
View File

@ -0,0 +1,117 @@
package main
import (
"bufio"
"context"
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"runtime"
"time"
"github.com/dgraph-io/dgo/v2"
"github.com/dgraph-io/dgo/v2/protos/api"
"github.com/google/uuid"
"github.com/sirupsen/logrus"
"gitlab.dcso.lolcat/LABS/styx/graph"
"gitlab.dcso.lolcat/LABS/styx/models"
)
var (
_, b, _, _ = runtime.Caller(0)
basepath = filepath.Dir(b)
)
// Result is the result from the matching query. Probably going to change.
type Result struct {
Result []models.Node `json:"Node,omitempty"`
}
func loadTargets(graphClient *dgo.Dgraph) error {
path := basepath + "/../../matcher/data/"
sliceDomain, err := ioutil.ReadDir(path)
if err != nil {
logrus.Warn("matcher#ReadDir#domains", err)
return err
}
for _, file := range sliceDomain {
logrus.Info("loading: ", file.Name(), " please wait...")
f, err := os.OpenFile(path+file.Name(), 0, 0644)
if err != nil {
logrus.Warn("matcher#OpenFile#", err)
return err
}
scanner := bufio.NewScanner(f)
for scanner.Scan() {
uuid := uuid.New().String()
t := time.Now()
rfc3339time := t.Format(time.RFC3339)
matcher := models.Match{
ID: uuid,
Timestamp: rfc3339time,
Target: scanner.Text(),
Nodes: []models.Node{},
Type: "matcher",
}
ctx := context.Background()
query := `query eq($a: string){
Node(func: eq(target, $a)){
uid
}
}`
txn := graphClient.NewTxn()
ret, err := txn.QueryWithVars(ctx, query, map[string]string{"$a": scanner.Text()})
if err != nil {
logrus.Warn(err)
}
n := Result{}
json.Unmarshal([]byte(ret.Json), &n)
// Check if the target already exists, if so, skipping not inserting
// the data
if len(n.Result) == 0 {
logrus.Info("new matcher, charging...")
mu := &api.Mutation{
CommitNow: true,
}
pb, err := json.Marshal(matcher)
if err != nil {
logrus.Error(err)
return err
}
mu.SetJson = pb
txn = graphClient.NewTxn()
defer txn.Discard(ctx)
_, err = txn.Mutate(ctx, mu)
if err != nil {
logrus.Error(err)
return err
}
}
}
}
return nil
}
func main() {
logrus.Info("Initializing Dgraph for the importer...")
dgraphClient, err := graph.ConnectToDgraph(false)
if err != nil || dgraphClient == nil {
logrus.WithField("err", err).Error("error initialising the graph database")
}
logrus.Info("Loading data...")
loadTargets(dgraphClient)
logrus.Info("Done!")
}

View File

@ -6,7 +6,6 @@ import (
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
"gitlab.dcso.lolcat/LABS/styx/broker"
"gitlab.dcso.lolcat/LABS/styx/graph"
"gitlab.dcso.lolcat/LABS/styx/matcher"
"gitlab.dcso.lolcat/LABS/styx/plugins"
@ -32,15 +31,15 @@ func main() {
os.Setenv("SHODAN_KEY", viper.GetString("shodan.key"))
logrus.Info("Starting to get data from the Internet...")
logrus.Info("Initializing Kafka...")
_, err = broker.SetUpKafkaConnecter()
if err != nil {
logrus.WithField("err", err).Error("error initialising kafka")
}
logrus.Info("done")
// logrus.Info("Initializing Kafka...")
// _, err = broker.SetUpKafkaConnecter()
// if err != nil {
// logrus.WithField("err", err).Error("error initialising kafka")
// }
// logrus.Info("done")
logrus.Info("Initializing Dgraph...")
dgraphClient, err := graph.ConnectToDgraph()
dgraphClient, err := graph.ConnectToDgraph(true)
if err != nil {
logrus.WithField("err", err).Error("error initialising the graph database")
}

View File

@ -23,34 +23,34 @@ services:
ports:
- 8000:8000
command: dgraph-ratel
zoo1:
image: zookeeper:3.4.9
hostname: zoo1
ports:
- "2181:2181"
environment:
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=zoo1:2888:3888
volumes:
- ./zk-single-kafka-single/zoo1/data:/data
- ./zk-single-kafka-single/zoo1/datalog:/datalog
# zoo1:
# image: zookeeper:3.4.9
# hostname: zoo1
# ports:
# - "2181:2181"
# environment:
# ZOO_MY_ID: 1
# ZOO_PORT: 2181
# ZOO_SERVERS: server.1=zoo1:2888:3888
# volumes:
# - ./zk-single-kafka-single/zoo1/data:/data
# - ./zk-single-kafka-single/zoo1/datalog:/datalog
# https://github.com/simplesteph/kafka-stack-docker-compose/blob/master/zk-single-kafka-single.yml
kafka1:
image: confluentinc/cp-kafka:5.5.0
hostname: kafka1
ports:
- "9092:9092"
environment:
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
volumes:
- ./zk-single-kafka-single/kafka1/data:/var/lib/kafka/data
depends_on:
- zoo1
# kafka1:
# image: confluentinc/cp-kafka:5.5.0
# hostname: kafka1
# ports:
# - "9092:9092"
# environment:
# KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
# KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
# KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
# KAFKA_BROKER_ID: 1
# KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
# KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
# volumes:
# - ./zk-single-kafka-single/kafka1/data:/var/lib/kafka/data
# depends_on:
# - zoo1

View File

@ -8,7 +8,7 @@ import (
"google.golang.org/grpc"
)
func ConnectToDgraph() (*dgo.Dgraph, error) {
func ConnectToDgraph(setupSchema bool) (*dgo.Dgraph, error) {
conn, err := grpc.Dial("localhost:9080", grpc.WithInsecure())
if err != nil {
return nil, err
@ -16,9 +16,11 @@ func ConnectToDgraph() (*dgo.Dgraph, error) {
dgraphClient := dgo.NewDgraphClient(api.NewDgraphClient(conn))
err = setupDgraphSchema(dgraphClient)
if err != nil {
return nil, err
if setupSchema {
err = setupDgraphSchema(dgraphClient)
if err != nil {
return nil, err
}
}
return dgraphClient, nil

View File

@ -1,4 +1,4 @@
package main
package styx
import (
"os"

View File

@ -137,6 +137,7 @@ uid
logrus.Error(err)
return nil, err
}
}
}
@ -151,6 +152,7 @@ func (m *Matcher) Run(wg *sync.WaitGroup, graphClient *dgo.Dgraph) {
logrus.Error(err)
}
logrus.Info("finished loading matcher targets")
fmt.Println(targets)
if !m.Running {
m.StoppedChan = make(chan bool)
@ -350,7 +352,6 @@ func runShodanMatcher(target string, graphClient *dgo.Dgraph) {
}
}
`
ctx := context.Background()
txn := graphClient.NewTxn()
res, err := txn.QueryWithVars(ctx, q, map[string]string{"$a": target})