Matcher logic and IOCs

This work starts to build the matcher logic into styx. For the moment,
the goal is to define IOCs and load them when the Matcher plugin is
activated.

To implement: Then, the matcher will run periodic queries to different
types of nodes and index them to its one Matcher Dgraph Node. So be
targetting a specific IOCs, the user will be able to list the
observation that have been made to it.
This commit is contained in:
Christopher Talib 2020-05-29 11:32:55 +02:00
parent 9547aeea3f
commit 3961e79062
4 changed files with 152 additions and 59 deletions

View file

@ -61,7 +61,7 @@ pasteNode: PasteNode
} }
nodes: [uid] . nodes: [uid] .
target: string . target: string @index(term) .
type Match { type Match {
id: string id: string

View file

@ -4,7 +4,6 @@ import (
"bufio" "bufio"
"context" "context"
"encoding/json" "encoding/json"
"fmt"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
@ -14,6 +13,7 @@ import (
"time" "time"
"github.com/dgraph-io/dgo/v2" "github.com/dgraph-io/dgo/v2"
"github.com/dgraph-io/dgo/v2/protos/api"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/spf13/viper" "github.com/spf13/viper"
@ -25,12 +25,14 @@ var (
basepath = filepath.Dir(b) basepath = filepath.Dir(b)
) )
// Matcher is the structure for the matching logic within Styx.
type Matcher struct { type Matcher struct {
Running bool Running bool
StopChan chan bool StopChan chan bool
StoppedChan chan bool StoppedChan chan bool
} }
// Initialize initialises the matcher based on the given configuration.
func (m *Matcher) Initialize() bool { func (m *Matcher) Initialize() bool {
if !viper.GetBool("matcher.activated") { if !viper.GetBool("matcher.activated") {
return false return false
@ -40,6 +42,7 @@ func (m *Matcher) Initialize() bool {
return true return true
} }
// Stop gracefully stops the matching logic.
func (m *Matcher) Stop(wg *sync.WaitGroup) { func (m *Matcher) Stop(wg *sync.WaitGroup) {
if m.Running { if m.Running {
m.StopChan = make(chan bool) m.StopChan = make(chan bool)
@ -50,76 +53,154 @@ func (m *Matcher) Stop(wg *sync.WaitGroup) {
} }
} }
// Result is the result from the matching query. Probably going to change.
type Result struct { type Result struct {
Result []models.Node `json:"Node,omiempty"` Result []models.Node `json:"Node,omiempty"`
} }
func loadTargets(graphClient *dgo.Dgraph) error {
path := basepath + "/data/"
sliceDomain, err := ioutil.ReadDir(path)
if err != nil {
logrus.Warn("matcher#ReadDir#domains", err)
return err
}
for _, file := range sliceDomain {
f, err := os.OpenFile(path+file.Name(), 0, 0644)
if err != nil {
logrus.Warn("matcher#OpenFile#", err)
return err
}
scanner := bufio.NewScanner(f)
for scanner.Scan() {
uuid := uuid.New().String()
t := time.Now()
rfc3339time := t.Format(time.RFC3339)
matcher := models.Match{
ID: uuid,
Timestamp: rfc3339time,
Target: scanner.Text(),
Nodes: []models.Node{},
Type: "matcher",
}
ctx := context.Background()
mu := &api.Mutation{
CommitNow: true,
}
pb, err := json.Marshal(matcher)
if err != nil {
logrus.Error(err)
return err
}
mu.SetJson = pb
_, err = graphClient.NewTxn().Mutate(ctx, mu)
if err != nil {
logrus.Error(err)
return err
}
}
if err := scanner.Err(); err != nil {
logrus.Error(err)
return err
}
}
return nil
}
// Run runs the routine trying to find matches in the ingested data. // Run runs the routine trying to find matches in the ingested data.
func (m *Matcher) Run(wg *sync.WaitGroup, graphClient *dgo.Dgraph) { func (m *Matcher) Run(wg *sync.WaitGroup, graphClient *dgo.Dgraph) {
if err := loadTargets(graphClient); err != nil {
logrus.Error(err)
}
// Created nodes based on the IOCs
// Upsert those nodes if the values are found
if !m.Running { if !m.Running {
m.StoppedChan = make(chan bool) // m.StoppedChan = make(chan bool)
wg.Add(1) // wg.Add(1)
for { // for {
q := `query allofterms($a: string) { // q := `query allofterms($a: string) {
Node(func: allofterms(full, $a)) { // Node(func: allofterms(full, $a)) {
uid // uid
id // }}`
type
ndata
pasteNode {
id
type
created
modified
fullPaste {
full
meta {
full_url
size
expire
title
syntax
user
scrape_url
date
key
}
}
}
}
}`
ctx := context.Background() // ctx := context.Background()
txn := graphClient.NewTxn() // txn := graphClient.NewTxn()
defer txn.Discard(ctx) // defer txn.Discard(ctx)
res, err := txn.QueryWithVars(ctx, q, map[string]string{"$a": "code"}) // res, err := txn.QueryWithVars(ctx, q, map[string]string{"$a": "code"})
if err != nil { // if err != nil {
logrus.Warn(err) // logrus.Warn(err)
} // }
n := Result{} // n := Result{}
json.Unmarshal([]byte(res.Json), &n) // json.Unmarshal([]byte(res.Json), &n)
if len(n.Result) != 0 { // uuid := uuid.New().String()
// TODO: review time and id to be updated on new resulsts // t := time.Now()
uuid := uuid.New().String() // rfc3339time := t.Format(time.RFC3339)
t := time.Now() // matcher := models.Match{
rfc3339time := t.Format(time.RFC3339) // ID: uuid,
matcher := models.Match{ // Timestamp: rfc3339time,
ID: uuid, // Target: "code",
Timestamp: rfc3339time, // Nodes: []models.Node{},
Target: "code", // Type: "matcher",
} // }
// if len(n.Result) != 0 {
// // TODO: review time and id to be updated on new resulsts
for _, res := range n.Result { // for _, res := range n.Result {
matcher.Nodes = append(matcher.Nodes, res) // if len(matcher.Nodes) == 0 {
} // fmt.Println("First node appending")
// matcher.Nodes = append(matcher.Nodes, res)
// continue
// }
fmt.Println("matcher:", matcher) // for _, node := range matcher.Nodes {
// if res.UID != node.UID {
// fmt.Println("not there, appending...")
// matcher.Nodes = append(matcher.Nodes, res)
} // }
// }
// }
m.Running = true // fmt.Println("matcher:", matcher)
}
// query := fmt.Sprintf(`
// query {
// node as var(func: eq(id, %s))
// }
// `, matcher.ID)
// nquads := fmt.Sprintf(`uid(node) <nodes> \"%v\"`, matcher.Nodes)
// mu := &api.Mutation{
// Cond: fmt.Sprintf(`@if(eq(id, %s))`, matcher.ID),
// SetNquads: []byte(nquads),
// }
// req := &api.Request{
// Query: query,
// Mutations: []*api.Mutation{mu},
// CommitNow: true,
// }
// ret, err := graphClient.NewTxn().Do(ctx, req)
// fmt.Println("#####", ret)
// if err != nil {
// logrus.Fatal(err)
// }
// }
m.Running = true
// }
} }
} }

View file

@ -136,9 +136,11 @@ func SaveEdge(edge *Edge) {
// Match represents clustered results based on a target. // Match represents clustered results based on a target.
type Match struct { type Match struct {
ID string `json:"id,omiempty"` ID string `json:"id,omiempty"`
UID string `json:"uid,omiempty"`
Nodes []Node `json:"nodes,omiempty"` Nodes []Node `json:"nodes,omiempty"`
Target string `json:"target,omiempty"` Target string `json:"target,omiempty"`
Timestamp string `json:"timestamp,omiempty"` Timestamp string `json:"timestamp,omiempty"`
Type string `json:"type,omiempty"`
} }
// CertStreamRaw is a wrapper around the stream function to unmarshall the // CertStreamRaw is a wrapper around the stream function to unmarshall the

View file

@ -13,3 +13,13 @@ func FileExists(filename string) error {
} }
return nil return nil
} }
// StringInSlice checks for the presence of a string in a slice.
func StringInSlice(a string, list []string) bool {
for _, b := range list {
if b == a {
return true
}
}
return false
}