Matcher logic and IOCs
This work starts to build the matcher logic into styx. For the moment, the goal is to define IOCs and load them when the Matcher plugin is activated. To implement: Then, the matcher will run periodic queries to different types of nodes and index them to its one Matcher Dgraph Node. So be targetting a specific IOCs, the user will be able to list the observation that have been made to it.
This commit is contained in:
parent
9547aeea3f
commit
3961e79062
|
@ -61,7 +61,7 @@ pasteNode: PasteNode
|
|||
}
|
||||
|
||||
nodes: [uid] .
|
||||
target: string .
|
||||
target: string @index(term) .
|
||||
|
||||
type Match {
|
||||
id: string
|
||||
|
|
197
matcher/main.go
197
matcher/main.go
|
@ -4,7 +4,6 @@ import (
|
|||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -14,6 +13,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/dgraph-io/dgo/v2"
|
||||
"github.com/dgraph-io/dgo/v2/protos/api"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/spf13/viper"
|
||||
|
@ -25,12 +25,14 @@ var (
|
|||
basepath = filepath.Dir(b)
|
||||
)
|
||||
|
||||
// Matcher is the structure for the matching logic within Styx.
|
||||
type Matcher struct {
|
||||
Running bool
|
||||
StopChan chan bool
|
||||
StoppedChan chan bool
|
||||
}
|
||||
|
||||
// Initialize initialises the matcher based on the given configuration.
|
||||
func (m *Matcher) Initialize() bool {
|
||||
if !viper.GetBool("matcher.activated") {
|
||||
return false
|
||||
|
@ -40,6 +42,7 @@ func (m *Matcher) Initialize() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// Stop gracefully stops the matching logic.
|
||||
func (m *Matcher) Stop(wg *sync.WaitGroup) {
|
||||
if m.Running {
|
||||
m.StopChan = make(chan bool)
|
||||
|
@ -50,76 +53,154 @@ func (m *Matcher) Stop(wg *sync.WaitGroup) {
|
|||
}
|
||||
}
|
||||
|
||||
// Result is the result from the matching query. Probably going to change.
|
||||
type Result struct {
|
||||
Result []models.Node `json:"Node,omiempty"`
|
||||
}
|
||||
|
||||
func loadTargets(graphClient *dgo.Dgraph) error {
|
||||
path := basepath + "/data/"
|
||||
|
||||
sliceDomain, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
logrus.Warn("matcher#ReadDir#domains", err)
|
||||
return err
|
||||
}
|
||||
|
||||
for _, file := range sliceDomain {
|
||||
f, err := os.OpenFile(path+file.Name(), 0, 0644)
|
||||
if err != nil {
|
||||
logrus.Warn("matcher#OpenFile#", err)
|
||||
return err
|
||||
}
|
||||
scanner := bufio.NewScanner(f)
|
||||
|
||||
for scanner.Scan() {
|
||||
uuid := uuid.New().String()
|
||||
t := time.Now()
|
||||
rfc3339time := t.Format(time.RFC3339)
|
||||
matcher := models.Match{
|
||||
ID: uuid,
|
||||
Timestamp: rfc3339time,
|
||||
Target: scanner.Text(),
|
||||
Nodes: []models.Node{},
|
||||
Type: "matcher",
|
||||
}
|
||||
ctx := context.Background()
|
||||
mu := &api.Mutation{
|
||||
CommitNow: true,
|
||||
}
|
||||
|
||||
pb, err := json.Marshal(matcher)
|
||||
if err != nil {
|
||||
logrus.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
mu.SetJson = pb
|
||||
|
||||
_, err = graphClient.NewTxn().Mutate(ctx, mu)
|
||||
if err != nil {
|
||||
logrus.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
logrus.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Run runs the routine trying to find matches in the ingested data.
|
||||
func (m *Matcher) Run(wg *sync.WaitGroup, graphClient *dgo.Dgraph) {
|
||||
if err := loadTargets(graphClient); err != nil {
|
||||
logrus.Error(err)
|
||||
}
|
||||
// Created nodes based on the IOCs
|
||||
// Upsert those nodes if the values are found
|
||||
if !m.Running {
|
||||
m.StoppedChan = make(chan bool)
|
||||
wg.Add(1)
|
||||
for {
|
||||
q := `query allofterms($a: string) {
|
||||
Node(func: allofterms(full, $a)) {
|
||||
uid
|
||||
id
|
||||
type
|
||||
ndata
|
||||
pasteNode {
|
||||
id
|
||||
type
|
||||
created
|
||||
modified
|
||||
fullPaste {
|
||||
full
|
||||
meta {
|
||||
full_url
|
||||
size
|
||||
expire
|
||||
title
|
||||
syntax
|
||||
user
|
||||
scrape_url
|
||||
date
|
||||
key
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
// m.StoppedChan = make(chan bool)
|
||||
// wg.Add(1)
|
||||
// for {
|
||||
// q := `query allofterms($a: string) {
|
||||
// Node(func: allofterms(full, $a)) {
|
||||
// uid
|
||||
// }}`
|
||||
|
||||
ctx := context.Background()
|
||||
txn := graphClient.NewTxn()
|
||||
defer txn.Discard(ctx)
|
||||
res, err := txn.QueryWithVars(ctx, q, map[string]string{"$a": "code"})
|
||||
if err != nil {
|
||||
logrus.Warn(err)
|
||||
}
|
||||
// ctx := context.Background()
|
||||
// txn := graphClient.NewTxn()
|
||||
// defer txn.Discard(ctx)
|
||||
// res, err := txn.QueryWithVars(ctx, q, map[string]string{"$a": "code"})
|
||||
// if err != nil {
|
||||
// logrus.Warn(err)
|
||||
// }
|
||||
|
||||
n := Result{}
|
||||
json.Unmarshal([]byte(res.Json), &n)
|
||||
if len(n.Result) != 0 {
|
||||
// TODO: review time and id to be updated on new resulsts
|
||||
uuid := uuid.New().String()
|
||||
t := time.Now()
|
||||
rfc3339time := t.Format(time.RFC3339)
|
||||
matcher := models.Match{
|
||||
ID: uuid,
|
||||
Timestamp: rfc3339time,
|
||||
Target: "code",
|
||||
}
|
||||
// n := Result{}
|
||||
// json.Unmarshal([]byte(res.Json), &n)
|
||||
// uuid := uuid.New().String()
|
||||
// t := time.Now()
|
||||
// rfc3339time := t.Format(time.RFC3339)
|
||||
// matcher := models.Match{
|
||||
// ID: uuid,
|
||||
// Timestamp: rfc3339time,
|
||||
// Target: "code",
|
||||
// Nodes: []models.Node{},
|
||||
// Type: "matcher",
|
||||
// }
|
||||
// if len(n.Result) != 0 {
|
||||
// // TODO: review time and id to be updated on new resulsts
|
||||
|
||||
for _, res := range n.Result {
|
||||
matcher.Nodes = append(matcher.Nodes, res)
|
||||
}
|
||||
// for _, res := range n.Result {
|
||||
// if len(matcher.Nodes) == 0 {
|
||||
// fmt.Println("First node appending")
|
||||
// matcher.Nodes = append(matcher.Nodes, res)
|
||||
// continue
|
||||
// }
|
||||
|
||||
fmt.Println("matcher:", matcher)
|
||||
// for _, node := range matcher.Nodes {
|
||||
// if res.UID != node.UID {
|
||||
// fmt.Println("not there, appending...")
|
||||
// matcher.Nodes = append(matcher.Nodes, res)
|
||||
|
||||
}
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
m.Running = true
|
||||
}
|
||||
// fmt.Println("matcher:", matcher)
|
||||
|
||||
// query := fmt.Sprintf(`
|
||||
// query {
|
||||
// node as var(func: eq(id, %s))
|
||||
// }
|
||||
// `, matcher.ID)
|
||||
|
||||
// nquads := fmt.Sprintf(`uid(node) <nodes> \"%v\"`, matcher.Nodes)
|
||||
|
||||
// mu := &api.Mutation{
|
||||
// Cond: fmt.Sprintf(`@if(eq(id, %s))`, matcher.ID),
|
||||
// SetNquads: []byte(nquads),
|
||||
// }
|
||||
|
||||
// req := &api.Request{
|
||||
// Query: query,
|
||||
// Mutations: []*api.Mutation{mu},
|
||||
// CommitNow: true,
|
||||
// }
|
||||
|
||||
// ret, err := graphClient.NewTxn().Do(ctx, req)
|
||||
// fmt.Println("#####", ret)
|
||||
// if err != nil {
|
||||
// logrus.Fatal(err)
|
||||
// }
|
||||
// }
|
||||
|
||||
m.Running = true
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -136,9 +136,11 @@ func SaveEdge(edge *Edge) {
|
|||
// Match represents clustered results based on a target.
|
||||
type Match struct {
|
||||
ID string `json:"id,omiempty"`
|
||||
UID string `json:"uid,omiempty"`
|
||||
Nodes []Node `json:"nodes,omiempty"`
|
||||
Target string `json:"target,omiempty"`
|
||||
Timestamp string `json:"timestamp,omiempty"`
|
||||
Type string `json:"type,omiempty"`
|
||||
}
|
||||
|
||||
// CertStreamRaw is a wrapper around the stream function to unmarshall the
|
||||
|
|
|
@ -13,3 +13,13 @@ func FileExists(filename string) error {
|
|||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// StringInSlice checks for the presence of a string in a slice.
|
||||
func StringInSlice(a string, list []string) bool {
|
||||
for _, b := range list {
|
||||
if b == a {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue