Working version capturing CertStream saving raw, nodes and edges.

There was some problem with my editor and lost some code, this work is
fixing that and adding new features. Current state of the code:
* capturing CertStream traffic
* saving raw certstream objects in a custom wrapper
* extracting fingerprints and domains from the certstream object
* saving fingerprints and domains nodes and edges between them.
* fingerprint is linked to the raw certstream object with an edge
* saving to files with customizable names (raw in code)

broker:
* kafka connection and test
* no sending of data to it for the moment
This commit is contained in:
Christopher Talib 2020-01-29 12:47:01 +01:00
parent b9f0ac688c
commit b9a2e73e33
3 changed files with 54 additions and 13 deletions

10
main.go
View file

@ -24,16 +24,18 @@ func main() {
case jq := <-stream:
if data, err := models.ExtractCertFromStream(jq); err == nil {
// rawNode := models.WrapCertStreamData(*data)
// models.SaveNode(rawNode)
rawNode := models.WrapCertStreamData(*data)
models.SaveRaw("raw_certstream.json", rawNode)
fingerprintNode := models.BuildNode("certstream", "fingerprint", data.Data.LeafCert.Fingerprint)
models.SaveNode(fingerprintNode)
models.SaveNode("nodes.json", fingerprintNode)
models.BuildEdge("certstream", rawNode.ID, fingerprintNode.ID)
fmt.Println(fingerprintNode)
allDomains := data.Data.LeafCert.AllDomains
var edge *models.Edge
for _, domain := range allDomains {
domainNode := models.BuildNode("certstream", "domain", domain)
models.SaveNode(domainNode)
models.SaveNode("nodes.json", domainNode)
edge = models.BuildEdge("certstream", fingerprintNode.ID, domainNode.ID)
fmt.Println(edge)
models.SaveEdge(edge)

View file

@ -21,7 +21,18 @@ func BuildNode(flag string, dataType string, data string) *Node {
Created: rfc3339time,
Modified: rfc3339time,
}
}
func WrapCertStreamData(data CertStreamStruct) *CertStreamWrapper {
t := time.Now()
rfc3339time := t.Format(time.RFC3339)
return &CertStreamWrapper{
ID: "certstream--" + uuid.New().String(),
Type: "certstream_raw",
Data: data,
Created: rfc3339time,
Modified: rfc3339time,
}
}
// BuildEdge build a send from two nodes with a given source type.
@ -37,13 +48,41 @@ func BuildEdge(source string, nodeOneUUID string, nodeTwoUUID string) *Edge {
}
}
// SaveNode saves a node to a file.
func SaveNode(node *Node) {
err := utils.FileExists("nodes.json")
func SaveRaw(filename string, data *CertStreamWrapper) {
err := utils.FileExists(filename)
if err != nil {
logrus.Error(err)
}
nodeFile, err := ioutil.ReadFile("nodes.json")
nodeFile, err := ioutil.ReadFile(filename)
if err != nil {
logrus.Error(err)
}
rawDatas := []CertStreamWrapper{}
if err := json.Unmarshal(nodeFile, &rawDatas); err != nil {
logrus.Error(err)
}
rawDatas = append(rawDatas, *data)
rawBytes, err := json.Marshal(rawDatas)
if err != nil {
logrus.Error(err)
}
err = ioutil.WriteFile(filename, rawBytes, 0644)
if err != nil {
logrus.Error(err)
}
}
// SaveNode saves a node to a file.
func SaveNode(filename string, node *Node) {
err := utils.FileExists(filename)
if err != nil {
logrus.Error(err)
}
nodeFile, err := ioutil.ReadFile(filename)
if err != nil {
logrus.Error(err)
}
@ -60,7 +99,7 @@ func SaveNode(node *Node) {
logrus.Error(err)
}
err = ioutil.WriteFile("nodes.json", nodeBytes, 0644)
err = ioutil.WriteFile(filename, nodeBytes, 0644)
if err != nil {
logrus.Error(err)
}

View file

@ -55,20 +55,20 @@ func QueryPastes() ([]PasteMeta, error) {
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
logrus.Error("Could not do requeest due to %v", err)
logrus.Error("Could not do request due to", err)
return nil, err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
logrus.Error("Could not fetch response due to %v", err)
logrus.Error("Could not fetch response due to", err)
return nil, err
}
var pastes []PasteMeta
if err := json.Unmarshal(body, &pastes); err != nil {
logrus.Error("Could not decode response due to %v, body %s", err, string(body))
logrus.Error("Could not decode response due to ", err, " body", string(body))
return nil, err
}
@ -80,7 +80,7 @@ func FetchPaste(paste PasteMeta) (string, error) {
url := paste.ScrapeURL
req, err := http.NewRequest("GET", url, nil)
if err != nil {
log.Printf("Could build request %v due to %v", req, err)
logrus.Error("Could build request", req, " due to", err)
return "", err
}