styx/models/pastebin.go
Christopher Talib b9a2e73e33 Working version capturing CertStream saving raw, nodes and edges.
There was some problem with my editor and lost some code, this work is
fixing that and adding new features. Current state of the code:
* capturing CertStream traffic
* saving raw certstream objects in a custom wrapper
* extracting fingerprints and domains from the certstream object
* saving fingerprints and domains nodes and edges between them.
* fingerprint is linked to the raw certstream object with an edge
* saving to files with customizable names (raw in code)

broker:
* kafka connection and test
* no sending of data to it for the moment
2020-01-29 12:47:01 +01:00

102 lines
2.5 KiB
Go

package models
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"github.com/sirupsen/logrus"
)
// PasteMeta is a set of descriptive information on a paste.
type PasteMeta struct {
ScrapeURL string `json:"scrape_url"`
FullURL string `json:"full_url"`
Date string `json:"date"`
Key string `json:"key"`
Size string `json:"size"`
Expire string `json:"expire"`
Title string `json:"title"`
Syntax string `json:"syntax"`
User string `json:"user"`
}
// PasteFull extends PasteMeta by the actual content.
type PasteFull struct {
ScrapeURL string `json:"scrape_url"`
FullURL string `json:"full_url"`
Date string `json:"date"`
Key string `json:"key"`
Size string `json:"size"`
Expire string `json:"expire"`
Title string `json:"title"`
Syntax string `json:"syntax"`
User string `json:"user"`
Data string `json:"data"`
RFC3339 string `json:"time"`
}
// Meta Information: https://pastebin.com/api_scraping.php
// Content: http://pastebin.com/api_scrape_item.php
// QueryPastes returns metadata for the last 100 public pastes.
func QueryPastes() ([]PasteMeta, error) {
server := "pastebin.com"
req, err := http.NewRequest("GET", fmt.Sprintf("https://%s/api_scraping.php?limit=100", server), nil)
if err != nil {
logrus.Fatal("Could not build http request", err)
return nil, err
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
logrus.Error("Could not do request due to", err)
return nil, err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
logrus.Error("Could not fetch response due to", err)
return nil, err
}
var pastes []PasteMeta
if err := json.Unmarshal(body, &pastes); err != nil {
logrus.Error("Could not decode response due to ", err, " body", string(body))
return nil, err
}
return pastes, err
}
// FetchPaste fetches paste contents via the web API.
func FetchPaste(paste PasteMeta) (string, error) {
url := paste.ScrapeURL
req, err := http.NewRequest("GET", url, nil)
if err != nil {
logrus.Error("Could build request", req, " due to", err)
return "", err
}
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
log.Printf("Could not do request %v due to %v", req, err)
return "", err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Printf("Could not read response body %v due to %v", resp.Body, err)
return "", err
}
return string(body), nil
}