2020-01-28 16:00:56 +01:00
|
|
|
package models
|
|
|
|
|
2020-01-29 11:05:05 +01:00
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"net/http"
|
|
|
|
|
|
|
|
"github.com/sirupsen/logrus"
|
|
|
|
)
|
|
|
|
|
2020-01-28 16:00:56 +01:00
|
|
|
// PasteMeta is a set of descriptive information on a paste.
|
|
|
|
type PasteMeta struct {
|
|
|
|
ScrapeURL string `json:"scrape_url"`
|
|
|
|
FullURL string `json:"full_url"`
|
|
|
|
Date string `json:"date"`
|
|
|
|
Key string `json:"key"`
|
|
|
|
Size string `json:"size"`
|
|
|
|
Expire string `json:"expire"`
|
|
|
|
Title string `json:"title"`
|
|
|
|
Syntax string `json:"syntax"`
|
|
|
|
User string `json:"user"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// PasteFull extends PasteMeta by the actual content.
|
|
|
|
type PasteFull struct {
|
|
|
|
ScrapeURL string `json:"scrape_url"`
|
|
|
|
FullURL string `json:"full_url"`
|
|
|
|
Date string `json:"date"`
|
|
|
|
Key string `json:"key"`
|
|
|
|
Size string `json:"size"`
|
|
|
|
Expire string `json:"expire"`
|
|
|
|
Title string `json:"title"`
|
|
|
|
Syntax string `json:"syntax"`
|
|
|
|
User string `json:"user"`
|
|
|
|
Data string `json:"data"`
|
|
|
|
RFC3339 string `json:"time"`
|
|
|
|
}
|
2020-01-29 11:05:05 +01:00
|
|
|
|
|
|
|
// Meta Information: https://pastebin.com/api_scraping.php
|
|
|
|
// Content: http://pastebin.com/api_scrape_item.php
|
|
|
|
|
|
|
|
// QueryPastes returns metadata for the last 100 public pastes.
|
|
|
|
func QueryPastes() ([]PasteMeta, error) {
|
|
|
|
server := "pastebin.com"
|
|
|
|
req, err := http.NewRequest("GET", fmt.Sprintf("https://%s/api_scraping.php?limit=100", server), nil)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
logrus.Fatal("Could not build http request", err)
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
2020-01-29 12:47:01 +01:00
|
|
|
logrus.Error("Could not do request due to", err)
|
2020-01-29 11:05:05 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
2020-01-29 12:47:01 +01:00
|
|
|
logrus.Error("Could not fetch response due to", err)
|
2020-01-29 11:05:05 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var pastes []PasteMeta
|
|
|
|
if err := json.Unmarshal(body, &pastes); err != nil {
|
2020-01-29 12:47:01 +01:00
|
|
|
logrus.Error("Could not decode response due to ", err, " body", string(body))
|
2020-01-29 11:05:05 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return pastes, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// FetchPaste fetches paste contents via the web API.
|
|
|
|
func FetchPaste(paste PasteMeta) (string, error) {
|
|
|
|
url := paste.ScrapeURL
|
|
|
|
req, err := http.NewRequest("GET", url, nil)
|
|
|
|
if err != nil {
|
2020-01-29 12:47:01 +01:00
|
|
|
logrus.Error("Could build request", req, " due to", err)
|
2020-01-29 11:05:05 +01:00
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Could not do request %v due to %v", req, err)
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Could not read response body %v due to %v", resp.Body, err)
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return string(body), nil
|
|
|
|
}
|