Add compression between peers

This commit is contained in:
maride 2020-06-20 01:06:47 +02:00
parent ed6baf7753
commit 58b0820899
4 changed files with 46 additions and 25 deletions

View File

@ -3,6 +3,7 @@ package logistic
import ( import (
"archive/tar" "archive/tar"
"bytes" "bytes"
"compress/flate"
"encoding/base64" "encoding/base64"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
@ -11,7 +12,7 @@ import (
) )
// Packs a whole fuzzer directory - at least queue/, fuzz_bitmap, fuzzer_stats // Packs a whole fuzzer directory - at least queue/, fuzz_bitmap, fuzzer_stats
func PackFuzzer(fuzzerName string, directory string) []byte { func PackFuzzer(fuzzerName string, directory string) ([]byte, error) {
// Gather contents // Gather contents
contentArray := [][]byte{ contentArray := [][]byte{
[]byte(fuzzerName), []byte(fuzzerName),
@ -20,21 +21,34 @@ func PackFuzzer(fuzzerName string, directory string) []byte {
packQueueFiles(directory), packQueueFiles(directory),
} }
// Prepare FLATE compression
var flateBuffer bytes.Buffer
flateWrite, flateErr := flate.NewWriter(&flateBuffer, flate.BestCompression)
if flateErr != nil {
return nil, fmt.Errorf("unable to prepare flate compressor: %s", flateErr)
}
// Convert all parts to base64, and concat them to the packet // Convert all parts to base64, and concat them to the packet
var result []byte firstRun := true
for _, a := range contentArray { for _, a := range contentArray {
b64Buf := make([]byte, base64.StdEncoding.EncodedLen(len(a))) b64Buf := make([]byte, base64.StdEncoding.EncodedLen(len(a)))
base64.StdEncoding.Encode(b64Buf, a) base64.StdEncoding.Encode(b64Buf, a)
// Add newline char as separator // Add newline char as separator, avoiding it on the first run
result = append(result, '\n') if firstRun {
firstRun = false
// Append base64 encoded content } else {
result = append(result, b64Buf...) flateWrite.Write([]byte("\n"))
} }
// Append base64 encoded content
flateWrite.Write(b64Buf)
}
flateWrite.Close()
// Return result: a big byte array, representing concatted base64-encoded files // Return result: a big byte array, representing concatted base64-encoded files
return result return flateBuffer.Bytes(), nil
} }
// Reads a single file and returns it // Reads a single file and returns it

View File

@ -3,6 +3,7 @@ package logistic
import ( import (
"archive/tar" "archive/tar"
"bytes" "bytes"
"compress/flate"
"encoding/base64" "encoding/base64"
"fmt" "fmt"
"io" "io"
@ -14,8 +15,13 @@ import (
// Unpacks a raw string, creates files and stores them in the target directory. May return an error if one occurrs // Unpacks a raw string, creates files and stores them in the target directory. May return an error if one occurrs
func UnpackInto(raw []byte, targetDir string) error { func UnpackInto(raw []byte, targetDir string) error {
// Clean raw bytes: trim possibly leading and/or trailing newlines // Prepare FLATE decompressor
raw = bytes.Trim(raw, "\n") var flateBuffer bytes.Buffer
flateReader := flate.NewReader(&flateBuffer)
// Uncompress
flateBuffer.Write(raw)
raw, _ = ioutil.ReadAll(flateReader)
// Process raw bytes // Process raw bytes
splitted := bytes.Split(raw, []byte("\n")) splitted := bytes.Split(raw, []byte("\n"))

View File

@ -1,11 +1,11 @@
package net package net
import ( import (
"bufio"
"flag" "flag"
"fmt" "fmt"
"github.com/maride/afl-transmit/logistic" "github.com/maride/afl-transmit/logistic"
"io" "io"
"io/ioutil"
"log" "log"
"net" "net"
"strings" "strings"
@ -50,23 +50,20 @@ func handle(conn net.Conn, outputDirectory string) {
// Make sure to close connection on return // Make sure to close connection on return
defer conn.Close() defer conn.Close()
// Loop until we either hit EOF or an error
for {
// Read raw content // Read raw content
cont, contErr := bufio.NewReader(conn).ReadString('\x00') cont, contErr := ioutil.ReadAll(conn) // bufio.NewReader(conn).ReadString('\x00')
if contErr == io.EOF { if contErr == nil || contErr == io.EOF {
// We received the whole content, time to process it // We received the whole content, time to process it
unpackErr := logistic.UnpackInto([]byte(cont), outputDirectory) unpackErr := logistic.UnpackInto([]byte(cont), outputDirectory)
if unpackErr != nil { if unpackErr != nil {
log.Printf("Encountered error processing packet from %s: %s", conn.RemoteAddr().String(), unpackErr) log.Printf("Encountered error processing packet from %s: %s", conn.RemoteAddr().String(), unpackErr)
} }
return return
} else if contErr != nil { } else {
// We encountered an error on that connection // We encountered an error on that connection
log.Printf("Encountered error while reading from %s: %s", conn.RemoteAddr().String(), contErr) log.Printf("Encountered error while reading from %s: %s", conn.RemoteAddr().String(), contErr)
return return
} }
}
} }

View File

@ -31,7 +31,11 @@ func WatchFuzzers(outputDirectory string) {
for _, localFuzzDir := range localFuzzers { for _, localFuzzDir := range localFuzzers {
// Pack important parts of the fuzzer directory into a byte array // Pack important parts of the fuzzer directory into a byte array
fuzzerName := filepath.Base(localFuzzDir) fuzzerName := filepath.Base(localFuzzDir)
packedFuzzer := logistic.PackFuzzer(fuzzerName, localFuzzDir) packedFuzzer, packerErr := logistic.PackFuzzer(fuzzerName, localFuzzDir)
if packerErr != nil {
log.Printf("Failed to pack fuzzer: %s", packerErr)
continue
}
// and send it to our peers // and send it to our peers
net.SendToPeers(packedFuzzer) net.SendToPeers(packedFuzzer)