Add compression between peers

This commit is contained in:
maride 2020-06-20 01:06:47 +02:00
parent ed6baf7753
commit 58b0820899
4 changed files with 46 additions and 25 deletions

View File

@ -3,6 +3,7 @@ package logistic
import (
"archive/tar"
"bytes"
"compress/flate"
"encoding/base64"
"fmt"
"io/ioutil"
@ -11,7 +12,7 @@ import (
)
// Packs a whole fuzzer directory - at least queue/, fuzz_bitmap, fuzzer_stats
func PackFuzzer(fuzzerName string, directory string) []byte {
func PackFuzzer(fuzzerName string, directory string) ([]byte, error) {
// Gather contents
contentArray := [][]byte{
[]byte(fuzzerName),
@ -20,21 +21,34 @@ func PackFuzzer(fuzzerName string, directory string) []byte {
packQueueFiles(directory),
}
// Prepare FLATE compression
var flateBuffer bytes.Buffer
flateWrite, flateErr := flate.NewWriter(&flateBuffer, flate.BestCompression)
if flateErr != nil {
return nil, fmt.Errorf("unable to prepare flate compressor: %s", flateErr)
}
// Convert all parts to base64, and concat them to the packet
var result []byte
firstRun := true
for _, a := range contentArray {
b64Buf := make([]byte, base64.StdEncoding.EncodedLen(len(a)))
base64.StdEncoding.Encode(b64Buf, a)
// Add newline char as separator
result = append(result, '\n')
// Append base64 encoded content
result = append(result, b64Buf...)
// Add newline char as separator, avoiding it on the first run
if firstRun {
firstRun = false
} else {
flateWrite.Write([]byte("\n"))
}
// Append base64 encoded content
flateWrite.Write(b64Buf)
}
flateWrite.Close()
// Return result: a big byte array, representing concatted base64-encoded files
return result
return flateBuffer.Bytes(), nil
}
// Reads a single file and returns it

View File

@ -3,6 +3,7 @@ package logistic
import (
"archive/tar"
"bytes"
"compress/flate"
"encoding/base64"
"fmt"
"io"
@ -14,8 +15,13 @@ import (
// Unpacks a raw string, creates files and stores them in the target directory. May return an error if one occurrs
func UnpackInto(raw []byte, targetDir string) error {
// Clean raw bytes: trim possibly leading and/or trailing newlines
raw = bytes.Trim(raw, "\n")
// Prepare FLATE decompressor
var flateBuffer bytes.Buffer
flateReader := flate.NewReader(&flateBuffer)
// Uncompress
flateBuffer.Write(raw)
raw, _ = ioutil.ReadAll(flateReader)
// Process raw bytes
splitted := bytes.Split(raw, []byte("\n"))

View File

@ -1,11 +1,11 @@
package net
import (
"bufio"
"flag"
"fmt"
"github.com/maride/afl-transmit/logistic"
"io"
"io/ioutil"
"log"
"net"
"strings"
@ -50,23 +50,20 @@ func handle(conn net.Conn, outputDirectory string) {
// Make sure to close connection on return
defer conn.Close()
// Loop until we either hit EOF or an error
for {
// Read raw content
cont, contErr := bufio.NewReader(conn).ReadString('\x00')
cont, contErr := ioutil.ReadAll(conn) // bufio.NewReader(conn).ReadString('\x00')
if contErr == io.EOF {
if contErr == nil || contErr == io.EOF {
// We received the whole content, time to process it
unpackErr := logistic.UnpackInto([]byte(cont), outputDirectory)
if unpackErr != nil {
log.Printf("Encountered error processing packet from %s: %s", conn.RemoteAddr().String(), unpackErr)
}
return
} else if contErr != nil {
} else {
// We encountered an error on that connection
log.Printf("Encountered error while reading from %s: %s", conn.RemoteAddr().String(), contErr)
return
}
}
}

View File

@ -31,7 +31,11 @@ func WatchFuzzers(outputDirectory string) {
for _, localFuzzDir := range localFuzzers {
// Pack important parts of the fuzzer directory into a byte array
fuzzerName := filepath.Base(localFuzzDir)
packedFuzzer := logistic.PackFuzzer(fuzzerName, localFuzzDir)
packedFuzzer, packerErr := logistic.PackFuzzer(fuzzerName, localFuzzDir)
if packerErr != nil {
log.Printf("Failed to pack fuzzer: %s", packerErr)
continue
}
// and send it to our peers
net.SendToPeers(packedFuzzer)