mirror of
				https://github.com/maride/afl-transmit.git
				synced 2025-10-11 01:56:50 +00:00 
			
		
		
		
	Reduce transmitted files by tracking identical files
This commit is contained in:
		
							parent
							
								
									c7bf5bc91b
								
							
						
					
					
						commit
						5cb6c1af9b
					
				
							
								
								
									
										11
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								README.md
									
									
									
									
									
								
							| @ -37,3 +37,14 @@ dd if=/dev/urandom bs=32 count=1 2>/dev/null | base64 | tee transmit.key | ||||
| ``` | ||||
| 
 | ||||
| As already said, the same key must be used on all nodes. | ||||
| 
 | ||||
| ### Traffic reduction | ||||
| 
 | ||||
| On default, *afl-transmit* avoids sending files with the same file present in different fuzzer directories. | ||||
| This will greatly reduce the traffic between your nodes (I measured 621 kB to 1.3 kB, for example). | ||||
| Please note that there might be some edge cases when you don't want that behaviour, e.g. | ||||
| - you want to preserve the queue of each fuzzer | ||||
| - you expect your fuzzers to give the same (file) name to different test cases, in which case *afl-transmit* would mistakenly assume that the file has the same *contents* and not only the same *name* | ||||
| - you don't care for traffic | ||||
| 
 | ||||
| To avoid reducing the transmitted files, add `--no-duplicates=false` as argument. | ||||
|  | ||||
| @ -4,6 +4,7 @@ import ( | ||||
| 	"archive/tar" | ||||
| 	"bytes" | ||||
| 	"compress/flate" | ||||
| 	"flag" | ||||
| 	"fmt" | ||||
| 	"io/ioutil" | ||||
| 	"log" | ||||
| @ -11,6 +12,13 @@ import ( | ||||
| 	"strings" | ||||
| ) | ||||
| 
 | ||||
| var noDuplicates bool | ||||
| 
 | ||||
| // RegisterPackerFlags registers flags which are required by the packer | ||||
| func RegisterPackerFlags() { | ||||
| 	flag.BoolVar(&noDuplicates, "no-duplicates", true, "Avoid transmitting the same file multiple times, e.g. because it is present in multiple fuzzer's queues") | ||||
| } | ||||
| 
 | ||||
| // PackFuzzers packs all targeted fuzzers into a TAR - at least queue/, fuzz_bitmap, fuzzer_stats | ||||
| func PackFuzzers(fuzzers []string, fuzzerDirectory string) ([]byte, error) { | ||||
| 	// Create TAR archive | ||||
| @ -21,7 +29,8 @@ func PackFuzzers(fuzzers []string, fuzzerDirectory string) ([]byte, error) { | ||||
| 	// - the fuzz_bitmap file | ||||
| 	// - the fuzzer_stats file | ||||
| 	// - the is_main_fuzzer file if present | ||||
| 	// - the queue/ directory | ||||
| 	// - the queue/ directory - but avoiding duplicates | ||||
| 	var pkgCont []string // list of queue files already present in the archive | ||||
| 	for _, fuzzer := range fuzzers { | ||||
| 		// We need full paths to read, but will write relative paths into the TAR archive | ||||
| 		absFuzzerPath := fuzzerDirectory | ||||
| @ -31,7 +40,7 @@ func PackFuzzers(fuzzers []string, fuzzerDirectory string) ([]byte, error) { | ||||
| 		packSingleFile(tarWriter, absFuzzerPath, relFuzzerPath, "fuzz_bitmap", false) | ||||
| 		packSingleFile(tarWriter, absFuzzerPath, relFuzzerPath, "fuzzer_stats", false) | ||||
| 		packSingleFile(tarWriter, absFuzzerPath, relFuzzerPath, "is_main_fuzzer", true) | ||||
| 		packQueueFiles(tarWriter, absFuzzerPath, relFuzzerPath) | ||||
| 		packQueueFiles(tarWriter, absFuzzerPath, relFuzzerPath, &pkgCont) | ||||
| 	} | ||||
| 
 | ||||
| 	// Close TAR archive | ||||
| @ -81,7 +90,7 @@ func packSingleFile(tarWriter *tar.Writer, absPath string, relPath string, fileN | ||||
| } | ||||
| 
 | ||||
| // Packs the files in the given directory into a tar archive | ||||
| func packQueueFiles(tarWriter *tar.Writer, absPath string, relPath string) { | ||||
| func packQueueFiles(tarWriter *tar.Writer, absPath string, relPath string, pkgCont *[]string) { | ||||
| 	// Get list of queue files | ||||
| 	queuePath := fmt.Sprintf("%s%c%s%cqueue", absPath, os.PathSeparator, relPath, os.PathSeparator) | ||||
| 	filesInDir, readErr := ioutil.ReadDir(queuePath) | ||||
| @ -98,7 +107,29 @@ func packQueueFiles(tarWriter *tar.Writer, absPath string, relPath string) { | ||||
| 			continue | ||||
| 		} | ||||
| 
 | ||||
| 		// Check if we should care fore duplicates | ||||
| 		if noDuplicates && checkDuplicate(f.Name(), pkgCont) { | ||||
| 			// that file is already present in the package - avoid packing it again | ||||
| 			continue | ||||
| 		} | ||||
| 
 | ||||
| 		// Pack into the archive | ||||
| 		packSingleFile(tarWriter, absPath, relPath, fmt.Sprintf("queue%c%s", os.PathSeparator, f.Name()), false) | ||||
| 
 | ||||
| 		if noDuplicates { | ||||
| 			// Append added file name to the list of things included in the package | ||||
| 			*pkgCont = append(*pkgCont, f.Name()) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // checkDuplicate checks if name is already present in pkgCont | ||||
| func checkDuplicate(name string, pkgCont *[]string) bool { | ||||
| 	for _, p := range *pkgCont { | ||||
| 		if p == name { | ||||
| 			return true | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return true | ||||
| } | ||||
|  | ||||
							
								
								
									
										2
									
								
								main.go
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								main.go
									
									
									
									
									
								
							| @ -3,6 +3,7 @@ package main | ||||
| import ( | ||||
| 	"flag" | ||||
| 	"fmt" | ||||
| 	"github.com/maride/afl-transmit/logistic" | ||||
| 	"github.com/maride/afl-transmit/net" | ||||
| 	"github.com/maride/afl-transmit/stats" | ||||
| 	"github.com/maride/afl-transmit/watchdog" | ||||
| @ -20,6 +21,7 @@ func main() { | ||||
| 	net.RegisterSenderFlags() | ||||
| 	net.RegisterListenFlags() | ||||
| 	net.RegisterCryptFlags() | ||||
| 	logistic.RegisterPackerFlags() | ||||
| 	RegisterGlobalFlags() | ||||
| 	flag.Parse() | ||||
| 
 | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user