feat(d-eyes): init

This commit is contained in:
zitn
2023-11-06 16:31:16 +08:00
parent 804617ded3
commit 270bb18b98
117 changed files with 19222 additions and 0 deletions

56
filedetection/file.go Normal file
View File

@@ -0,0 +1,56 @@
package filedetection
import (
"os"
)
type File interface {
Path() string
Stat() (os.FileInfo, error)
Hashes() (md5sum, sha256sum string, err error)
EnableHashMarshalling() error
}
type OSFile struct {
FilePath string `json:"path"`
MD5Sum string `json:"MD5,omitempty"`
SHA256Sum string `json:"SHA256,omitempty"`
}
func NewFile(path string) File {
return &OSFile{FilePath: path}
}
//
//func CloneFile(f File) File {
// if f == nil {
// return nil
// }
//
// osFile, ok := f.(*OSFile)
// if ok {
// return &OSFile{
// FilePath: osFile.FilePath,
// MD5Sum: osFile.MD5Sum,
// SHA256Sum: osFile.SHA256Sum,
// }
// }
// return NewFile(f.Path())
//}
func (f *OSFile) Path() string {
return f.FilePath
}
func (f *OSFile) Stat() (os.FileInfo, error) {
return os.Stat(f.FilePath)
}
func (f *OSFile) Hashes() (md5sum, sha256sum string, err error) {
return ComputeHashes(f.FilePath)
}
func (f *OSFile) EnableHashMarshalling() (err error) {
f.MD5Sum, f.SHA256Sum, err = f.Hashes()
return
}

54
filedetection/filescan.go Normal file
View File

@@ -0,0 +1,54 @@
package filedetection
import (
"context"
"fmt"
"sync"
"github.com/hillu/go-yara/v4"
"d-eyes/output"
"d-eyes/yaraobj"
)
func StringSlice(name string) []string {
return nil
}
var sum = 0
func StartFileScan(path string, rules *yara.Rules, thread int, r *[]output.Result) int {
iteratorCtx := context.Background()
var pathIterator Iterator
fileExtensions := StringSlice("")
pIt, err := IteratePath(iteratorCtx, path, fileExtensions)
if err != nil {
fmt.Printf("- %s ERROR: could not intialize scanner for path, reason: %v", path, err)
}
pathIterator = Concurrent(pathIterator, pIt)
fmt.Printf("- %s\n", path)
if pathIterator != nil {
defer pathIterator.Close()
yaraScanner, err := yaraobj.NewYaraScanner(rules)
if err != nil {
fmt.Println("NewYaraScanner goes error !!!")
}
fsScanner := NewFSScanner(yaraScanner)
wg := &sync.WaitGroup{}
wg.Add(thread)
for i := 0; i < thread; i++ {
go fsScanner.Scan(pathIterator, wg, &sum, r)
}
wg.Wait()
}
return sum
}

View File

@@ -0,0 +1,68 @@
package filedetection
import (
"encoding/json"
"io"
"path/filepath"
"strings"
"sync"
"github.com/hillu/go-yara/v4"
"d-eyes/output"
"d-eyes/yaraobj"
)
type FSScanner struct {
scanner *yaraobj.YaraScanner
}
func NewFSScanner(scanner *yaraobj.YaraScanner) *FSScanner {
return &FSScanner{
scanner: scanner,
}
}
type FSScanProgress struct {
File File
Matches yara.MatchRules
Error error
}
func (s *FSScanner) Scan(it Iterator, wg *sync.WaitGroup, sum *int, r *[]output.Result) {
defer wg.Done()
for {
file, err := it.Next()
if err == io.EOF {
break
} else if err != nil {
continue
}
*sum++
switch ext := strings.ToLower(filepath.Ext(file.Path())); ext {
case ".yar":
continue
case ".zip", ".tar.gz", ".rar", ".7z", ".gzp", ".bzp2", ".tar", ".gz", ".iso", ".vmem", ".vhd", ".qcow2", ".vmdk":
continue
default:
matches, err := s.scanner.ScanFile(file.Path())
if err != nil {
//fmt.Println("The matches of ScanFile function goes wrong!!! ")
//fmt.Println("err is ",err," file is ",file.Path())
//fmt.Println("-----------------------------------------------")
//return nil
}
if len(matches) != 0 {
data := matches[0].Metas[0]
dataType, _ := json.Marshal(data)
dataString := string(dataType)
meta := strings.Split(dataString, ":")[2]
metaTmp := strings.Trim(meta, "\"}")
resTmp := output.Result{metaTmp, file.Path()}
*r = append(*r, resTmp)
}
}
}
}

View File

@@ -0,0 +1,61 @@
package filedetection
import (
"encoding/json"
"fmt"
"io"
"path/filepath"
"strings"
"sync"
"github.com/gookit/color"
"d-eyes/output"
"d-eyes/yaraobj"
)
type FSScanner struct {
scanner *yaraobj.YaraScanner
}
func NewFSScanner(scanner *yaraobj.YaraScanner) *FSScanner {
return &FSScanner{
scanner: scanner,
}
}
func (s *FSScanner) Scan(it Iterator, wg *sync.WaitGroup, sum *int, r *[]output.Result) {
defer wg.Done()
for {
file, err := it.Next()
if err == io.EOF {
break
} else if err != nil {
continue
}
*sum++
color.Info.Print("[INFO] D-Eyes FileScan scanning: ")
fmt.Println(file.Path())
switch ext := strings.ToLower(filepath.Ext(file.Path())); ext {
case ".yar":
continue
case ".zip", ".tar.gz", ".rar", ".7z", ".gzp", ".bzp2", ".tar", ".gz", ".iso", ".vmem", ".vhd", ".qcow2", ".vmdk":
continue
default:
matches, err := s.scanner.ScanFile(file.Path())
if err != nil {
continue
}
if len(matches) != 0 {
data := matches[0].Metas[0]
dataType, _ := json.Marshal(data)
dataString := string(dataType)
meta := strings.Split(dataString, ":")[2]
metaTmp := strings.Trim(meta, "\"}")
resTmp := output.Result{Risk: metaTmp, RiskPath: file.Path()}
*r = append(*r, resTmp)
}
}
}
}

View File

@@ -0,0 +1,236 @@
package filedetection
import (
"context"
"fmt"
"io"
"os"
"path/filepath"
"strings"
)
var (
FilesBuffer = 8
)
// doScandir linux
var skippedRoots = []string{
"/dev",
"/proc",
"/sys",
"/run",
}
var skippedDirs = []string{
"lost+found",
}
func doScanDir(path string) bool {
var err error
path, err = filepath.Abs(path)
if err != nil {
return false
}
for _, root := range skippedRoots {
if len(path) < len(root) {
continue
}
if path[:len(root)] == root {
if len(path) == len(root) || path[len(root)] == '/' {
return false
}
}
}
for _, dir := range skippedDirs {
if strings.Contains(path, "/"+dir) {
if len(path) == len(dir)+1 || path[len(dir)+1] == '/' {
return false
}
}
}
return true
}
var ErrSkipped = fmt.Errorf("skipped")
type nextEntry struct {
File File
Err error
}
type fsIterator struct {
root string
validExtensions []string
ctx context.Context
cancel context.CancelFunc
closed bool
dirs []string
next chan *nextEntry
}
func IteratePath(ctx context.Context, path string, validExtensions []string) (Iterator, error) {
stat, err := os.Stat(path)
if err != nil {
return nil, err
}
if !stat.IsDir() {
return nil, fmt.Errorf("path must be a directory")
}
for i := range validExtensions {
validExtensions[i] = strings.ToLower(validExtensions[i])
}
it := &fsIterator{
root: path,
validExtensions: validExtensions,
closed: false,
dirs: make([]string, 1, 16),
next: make(chan *nextEntry, FilesBuffer),
}
it.dirs[0] = path
it.ctx, it.cancel = context.WithCancel(ctx)
go it.dirScanner()
return it, nil
}
func (it *fsIterator) Root() string {
return it.root
}
func (it *fsIterator) doesExtensionMatch(path string) bool {
if it.validExtensions == nil || len(it.validExtensions) == 0 {
return true
}
_, file := filepath.Split(path)
parts := strings.Split(file, ".")
var ext string
if len(parts) > 1 {
ext = parts[len(parts)-1]
}
ext = strings.ToLower(ext)
for _, vExt := range it.validExtensions {
if ext == vExt {
return true
}
}
return false
}
func (it *fsIterator) dirScanner() {
defer close(it.next)
for {
select {
case <-it.ctx.Done():
break
default:
}
if len(it.dirs) == 0 {
break
}
dir := it.dirs[0]
it.dirs = it.dirs[1:]
func() {
f, err := os.Open(dir)
if err != nil {
it.next <- &nextEntry{
File: NewFile(dir),
Err: err,
}
return
}
defer f.Close()
for {
contents, err := f.Readdir(1)
if err == io.EOF {
break
} else if err != nil {
it.next <- &nextEntry{
File: NewFile(dir),
Err: err,
}
return
}
path := filepath.Join(dir, contents[0].Name())
if contents[0].IsDir() {
if doScanDir(path) {
it.dirs = append(it.dirs, path)
}
} else {
if it.doesExtensionMatch(path) {
it.next <- &nextEntry{
File: NewFile(path),
}
} else {
it.next <- &nextEntry{
File: NewFile(path),
Err: ErrSkipped,
}
}
}
}
}()
}
}
func (it *fsIterator) Next() (File, error) {
if it.closed {
return nil, io.EOF
}
next := <-it.next
if next == nil {
return nil, io.EOF
}
return next.File, next.Err
}
func (it *fsIterator) Close() error {
if it.closed {
return nil
}
it.closed = true
defer it.cancel()
return it.ctx.Err()
}
type fileListIterator struct {
files []string
i int
}
func IterateFileList(files []string) Iterator {
return &fileListIterator{
files: files,
i: 0,
}
}
func (it *fileListIterator) Next() (File, error) {
if it.i >= len(it.files) {
return nil, io.EOF
}
file := NewFile(it.files[it.i])
it.i++
return file, nil
}
func (it *fileListIterator) Close() error {
return nil
}

View File

@@ -0,0 +1,203 @@
package filedetection
import (
"context"
"fmt"
"io"
"os"
"path/filepath"
"strings"
)
var (
FilesBuffer = 8
)
//doScandir windows
func doScanDir(path string) bool {
return true
}
var ErrSkipped = fmt.Errorf("skipped")
type nextEntry struct {
File File
Err error
}
type fsIterator struct {
root string
validExtensions []string
ctx context.Context
cancel context.CancelFunc
closed bool
dirs []string
next chan *nextEntry
}
func IteratePath(ctx context.Context, path string, validExtensions []string) (Iterator, error) {
stat, err := os.Stat(path)
if err != nil {
return nil, err
}
if !stat.IsDir() {
return nil, fmt.Errorf("path must be a directory")
}
for i := range validExtensions {
validExtensions[i] = strings.ToLower(validExtensions[i])
}
it := &fsIterator{
root: path,
validExtensions: validExtensions,
closed: false,
dirs: make([]string, 1, 16),
next: make(chan *nextEntry, FilesBuffer),
}
it.dirs[0] = path
it.ctx, it.cancel = context.WithCancel(ctx)
go it.dirScanner()
return it, nil
}
func (it *fsIterator) Root() string {
return it.root
}
func (it *fsIterator) doesExtensionMatch(path string) bool {
if it.validExtensions == nil || len(it.validExtensions) == 0 {
return true
}
_, file := filepath.Split(path)
parts := strings.Split(file, ".")
var ext string
if len(parts) > 1 {
ext = parts[len(parts)-1]
}
ext = strings.ToLower(ext)
for _, vExt := range it.validExtensions {
if ext == vExt {
return true
}
}
return false
}
func (it *fsIterator) dirScanner() {
defer close(it.next)
for {
select {
case <-it.ctx.Done():
break
default:
}
if len(it.dirs) == 0 {
break
}
dir := it.dirs[0]
it.dirs = it.dirs[1:]
func() {
f, err := os.Open(dir)
if err != nil {
it.next <- &nextEntry{
File: NewFile(dir),
Err: err,
}
return
}
defer f.Close()
for {
contents, err := f.Readdir(1)
if err == io.EOF {
break
} else if err != nil {
it.next <- &nextEntry{
File: NewFile(dir),
Err: err,
}
return
}
path := filepath.Join(dir, contents[0].Name())
if contents[0].IsDir() {
if doScanDir(path) {
it.dirs = append(it.dirs, path)
}
} else {
if it.doesExtensionMatch(path) {
it.next <- &nextEntry{
File: NewFile(path),
}
} else {
it.next <- &nextEntry{
File: NewFile(path),
Err: ErrSkipped,
}
}
}
}
}()
}
}
func (it *fsIterator) Next() (File, error) {
if it.closed {
return nil, io.EOF
}
next := <-it.next
if next == nil {
return nil, io.EOF
}
return next.File, next.Err
}
func (it *fsIterator) Close() error {
if it.closed {
return nil
}
it.closed = true
defer it.cancel()
return it.ctx.Err()
}
type fileListIterator struct {
files []string
i int
}
func IterateFileList(files []string) Iterator {
return &fileListIterator{
files: files,
i: 0,
}
}
func (it *fileListIterator) Next() (File, error) {
if it.i >= len(it.files) {
return nil, io.EOF
}
file := NewFile(it.files[it.i])
it.i++
return file, nil
}
func (it *fileListIterator) Close() error {
return nil
}

87
filedetection/hashes.go Normal file
View File

@@ -0,0 +1,87 @@
package filedetection
import (
"crypto/md5"
"crypto/sha256"
"encoding/hex"
"io"
"os"
"sync"
)
var globalHasher *CachingHasher
func init() {
globalHasher = NewCachingHasher()
}
// ComputeHashes computes the md5 and sha256 hashes of a given file.
func ComputeHashes(file string) (md5sum, sha256sum string, err error) {
return globalHasher.ComputeHashes(file)
}
type multiHash struct {
md5sum string
sha256sum string
}
type CachingHasher struct {
hashes map[string]*multiHash
mux *sync.RWMutex
}
func NewCachingHasher() *CachingHasher {
return &CachingHasher{
hashes: map[string]*multiHash{},
mux: &sync.RWMutex{},
}
}
func (h *CachingHasher) readCache(file string) (*multiHash, bool) {
h.mux.RLock()
defer h.mux.RUnlock()
hashes, ok := h.hashes[file]
return hashes, ok
}
func (h *CachingHasher) writeCache(file string, hashes *multiHash) {
h.mux.Lock()
defer h.mux.Unlock()
h.hashes[file] = hashes
}
// ComputeHashes computes the md5 and sha256 hashes of a given file.
func (h *CachingHasher) ComputeHashes(file string) (md5sum, sha256sum string, err error) {
hashes, ok := h.readCache(file)
if ok {
return hashes.md5sum, hashes.sha256sum, nil
}
var f *os.File
f, err = os.OpenFile(file, os.O_RDONLY, 0666)
if err != nil {
return
}
defer f.Close()
h5 := md5.New()
h256 := sha256.New()
teeH5 := io.TeeReader(f, h5)
_, err = io.Copy(h256, teeH5)
if err != nil {
return
}
md5sum = hex.EncodeToString(h5.Sum(nil))
sha256sum = hex.EncodeToString(h256.Sum(nil))
h.writeCache(file, &multiHash{
md5sum: md5sum,
sha256sum: sha256sum,
})
return
}

172
filedetection/iterator.go Normal file
View File

@@ -0,0 +1,172 @@
package filedetection
import (
"io"
"sync"
"github.com/targodan/go-errors"
)
type Iterator interface {
Next() (File, error)
Close() error
}
type concatIterator struct {
i int
iterators []Iterator
}
func concat(it1 Iterator, it2 Iterator) Iterator {
if it1 == nil {
return it2
}
if it2 == nil {
return it1
}
cit1, ok1 := it1.(*concatIterator)
cit2, ok2 := it2.(*concatIterator)
if ok1 && ok2 {
cit1.iterators = append(cit1.iterators, cit2.iterators...)
return cit1
}
if ok1 {
cit1.iterators = append(cit1.iterators, it2)
return cit1
}
if ok2 {
cit2.iterators = append(cit2.iterators, it1)
return cit2
}
return &concatIterator{
i: 0,
iterators: []Iterator{it1, it2},
}
}
func Concat(iterators ...Iterator) Iterator {
var ret Iterator
for _, it := range iterators {
ret = concat(ret, it)
}
return ret
}
func (it *concatIterator) Next() (File, error) {
if it.i >= len(it.iterators) {
return nil, io.EOF
}
f, err := it.iterators[it.i].Next()
if err == io.EOF {
it.i++
return it.Next()
}
return f, err
}
func (it *concatIterator) Close() error {
var err error
for _, iterator := range it.iterators {
err = errors.NewMultiError(err, iterator.Close())
}
return err
}
type concurrentIterator struct {
iterators []Iterator
c chan *nextEntry
wg *sync.WaitGroup
closed bool
}
func concurrent(it1 Iterator, it2 Iterator) Iterator {
if it1 == nil {
return it2
}
if it2 == nil {
return it1
}
cit1, ok1 := it1.(*concurrentIterator)
cit2, ok2 := it2.(*concurrentIterator)
if ok1 && ok2 {
panic("cannot combine two concurrent iterators")
}
if ok1 {
cit1.iterators = append(cit1.iterators, it2)
cit1.wg.Add(1)
go cit1.consume(len(cit1.iterators) - 1)
return cit1
}
if ok2 {
cit2.iterators = append(cit2.iterators, it1)
cit2.wg.Add(1)
go cit2.consume(len(cit2.iterators) - 1)
return cit2
}
cit := &concurrentIterator{
iterators: []Iterator{it1, it2},
c: make(chan *nextEntry),
wg: new(sync.WaitGroup),
}
cit.wg.Add(2)
go cit.consume(0)
go cit.consume(1)
go func() {
cit.wg.Wait()
close(cit.c)
}()
return cit
}
func Concurrent(iterators ...Iterator) Iterator {
var cit Iterator
for _, it := range iterators {
cit = concurrent(cit, it)
}
return cit
}
func (it *concurrentIterator) consume(i int) {
defer it.wg.Done()
for {
f, err := it.iterators[i].Next()
if err == io.EOF {
break
}
it.c <- &nextEntry{
File: f,
Err: err,
}
}
}
func (it *concurrentIterator) Next() (File, error) {
if it.closed {
return nil, io.EOF
}
next := <-it.c
if next == nil {
return nil, io.EOF
}
return next.File, next.Err
}
func (it *concurrentIterator) Close() error {
if it.closed {
return nil
}
it.closed = true
var err error
for _, iterator := range it.iterators {
err = errors.NewMultiError(err, iterator.Close())
}
return err
}