Browse Source

rearrange, rename, refactor

recursive
Stephen McQuay 4 years ago
parent
commit
c5cd77676e
No known key found for this signature in database GPG Key ID: 1ABF428F71BAFC3D
3 changed files with 204 additions and 188 deletions
  1. +84
    -80
      check.go
  2. +120
    -0
      hash.go
  3. +0
    -108
      main.go

+ 84
- 80
check.go View File

@ -15,53 +15,62 @@ import (
"sync"
)
// input contains a file-ish piece of work to perform
type input struct {
f io.ReadCloser
err error
}
// checksum contains the path to a file, a way to hash it, and the results of
// the hash
type checksum struct {
filename string
hash hash.Hash
checksum string
err error
}
func parseCS(line string) (checksum, error) {
elems := strings.Fields(line)
if len(elems) != 2 {
return checksum{}, fmt.Errorf("unexpected content: %d != 2", len(elems))
}
cs, f := elems[0], elems[1]
var hsh hash.Hash
switch len(cs) {
case 32:
hsh = md5.New()
case 40:
hsh = sha1.New()
case 64:
hsh = sha256.New()
case 128:
hsh = sha512.New()
default:
return checksum{}, fmt.Errorf("unknown format: %q", line)
}
return checksum{filename: f, hash: hsh, checksum: cs}, nil
}
// check is the entry point for -c operation.
func check(args []string) chan error {
jobs := make(chan checksum)
type input struct {
f io.ReadCloser
err error
}
go func() {
for i := range toInput(args) {
if i.err != nil {
jobs <- checksum{err: i.err}
break
}
s := bufio.NewScanner(i.f)
for s.Scan() {
jobs <- parseCS(s.Text())
}
i.f.Close()
if s.Err() != nil {
jobs <- checksum{err: s.Err()}
}
}
close(jobs)
}()
type work struct {
cs checksum
err error
results := []<-chan error{}
for w := 0; w < *ngo; w++ {
results = append(results, verify(jobs))
}
return merge(results)
}
func streams(files []string) chan input {
// toInput converts args to a stream of input
func toInput(args []string) chan input {
r := make(chan input)
go func() {
for _, name := range files {
for _, name := range args {
f, err := os.Open(name)
r <- input{f, err}
}
if len(files) == 0 {
if len(args) == 0 {
r <- input{f: os.Stdin}
}
close(r)
@ -70,37 +79,59 @@ func streams(files []string) chan input {
return r
}
func check(files []string) chan error {
jobs := make(chan work)
// parseCS picks apart a line from a checksum file and returns everything
// needed to perform a checksum.
func parseCS(line string) checksum {
elems := strings.Fields(line)
if len(elems) != 2 {
return checksum{err: fmt.Errorf("unexpected content: %d != 2", len(elems))}
}
cs, f := elems[0], elems[1]
var hsh hash.Hash
switch len(cs) {
case 32:
hsh = md5.New()
case 40:
hsh = sha1.New()
case 64:
hsh = sha256.New()
case 128:
hsh = sha512.New()
default:
return checksum{err: fmt.Errorf("unknown format: %q", line)}
}
return checksum{filename: f, hash: hsh, checksum: cs}
}
// verify does grunt work of verifying a stream of jobs (filenames).
func verify(jobs chan checksum) chan error {
r := make(chan error)
go func() {
for stream := range streams(files) {
if stream.err != nil {
jobs <- work{err: stream.err}
break
for job := range jobs {
if job.err != nil {
log.Printf("%+v", job.err)
continue
}
s := bufio.NewScanner(stream.f)
for s.Scan() {
cs, err := parseCS(s.Text())
jobs <- work{cs, err}
fspan>, err := os.Open(job.filename)
if err != nil {
r <- err
continue
}
stream.f.Close()
if s.Err() != nil {
jobs <- work{err: s.Err()}
if _, err := io.Copy(job.hash, f); err != nil {
r <- err
continue
}
f.Close()
if fmt.Sprintf("%x", job.hash.Sum(nil)) != job.checksum {
r <- fmt.Errorf("%s: bad", job.filename)
}
}
close(jobs)
close(r)
}()
results := []<-chan error{}
for w := 0; w < *ngo; w++ {
results = append(results, verify(jobs))
}
return merge(results)
return r
}
// merge is simple error fan-in
func merge(cs []<-chan error) chan error {
out := make(chan error)
@ -124,30 +155,3 @@ func merge(cs []<-chan error) chan error {
}()
return out
}
func verify(jobs chan work) chan error {
r := make(chan error)
go func() {
for job := range jobs {
if job.err != nil {
log.Printf("%+v", job.err)
continue
}
f, err := os.Open(job.cs.filename)
if err != nil {
r <- fmt.Errorf("open: %v", err)
continue
}
if _, err := io.Copy(job.cs.hash, f); err != nil {
r <- err
continue
}
f.Close()
if fmt.Sprintf("%x", job.cs.hash.Sum(nil)) != job.cs.checksum {
r <- fmt.Errorf("%s: bad", job.cs.filename)
}
}
close(r)
}()
return r
}

+ 120
- 0
hash.go View File

@ -0,0 +1,120 @@
package main
import (
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"crypto/sha512"
"fmt"
"hash"
"io"
"os"
"sync"
)
// result is a message or error payload
type result struct {
msg string
err error
}
// hashr exists so that we can make a thing that can return valid hash.Hash
// interfaces.
type hashr func() hash.Hash
// hsh figures out which hash algo to use, and distributes the work of hashing
func hsh(files []string) chan result {
var h hashr
switch *algo {
case "sha1", "1":
h = sha1.New
case "sha256", "256":
h = sha256.New
case "sha512", "512":
h = sha512.New
case "md5":
h = md5.New
default:
r := make(chan result)
go func() {
r <- result{err: fmt.Errorf("unsupported algorithm: %v", *algo)}
}()
return r
}
if len(files) == 0 {
hsh := h()
_, err := io.Copy(hsh, os.Stdin)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("%x -\n", hsh.Sum(nil))
return nil
}
jobs := make(chan checksum)
go func() {
for _, name := range files {
jobs <- checksum{filename: name}
}
close(jobs)
}()
res := []<-chan result{}
for w := 0; w < *ngo; w++ {
res = append(res, compute(h, jobs))
}
return rmerge(res)
}
// compute is the checksumming workhorse
func compute(h hashr, jobs chan checksum) chan result {
hsh := h()
r := make(chan result)
go func() {
for job := range jobs {
f, err := os.Open(job.filename)
if err != nil {
r <- result{err: err}
continue
}
hsh.Reset()
_, err = io.Copy(hsh, f)
f.Close()
if err != nil {
r <- result{err: err}
continue
}
r <- result{msg: fmt.Sprintf("%x %s", hsh.Sum(nil), job.filename)}
}
close(r)
}()
return r
}
// rmerge implements fan-in
func rmerge(cs []<-chan result) chan result {
out := make(chan result)
var wg sync.WaitGroup
output := func(c <-chan result) {
for n := range c {
out <- n
}
wg.Done()
}
wg.Add(len(cs))
for _, c := range cs {
go output(c)
}
go func() {
wg.Wait()
close(out)
}()
return out
}

+ 0
- 108
main.go View File

@ -1,17 +1,10 @@
package main
import (
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"crypto/sha512"
"flag"
"fmt"
"hash"
"io"
"os"
"runtime"
"sync"
)
var algo = flag.String("a", "sha1", "algorithm to use")
@ -46,104 +39,3 @@ func main() {
}
}
}
type hashr func() hash.Hash
func hsh(files []string) chan result {
var h hashr
switch *algo {
case "sha1", "1":
h = sha1.New
case "sha256", "256":
h = sha256.New
case "sha512", "512":
h = sha512.New
case "md5":
h = md5.New
default:
r := make(chan result)
go func() {
r <- result{err: fmt.Errorf("unsupported algorithm: %v", *algo)}
}()
return r
}
if len(files) == 0 {
hsh := h()
_, err := io.Copy(hsh, os.Stdin)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("%x -\n", hsh.Sum(nil))
return nil
}
jobs := make(chan work)
go func() {
for _, name := range files {
jobs <- work{cs: checksum{filename: name}}
}
close(jobs)
}()
res := []<-chan result{}
for w := 0; w < *ngo; w++ {
res = append(res, compute(h, jobs))
}
return rmerge(res)
}
type result struct {
msg string
err error
}
func compute(h hashr, jobs chan work) chan result {
hsh := h()
r := make(chan result)
go func() {
for job := range jobs {
f, err := os.Open(job.cs.filename)
if err != nil {
r <- result{err: err}
continue
}
hsh.Reset()
_, err = io.Copy(hsh, f)
f.Close()
if err != nil {
r <- result{err: err}
continue
}
r <- result{msg: fmt.Sprintf("%x %s", hsh.Sum(nil), job.cs.filename)}
}
close(r)
}()
return r
}
func rmerge(cs []<-chan result) chan result {
out := make(chan result)
var wg sync.WaitGroup
output := func(c <-chan result) {
for n := range c {
out <- n
}
wg.Done()
}
wg.Add(len(cs))
for _, c := range cs {
go output(c)
}
go func() {
wg.Wait()
close(out)
}()
return out
}

Loading…
Cancel
Save