add missing link cleanup subcommand.

This commit is contained in:
Stephen McQuay 2016-07-09 21:40:14 -07:00
parent ceb4206787
commit 1abff34b3d
No known key found for this signature in database
GPG Key ID: 1ABF428F71BAFC3D
4 changed files with 118 additions and 2 deletions

View File

@ -111,6 +111,31 @@ func Parse(in <-chan string) <-chan Media {
return out return out
} }
// MissingLink detects if the values coming from medias is a duplicate file
// rather than a hardlink to the content store.
func MissingLink(medias <-chan Media, root string) (<-chan Media, <-chan error) {
out := make(chan Media)
errs := make(chan error)
go func() {
for m := range medias {
var d, c os.FileInfo
var err error
if d, err = os.Stat(m.Path); err != nil {
errs <- err
}
if c, err = os.Stat(m.Content(root)); err != nil {
errs <- err
}
if !os.SameFile(d, c) {
out <- m
}
}
close(errs)
close(out)
}()
return out, errs
}
// Move calls Move on each Media on input chan. It is the first step in the // Move calls Move on each Media on input chan. It is the first step in the
// pipeline after fan-in. // pipeline after fan-in.
func Move(in <-chan Media, root string) <-chan error { func Move(in <-chan Media, root string) <-chan error {

74
cmd/am/clean.go Normal file
View File

@ -0,0 +1,74 @@
package main
import (
"fmt"
"log"
"os"
"path/filepath"
"runtime"
"sync"
"mcquay.me/arrange"
)
func clean(dir string) error {
dateDir := filepath.Join(dir, "date")
if _, err := os.Stat(dateDir); os.IsNotExist(err) {
return fmt.Errorf("couldn't find 'date' dir in %q", dir)
}
work := arrange.Source(dateDir)
streams := []<-chan arrange.Media{}
errs := []<-chan error{}
workers := runtime.NumCPU()
if *cores != 0 {
workers = *cores
}
for w := 0; w < workers; w++ {
s, e := arrange.MissingLink(arrange.Parse(work), dir)
streams = append(streams, s)
errs = append(errs, e)
}
var err error
go func() {
for e := range eMerge(errs) {
log.Printf("%+v", e)
err = fmt.Errorf("%v, %v", err, e)
}
}()
for m := range arrange.Merge(streams) {
log.Printf("%q > %q", m.Path, m.Content(dir))
if err := os.Remove(m.Path); err != nil {
log.Printf("%+v", err)
}
if err := os.Link(m.Content(dir), m.Path); err != nil {
log.Printf("%+v", err)
}
}
return err
}
func eMerge(cs []<-chan error) <-chan error {
out := make(chan error)
var wg sync.WaitGroup
output := func(c <-chan error) {
for n := range c {
out <- n
}
wg.Done()
}
for _, c := range cs {
go output(c)
}
wg.Add(len(cs))
go func() {
wg.Wait()
close(out)
}()
return out
}

View File

@ -7,8 +7,9 @@ import (
"os" "os"
) )
const usage = "am <arr|help> [flags]" const usage = "am <arr|clean|help> [flags]"
const arrUsage = "am arr [-h|-cores=N] <in> <out>" const arrUsage = "am arr [-h|-cores=N] <in> <out>"
const cleanUsage = "am clean [-h|-cores=N] <directory>"
type stats struct { type stats struct {
total int total int
@ -41,6 +42,17 @@ func main() {
fmt.Fprintf(os.Stderr, "problem arranging media: %v\n", err) fmt.Fprintf(os.Stderr, "problem arranging media: %v\n", err)
os.Exit(1) os.Exit(1)
} }
case "c", "cl", "clean":
args := flag.Args()
if len(args) != 1 {
fmt.Fprintf(os.Stderr, "%s\n", cleanUsage)
os.Exit(1)
}
dir := args[0]
if err := clean(dir); err != nil {
fmt.Fprintf(os.Stderr, "problem cleaning: %v\n", err)
os.Exit(1)
}
default: default:
fmt.Fprintf(os.Stderr, "%s\n", usage) fmt.Fprintf(os.Stderr, "%s\n", usage)
os.Exit(1) os.Exit(1)

View File

@ -25,7 +25,7 @@ func (m Media) Move(root string) error {
} }
defer f.Close() defer f.Close()
content := filepath.Join(root, "content", m.Hash[:2], m.Hash[2:]+m.Extension) content := m.Content(root)
if _, err := os.Stat(content); !os.IsNotExist(err) { if _, err := os.Stat(content); !os.IsNotExist(err) {
return Dup{content} return Dup{content}
@ -67,3 +67,8 @@ func (m Media) Move(root string) error {
// return os.Symlink(rel, name) // return os.Symlink(rel, name)
return os.Link(content, name) return os.Link(content, name)
} }
// Content returns the content-address path starting at root.
func (m Media) Content(root string) string {
return filepath.Join(root, "content", m.Hash[:2], m.Hash[2:]+m.Extension)
}