Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
naftalibeder committed Aug 15, 2024
0 parents commit 1ecc32e
Show file tree
Hide file tree
Showing 41 changed files with 2,381 additions and 0 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Deploy

on:
push:
branches: ["main", "chore/deploy"]
pull_request:
branches: ["main"]

jobs:
deploy:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: "1.21.4"
- name: Run tests
run: go test -v ./...
- name: Build
run: |
GOOS=darwin GOARCH=amd64 go build -v -o macos
GOOS=linux GOARCH=amd64 go build -v -o linux
GOOS=windows GOARCH=amd64 go build -v -o windows
- name: Create release
run: |
gh release create $tag -t $tag
gh release upload $tag ./macos ./linux ./windows
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
tag: "0.0.1"
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.DS_Store
150 changes: 150 additions & 0 deletions console/console.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
package console

import (
"bufio"
"fmt"
"log"
"os"
"strings"
"testing"
"time"

"golang.org/x/term"
)

type phase = int

const (
PhaseCounting phase = 0
PhaseAnalyzing phase = 1
PhaseConvertingImgs phase = 2
PhaseConvertingVids phase = 3
PhaseComplete phase = 4
)

const (
headerColWd = 5
)

var (
writer = bufio.NewWriter(os.Stdout)
termFD int
width int
output map[phase]([][]string)
)

func Start() {
if testing.Testing() {
return
}

termFD = int(os.Stdin.Fd())
output = map[int][][]string{}
}

func Update(phase int, entry [][]string) {
if testing.Testing() {
return
}

wd, _, err := term.GetSize(termFD)
if err != nil {
log.Fatal(err)
}
width = wd

retreat := true

if phase == PhaseCounting {
output[PhaseCounting] = entry
} else if phase == PhaseAnalyzing {
output[PhaseAnalyzing] = entry
} else if phase == PhaseConvertingImgs {
output[PhaseConvertingImgs] = entry
} else if phase == PhaseConvertingVids {
output[PhaseConvertingVids] = entry
} else if phase == PhaseComplete {
output[PhaseComplete] = entry
retreat = false
}

print(phase, retreat)
}

func print(phase int, retreat bool) {
checkIfCompleted := func(p int) string {
if phase >= p {
return "[x]"
} else {
return "[ ]"
}
}

rows := [][]string{}
rows = append(rows, []string{checkIfCompleted(PhaseCounting), "Counting files"})
rows = append(rows, output[PhaseCounting]...)
rows = append(rows, []string{checkIfCompleted(PhaseAnalyzing), "Analyzing files"})
rows = append(rows, output[PhaseAnalyzing]...)
rows = append(rows, []string{checkIfCompleted(PhaseConvertingImgs), "Converting images"})
rows = append(rows, output[PhaseConvertingImgs]...)
rows = append(rows, []string{checkIfCompleted(PhaseConvertingVids), "Converting videos"})
rows = append(rows, output[PhaseConvertingVids]...)
rows = append(rows, []string{checkIfCompleted(PhaseComplete), "Complete"})
rows = append(rows, output[PhaseComplete]...)

write(rows, retreat)
}

func write(rows [][]string, retreat bool) {
lineCt := 0
widths := []int{headerColWd, width - headerColWd - 1}

// Functions.

addNewLn := func() {
start := "\r"
clear := "\033[K"
fmt.Fprint(writer, "\n"+start+clear)
lineCt++
}
addRowDivider := func() {
fmt.Fprint(writer, "+"+strings.Repeat("-", width-2)+"+")
addNewLn()
}
addCellDivider := func(i int, t string) {
ct := widths[i] - len(t)
if ct >= 2 {
fmt.Fprint(writer, strings.Repeat(" ", ct-1))
}
}

// Content.

addRowDivider()
for _, cols := range rows {
if cols[0] == "-" {
addRowDivider()
} else {
fmt.Fprint(writer, "| ")
for i, cell := range cols {
fmt.Fprint(writer, cell)
addCellDivider(i, cell)
}
fmt.Fprint(writer, "|")
addNewLn()
}
}
addRowDivider()

// Print and clean up.

writer.Flush()

if retreat {
fmt.Printf("\033[%dA", lineCt)
}
}

func GetElapsedStr(start time.Time) string {
return time.Since(start).Truncate(time.Second).String()
}
168 changes: 168 additions & 0 deletions detaku/analyzeDir.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
package detaku

import (
"fmt"
"io/fs"
"os/exec"
"path/filepath"
"strconv"
"strings"
"time"

"detaku/console"
"detaku/types"
"detaku/utils"
)

type AnalyzeDirResult struct {
ImgFileInfoMap types.FileInfoMap
VidFileInfoMap types.FileInfoMap
SupplFileInfoMap types.FileInfoMap
}

type AnalyzeFileJob struct {
Path string
}

type AnalyzeFileResult struct {
Path string
MediaKind types.MediaKind
MediaFileInfo types.FileInfo
SupplFileInfo types.FileInfo
Ext string
Err error
}

func analyzeDir(srcDir string) (AnalyzeDirResult, error) {
// Get total file count.

sectionStart := time.Now()
console.Update(console.PhaseCounting, [][]string{
{"", "- In progress (may take several minutes)..."},
})

out, err := exec.Command("bash", "-c", fmt.Sprintf("find '%s' -type f | wc -l", srcDir)).CombinedOutput()
if err != nil {
return AnalyzeDirResult{}, fmt.Errorf("failed to get file count: %s, %s", string(out), err)
}
totalFileCt, err := strconv.Atoi(strings.TrimSpace(string(out)))
if err != nil {
return AnalyzeDirResult{}, fmt.Errorf("failed to convert file count to int: %s", err)
}

console.Update(console.PhaseCounting, [][]string{
{"", fmt.Sprintf("- Found %d files", totalFileCt)},
{"", "- " + console.GetElapsedStr(sectionStart) + " elapsed"},
})

// Get a more precise list of usable files.

usableFilesMap := map[string]bool{}
_ = filepath.WalkDir(srcDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}

if d.IsDir() {
return nil
}

fileName := d.Name()
if strings.HasPrefix(fileName, ".") {
return nil
}

usableFilesMap[path] = true
return nil
})

// Set up worker pool to handle file analysis jobs.

jobCt := len(usableFilesMap)
jobs := make(chan AnalyzeFileJob, jobCt)
results := make(chan AnalyzeFileResult, jobCt)

// Initialize all workers.
const workerCt = 10
for i := 0; i < workerCt; i++ {
go runAnalyzeFileJob(jobs, results)
}
defer close(jobs)

// Populate jobs.
for path := range usableFilesMap {
job := AnalyzeFileJob{
Path: path,
}
jobs <- job
}

// Read results from file analysis.

sectionStart = time.Now()

imgFileInfoMap := types.FileInfoMap{}
imgExtCtMap := types.ExtCtMap{}

vidFileInfoMap := types.FileInfoMap{}
vidExtCtMap := types.ExtCtMap{}
vidTotalDurationSec := 0
vidTotalDuration := ""
vidsNeedReEncodeCt := 0

// A lookup table recording the existence of each supplementary json file.
// Each file is keyed by its full path.
var supplFileInfoMap = types.FileInfoMap{}

walkedFileCt := 0
for i := 0; i < jobCt; i++ {
result := <-results
walkedFileCt++

// Add result to counter maps.

if result.MediaKind == types.Image {
imgFileInfoMap[result.Path] = result.MediaFileInfo
imgExtCtMap[result.Ext]++
} else if result.MediaKind == types.Video {
vidFileInfoMap[result.Path] = result.MediaFileInfo
vidExtCtMap[result.Ext]++
vidTotalDurationSec += int(result.MediaFileInfo.VidInfo.DurationSec)
dur, _ := time.ParseDuration(fmt.Sprintf("%ds", vidTotalDurationSec))
vidTotalDuration = dur.String()
if result.MediaFileInfo.VidInfo.NeedsReEncode {
vidsNeedReEncodeCt++
}
}
supplFileInfoMap[result.Path] = result.SupplFileInfo

// Tell us about it.

imgExtsSorted := utils.SortedListFromCt(imgExtCtMap)
imgExtsDisp := ""
if len(imgExtCtMap) > 0 {
imgExtsDisp = fmt.Sprintf("(%s)", imgExtsSorted)
}

vidExtsSorted := utils.SortedListFromCt(vidExtCtMap)
vidExtsDisp := ""
if len(vidExtCtMap) > 0 {
vidExtsDisp = fmt.Sprintf("(%s)", vidExtsSorted)
}

console.Update(console.PhaseAnalyzing, [][]string{
{"", fmt.Sprintf("- Analyzed %d/%d files", walkedFileCt, totalFileCt)},
{"", fmt.Sprintf("- Images: %d %s", len(imgFileInfoMap), imgExtsDisp)},
{"", fmt.Sprintf("- Videos: %d %s (%s, %d need re-encoding)", len(vidFileInfoMap), vidExtsDisp, vidTotalDuration, vidsNeedReEncodeCt)},
{"", fmt.Sprintf("- Supplementary files: %d", len(supplFileInfoMap))},
{"", "- " + console.GetElapsedStr(sectionStart) + " elapsed"},
})
}

result := AnalyzeDirResult{
ImgFileInfoMap: imgFileInfoMap,
VidFileInfoMap: vidFileInfoMap,
SupplFileInfoMap: supplFileInfoMap,
}
return result, nil
}
Loading

0 comments on commit 1ecc32e

Please sign in to comment.