summaryrefslogtreecommitdiff
path: root/cmd/booktopipeline/main.go
blob: 7254d783f7c7ad70f10324e676060ad48e54f85b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
// Copyright 2019 Nick White.
// Use of this source code is governed by the GPLv3
// license that can be found in the LICENSE file.

// booktopipeline uploads a book to cloud storage and adds the name
// to a queue ready to be processed by the bookpipeline tool.
package main

import (
	"flag"
	"fmt"
	"log"
	"os"
	"path/filepath"

	"rescribe.xyz/bookpipeline"

	"rescribe.xyz/bookpipeline/internal/pipeline"
)

const usage = `Usage: booktopipeline [-c conn] [-t training] [-prebinarised] [-notbinarised] [-v] bookdir [bookname]

Uploads the book in bookdir to the S3 'inprogress' bucket and adds it
to the 'preprocess' or 'wipeonly' SQS queue. The queue to send to is
autodetected based on the number of .jpg and .png files; more .jpg
than .png means it will be presumed to be not binarised, and it will
go to the 'preprocess' queue. The queue can be manually selected by
using the flags -prebinarised (for the wipeonly queue) or
-notbinarised (for the preprocess queue).

If bookname is omitted the last part of the bookdir is used.
`

// null writer to enable non-verbose logging to be discarded
type NullWriter bool

func (w NullWriter) Write(p []byte) (n int, err error) {
	return len(p), nil
}

var verboselog *log.Logger

func main() {
	verbose := flag.Bool("v", false, "Verbose")
	conntype := flag.String("c", "aws", "connection type ('aws' or 'local')")
	wipeonly := flag.Bool("prebinarised", false, "Prebinarised: only preprocessing will be to wipe")
	dobinarise := flag.Bool("notbinarised", false, "Not binarised: all preprocessing will be done including binarisation")
	training := flag.String("t", "", "Training to use (training filename without the .traineddata part)")

	flag.Usage = func() {
		fmt.Fprintf(flag.CommandLine.Output(), usage)
		flag.PrintDefaults()
	}
	flag.Parse()
	if flag.NArg() < 1 || flag.NArg() > 3 {
		flag.Usage()
		return
	}

	bookdir := flag.Arg(0)
	var bookname string
	if flag.NArg() > 2 {
		bookname = flag.Arg(1)
	} else {
		bookname = filepath.Base(bookdir)
	}

	if *verbose {
		verboselog = log.New(os.Stdout, "", log.LstdFlags)
	} else {
		var n NullWriter
		verboselog = log.New(n, "", log.LstdFlags)
	}

	var conn pipeline.Pipeliner
	switch *conntype {
	case "aws":
		conn = &bookpipeline.AwsConn{Region: "eu-west-2", Logger: verboselog}
	case "local":
		conn = &bookpipeline.LocalConn{Logger: verboselog}
	default:
		log.Fatalln("Unknown connection type")
	}
	err := conn.Init()
	if err != nil {
		log.Fatalln("Failed to set up cloud connection:", err)
	}

	qid := pipeline.DetectQueueType(bookdir, conn)

	// Flags set override the queue selection
	if *wipeonly {
		qid = conn.WipeQueueId()
	}
	if *dobinarise {
		qid = conn.PreQueueId()
	}

	verboselog.Println("Checking that all images are valid in", bookdir)
	err = pipeline.CheckImages(bookdir)
	if err != nil {
		log.Fatalln(err)
	}

	verboselog.Println("Uploading all images are valid in", bookdir)
	err = pipeline.UploadImages(bookdir, bookname, conn)
	if err != nil {
		log.Fatalln(err)
	}

	if *training != "" {
		bookname = bookname + " " + *training
	}
	err = conn.AddToQueue(qid, bookname)
	if err != nil {
		log.Fatalln("Error adding book to queue:", err)
	}

	var qname string
	if qid == conn.PreQueueId() {
		qname = "preprocess"
	} else {
		qname = "wipeonly"
	}

	fmt.Println("Uploaded book to queue", qname)
}