1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
|
package main
import (
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"path"
"strings"
)
const usage = `Usage: iiifdownloader url
Downloads all pages from a IIIF server.
`
const bnfPrefix = `https://gallica.bnf.fr/ark:/`
func filesAreIdentical(fn1, fn2 string) (bool, error) {
f1, err := os.Open(fn1)
defer f1.Close()
if err != nil {
return false, fmt.Errorf("Error opening file %s: %v\n", fn1, err)
}
b1, err := ioutil.ReadAll(f1)
if err != nil {
return false, fmt.Errorf("Error reading file %s: %v\n", fn1, err)
}
f2, err := os.Open(fn2)
defer f2.Close()
if err != nil {
return false, fmt.Errorf("Error opening file %s: %v\n", fn2, err)
}
b2, err := ioutil.ReadAll(f2)
if err != nil {
return false, fmt.Errorf("Error reading file %s: %v\n", fn2, err)
}
for i, _ := range b1 {
if b1[i] != b2[i] {
return false, nil
}
}
return true, nil
}
func main() {
flag.Usage = func() {
fmt.Fprintf(flag.CommandLine.Output(), usage)
flag.PrintDefaults()
}
flag.Parse()
if flag.NArg() < 1 {
flag.Usage()
return
}
url := flag.Arg(0)
var bookdir string
var pgurlStart, pgurlEnd string
var pgurlAltStart, pgurlAltEnd string
//var pgNums []int
var noPgNums bool
switch {
case strings.HasPrefix(url, bnfPrefix):
f := strings.Split(url[len(bnfPrefix):], "/")
if len(f) < 2 {
log.Fatalln("Failed to extract BNF book ID from URL")
}
bookid := f[0] + "/" + f[1]
bookdir = f[0] + "-" + f[1]
pgurlStart = "https://gallica.bnf.fr/iiif/ark:/" + bookid + "/f"
pgurlEnd = "/full/full/0/native.jpg"
noPgNums = true
// BNF don't have all pages available from IIIF, but they do have
// the missing ones in less good quality from an alternative URL.
pgurlAltStart = "https://gallica.bnf.fr/ark:/" + bookid + "/f"
pgurlAltEnd = ".highres"
default:
log.Fatalln("Error: generic IIIF downloading not supported yet")
}
err := os.MkdirAll(bookdir, 0777)
if err != nil {
log.Fatalf("Error creating book dir: %v\n", err)
}
if noPgNums {
pgnum := 0
for {
pgnum++
fmt.Printf("Downloading page %d\n", pgnum)
fn := path.Join(bookdir, fmt.Sprintf("%04d.jpg", pgnum))
_, err = os.Stat(fn)
if err == nil || os.IsExist(err) {
fmt.Printf("Skipping already present page %d\n", pgnum)
continue
}
u := fmt.Sprintf("%s%d%s", pgurlStart, pgnum, pgurlEnd)
resp, err := http.Get(u)
if err != nil {
log.Fatalf("Error downloading page %d, %s: %v\n", pgnum, u, err)
}
defer resp.Body.Close()
switch {
case resp.StatusCode == http.StatusNotFound:
fmt.Printf("Got 404, assuming end of pages, for page %d, %s\n", pgnum, u)
return
case resp.StatusCode != http.StatusOK:
fmt.Printf("Error downloading page %d, %s: HTTP Code %s\n", pgnum, u, resp.Status)
if pgurlAltStart == "" && pgurlAltEnd == "" {
log.Fatalln("No alternative URL to try, book failed (or ended, hopefully)")
}
fmt.Printf("Trying to redownload page %d at lower quality\n", pgnum)
u = fmt.Sprintf("%s%d%s", pgurlAltStart, pgnum, pgurlAltEnd)
resp, err = http.Get(u)
if err != nil {
log.Fatalf("Error downloading page %d, %s: %v\n", pgnum, u, err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
log.Fatalf("Error downloading page %d, %s: HTTP Code %s\n", pgnum, u, resp.Status)
}
}
f, err := os.Create(fn)
defer f.Close()
if err != nil {
log.Fatalf("Error creating file %s: %v\n", fn, err)
}
_, err = io.Copy(f, resp.Body)
if err != nil {
log.Fatalf("Error writing file %s: %v\n", fn, err)
}
// Close once finished with, as defer won't trigger until the end of the function
resp.Body.Close()
f.Close()
// Check that the last two downloaded files aren't identical, as this
// can happen when there are no more pages to download.
if pgnum == 1 {
continue
}
fn2 := path.Join(bookdir, fmt.Sprintf("%04d.jpg", pgnum-1))
identical, err := filesAreIdentical(fn, fn2)
if err != nil {
log.Fatalf("Error checking for files being identical: %v\n", err)
}
if identical {
fmt.Println("Last 2 pages were identical, looks like it's the end of the book")
err = os.Remove(fn)
if err != nil {
log.Fatalf("Error removing dupilicate page %d: %v", fn, err)
}
err = os.Remove(fn2)
if err != nil {
log.Fatalf("Error removing dupilicate page %d: %v", fn2, err)
}
return
}
}
}
}
|