-
Notifications
You must be signed in to change notification settings - Fork 0
/
parser.go
413 lines (368 loc) · 10.9 KB
/
parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
// Copyright 2017 Tomas Machalek <tomas.machalek@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vertigo
import (
"bufio"
"compress/gzip"
"context"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"regexp"
"strings"
"github.com/rs/zerolog/log"
"golang.org/x/text/encoding/charmap"
"golang.org/x/text/transform"
)
var (
vertCmdSplit = regexp.MustCompile(`\s+`)
)
const (
channelChunkSize = 250000 // changing the value affects performance (10k...300k ~ 15%)
logProgressEachNthDefault = 1000000
LineTypeToken = "token"
LineTypeStruct = "struct"
LineTypeIgnored = "ignored"
AccumulatorTypeStack = "stack"
AccumulatorTypeComb = "comb"
AccumulatorTypeNil = "nil"
CharsetISO8859_1 = "iso-8859-1"
CharsetISO8859_2 = "iso-8859-2"
CharsetISO8859_3 = "iso-8859-3"
CharsetISO8859_4 = "iso-8859-4"
CharsetISO8859_5 = "iso-8859-5"
CharsetISO8859_6 = "iso-8859-6"
CharsetISO8859_7 = "iso-8859-7"
CharsetISO8859_8 = "iso-8859-8"
CharsetWindows1250 = "windows-1250"
CharsetWindows1251 = "windows-1251"
CharsetWindows1252 = "windows-1252"
CharsetWindows1253 = "windows-1253"
CharsetWindows1254 = "windows-1254"
CharsetWindows1255 = "windows-1255"
CharsetWindows1256 = "windows-1256"
CharsetWindows1257 = "windows-1257"
CharsetWindows1258 = "windows-1258"
CharsetUTF_8 = "utf-8"
)
// --------------------------------------------------------
// ParserConf contains configuration parameters for
// vertical file parser
type ParserConf struct {
// Source vertical file (either a plain text file or a gzip one)
InputFilePath string `json:"inputFilePath"`
Encoding string `json:"encoding"`
FilterArgs [][][]string `json:"filterArgs"`
StructAttrAccumulator string `json:"structAttrAccumulator"`
LogProgressEachNth int `json:"logProgressEachNth"`
}
// LoadConfig loads the configuration from a JSON file.
// In case of an error the program exits with panic.
func LoadConfig(path string) *ParserConf {
rawData, err := os.ReadFile(path)
if err != nil {
panic(err)
}
var conf ParserConf
err = json.Unmarshal(rawData, &conf)
if err != nil {
panic(err)
}
return &conf
}
// ------
type structAttrAccumulator interface {
Begin(value *Structure) error
End(name string) (*Structure, error)
GetAttrs() map[string]string
Size() int
}
// --------------------------------------------------------
// LineProcessor describes an object able to handle
// Vertigo's parsing events.
type LineProcessor interface {
// ProcToken is called each time the parser encounters a positional
// attribute. In case parsing produces an error, it is passed to the
// function without stopping the whole process.
// In case the function returns an error, the parser stops
// (in the simplest case it can be even the error it recieves)
ProcToken(token *Token, line int, err error) error
// ProcStruct is called each time parser encounters a structure opening
// element (e.g. <doc>). In case parsing produces an error, it is passed
// to the function without stopping the whole process.
// In case the function returns an error, the parser stops.
ProcStruct(strc *Structure, line int, err error) error
// ProcStructClose is called each time parser encouters a structure
// closing element (e.g. </doc>). In case parsing produces an error,
// it is passed to the function without stopping the whole process.
// In case the function returns an error, the parser stops.
ProcStructClose(strc *StructureClose, line int, err error) error
}
// ----
type procItem struct {
idx int
value interface{}
err error
}
// --------------------------------------------------------
func createStructAttrAccumulator(ident string) (structAttrAccumulator, error) {
switch ident {
case AccumulatorTypeStack:
return newStack(), nil
case AccumulatorTypeComb:
return newStructAttrs(), nil
case AccumulatorTypeNil:
return newNilStructAttrs(), nil
default:
return nil, fmt.Errorf("unknown accumulator type \"%s\"", ident)
}
}
// SupportedCharsets returns a list of names of
// character sets.
func SupportedCharsets() []string {
return []string{CharsetISO8859_2, CharsetUTF_8, CharsetWindows1250}
}
// GetCharmapByName returns a proper Charmap instance based
// on provided encoding name. The name detection is case
// insensitive (e.g. utf-8 is the same as UTF-8). The number
// of supported charsets is
func GetCharmapByName(name string) (*charmap.Charmap, error) {
switch strings.ToLower(name) {
case CharsetISO8859_1:
return charmap.ISO8859_1, nil
case CharsetISO8859_2:
return charmap.ISO8859_2, nil
case CharsetISO8859_3:
return charmap.ISO8859_3, nil
case CharsetISO8859_4:
return charmap.ISO8859_4, nil
case CharsetISO8859_5:
return charmap.ISO8859_5, nil
case CharsetISO8859_6:
return charmap.ISO8859_6, nil
case CharsetISO8859_7:
return charmap.ISO8859_7, nil
case CharsetISO8859_8:
return charmap.ISO8859_8, nil
case CharsetWindows1250:
return charmap.Windows1250, nil
case CharsetWindows1251:
return charmap.Windows1251, nil
case CharsetWindows1252:
return charmap.Windows1252, nil
case CharsetWindows1253:
return charmap.Windows1253, nil
case CharsetWindows1254:
return charmap.Windows1254, nil
case CharsetWindows1255:
return charmap.Windows1255, nil
case CharsetWindows1256:
return charmap.Windows1256, nil
case CharsetWindows1257:
return charmap.Windows1257, nil
case CharsetWindows1258:
return charmap.Windows1258, nil
case CharsetUTF_8:
return nil, nil
case "":
log.Warn().Msg("No charset specified, assuming utf-8")
return nil, nil
default:
return nil, fmt.Errorf("unsupported charset '%s'", name)
}
}
func importString(s string, ch *charmap.Charmap) string {
if ch == nil {
return s
}
ans, _, _ := transform.String(ch.NewDecoder(), s)
// TODO handle error
return ans
}
func openInputFile(path string) (io.Reader, error) {
f, err := os.Open(path)
if err != nil {
return nil, fmt.Errorf("failed to open input file: %w", err)
}
finfo, err := f.Stat()
if err != nil {
return nil, fmt.Errorf("failed to open input file: %w", err)
}
if !finfo.Mode().IsRegular() {
return nil, fmt.Errorf("failed to open input file: path %s is not a regular file", path)
}
var rd io.Reader
if strings.HasSuffix(path, ".gz") {
rd, err = gzip.NewReader(f)
if err != nil {
return nil, fmt.Errorf("failed to open input file: %w", err)
}
} else {
rd = f
}
return rd, nil
}
// ParseVerticalFile processes a corpus vertical file
// line by line and applies a custom LineProcessor on
// them. The processing is parallelized in the sense
// that reading a file into lines and processing of
// the lines runs in different goroutines. But the
// function as a whole behaves synchronously - i.e.
// once it returns a value, the processing is finished.
func ParseVerticalFile(ctx context.Context, conf *ParserConf, lproc LineProcessor) error {
chm, chErr := GetCharmapByName(conf.Encoding)
if chErr != nil {
return chErr
}
if chm != nil {
log.Info().
Str("inputCharset", chm.String()).
Msgf("Configured conversion from input charset")
}
if strings.HasPrefix(conf.InputFilePath, "|") {
script := vertCmdSplit.Split(conf.InputFilePath, -1)
if len(script) < 2 {
return fmt.Errorf("failed to parse vertical file: invalid dynamically generated vertical file specification")
}
cmd := exec.Command(script[1], script[2:]...)
cmd.Env = os.Environ()
rd, err := cmd.StdoutPipe()
if err != nil {
return fmt.Errorf("failed to parse vertical file: %w", err)
}
brd := bufio.NewScanner(rd)
if err = cmd.Start(); err != nil {
return fmt.Errorf("failed to parse vertical file: %w", err)
}
if err = parseVerticalFromScanner(ctx, brd, chm, conf, lproc); err != nil {
return fmt.Errorf("failed to parse vertical file: %w", err)
}
if err := cmd.Wait(); err != nil {
return fmt.Errorf("failed to parse vertical file: %w", err)
}
} else {
rd, err := openInputFile(conf.InputFilePath)
if err != nil {
return err
}
brd := bufio.NewScanner(rd)
if err = parseVerticalFromScanner(ctx, brd, chm, conf, lproc); err != nil {
return err
}
}
return nil
}
func parseVerticalFromScanner(
ctx context.Context,
brd *bufio.Scanner,
chm *charmap.Charmap,
conf *ParserConf,
lproc LineProcessor,
) error {
ch := make(chan []procItem)
chunk := make([]procItem, channelChunkSize)
stop := make(chan struct{})
defer close(stop)
stack, err := createStructAttrAccumulator(conf.StructAttrAccumulator)
if err != nil {
return err
}
logProgressEachNth := logProgressEachNthDefault
if conf.LogProgressEachNth > 0 {
logProgressEachNth = conf.LogProgressEachNth
}
go func() {
defer close(ch)
i := 0
lineNum := 0
tokenNum := 0
for {
select {
case <-ctx.Done():
log.Info().Msg("forcibly stopped processing")
return
default:
if !brd.Scan() {
if i > 0 {
ch <- chunk[:i]
}
return
}
line, parseErr := parseLine(importString(brd.Text(), chm), stack)
tok, isTok := line.(*Token)
if isTok {
tok.Idx = tokenNum
tokenNum++
}
chunk[i] = procItem{idx: lineNum, value: line, err: parseErr}
i++
if i == channelChunkSize {
i = 0
ch <- chunk
chunk = make([]procItem, channelChunkSize)
}
if lineNum > 0 && lineNum%logProgressEachNth == 0 {
log.Info().
Int("numProcessed", lineNum).
Msgf("chunk of lines processed")
}
lineNum++
}
}
}()
var procErr error
for items := range ch {
for _, item := range items {
switch item.value.(type) {
case *Token:
tk := item.value.(*Token)
if tk.MatchesFilter(conf.FilterArgs) {
procErr = lproc.ProcToken(tk, item.idx, err)
}
case *Structure:
procErr = lproc.ProcStruct(item.value.(*Structure), item.idx, item.err)
case *StructureClose:
procErr = lproc.ProcStructClose(item.value.(*StructureClose), item.idx, item.err)
}
if procErr != nil {
return procErr
}
}
}
log.Info().Int("metadataStackSize", stack.Size()).Msg("Parsing done")
return nil
}
// ParseVerticalFileNoGoRo is just for benchmarking purposes
func ParseVerticalFileNoGoRo(conf *ParserConf, lproc LineProcessor) {
f, err := os.Open(conf.InputFilePath)
if err != nil {
panic(err)
}
rd := bufio.NewScanner(f)
stack := newStack()
i := 0
for rd.Scan() {
token, err := parseLine(rd.Text(), stack)
tToken, ok := token.(*Token)
if ok {
lproc.ProcToken(tToken, i, err)
}
i++
}
log.Info().
Int("metadataStackSize", stack.Size()).
Msg("Parsing done")
}