Skip to content

Commit 2e37c69

Browse files
author
Rolf Martin-Hoster
committed
Merge branch 'master' of https://github.com/percona/go-mysql into thread_id_support
2 parents 24e7829 + fcc0266 commit 2e37c69

File tree

7 files changed

+515
-383
lines changed

7 files changed

+515
-383
lines changed

event/aggregator.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ func (a *Aggregator) Finalize() Result {
8989
class.Finalize(a.rateLimit)
9090
class.UniqueQueries = 1
9191
if class.Example != nil && class.Example.Ts != "" {
92-
if t, err := time.Parse("060102 15:04:05", class.Example.Ts); err != nil {
92+
if t, err := time.Parse("2006-01-02 15:04:05", class.Example.Ts); err != nil {
9393
class.Example.Ts = ""
9494
} else {
9595
class.Example.Ts = t.Add(a.utcOffset).Format("2006-01-02 15:04:05")

event/aggregator_test.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,9 @@ func aggregateSlowLog(input, output string, utcOffset time.Duration, examples bo
4646
if err != nil {
4747
l.Fatal(err)
4848
}
49-
p := parser.NewSlowLogParser(file, log.Options{})
49+
opt := log.Options{}
50+
opt.DefaultLocation = time.UTC
51+
p := parser.NewSlowLogParser(file, opt)
5052
go p.Start()
5153
a := event.NewAggregator(examples, utcOffset, 10)
5254
for e := range p.EventChan() {

event/class.go

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,10 @@ func (c *Class) AddEvent(e *log.Event, outlier bool) {
100100
} else {
101101
c.Example.Query = e.Query
102102
}
103-
c.Example.Ts = e.Ts
103+
if !e.Ts.IsZero() {
104+
// todo use time.RFC3339Nano instead
105+
c.Example.Ts = e.Ts.UTC().Format("2006-01-02 15:04:05")
106+
}
104107
}
105108
}
106109
}

log/log.go

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,17 +20,22 @@
2020
// like max Query_time. See also percona.com/go-mysql/event/.
2121
package log
2222

23+
import (
24+
"time"
25+
)
26+
2327
// An event is a query like "SELECT col FROM t WHERE id = 1", some metrics like
2428
// Query_time (slow log) or SUM_TIMER_WAIT (Performance Schema), and other
2529
// metadata like default database, timestamp, etc. Metrics and metadata are not
2630
// guaranteed to be defined--and frequently they are not--but at minimum an
2731
// event is expected to define the query and Query_time metric. Other metrics
2832
// and metadata vary according to MySQL version, distro, and configuration.
2933
type Event struct {
30-
Offset uint64 // byte offset in file at which event starts
31-
Ts string // raw timestamp of event
32-
Admin bool // true if Query is admin command
33-
Query string // SQL query or admin command
34+
Offset uint64 // byte offset in file at which event starts
35+
OffsetEnd uint64 // byte offset in file at which event ends
36+
Ts time.Time // timestamp of event
37+
Admin bool // true if Query is admin command
38+
Query string // SQL query or admin command
3439
User string
3540
Host string
3641
Db string
@@ -56,6 +61,7 @@ type Options struct {
5661
StartOffset uint64 // byte offset in file at which to start parsing
5762
FilterAdminCommand map[string]bool // admin commands to ignore
5863
Debug bool // print trace info to STDOUT
64+
DefaultLocation *time.Location // DefaultLocation to assume for logs in MySQL < 5.7 format.
5965
}
6066

6167
// A LogParser sends events to a channel.

log/slow/parser.go

Lines changed: 32 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -27,20 +27,24 @@ import (
2727
"regexp"
2828
"strconv"
2929
"strings"
30+
"time"
3031

3132
"github.com/percona/go-mysql/log"
3233
)
3334

3435
// Regular expressions to match important lines in slow log.
35-
var timeRe = regexp.MustCompile(`Time: (\S+\s{1,2}\S+)`)
36-
var userRe = regexp.MustCompile(`User@Host: ([^\[]+|\[[^[]+\]).*?@ (\S*) \[(.*)\]`)
37-
var idRe = regexp.MustCompile(`(Id|Thread_id): +([0-9]*)`)
38-
var schema = regexp.MustCompile(`Schema: +(.*?) +Last_errno:`)
39-
var headerRe = regexp.MustCompile(`^#\s+[A-Z]`)
40-
var metricsRe = regexp.MustCompile(`(\w+): (\S+|\z)`)
41-
var adminRe = regexp.MustCompile(`command: (.+)`)
42-
var setRe = regexp.MustCompile(`^SET (?:last_insert_id|insert_id|timestamp)`)
43-
var useRe = regexp.MustCompile(`^(?i)use `)
36+
var (
37+
timeRe = regexp.MustCompile(`Time: (\S+\s{1,2}\S+)`)
38+
timeNewRe = regexp.MustCompile(`Time:\s+(\d{4}-\d{2}-\d{2}\S+)`)
39+
userRe = regexp.MustCompile(`User@Host: ([^\[]+|\[[^[]+\]).*?@ (\S*) \[(.*)\]`)
40+
idRe = regexp.MustCompile(`(Id|Thread_id): +([0-9]*)`)
41+
schema = regexp.MustCompile(`Schema: +(.*?) +Last_errno:`)
42+
headerRe = regexp.MustCompile(`^#\s+[A-Z]`)
43+
metricsRe = regexp.MustCompile(`(\w+): (\S+|\z)`)
44+
adminRe = regexp.MustCompile(`command: (.+)`)
45+
setRe = regexp.MustCompile(`^SET (?:last_insert_id|insert_id|timestamp)`)
46+
useRe = regexp.MustCompile(`^(?i)use `)
47+
)
4448

4549
// A SlowLogParser parses a MySQL slow log. It implements the LogParser interface.
4650
type SlowLogParser struct {
@@ -55,12 +59,17 @@ type SlowLogParser struct {
5559
queryLines uint64
5660
bytesRead uint64
5761
lineOffset uint64
62+
endOffset uint64
5863
stopped bool
5964
event *log.Event
6065
}
6166

6267
// NewSlowLogParser returns a new SlowLogParser that reads from the open file.
6368
func NewSlowLogParser(file *os.File, opt log.Options) *SlowLogParser {
69+
if opt.DefaultLocation == nil {
70+
// Old MySQL format assumes time is taken from SYSTEM.
71+
opt.DefaultLocation = time.Local
72+
}
6473
p := &SlowLogParser{
6574
file: file,
6675
opt: opt,
@@ -136,12 +145,6 @@ SCANNER_LOOP:
136145
lineLen := uint64(len(line))
137146
p.bytesRead += lineLen
138147
p.lineOffset = p.bytesRead - lineLen
139-
if p.lineOffset != 0 {
140-
// @todo Need to get clear on why this is needed;
141-
// it does make the value correct; an off-by-one issue
142-
p.lineOffset += 1
143-
}
144-
145148
if p.opt.Debug {
146149
fmt.Println()
147150
l.Printf("+%d line: %s", p.lineOffset, line)
@@ -181,6 +184,7 @@ SCANNER_LOOP:
181184
}
182185

183186
if !p.stopped && p.queryLines > 0 {
187+
p.endOffset = p.bytesRead
184188
p.sendEvent(false, false)
185189
}
186190

@@ -214,10 +218,16 @@ func (p *SlowLogParser) parseHeader(line string) {
214218
l.Println("time")
215219
}
216220
m := timeRe.FindStringSubmatch(line)
217-
if len(m) < 2 {
218-
return
221+
if len(m) == 2 {
222+
p.event.Ts, _ = time.ParseInLocation("060102 15:04:05", m[1], p.opt.DefaultLocation)
223+
} else {
224+
m = timeNewRe.FindStringSubmatch(line)
225+
if len(m) == 2 {
226+
p.event.Ts, _ = time.ParseInLocation(time.RFC3339Nano, m[1], p.opt.DefaultLocation)
227+
} else {
228+
return
229+
}
219230
}
220-
p.event.Ts = m[1]
221231
if userRe.MatchString(line) {
222232
if p.opt.Debug {
223233
l.Println("user (bad format)")
@@ -300,6 +310,7 @@ func (p *SlowLogParser) parseQuery(line string) {
300310
}
301311
p.inHeader = true
302312
p.inQuery = false
313+
p.endOffset = p.lineOffset
303314
p.sendEvent(true, false)
304315
p.parseHeader(line)
305316
return
@@ -352,6 +363,7 @@ func (p *SlowLogParser) parseAdmin(line string) {
352363
if p.opt.Debug {
353364
l.Println("not filtered")
354365
}
366+
p.endOffset = p.bytesRead
355367
p.sendEvent(false, false)
356368
} else {
357369
p.inHeader = false
@@ -364,6 +376,8 @@ func (p *SlowLogParser) sendEvent(inHeader bool, inQuery bool) {
364376
l.Println("send event")
365377
}
366378

379+
p.event.OffsetEnd = p.endOffset
380+
367381
// Make a new event and reset our metadata.
368382
defer func() {
369383
p.event = log.NewEvent()

0 commit comments

Comments
 (0)