Skip to content

Commit

Permalink
perf: optimize job update refresh performance
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelncui committed Oct 5, 2023
1 parent 973cbb9 commit b7c075b
Show file tree
Hide file tree
Showing 17 changed files with 811 additions and 398 deletions.
12 changes: 6 additions & 6 deletions apis/converts.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,12 @@ func convertJobs(jobs ...*executor.Job) []*entity.Job {
converted := make([]*entity.Job, 0, len(jobs))
for _, job := range jobs {
converted = append(converted, &entity.Job{
Id: job.ID,
Status: job.Status,
Priority: job.Priority,
CreateTime: job.CreateTime.Unix(),
UpdateTime: job.UpdateTime.Unix(),
State: job.State,
Id: job.ID,
Status: job.Status,
Priority: job.Priority,
CreateTimeNs: job.CreateTime.UnixNano(),
UpdateTimeNs: job.UpdateTime.UnixNano(),
State: job.State,
})
}
return converted
Expand Down
2 changes: 1 addition & 1 deletion apis/job_get_log.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,5 @@ func (api *API) JobGetLog(ctx context.Context, req *entity.JobGetLogRequest) (*e
return nil, fmt.Errorf("read log fail, %w", err)
}

return &entity.JobGetLogReply{Logs: buf}, nil
return &entity.JobGetLogReply{Logs: buf, Offset: req.GetOffset() + int64(len(buf))}, nil
}
6 changes: 6 additions & 0 deletions apis/job_list.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@ func (api *API) JobList(ctx context.Context, req *entity.JobListRequest) (*entit
return nil, err
}
return &entity.JobListReply{Jobs: convertJobs(jobs...)}, nil
case *entity.JobListRequest_RecentlyUpdate:
jobs, err := api.exe.ListRecentlyUpdateJob(ctx, param.RecentlyUpdate)
if err != nil {
return nil, err
}
return &entity.JobListReply{Jobs: convertJobs(jobs...)}, nil
default:
return nil, fmt.Errorf("unexpected param, %T", req.Param)
}
Expand Down
4 changes: 4 additions & 0 deletions entity/job.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ import (
"database/sql/driver"
)

const (
JobStatusVisible = 128
)

var (
_ = sql.Scanner(&JobParam{})
_ = driver.Valuer(&JobParam{})
Expand Down
291 changes: 186 additions & 105 deletions entity/job.pb.go

Large diffs are not rendered by default.

14 changes: 11 additions & 3 deletions entity/job.proto
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,17 @@ enum JobStatus {
PROCESSING = 3;
COMPLETED = 4;

FAILED = 255;
FAILED = 127;

DELETED = 255;
}

message Job {
int64 id = 1;
JobStatus status = 2;
int64 priority = 3;
int64 create_time = 4;
int64 update_time = 5;
int64 create_time_ns = 4;
int64 update_time_ns = 5;

JobState state = 17;
}
Expand Down Expand Up @@ -58,6 +60,12 @@ message JobFilter {
optional int64 offset = 34;
}

message JobRecentlyUpdateFilter {
optional int64 update_since_ns = 1;

optional int64 limit = 33;
}

message JobDisplay {
oneof display {
job_archive.JobArchiveDisplay archive = 1;
Expand Down
459 changes: 245 additions & 214 deletions entity/service.pb.go

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions entity/service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ message JobListRequest {
oneof param {
JobMGetRequest mget = 1;
job.JobFilter list = 2;
job.JobRecentlyUpdateFilter recently_update = 3;
}
}

Expand Down Expand Up @@ -163,6 +164,7 @@ message JobGetLogRequest {

message JobGetLogReply {
bytes logs = 1;
int64 offset = 2;
}

message SourceListRequest {
Expand Down
67 changes: 64 additions & 3 deletions entity/utils.go
Original file line number Diff line number Diff line change
@@ -1,13 +1,34 @@
package entity

import (
"bytes"
"database/sql/driver"
"fmt"
"io"

"github.com/klauspost/compress/zstd"
"github.com/modern-go/reflect2"
"github.com/samuelncui/yatm/tools"
"google.golang.org/protobuf/proto"
)

const (
compressThreshold = 1024
)

var (
magicHeaderV2 = []byte{0xff, 'y', 'm', '\x02'}

zstdEncoderPool = tools.NewPool(func() *zstd.Encoder {
encoder, _ := zstd.NewWriter(nil) // there will be no error without options
return encoder
})
zstdDecoderPool = tools.NewPool(func() *zstd.Decoder {
decoder, _ := zstd.NewReader(nil) // there will be no error without options
return decoder
})
)

// Scan implement database/sql.Scanner
func Scan(dst proto.Message, src interface{}) error {
typ := reflect2.TypeOf(dst).(reflect2.PtrType).Elem()
Expand All @@ -24,22 +45,62 @@ func Scan(dst proto.Message, src interface{}) error {
default:
return fmt.Errorf("process define extra scanner, unexpected type for i18n, %T", v)
}

if len(buf) == 0 {
return nil
}

if bytes.HasPrefix(buf, magicHeaderV2) {
decoder := zstdDecoderPool.Get()

err := decoder.Reset(bytes.NewBuffer(buf[len(magicHeaderV2):]))
if err != nil {
return fmt.Errorf("zstd reset decoder fail, %w", err)
}

buf, err = io.ReadAll(decoder)
if err != nil {
return fmt.Errorf("zstd read decoder fail, %w", err)
}

decoder.Reset(nil)
zstdDecoderPool.Put(decoder)
}

if err := proto.Unmarshal(buf, dst); err != nil {
return fmt.Errorf("process define extra scanner, json unmarshal fail, %w", err)
return fmt.Errorf("process define extra scanner, protobuf unmarshal fail, %w", err)
}

return nil
}

// Value implement database/sql/driver.Valuer
func Value(src proto.Message) (driver.Value, error) {
buf, err := proto.Marshal(src)
if err != nil {
return nil, fmt.Errorf("process define extra valuer, json marshal fail, %w", err)
return nil, fmt.Errorf("process define extra valuer, protobuf marshal fail, %w", err)
}

if len(buf) <= compressThreshold {
return buf, nil
}

buffer := bytes.NewBuffer(make([]byte, 0, len(buf)))
buffer.Write(magicHeaderV2)

encoder := zstdEncoderPool.Get()
encoder.Reset(buffer)
_, err = encoder.Write(buf)
if err != nil {
return nil, fmt.Errorf("zstd write to encoder fail, %w", err)
}
err = encoder.Close()
if err != nil {
return nil, fmt.Errorf("zstd close encoder fail, %w", err)
}

buf = buffer.Bytes()
encoder.Reset(nil)
zstdEncoderPool.Put(encoder)

return buf, nil
}
39 changes: 36 additions & 3 deletions executor/job.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ type Job struct {
State *entity.JobState

CreateTime time.Time
UpdateTime time.Time
UpdateTime time.Time `gorm:"index:idx_update_time"`
}

func (j *Job) BeforeUpdate(tx *gorm.DB) error {
Expand Down Expand Up @@ -56,9 +56,18 @@ func (e *Executor) CreateJob(ctx context.Context, job *Job, param *entity.JobPar
}

func (e *Executor) DeleteJobs(ctx context.Context, ids ...int64) error {
if r := e.db.WithContext(ctx).Delete(ModelJob, ids); r.Error != nil {
return fmt.Errorf("delete job fail, err= %w", r.Error)
jobs, err := e.MGetJob(ctx, ids...)
if err != nil {
return fmt.Errorf("mget jobs fail")
}

for _, job := range jobs {
job.Status = entity.JobStatus_DELETED
if r := e.db.WithContext(ctx).Save(job); r.Error != nil {
return fmt.Errorf("delete job write db fail, id= %d err= %w", job.ID, r.Error)
}
}

return nil
}

Expand Down Expand Up @@ -120,6 +129,8 @@ func (e *Executor) ListJob(ctx context.Context, filter *entity.JobFilter) ([]*Jo
db := e.db.WithContext(ctx)
if filter.Status != nil {
db = db.Where("status = ?", *filter.Status)
} else {
db = db.Where("status < ?", entity.JobStatusVisible)
}

if filter.Limit != nil {
Expand All @@ -140,3 +151,25 @@ func (e *Executor) ListJob(ctx context.Context, filter *entity.JobFilter) ([]*Jo

return jobs, nil
}

func (e *Executor) ListRecentlyUpdateJob(ctx context.Context, filter *entity.JobRecentlyUpdateFilter) ([]*Job, error) {
db := e.db.WithContext(ctx)
if filter.UpdateSinceNs != nil {
db = db.Where("update_time > ?", time.Unix(0, *filter.UpdateSinceNs))
}

if filter.Limit != nil {
db = db.Limit(int(*filter.Limit))
} else {
db = db.Limit(20)
}

db = db.Order("update_time ASC")

jobs := make([]*Job, 0, 20)
if r := db.Find(&jobs); r.Error != nil {
return nil, fmt.Errorf("list jobs fail, err= %w", r.Error)
}

return jobs, nil
}
11 changes: 6 additions & 5 deletions frontend/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ import { ServiceClient, File, SourceFile, Tape, Position } from "./entity";

import moment from "moment";

export const MODE_DIR = 2147483648n; // d: is a directory
export const JOB_STATUS_VISIBLE = 128;

const apiBase: string = (() => {
const base = (window as any).apiBase as string;
if (!base || base === "%%API_BASE%%") {
Expand All @@ -16,8 +19,6 @@ export const fileBase: string = (() => {
return apiBase.replace("/services", "/files");
})();

export const ModeDir = 2147483648n; // d: is a directory

export const Root: FileData = {
id: "0",
name: "Root",
Expand All @@ -38,7 +39,7 @@ export const cli = new ServiceClient(transport);

export function convertFiles(files: Array<File>): FileData[] {
return files.map((file) => {
const isDir = (file.mode & ModeDir) > 0;
const isDir = (file.mode & MODE_DIR) > 0;

return {
id: getID(file),
Expand All @@ -58,7 +59,7 @@ export function convertFiles(files: Array<File>): FileData[] {

export function convertSourceFiles(files: Array<SourceFile>): FileData[] {
return files.map((file) => {
const isDir = (file.mode & ModeDir) > 0;
const isDir = (file.mode & MODE_DIR) > 0;

return {
id: getID(file),
Expand Down Expand Up @@ -99,7 +100,7 @@ export function convertTapes(tapes: Array<Tape>): FileData[] {

export function convertPositions(positions: Array<Position>): FileData[] {
return positions.map((posi) => {
const isDir = (posi.mode & ModeDir) > 0;
const isDir = (posi.mode & MODE_DIR) > 0;
const name = isDir ? splitPath(posi.path.slice(0, -1)) : splitPath(posi.path);

return {
Expand Down
Loading

0 comments on commit b7c075b

Please sign in to comment.