Initial commit

This commit is contained in:
Donny
2019-04-22 20:46:32 +08:00
commit 49ab8aadd1
25441 changed files with 4055000 additions and 0 deletions

View File

@@ -0,0 +1,29 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/admin/main/laser/cmd:all-srcs",
"//app/admin/main/laser/conf:all-srcs",
"//app/admin/main/laser/dao:all-srcs",
"//app/admin/main/laser/http:all-srcs",
"//app/admin/main/laser/model:all-srcs",
"//app/admin/main/laser/service:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,61 @@
#### laser-admin
##### version 1.3.1
> 1.修复稿件统计日期查询左右均闭
##### version 1.3.0
> 1.稿件代码查询SQL优化,避免慢查询
> 2.csv下载优化
> 3.计算代码优化,复用和内聚
##### version 1.2.0
> 1.【数据监控】定时发布的视频/稿件数据拆解
##### version 1.1.0
> 1.稿件一二三回查移区统计
##### version 1.0.12
> 1.删除稿件数据可视化的代码
##### version 1.0.11
> 1.增加频道回查操作数据
##### version 1.0.10
> 1.修复2Map取交集错误
##### version 1.0.9
> 1.增加展示审核员每小时的稿件审核量
##### version 1.0.8
> 1.修复查询实时视频审核吞吐量逻辑
##### version 1.0.7
> 1.日志缓存管理端同步更新
##### version 1.0.6
> 1.增加查看和CSV下载视频稿件审核单个审核员的实时吞吐量数据
##### version 1.0.5
> 1.修复SQL语法错误 无法删除任务和编辑-500错误
##### version 1.0.4
> 1.修复dao从orm换成原生sql后引发的添加任务bug
##### version 1.0.3
> 1.type_id逗号分隔
> 2.增加排行,搜索,动态,粉丝动态禁止统计指标
##### version 1.0.2
> 1.热修复查询上报记录SQL错误
> 2.修改任务上报管理端接口的逻辑
##### version 1.0.1
> 1.改变传参形式uid逗号分隔
##### version 1.0.0
> 1.统计查询:稿件流程数据可视化和稿件回查项目
##### version 0.0.1
> 1.初始化laser-admin应用
> 2.微调业务逻辑并增加username业务字段
> 3.根据主站新工程规范改造成BladeMaster

View File

@@ -0,0 +1,11 @@
# Owner
haoguanwei
shencen
wangzhe01
# Author
shencen
yanjinbin
# Reviewer
shencen

View File

@@ -0,0 +1,16 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- haoguanwei
- shencen
- wangzhe01
- yanjinbin
labels:
- admin
- admin/main/laser
- main
options:
no_parent_owners: true
reviewers:
- shencen
- yanjinbin

View File

@@ -0,0 +1,10 @@
#### laser-admin
##### 项目简介
> laser-admin
##### 编译环境
> 请只用golang v1.8.x以上版本编译执行
##### 依赖包
> 1.公共包go-common

View File

@@ -0,0 +1,43 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_binary",
"go_library",
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
tags = ["automanaged"],
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = ["laser-admin.toml"],
importpath = "go-common/app/admin/main/laser/cmd",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/laser/conf:go_default_library",
"//app/admin/main/laser/http:go_default_library",
"//library/log:go_default_library",
"//library/os/signal:go_default_library",
"//library/syscall:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,93 @@
# This is a TOML document. Boom.
[log]
dir = "/data/log/laser-admin/"
[bm]
addr = "0.0.0.0:7500"
maxListen = 10
timeout = "1000s"
[httpClient]
key = "6aa4286456d16b97"
secret = "351cf022e1ae8296109c3c524faafcc8"
dial = "50ms"
timeout = "10s"
keepAlive = "60s"
timer = 16
[httpClient.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[host]
manager = "http://uat-manager.bilibili.co"
[mysql]
addr = "172.16.33.205:3306"
dsn = "yanjinbin:HXjFexIsMilwDARW3ZCKP1fEGymdbzBV@tcp(172.16.33.205:3306)/bilibili_laser?timeout=500s&readTimeout=500s&writeTimeout=500s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
idleTimeout = "4h"
queryTimeout = "500s"
execTimeout = "500s"
tranTimeout = "500s"
[auth]
managerHost = "http://uat-manager.bilibili.co"
dashboardHost = "http://dashboard-mng.bilibili.co"
dashboardCaller = "manager-go"
[auth.DsHTTPClient]
key = "manager-go"
secret = "949bbb2dd3178252638c2407578bc7ad"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
[auth.DsHTTPClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[auth.MaHTTPClient]
key = "f6433799dbd88751"
secret = "36f8ddb1806207fe07013ab6a77a3935"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
[auth.MaHTTPClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[auth.session]
sessionIDLength = 32
cookieLifeTime = 1800
cookieName = "laser-go"
domain = ".bilibili.co"
[auth.session.Memcache]
name = "go-business/auth"
proto = "unix"
addr = "/tmp/uat-laser-auth-mc.sock"
active = 10
idle = 5
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[memcache]
[memcache.laser]
name = "creative/archive"
proto = "tcp"
addr = "172.16.0.148:11211"
idle = 5
active = 10
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
expire = "6h"

View File

@@ -0,0 +1,39 @@
package main
import (
"flag"
"os"
"go-common/app/admin/main/laser/conf"
"go-common/app/admin/main/laser/http"
"go-common/library/log"
"go-common/library/os/signal"
"go-common/library/syscall"
)
func main() {
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
log.Init(conf.Conf.Log)
defer log.Close()
http.Init(conf.Conf)
log.Info("laser-admin start")
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
s := <-c
log.Info("laser-admin get a signal %s", s.String())
switch s {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGSTOP, syscall.SIGINT:
log.Info("laser-admin exit")
return
case syscall.SIGHUP:
// TODO reload
default:
return
}
}
}

View File

@@ -0,0 +1,38 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["conf.go"],
importpath = "go-common/app/admin/main/laser/conf",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/cache/memcache:go_default_library",
"//library/conf:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/permit:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,106 @@
package conf
import (
"flag"
"errors"
"github.com/BurntSushi/toml"
"go-common/library/cache/memcache"
"go-common/library/conf"
"go-common/library/database/sql"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/permit"
xtime "go-common/library/time"
)
// ConfPath str.
var (
ConfPath string
Conf = &Config{}
client *conf.Client
)
// Config app meta config.
type Config struct {
// base
// ELK
Log *log.Config
// Auth
Auth *permit.Config
// http
BM *bm.ServerConfig
// mysql
Mysql *sql.Config
// http client
HTTPClient *bm.ClientConfig
// host
Host *Host
// Memcache
Memcache *Memcache
}
// Memcache conf.
type Memcache struct {
Laser struct {
*memcache.Config
Expire xtime.Duration
}
}
// Host conf.
type Host struct {
Manager string
}
func init() {
flag.StringVar(&ConfPath, "conf", "", "default config path")
}
// Init fn.
func Init() (err error) {
if ConfPath != "" {
return local()
}
return remote()
}
func local() (err error) {
_, err = toml.DecodeFile(ConfPath, &Conf)
return
}
func remote() (err error) {
if client, err = conf.New(); err != nil {
return
}
if err = load(); err != nil {
return
}
go func() {
for range client.Event() {
log.Info("config reload")
if load() != nil {
log.Error("config reload error (%v)", err)
}
}
}()
return
}
func load() (err error) {
var (
cf string
ok bool
tmpConf *Config
)
if cf, ok = client.Toml2(); !ok {
return errors.New("load config center error")
}
if _, err = toml.Decode(cf, &tmpConf); err != nil {
return errors.New("could not decode config")
}
*Conf = *tmpConf
return
}

View File

@@ -0,0 +1,65 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"memcache_test.go",
"stat_test.go",
"task_log_test.go",
"task_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/laser/conf:go_default_library",
"//app/admin/main/laser/model:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"memcache.go",
"stat.go",
"task.go",
"task_log.go",
],
importpath = "go-common/app/admin/main/laser/dao",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/laser/conf:go_default_library",
"//app/admin/main/laser/model:go_default_library",
"//library/cache/memcache:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,42 @@
package dao
import (
"context"
"go-common/app/admin/main/laser/conf"
"go-common/library/cache/memcache"
"go-common/library/database/sql"
bm "go-common/library/net/http/blademaster"
"time"
)
// Dao struct.
type Dao struct {
c *conf.Config
laserDB *sql.DB
mc *memcache.Pool
mcExpire int32
HTTPClient *bm.Client
}
// New Dao instance.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
laserDB: sql.NewMySQL(c.Mysql),
mc: memcache.NewPool(c.Memcache.Laser.Config),
mcExpire: int32(time.Duration(c.Memcache.Laser.Expire) / time.Second),
HTTPClient: bm.NewClient(c.HTTPClient),
}
return
}
// Ping check db connection.
func (d *Dao) Ping(c context.Context) (err error) {
return d.laserDB.Ping(c)
}
// Close dao resources.
func (d *Dao) Close(c context.Context) (err error) {
return d.laserDB.Close()
}

View File

@@ -0,0 +1,35 @@
package dao
import (
"flag"
"go-common/app/admin/main/laser/conf"
"os"
"testing"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "mobile.studio.laser-admin")
flag.Set("conf_token", "25911b439f4636ce9083f91c4882dffa")
flag.Set("tree_id", "19167")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
}
if os.Getenv("UT_LOCAL_TEST") != "" {
flag.Set("conf", "../../cmd/laser-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,66 @@
package dao
import (
"context"
"go-common/app/admin/main/laser/model"
"go-common/library/cache/memcache"
"go-common/library/log"
"strconv"
)
const (
_prefix = "taskinfo_"
)
func keyTaskInfo(mid int64) string {
return _prefix + strconv.FormatInt(mid, 10)
}
// TaskInfoCache get taskInfo cache
func (d *Dao) TaskInfoCache(c context.Context, mid int64) (ti *model.TaskInfo, err error) {
var (
conn = d.mc.Get(c)
r *memcache.Item
)
defer conn.Close()
r, err = conn.Get(keyTaskInfo(mid))
if err != nil {
if err == memcache.ErrNotFound {
err = nil
} else {
log.Error("conn Get2(%d) error(%v)", mid, err)
}
return
}
if err = conn.Scan(r, &ti); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", r.Value, err)
ti = nil
}
return
}
// AddTaskInfoCache add taskInfo cache
func (d *Dao) AddTaskInfoCache(c context.Context, mid int64, ti *model.TaskInfo) (err error) {
var (
key = keyTaskInfo(mid)
)
conn := d.mc.Get(c)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: ti, Flags: memcache.FlagJSON, Expiration: d.mcExpire}); err != nil {
log.Error("memcache.Set(%v) error(%v)", key, err)
}
return
}
// RemoveTaskInfoCache remove taskInfo cache
func (d *Dao) RemoveTaskInfoCache(c context.Context, mid int64) (err error) {
var (
key = keyTaskInfo(mid)
)
conn := d.mc.Get(c)
defer conn.Close()
if err = conn.Delete(key); err != nil {
log.Error("memcache.Delete(%v) error(%v)", key, err)
}
return
}

View File

@@ -0,0 +1,68 @@
package dao
import (
"context"
"go-common/app/admin/main/laser/model"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaokeyTaskInfo(t *testing.T) {
convey.Convey("keyTaskInfo", t, func(ctx convey.C) {
var (
mid = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keyTaskInfo(mid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoTaskInfoCache(t *testing.T) {
convey.Convey("TaskInfoCache", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.TaskInfoCache(c, mid)
ctx.Convey("Then err should be nil.ti should not be nil.", func(ctx convey.C) {
})
})
})
}
func TestDaoAddTaskInfoCache(t *testing.T) {
convey.Convey("AddTaskInfoCache", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
ti = &model.TaskInfo{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.AddTaskInfoCache(c, mid, ti)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
})
})
})
}
func TestDaoRemoveTaskInfoCache(t *testing.T) {
convey.Convey("RemoveTaskInfoCache", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.RemoveTaskInfoCache(c, mid)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,151 @@
package dao
import (
"context"
"fmt"
"time"
"go-common/app/admin/main/laser/model"
"go-common/library/ecode"
"go-common/library/log"
"go-common/library/xstr"
"net/url"
)
const (
_UrlUnames = "/x/admin/manager/users/unames"
_UrlUids = "/x/admin/manager/users/uids"
_queryArchiveStatSQL = " SELECT stat_date, business, stat_type, typeid, uid, stat_value FROM archive_stat WHERE stat_date = '%s' AND business = %d %s "
_queryArchiveAuditCargoSQL = " SELECT uid, stat_date, receive_value, audit_value FROM archive_audit_cargo_hour %s "
_queryArchiveStatStreamSQL = " SELECT stat_time, business, stat_type, typeid, uid, stat_value FROM archive_stat_stream WHERE stat_time = '%s' AND business = %d %s "
)
// StatArchiveStat is stat archive data.
func (d *Dao) StatArchiveStat(c context.Context, business int, typeIDS []int64, uids []int64, statTypes []int64, statDate time.Time) (statNodes []*model.StatNode, err error) {
var queryStmt string
if len(statTypes) != 0 {
queryStmt = queryStmt + fmt.Sprintf(" And stat_type in ( %s ) ", xstr.JoinInts(statTypes))
}
if len(typeIDS) != 0 {
queryStmt = queryStmt + fmt.Sprintf(" AND typeid IN ( %s ) ", xstr.JoinInts(typeIDS))
}
if len(uids) != 0 {
queryStmt = queryStmt + fmt.Sprintf(" AND uid IN ( %s ) ", xstr.JoinInts(uids))
}
rows, err := d.laserDB.Query(c, fmt.Sprintf(_queryArchiveStatSQL, statDate.Format("2006-01-02"), business, queryStmt))
if err != nil {
log.Error("d.laserDB.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
item := &model.StatNode{}
if err = rows.Scan(&item.StatDate, &item.Business, &item.StatType, &item.TypeID, &item.UID, &item.StatValue); err != nil {
log.Error("rows.Scan() error(%v)", err)
return
}
statNodes = append(statNodes, item)
}
return
}
// QueryArchiveCargo is query archive audit and receive value.
func (d *Dao) QueryArchiveCargo(c context.Context, statTime time.Time, uids []int64) (items []*model.CargoDetail, err error) {
whereStmt := fmt.Sprintf(" WHERE stat_date = '%s' ", statTime.Format("2006-01-02 15:04:05"))
if len(uids) != 0 {
uidStr := xstr.JoinInts(uids)
whereStmt = whereStmt + fmt.Sprintf(" AND uid in ( %s ) ", uidStr)
}
rows, err := d.laserDB.Query(c, fmt.Sprintf(_queryArchiveAuditCargoSQL, whereStmt))
if err != nil {
log.Error("d.laserDB.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
item := &model.CargoDetail{}
if err = rows.Scan(&item.UID, &item.StatDate, &item.ReceiveValue, &item.AuditValue); err != nil {
log.Error("rows.Scan() error(%v)", err)
return
}
items = append(items, item)
}
return
}
//GetUIDByNames is query uids by uname array separated by comma.
func (d *Dao) GetUIDByNames(c context.Context, unames string) (res map[string]int64, err error) {
var param = url.Values{}
param.Set("unames", unames)
var httpRes struct {
Code int `json:"code"`
Data map[string]int64 `json:"data"`
Message string `json:"message"`
}
err = d.HTTPClient.Get(c, d.c.Host.Manager+_UrlUids, "", param, &httpRes)
if err != nil {
log.Error("d.client.Get(%s) error(%v)", d.c.Host.Manager+_UrlUids+"?"+param.Encode(), err)
return
}
if httpRes.Code != ecode.OK.Code() {
log.Error("url(%s) error(%v), code(%d), message(%s)", d.c.Host.Manager+_UrlUids+"?"+param.Encode(), err, httpRes.Code, httpRes.Message)
}
res = httpRes.Data
return
}
//GetUNamesByUids is query usernames by uids.
func (d *Dao) GetUNamesByUids(c context.Context, uids []int64) (res map[int64]string, err error) {
var param = url.Values{}
var uidStr = xstr.JoinInts(uids)
param.Set("uids", uidStr)
var httpRes struct {
Code int `json:"code"`
Data map[int64]string `json:"data"`
Message string `json:"message"`
}
err = d.HTTPClient.Get(c, d.c.Host.Manager+_UrlUnames, "", param, &httpRes)
if err != nil {
log.Error("d.client.Get(%s) error(%v)", d.c.Host.Manager+_UrlUnames+"?"+param.Encode(), err)
return
}
if httpRes.Code != 0 {
log.Error("url(%s) error(%v), code(%d), message(%s)", d.c.Host.Manager+_UrlUnames+"?"+param.Encode(), err, httpRes.Code, httpRes.Message)
}
res = httpRes.Data
return
}
// StatArchiveStatStream is stat archive data.
func (d *Dao) StatArchiveStatStream(c context.Context, business int, typeIDS []int64, uids []int64, statTypes []int64, statDate time.Time) (statNodes []*model.StatNode, err error) {
var queryStmt string
if len(statTypes) != 0 {
queryStmt = queryStmt + fmt.Sprintf(" And stat_type in ( %s ) ", xstr.JoinInts(statTypes))
}
if len(typeIDS) != 0 {
queryStmt = queryStmt + fmt.Sprintf(" AND typeid IN ( %s ) ", xstr.JoinInts(typeIDS))
}
if len(uids) != 0 {
queryStmt = queryStmt + fmt.Sprintf(" AND uid IN ( %s ) ", xstr.JoinInts(uids))
}
rows, err := d.laserDB.Query(c, fmt.Sprintf(_queryArchiveStatStreamSQL, statDate.Format("2006-01-02"), business, queryStmt))
if err != nil {
log.Error("d.laserDB.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
node := &model.StatNode{}
if err = rows.Scan(&node.StatDate, &node.Business, &node.StatType, &node.TypeID, &node.UID, &node.StatValue); err != nil {
log.Error("rows.Scan() error(%v)", err)
return
}
statNodes = append(statNodes, node)
}
return
}

View File

@@ -0,0 +1,93 @@
package dao
import (
"context"
"testing"
"time"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoStatArchiveStat(t *testing.T) {
convey.Convey("StatArchiveStat", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
typeIDS = []int64{}
uids = []int64{}
statTypes = []int64{}
statDate = time.Now()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
d.StatArchiveStat(c, business, typeIDS, uids, statTypes, statDate)
convCtx.Convey("Then err should be nil.statNodes should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestDaoQueryArchiveCargo(t *testing.T) {
convey.Convey("QueryArchiveCargo", t, func(convCtx convey.C) {
var (
c = context.Background()
statTime = time.Now()
uids = []int64{}
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
d.QueryArchiveCargo(c, statTime, uids)
convCtx.Convey("Then err should be nil.items should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestDaoGetUIDByNames(t *testing.T) {
convey.Convey("GetUIDByNames", t, func(convCtx convey.C) {
var (
c = context.Background()
unames = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
d.GetUIDByNames(c, unames)
convCtx.Convey("Then err should be nil.res should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestDaoGetUNamesByUids(t *testing.T) {
convey.Convey("GetUNamesByUids", t, func(convCtx convey.C) {
var (
c = context.Background()
uids = []int64{}
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
d.GetUNamesByUids(c, uids)
convCtx.Convey("Then err should be nil.res should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestDaoStatArchiveStatStream(t *testing.T) {
convey.Convey("StatArchiveStatStream", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
typeIDS = []int64{}
uids = []int64{}
statTypes = []int64{}
statDate = time.Now()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
d.StatArchiveStatStream(c, business, typeIDS, uids, statTypes, statDate)
convCtx.Convey("Then err should be nil.statNodes should not be nil.", func(convCtx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,102 @@
package dao
import (
"go-common/library/log"
"context"
"fmt"
"github.com/pkg/errors"
"go-common/app/admin/main/laser/model"
"go-common/library/database/sql"
)
const (
_findMIDTaskSQL = " SELECT id, admin_id, username, mid, log_date, contact_email, source_type, platform, state, is_deleted, ctime, mtime FROM task WHERE mid = ? AND state = ? and is_deleted = 0"
_queryTaskInfoByIDSQL = " SELECT mid, log_date, source_type, platform FROM task WHERE state = 0 AND is_deleted = 0 AND id = ? "
_insertTaskSQL = " INSERT INTO task (mid, admin_id, username, log_date, contact_email, platform, source_type) VALUES (?, ?, ?, ?, ?, ?, ?) "
_deleteTaskSQL = " UPDATE task SET is_deleted = 1 , username = ? , admin_id = ? WHERE id = ? AND is_deleted = 0 "
_countTaskSQL = " SELECT count(*) FROM task WHERE %s "
_queryTaskSQL = " SELECT id, admin_id, username, mid, log_date, contact_email, source_type, platform, state, is_deleted, ctime, mtime FROM task WHERE %s ORDER BY %s LIMIT %d,%d "
_updateTaskSQL = " UPDATE task SET %s WHERE id = ? AND is_deleted = 0 AND state = ? "
)
// AddTask is add a unique task by mid and state(0).
func (d *Dao) AddTask(ctx context.Context, mid int64, username string, adminID int64, logDate string, contactEmail string, platform int, sourceType int) (lastInsertID int64, err error) {
res, err := d.laserDB.Exec(ctx, _insertTaskSQL, mid, adminID, username, logDate, contactEmail, platform, sourceType)
if err != nil {
log.Error("d.AddTask() error(%v)", err)
}
return res.LastInsertId()
}
// FindTask is find task by mid and state.
func (d *Dao) FindTask(context context.Context, mid int64, state int) (t *model.Task, err error) {
t = &model.Task{}
row := d.laserDB.QueryRow(context, _findMIDTaskSQL, mid, state)
if err = row.Scan(&t.ID, &t.AdminID, &t.Username, &t.MID, &t.LogDate, &t.ContactEmail, &t.SourceType, &t.Platform, &t.State, &t.IsDeleted, &t.CTime, &t.MTime); err != nil {
if err == sql.ErrNoRows {
t = nil
err = nil
return
}
log.Error("row.Scan() error(%v)", err)
}
return
}
// QueryTaskInfoByIDSQL is query task by task id.
func (d *Dao) QueryTaskInfoByIDSQL(c context.Context, id int64) (t *model.TaskInfo, err error) {
t = &model.TaskInfo{}
row := d.laserDB.QueryRow(c, _queryTaskInfoByIDSQL, id)
if err = row.Scan(&t.MID, &t.LogDate, &t.SourceType, &t.Platform); err != nil {
if err == sql.ErrNoRows {
t = nil
err = nil
} else {
err = errors.WithStack(err)
log.Error("row.Scan() error(%v)", err)
}
}
return
}
// DeleteTask is delete task by TaskID.
func (d *Dao) DeleteTask(ctx context.Context, taskID int64, username string, adminID int64) (err error) {
_, err = d.laserDB.Exec(ctx, _deleteTaskSQL, username, adminID, taskID)
if err != nil {
log.Error("d.DeleteTask() error(%v)", err)
}
return
}
// UpdateTask is update undone task where state = 0.
func (d *Dao) UpdateTask(ctx context.Context, taskID int64, state int, updateStmt string) (err error) {
_, err = d.laserDB.Exec(ctx, fmt.Sprintf(_updateTaskSQL, updateStmt), taskID, state)
if err != nil {
log.Error("d.UpdateTask() error(%v)", err)
}
return
}
// QueryTask is query task by condition.
func (d *Dao) QueryTask(ctx context.Context, queryStmt string, sort string, offset int, limit int) (tasks []*model.Task, count int64, err error) {
row := d.laserDB.QueryRow(ctx, fmt.Sprintf(_countTaskSQL, queryStmt))
if err = row.Scan(&count); err != nil {
return
}
rows, err := d.laserDB.Query(ctx, fmt.Sprintf(_queryTaskSQL, queryStmt, sort, offset, limit))
if err != nil {
log.Error("d.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
item := &model.Task{}
if err = rows.Scan(&item.ID, &item.AdminID, &item.Username, &item.MID, &item.LogDate, &item.ContactEmail, &item.SourceType, &item.Platform, &item.State, &item.IsDeleted, &item.CTime, &item.MTime); err != nil {
return
}
tasks = append(tasks, item)
}
return
}

View File

@@ -0,0 +1,32 @@
package dao
import (
"context"
"fmt"
"go-common/app/admin/main/laser/model"
)
const (
_countTaskLogSQL = "SELECT count(*) FROM task_log %s"
_queryTaskLogSQL = "SELECT * FROM task_log %s ORDER BY %s LIMIT %d, %d"
)
// QueryTaskLog is query finished task.
func (d *Dao) QueryTaskLog(ctx context.Context, queryStmt string, sort string, offset int, limit int) (taskLogs []*model.TaskLog, count int64, err error) {
row := d.laserDB.QueryRow(ctx, fmt.Sprintf(_countTaskLogSQL, queryStmt))
if err = row.Scan(&count); err != nil {
return
}
rows, err := d.laserDB.Query(ctx, fmt.Sprintf(_queryTaskLogSQL, queryStmt, sort, offset, limit))
if err != nil {
return
}
for rows.Next() {
t := &model.TaskLog{}
if err = rows.Scan(&t.ID, &t.TaskID, &t.MID, &t.Build, &t.Platform, &t.TaskState, &t.Reason, &t.CTime, &t.MTime); err != nil {
return
}
taskLogs = append(taskLogs, t)
}
return
}

View File

@@ -0,0 +1,26 @@
package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoQueryTaskLog(t *testing.T) {
convey.Convey("QueryTaskLog", t, func(ctx convey.C) {
var (
c = context.Background()
queryStmt = ""
sort = "ctime"
offset = int(1)
limit = int(10)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.QueryTaskLog(c, queryStmt, sort, offset, limit)
ctx.Convey("Then err should be nil.taskLogs,count should not be nil.", func(ctx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,112 @@
package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoAddTask(t *testing.T) {
convey.Convey("AddTask", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(2233)
username = "yanjinbin"
adminID = int64(479)
logDate = "2018-11-28 20:19:09"
contactEmail = "yanjinbin@qq.com"
platform = int(1)
sourceType = int(1)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.AddTask(c, mid, username, adminID, logDate, contactEmail, platform, sourceType)
ctx.Convey("Then err should be nil.lastInsertID should not be nil.", func(ctx convey.C) {
})
})
})
}
func TestDaoFindTask(t *testing.T) {
convey.Convey("FindTask", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
state = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.FindTask(c, mid, state)
ctx.Convey("Then err should be nil.no should not be nil.", func(ctx convey.C) {
})
})
})
}
func TestDaoQueryTaskInfoByIDSQL(t *testing.T) {
convey.Convey("QueryTaskInfoByIDSQL", t, func(ctx convey.C) {
var (
c = context.Background()
id = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.QueryTaskInfoByIDSQL(c, id)
ctx.Convey("Then err should be nil.no should not be nil.", func(ctx convey.C) {
})
})
})
}
func TestDaoDeleteTask(t *testing.T) {
convey.Convey("DeleteTask", t, func(ctx convey.C) {
var (
c = context.Background()
taskID = int64(0)
username = ""
adminID = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.DeleteTask(c, taskID, username, adminID)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaoUpdateTask(t *testing.T) {
convey.Convey("UpdateTask", t, func(ctx convey.C) {
var (
c = context.Background()
taskID = int64(0)
state = int(0)
updateStmt = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.UpdateTask(c, taskID, state, updateStmt)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
})
})
})
}
func TestDaoQueryTask(t *testing.T) {
convey.Convey("QueryTask", t, func(ctx convey.C) {
var (
c = context.Background()
queryStmt = ""
sort = ""
offset = int(0)
limit = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.QueryTask(c, queryStmt, sort, offset, limit)
ctx.Convey("Then err should be nil.tasks,count should not be nil.", func(ctx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,45 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"csv.go",
"http.go",
"stat.go",
"task.go",
"task_log.go",
],
importpath = "go-common/app/admin/main/laser/http",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/laser/conf:go_default_library",
"//app/admin/main/laser/model:go_default_library",
"//app/admin/main/laser/service:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/permit:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,47 @@
package http
import (
"fmt"
"net/http"
"github.com/pkg/errors"
)
// CSVContentType
var (
CSVContentType = []string{"application/csv"}
_ Render = CSV{}
)
// Render http response render.
type Render interface {
Render(http.ResponseWriter) error
WriteContentType(w http.ResponseWriter)
}
// CSV str.
type CSV struct {
Content []byte
Title string
}
// WriteContentType fn
func (j CSV) WriteContentType(w http.ResponseWriter) {
writeContentType(w, CSVContentType, j.Title)
}
func writeContentType(w http.ResponseWriter, value []string, title string) {
header := w.Header()
if val := header["Content-Type"]; len(val) == 0 {
header["Content-Type"] = value
}
header["Content-Disposition"] = append(header["Content-Disposition"], fmt.Sprintf("attachment; filename=\"%s\".csv", title))
}
// Render (JSON) writes data with json ContentType.
func (j CSV) Render(w http.ResponseWriter) (err error) {
if _, err = w.Write(j.Content); err != nil {
err = errors.WithStack(err)
}
return
}

View File

@@ -0,0 +1,76 @@
package http
import (
"net/http"
"go-common/app/admin/main/laser/conf"
"go-common/app/admin/main/laser/service"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/permit"
)
var (
svc *service.Service
authSrc *permit.Permit
)
// Init http server
func Init(c *conf.Config) {
svc = service.New(c)
authSrc = permit.New(c.Auth)
engine := bm.DefaultServer(c.BM)
initRouter(engine)
if err := engine.Start(); err != nil {
log.Error("engine.Start error(%v)", err)
panic(err)
}
}
func initRouter(e *bm.Engine) {
e.Ping(ping)
laser := e.Group("/x/admin/laser", authSrc.Verify())
{
task := laser.Group("/task")
{
task.POST("/add", addTask)
task.GET("/list", queryTask)
task.POST("/edit", updateTask)
task.POST("/delete", deleteTask)
}
taskLog := laser.Group("/task_log")
{
taskLog.GET("/list", queryTaskLog)
}
recheck := laser.Group("/archive/stat")
{
recheck.GET("/panel", recheckPanel)
recheck.GET("/user", recheckUser)
recheck.GET("/123_recheck", recheck123)
}
cargo := laser.Group("/archive/cargo")
{
cargo.GET("/audit/csv", auditCargoCsv)
cargo.GET("/auditors", auditorCargo)
}
tag := laser.Group("/archive/tag")
{
tag.GET("/recheck", tagRecheck)
}
video := laser.Group("/video/stat")
{
video.GET("/random_video", randomVideo)
video.GET("/random_video/csv", csvRandomVideo)
video.GET("/fixed_video", fixedVideo)
video.GET("/fixed_video/csv", csvFixedVideo)
}
}
}
// ping check server ok.
func ping(c *bm.Context) {
if err := svc.Ping(c); err != nil {
log.Error("laser-admin service ping error")
c.AbortWithStatus(http.StatusServiceUnavailable)
}
}

View File

@@ -0,0 +1,275 @@
package http
import (
"fmt"
"go-common/library/ecode"
bm "go-common/library/net/http/blademaster"
"go-common/library/xstr"
"net/http"
"sort"
"time"
)
func recheckPanel(c *bm.Context) {
v := new(struct {
TypeIDStr string `form:"type_id"`
StartDate int64 `form:"start_date"`
EndDate int64 `form:"end_date"`
UName string `form:"uname"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
recheckViews, err := svc.ArchiveRecheck(c, typeIDS, v.UName, v.StartDate, v.EndDate)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(recheckViews, func(i, j int) bool {
return recheckViews[i].Date > recheckViews[j].Date
})
c.JSON(recheckViews, nil)
}
func recheckUser(c *bm.Context) {
v := new(struct {
TypeIDStr string `form:"type_id"`
StartDate int64 `form:"start_date"`
EndDate int64 `form:"end_date"`
UName string `form:"uname"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
recheckViews, err := svc.UserRecheck(c, typeIDS, v.UName, v.StartDate, v.EndDate)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(recheckViews, func(i, j int) bool {
return recheckViews[i].Date > recheckViews[j].Date
})
c.JSON(recheckViews, nil)
}
func auditCargoCsv(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
Uname string `form:"uname"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
content, err := svc.CsvAuditCargo(c, v.StartDate, v.EndDate, v.Uname)
if err != nil {
c.JSON(nil, err)
return
}
c.Render(http.StatusOK, CSV{
Content: content,
Title: fmt.Sprintf("%s~%s-%s", time.Unix(v.StartDate, 0).Format("2006/01/02_15"), time.Unix(v.EndDate, 0).Format("2006/01/02_15"), v.Uname),
})
}
func auditorCargo(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
Uname string `form:"uname"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
wrappers, _, err := svc.AuditorCargoList(c, v.StartDate, v.EndDate, v.Uname)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(wrappers, func(i, j int) bool {
return wrappers[i].Date > wrappers[j].Date
})
c.JSON(wrappers, nil)
}
func tagRecheck(c *bm.Context) {
v := new(struct {
Uname string `form:"uname"`
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
tagViews, err := svc.TagRecheck(c, v.StartDate, v.EndDate, v.Uname)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(tagViews, func(i, j int) bool {
return tagViews[i].Date > tagViews[j].Date
})
c.JSON(tagViews, nil)
}
func recheck123(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
TypeIDStr string `form:"type_id"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
recheckViews, err := svc.Recheck123(c, v.StartDate, v.EndDate, typeIDS)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(recheckViews, func(i, j int) bool {
return recheckViews[i].Date > recheckViews[j].Date
})
c.JSON(recheckViews, nil)
}
func randomVideo(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
TypeIDStr string `form:"type_id"`
Uname string `form:"uname"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
statViewExts, _, err := svc.RandomVideo(c, v.StartDate, v.EndDate, typeIDS, v.Uname)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(statViewExts, func(i, j int) bool {
return statViewExts[i].Date > statViewExts[j].Date
})
c.JSON(statViewExts, nil)
}
func fixedVideo(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
TypeIDStr string `form:"type_id"`
Uname string `form:"uname"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
statViewExts, _, err := svc.FixedVideo(c, v.StartDate, v.EndDate, typeIDS, v.Uname)
if err != nil {
c.JSON(nil, err)
return
}
sort.Slice(statViewExts, func(i, j int) bool {
return statViewExts[i].Date > statViewExts[j].Date
})
c.JSON(statViewExts, nil)
}
func csvFixedVideo(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
Uname string `form:"uname"`
TypeIDStr string `form:"type_id"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
content, err := svc.CsvFixedVideoAudit(c, v.StartDate, v.EndDate, v.Uname, typeIDS)
if err != nil {
c.JSON(nil, err)
return
}
c.Render(http.StatusOK, CSV{
Content: content,
Title: fmt.Sprintf("%s_%s~%s-%s", "(定时发布)视频审核操作数据", time.Unix(v.StartDate, 0).Format("2006-01-02"), time.Unix(v.EndDate, 0).Format("2006-01-02"), v.Uname),
})
}
func csvRandomVideo(c *bm.Context) {
v := new(struct {
StartDate int64 `form:"stime"`
EndDate int64 `form:"etime"`
Uname string `form:"uname"`
TypeIDStr string `form:"type_id"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
var typeIDS []int64
if typeIDS, err = xstr.SplitInts(v.TypeIDStr); err != nil {
c.JSON(nil, err)
return
}
content, err := svc.CsvRandomVideoAudit(c, v.StartDate, v.EndDate, v.Uname, typeIDS)
if err != nil {
c.JSON(nil, err)
return
}
c.Render(http.StatusOK, CSV{
Content: content,
Title: fmt.Sprintf("%s_%s~%s-%s", "(非定时)视频审核操作数据", time.Unix(v.StartDate, 0).Format("2006-01-02"), time.Unix(v.EndDate, 0).Format("2006-01-02"), v.Uname),
})
}

View File

@@ -0,0 +1,179 @@
package http
import (
"regexp"
"strconv"
"go-common/app/admin/main/laser/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
const (
emailRegex = "^([a-z0-9A-Z]+[-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$"
)
func addTask(c *bm.Context) {
req := c.Request
v := new(struct {
MID int64 `form:"mid" validate:"required"`
LogDate int64 `form:"log_date" validate:"required"`
ContactEmail string `form:"contact_email" validate:"required"`
Platform int `form:"platform" validate:"required"`
SourceType int `form:"source_type" validate:"required"`
})
err := c.Bind(v)
if err != nil {
return
}
if v.LogDate <= 0 || v.MID <= 0 || !checkEmail(v.ContactEmail) || v.Platform <= 0 || v.SourceType <= 0 {
c.JSON(nil, ecode.RequestErr)
return
}
userCookie, err := req.Cookie("username")
if err != nil {
c.JSON(nil, err)
return
}
username := userCookie.Value
uidCookie, err := req.Cookie("uid")
var adminID int64
if err != nil {
adminID = 0
} else {
adminID, err = strconv.ParseInt(uidCookie.Value, 10, 64)
if err != nil {
c.JSON(nil, err)
return
}
}
err = svc.AddTask(c, v.MID, username, adminID, v.LogDate, v.ContactEmail, v.Platform, v.SourceType)
if err != nil {
log.Error("svc.AddTask() error(%v)", err)
}
c.JSON(nil, err)
}
func checkEmail(emailAddr string) (match bool) {
if emailAddr == "" {
return false
}
match, err := regexp.MatchString(emailRegex, emailAddr)
if err != nil {
return false
}
return match
}
func deleteTask(c *bm.Context) {
req := c.Request
v := new(struct {
TaskID int64 `form:"task_id"`
})
err := c.Bind(v)
if err != nil {
return
}
uidCookie, err := req.Cookie("uid")
var adminID int64
if err != nil {
adminID = 0
} else {
adminID, err = strconv.ParseInt(uidCookie.Value, 10, 64)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
}
userCookie, err := req.Cookie("username")
if err != nil {
c.JSON(nil, ecode.Unauthorized)
return
}
username := userCookie.Value
if err = svc.DeleteTask(c, v.TaskID, username, adminID); err != nil {
log.Error("svc.DeleteTask() error(%v)", err)
c.JSON(nil, err)
return
}
c.JSON(nil, nil)
}
func queryTask(c *bm.Context) {
v := new(struct {
Mid int64 `form:"mid"`
LogDateStart int64 `form:"log_date_start"`
LogDateEnd int64 `form:"log_date_end"`
SourceType int `form:"source_type"`
Platform int `form:"platform"`
State int `form:"state"`
SortBy string `form:"sort"`
PageNo int `form:"page_no"`
PageSize int `form:"page_size"`
})
err := c.Bind(v)
if err != nil {
return
}
tasks, count, err := svc.QueryTask(c, v.Mid, v.LogDateStart, v.LogDateEnd, v.SourceType, v.Platform, v.State, v.SortBy, v.PageNo, v.PageSize)
if err != nil {
log.Error("svc.QueryTask() error(%v)", err)
c.JSON(nil, err)
return
}
pager := &model.TaskPager{
PageSize: v.PageSize,
PageNo: v.PageNo,
Total: count,
Items: tasks,
}
c.JSON(pager, nil)
}
func updateTask(c *bm.Context) {
req := c.Request
v := new(struct {
TaskID int64 `form:"task_id" validate:"required"`
MID int64 `form:"mid" validate:"required"`
LogDate int64 `form:"log_date" validate:"required"`
ContactEmail string `form:"contact_email" validate:"required"`
SourceType int `form:"source_type" validate:"required"`
Platform int `form:"platform" validate:"required"`
})
err := c.Bind(v)
if err != nil {
return
}
if v.ContactEmail != "" && !checkEmail(v.ContactEmail) {
c.JSON(nil, ecode.RequestErr)
return
}
uidCookie, err := req.Cookie("uid")
var adminID int64
if err != nil {
adminID = 0
} else {
adminID, err = strconv.ParseInt(uidCookie.Value, 10, 64)
if err != nil {
c.JSON(nil, err)
return
}
}
userCookie, err := req.Cookie("username")
if err != nil {
c.JSON(nil, err)
return
}
username := userCookie.Value
err = svc.UpdateTask(c, username, adminID, v.TaskID, v.MID, v.LogDate, v.ContactEmail, v.SourceType, v.Platform)
if err != nil {
log.Error("svc.UpdateTask() error(%v)", err)
}
c.JSON(nil, err)
}

View File

@@ -0,0 +1,39 @@
package http
import (
"go-common/app/admin/main/laser/model"
"go-common/library/ecode"
"go-common/library/log"
"go-common/library/net/http/blademaster"
)
func queryTaskLog(c *blademaster.Context) {
v := new(struct {
MID int64 `form:"mid"`
TaskID int64 `form:"task_id"`
Platform int `form:"platform"`
TaskState int `form:"task_state"`
Sortby string `form:"sort"`
PageNo int `form:"page_no"`
PageSize int `form:"page_size"`
})
err := c.Bind(v)
if err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
logs, count, err := svc.QueryTaskLog(c, v.MID, v.TaskID, v.Platform, v.TaskState, v.Sortby, v.PageNo, v.PageSize)
if err != nil {
log.Error("svc.QueryTaskLog() error(%v)", err)
c.JSON(nil, err)
return
}
pager := &model.TaskLogPager{
PageNo: v.PageNo,
PageSize: v.PageSize,
Items: logs,
Total: count,
}
c.JSON(pager, nil)
}

View File

@@ -0,0 +1,43 @@
package model
import (
xtime "go-common/library/time"
)
// ArchiveAuditCargoHour is table archive_audit_cargo_hour.
type ArchiveAuditCargoHour struct {
ID int64 `json:"id"`
UID int64 `json:"uid"`
StatDate xtime.Time `json:"stat_date"`
ReceiveValue int64 `json:"receive_value"`
AuditValue int64 `json:"audit_value"`
Ctime xtime.Time `json:"ctime"`
Mtime xtime.Time `json:"mtime"`
State int `json:"state"`
}
// CargoDetail is archive audit detail.
type CargoDetail struct {
UID int64 `json:"uid"`
StatDate xtime.Time `json:"stat_date"`
ReceiveValue int64 `json:"receive_value"`
AuditValue int64 `json:"audit_value"`
}
// CargoItem is audit value which is received or done.
type CargoItem struct {
ReceiveValue int64 `json:"auditing"`
AuditValue int64 `json:"audited"`
}
// CargoView is json data compromised contracted with web front.
type CargoView struct {
Date string `json:"date"`
Data map[int]*CargoItem `json:"data"`
}
// CargoViewWrapper is json data for show the archive cargo audit of every auditor.
type CargoViewWrapper struct {
Username string `json:"username"`
*CargoView
}

View File

@@ -0,0 +1,169 @@
package model
import (
xtime "go-common/library/time"
)
// ArchiveStat is table archive_stat
type ArchiveStat struct {
ID int64 `json:"id"`
Business int `json:"business"`
StatType int `json:"stat_type"`
TypeID int `json:"typeid"`
GroupID int `json:"group_id"`
UID int64 `json:"uid"`
StatDate xtime.Time `json:"stat_date"`
Content string `json:"content"`
Ctime xtime.Time `json:"ctime"`
Mtime xtime.Time `json:"mtime"`
State int `json:"state"`
StatValue int64 `json:"stat_value"`
}
// StatNode is Minimum dimension stat value.
type StatNode struct {
StatDate xtime.Time `json:"stat_date"`
Business int `json:"business"`
StatType int `json:"stat_type"`
TypeID int `json:"typeid"`
UID int64 `json:"uid"`
StatValue int64 `json:"stat_value"`
}
// CsvMetaNode is
type CsvMetaNode struct {
Index int
Name string
DataCode int
}
// StatItem is element of stat view json model.
type StatItem struct {
DataCode int `json:"data_code"`
Value int64 `json:"value"`
}
// StatView is common stat view json model.
type StatView struct {
Date int64 `json:"date"`
Stats []*StatItem `json:"stats"`
}
// StatItemExt is StatItem extension.
type StatItemExt struct {
Uname string `json:"uname"`
Stats []*StatItem `json:"stat"`
}
// StatViewExt is StatView extension
type StatViewExt struct {
Date int64 `json:"date"`
Wraps []*StatItemExt `json:"stats"`
}
const (
// business字段枚举值
// ArchiveRecheck is 稿件回查项目
ArchiveRecheck = 1
// TagRecheck is 稿件频道tag回查项目
TagRecheck = 2
// RandomVideoAudit is 视频非定时审核操作数据
RandomVideoAudit = 3
// FixedVideoAudit is 视频定时审核操作数据
FixedVideoAudit = 4
// stat_type字段枚举值
// 统计指标枚举值
// TotalArchive is 总稿件量
TotalArchive = 1
// TotalOper is 总操作量
TotalOper = 2
// ReCheck is 打回量
ReCheck = 3
// Lock is 锁定量
Lock = 4
// ThreeLimit is 三限量
ThreeLimit = 5
// FirstCheck is 一查稿件量
FirstCheck = 6
// SecondCheck is 二查稿件量
SecondCheck = 7
// ThirdCheck is 三查稿件量
ThirdCheck = 8
// TotalOperFrequency is 总操作次数
TotalOperFrequency = 9
// FirstCheckOper is 一查次数
FirstCheckOper = 10
// SecondCheckOper is 二查次数
SecondCheckOper = 11
// ThirdCheckOper is 三查次数
ThirdCheckOper = 12
// FirstCheckTime is 一查响应总时间
FirstCheckTime = 13
// SecondCheckTime is 二查响应总时间
SecondCheckTime = 14
// ThirdCheckTime is 三查响应总时间
ThirdCheckTime = 15
// FirstAvgTime is 一查响应平均耗时
FirstAvgTime = 16
// SecondAvgTime is 二查响应平均耗时
SecondAvgTime = 17
// ThirdAvgTime is 三查响应平均耗时
ThirdAvgTime = 18
// NoRankArchive is 排行禁止
NoRankArchive = 19
// NoIndexArchive is 动态禁止
NoIndexArchive = 20
// NoRecommendArchive is 推荐禁止
NoRecommendArchive = 21
// NoPushArchive is 粉丝动态禁止
NoPushArchive = 22
// TagRecheckTotalTime is tag回查总时间
TagRecheckTotalTime = 23
// TagRecheckTotalCount is 频道回查操作总量
TagRecheckTotalCount = 24
// TagChangeCount is tag变更的稿件量
TagChangeCount = 25
// TagRecheckAvgTime is tag保存操作平均耗时
TagRecheckAvgTime = 26
// TotalVideo is 总操视频量
TotalVideo = 27
// TotalVideoOper is 总操作次数
TotalVideoOper = 28
// OpenVideo is 开放浏视频量
OpenVideo = 29
// OpenVideoOper is 开放浏览操作次数
OpenVideoOper = 30
// VipAccessVideo is 会员可视频量
VipAccessVideo = 31
// VipAccessVideoOper is 会员可见操作次数
VipAccessVideoOper = 32
// RejectVideo is 打视频量
RejectVideo = 33
// RejectVideoOper is 打回操作次数
RejectVideoOper = 34
// LockVideo is 锁视频量
LockVideo = 35
// LockVideoOper is 锁定操作次数
LockVideoOper = 36
// PassVideoTotalDuration is 通过视频总时长
PassVideoTotalDuration = 37
// FailVideoTotalDuration is 未通过视频总时长
FailVideoTotalDuration = 38
// WaitAuditAvgTime is 视频提交到进入待审平均响应时间
WaitAuditAvgTime = 39
// WaitAuditDuration is 视频提交到进入待审时间
WaitAuditDuration = 40
// WaitAuditOper is 视频提交到进入待审次数
WaitAuditOper = 41
//valueType
// NumValue is num unit
NumValue = 1
// TimeValue is second unit
TimeValue = 2
)

View File

@@ -0,0 +1,42 @@
package model
import (
xtime "go-common/library/time"
)
// ArchiveStatStream is table archive_stat
type ArchiveStatStream struct {
ID int64 `json:"id"`
Business int `json:"business"`
StatType int `json:"stat_type"`
TypeID int `json:"typeid"`
GroupID int `json:"group_id"`
UID int `json:"uid"`
StatTime xtime.Time `json:"stat_time"`
StatValue int64 `json:"stat_value"`
Ctime xtime.Time `json:"ctime"`
Mtime xtime.Time `json:"mtime"`
State int `json:"state"`
}
const (
// business字段枚举值
// Recheck123 is 稿件123回查移入移出
Recheck123 = 1
// stat_type字段枚举值
// FIRST_RECHECK_IN is 一查移入
FIRST_RECHECK_IN = 1
// FIRST_RECHECK_OUT is 一查移出
FIRST_RECHECK_OUT = 2
// SECOND_RECHECK_IN is 二查移入
SECOND_RECHECK_IN = 3
// SECOND_RECHECK_OUT is 二查移出
SECOND_RECHECK_OUT = 4
// THIRD_RECHECK_IN is 三查移入
THIRD_RECHECK_IN = 5
// THIRD_RECHECK_OUT is 三查移出
THIRD_RECHECK_OUT = 6
)

View File

@@ -0,0 +1,35 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"ArchiveAuditCargoHour.go",
"ArchiveStat.go",
"ArchiveStatStream.go",
"task.go",
"task_log.go",
],
importpath = "go-common/app/admin/main/laser/model",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = ["//library/time:go_default_library"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,38 @@
package model
import (
xtime "go-common/library/time"
)
// Task is Laser application Task
type Task struct {
ID int64 `json:"id"`
AdminID int64 `json:"admin_id"`
Username string `json:"username"`
MID int64 `json:"mid"`
LogDate xtime.Time `json:"log_date"`
ContactEmail string `json:"contact_email"`
SourceType int `json:"source_type"`
Platform int `json:"platform"`
State int `json:"state"`
IsDeleted int `json:"is_deleted"`
CTime xtime.Time `json:"ctime"`
MTime xtime.Time `json:"mtime"`
}
// TaskPager Task pager
type TaskPager struct {
Total int64 `json:"total"`
PageNo int `json:"page_no" default:"1"`
PageSize int `json:"page_size" default:"20"`
Items []*Task `json:"items"`
}
// TaskInfo is to set as value of memcache key(mid)
type TaskInfo struct {
MID int64
LogDate xtime.Time
SourceType int
Platform int
Empty bool
}

View File

@@ -0,0 +1,26 @@
package model
import (
xtime "go-common/library/time"
)
// TaskLog is record task
type TaskLog struct {
ID int64 `json:"id"`
TaskID int64 `json:"task_id"`
MID int64 `json:"mid"`
Build string `json:"build"`
Platform int `json:"platform"`
TaskState int `json:"task_state"`
Reason string `json:"reason"`
CTime xtime.Time `json:"ctime"`
MTime xtime.Time `json:"mtime"`
}
// TaskLogPager TaskLog Pager
type TaskLogPager struct {
Total int64 `json:"total"`
PageNo int `json:"page_no" default:"1"`
PageSize int `json:"page_size" default:"20"`
Items []*TaskLog `json:"items"`
}

View File

@@ -0,0 +1,61 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"archive_stat_test.go",
"csv_test.go",
"service_test.go",
"task_log_test.go",
"task_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/laser/conf:go_default_library",
"//app/admin/main/laser/model:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"archive_stat.go",
"csv.go",
"service.go",
"task.go",
"task_log.go",
],
importpath = "go-common/app/admin/main/laser/service",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/laser/conf:go_default_library",
"//app/admin/main/laser/dao:go_default_library",
"//app/admin/main/laser/model:go_default_library",
"//library/log:go_default_library",
"//library/time:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,615 @@
package service
import (
"context"
"sort"
"time"
"go-common/app/admin/main/laser/model"
)
const (
// ALLNAME is specific name video_audit overview
ALLNAME = "ALL全体总览"
// ALLUID is specific uid for video_audit overview
ALLUID = -1
)
// ArchiveRecheck is stat recheck flow data node.
func (s *Service) ArchiveRecheck(c context.Context, typeIDS []int64, unames string, startDate int64, endDate int64) (recheckViews []*model.StatView, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
var uids []int64
var res map[string]int64
if len(unames) != 0 {
res, err = s.dao.GetUIDByNames(c, unames)
if err != nil {
return
}
for _, uid := range res {
uids = append(uids, uid)
}
}
statTypes := []int64{model.TotalArchive, model.TotalOper, model.ReCheck, model.Lock,
model.ThreeLimit, model.FirstCheck, model.SecondCheck, model.ThirdCheck, model.NoRankArchive, model.NoIndexArchive, model.NoRecommendArchive, model.NoPushArchive,
model.FirstCheckOper, model.FirstCheckTime, model.SecondCheckOper,
model.SecondCheckTime, model.ThirdCheckOper, model.ThirdCheckTime}
var statViews []*model.StatView
for !start.After(end) {
statViews, err = s.dailyStatArchiveRecheck(c, model.ArchiveRecheck, typeIDS, statTypes, uids, start)
if err != nil {
return
}
recheckViews = append(recheckViews, statViews...)
start = start.AddDate(0, 0, 1)
}
return
}
// UserRecheck is stat user recheck data node.
func (s *Service) UserRecheck(c context.Context, typeIDS []int64, unames string, startDate int64, endDate int64) (
recheckViews []*model.StatView, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
var uids []int64
var res map[string]int64
if len(unames) != 0 {
res, err = s.dao.GetUIDByNames(c, unames)
if err != nil {
return
}
for _, uid := range res {
uids = append(uids, uid)
}
}
statTypes := []int64{model.TotalOperFrequency, model.FirstCheckOper, model.SecondCheckOper, model.ThirdCheckOper,
model.FirstCheckOper, model.FirstCheckTime, model.SecondCheckOper,
model.SecondCheckTime, model.ThirdCheckOper, model.ThirdCheckTime}
var statViews []*model.StatView
for !start.After(end) {
statViews, err = s.dailyStatArchiveRecheck(c, model.ArchiveRecheck, typeIDS, statTypes, uids, start)
if err != nil {
return
}
recheckViews = append(recheckViews, statViews...)
start = start.AddDate(0, 0, 1)
}
return
}
func (s *Service) dailyStatArchiveRecheck(c context.Context, business int, typeIDS []int64, statTypes []int64, uids []int64, statDate time.Time) (statViews []*model.StatView, err error) {
mediateView, err := s.dailyArchiveStat(c, business, typeIDS, statTypes, uids, statDate)
if err != nil || len(mediateView) == 0 {
return
}
statViews = makeUpArchiveRecheck(mediateView)
return
}
func makeUpArchiveRecheck(mediateView map[int64]map[int]int64) (statViews []*model.StatView) {
items := make(map[int64][]*model.StatItem)
for k1, v1 := range mediateView {
var recheckItems []*model.StatItem
denominatorValue, ok1 := v1[model.FirstCheckOper]
numeratorValue, ok2 := v1[model.FirstCheckTime]
if ok1 && ok2 {
if denominatorValue == 0 {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.FirstAvgTime,
Value: 0,
})
} else {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.FirstAvgTime,
Value: numeratorValue / denominatorValue,
})
}
}
denominatorValue, ok1 = v1[model.SecondCheckOper]
numeratorValue, ok2 = v1[model.SecondCheckTime]
if ok1 && ok2 {
if denominatorValue == 0 {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.SecondAvgTime,
Value: 0,
})
} else {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.SecondAvgTime,
Value: numeratorValue / denominatorValue,
})
}
}
denominatorValue, ok1 = v1[model.ThirdCheckOper]
numeratorValue, ok2 = v1[model.ThirdCheckTime]
if ok1 && ok2 {
if denominatorValue == 0 {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.ThirdAvgTime,
Value: 0,
})
} else {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.ThirdAvgTime,
Value: numeratorValue / denominatorValue,
})
}
}
for k2, v2 := range v1 {
recheckItems = append(recheckItems, &model.StatItem{DataCode: k2, Value: v2})
}
items[k1] = recheckItems
}
for k, v := range items {
statViews = append(statViews, &model.StatView{Date: k, Stats: v})
}
return
}
func (s *Service) dailyArchiveStat(c context.Context, business int, typeIDS []int64, statTypes []int64, uids []int64, statDate time.Time) (mediateView map[int64]map[int]int64, err error) {
statNodes, err := s.dao.StatArchiveStat(c, business, typeIDS, uids, statTypes, statDate)
if err != nil || len(statNodes) == 0 {
return
}
mediateView = make(map[int64]map[int]int64)
for _, node := range statNodes {
k1 := node.StatDate.Time().Unix()
k2 := node.StatType
newValue := node.StatValue
if v1, ok := mediateView[k1]; ok {
if v2, ok := v1[k2]; ok {
mediateView[k1][k2] = v2 + newValue
} else {
mediateView[k1][k2] = newValue
}
} else {
mediateView[k1] = map[int]int64{k2: newValue}
}
}
return
}
// TagRecheck is stat archive tag recheck.
func (s *Service) TagRecheck(c context.Context, startDate int64, endDate int64, unames string) (tagViews []*model.StatView, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
var uids []int64
var uname2uid map[string]int64
if len(unames) != 0 {
uname2uid, err = s.dao.GetUIDByNames(c, unames)
if err != nil {
return
}
for _, uid := range uname2uid {
uids = append(uids, uid)
}
}
statTypes := []int64{model.TagRecheckTotalTime, model.TagRecheckTotalCount, model.TagChangeCount, model.TagRecheckTotalCount, model.TagRecheckTotalTime}
var statViews []*model.StatView
for !start.After(end) {
statViews, err = s.dailyStatTagRecheck(c, model.TagRecheck, statTypes, uids, start)
if err != nil {
return
}
tagViews = append(tagViews, statViews...)
start = start.AddDate(0, 0, 1)
}
return
}
func (s *Service) dailyStatTagRecheck(c context.Context, business int, statTypes []int64, uids []int64, statDate time.Time) (statViews []*model.StatView, err error) {
mediateView, err := s.dailyArchiveStat(c, business, []int64{}, statTypes, uids, statDate)
if err != nil || len(mediateView) == 0 {
return
}
statViews = makeUpTagRecheck(mediateView)
return
}
func makeUpTagRecheck(mediateView map[int64]map[int]int64) (statViews []*model.StatView) {
items := make(map[int64][]*model.StatItem)
for k1, v1 := range mediateView {
var recheckItems []*model.StatItem
denominatorValue, ok1 := v1[model.TagRecheckTotalCount]
numeratorValue, ok2 := v1[model.TagRecheckTotalTime]
if ok1 && ok2 {
if denominatorValue == 0 {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.TagRecheckAvgTime,
Value: 0,
})
} else {
recheckItems = append(recheckItems, &model.StatItem{
DataCode: model.TagRecheckAvgTime,
Value: numeratorValue / denominatorValue,
})
}
}
for k2, v2 := range v1 {
recheckItems = append(recheckItems, &model.StatItem{DataCode: k2, Value: v2})
}
items[k1] = recheckItems
}
for k, v := range items {
statViews = append(statViews, &model.StatView{Date: k, Stats: v})
}
return
}
// Recheck123 is stat 123 recheck.
func (s *Service) Recheck123(c context.Context, startDate int64, endDate int64, typeIDS []int64) (recheckView []*model.StatView, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
emptyUids := []int64{}
statTypes := []int64{model.FIRST_RECHECK_IN, model.FIRST_RECHECK_OUT, model.SECOND_RECHECK_IN, model.SECOND_RECHECK_OUT, model.THIRD_RECHECK_IN, model.THIRD_RECHECK_OUT}
var statViews []*model.StatView
for !start.After(end) {
statViews, err = s.dailyStatArchiveStreamStat(c, model.Recheck123, typeIDS, emptyUids, statTypes, start)
if err != nil {
return
}
recheckView = append(recheckView, statViews...)
start = start.AddDate(0, 0, 1)
}
return
}
func (s *Service) dailyStatArchiveStreamStat(c context.Context, business int, typeIDS []int64, uids []int64, statTypes []int64, statDate time.Time) (statViews []*model.StatView, err error) {
statNodes, err := s.dao.StatArchiveStatStream(c, model.Recheck123, typeIDS, uids, statTypes, statDate)
if err != nil || len(statNodes) == 0 {
return
}
mediateView := make(map[int64]map[int]int64)
for _, v := range statNodes {
k1 := v.StatDate.Time().Unix()
k2 := v.StatType
newValue := v.StatValue
if v1, ok := mediateView[k1]; ok {
if v2, ok := v1[k2]; ok {
mediateView[k1][k2] = newValue + v2
} else {
mediateView[k1][k2] = newValue
}
} else {
mediateView[k1] = map[int]int64{k2: newValue}
}
}
for k1, v1 := range mediateView {
var statItems []*model.StatItem
for k2, v2 := range v1 {
statItems = append(statItems, &model.StatItem{DataCode: k2, Value: v2})
}
statViews = append(statViews, &model.StatView{Date: k1, Stats: statItems})
}
return
}
func wrap(cargoMap map[int64]*model.CargoItem) (views []*model.CargoView) {
// 适配返回的JSON结构,减少前端工作量, cargoMap 2 views.
mediateView := make(map[string]map[int]*model.CargoItem)
for k, v := range cargoMap {
statDate := time.Unix(k, 0)
k1 := statDate.Format("2006-01-02")
k2 := statDate.Hour()
if value1, ok := mediateView[k1]; ok {
if value2, ok := value1[k2]; ok {
value2.AuditValue = value2.AuditValue + v.AuditValue
value2.ReceiveValue = value2.ReceiveValue + v.ReceiveValue
mediateView[k1][k2] = value2
} else {
mediateView[k1][k2] = &model.CargoItem{
ReceiveValue: v.ReceiveValue,
AuditValue: v.AuditValue,
}
}
} else {
mediateView[k1] = map[int]*model.CargoItem{
k2: {
ReceiveValue: v.ReceiveValue,
AuditValue: v.AuditValue,
},
}
}
}
for k, v := range mediateView {
views = append(views, &model.CargoView{
Date: k,
Data: v,
})
}
return
}
// CsvAuditCargo is download archive cargo audit data by csv file type.
func (s *Service) CsvAuditCargo(c context.Context, startDate int64, endDate int64, unames string) (res []byte, err error) {
wrappers, lineWidth, err := s.AuditorCargoList(c, startDate, endDate, unames)
if err != nil {
return
}
data := formatAuditCargo(wrappers, lineWidth)
return FormatCSV(data)
}
// AuditorCargoList is query archive audit cargo by uname respectively with stat_date condition.
func (s *Service) AuditorCargoList(c context.Context, startDate int64, endDate int64, unames string) (wrappers []*model.CargoViewWrapper, lineWidth int, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
var uids []int64
var name2uid map[string]int64
uid2name := make(map[int64]string)
if len(unames) != 0 {
name2uid, err = s.dao.GetUIDByNames(c, unames)
if err != nil {
return
}
for name, uid := range name2uid {
uids = append(uids, uid)
uid2name[uid] = name
}
}
var items, itemsBlock []*model.CargoDetail
for !start.After(end) {
itemsBlock, err = s.dao.QueryArchiveCargo(c, start, uids)
if err != nil {
return
}
items = append(items, itemsBlock...)
start = start.Add(time.Hour * 1)
}
if len(items) == 0 {
return
}
mediateViews := make(map[int64]map[int64]*model.CargoItem)
uidMap := make(map[int64]bool)
for _, v := range items {
k1 := v.UID
k2 := v.StatDate.Time().Unix()
uidMap[k1] = true
if v1, ok := mediateViews[k1]; ok {
if v2, ok := v1[k2]; ok {
v2.ReceiveValue = v2.ReceiveValue + v.ReceiveValue
v2.AuditValue = v2.AuditValue + v.AuditValue
mediateViews[k1][k2] = v2
} else {
lineWidth = lineWidth + 1
mediateViews[k1][k2] = &model.CargoItem{
ReceiveValue: v.ReceiveValue,
AuditValue: v.AuditValue,
}
}
} else {
lineWidth = lineWidth + 1
mediateViews[k1] = map[int64]*model.CargoItem{
k2: {
ReceiveValue: v.ReceiveValue,
AuditValue: v.AuditValue,
},
}
}
}
if len(unames) == 0 {
for uid := range uidMap {
uids = append(uids, uid)
}
uid2name, err = s.dao.GetUNamesByUids(c, uids)
if err != nil {
return
}
}
for k, v := range mediateViews {
cargoViews := wrap(v)
for _, v := range cargoViews {
wrappers = append(wrappers, &model.CargoViewWrapper{
Username: uid2name[k],
CargoView: v,
})
}
}
return
}
// CsvRandomVideoAudit is download random video audit statistic data by csv file type.
func (s *Service) CsvRandomVideoAudit(c context.Context, startDate int64, endDate int64, unames string, typeIDS []int64) (res []byte, err error) {
statViewExts, lineWidth, err := s.RandomVideo(c, startDate, endDate, typeIDS, unames)
if err != nil {
return
}
sort.Slice(statViewExts, func(i, j int) bool {
return statViewExts[i].Date > statViewExts[j].Date
})
data := formatVideoAuditStat(statViewExts, lineWidth)
return FormatCSV(data)
}
// CsvFixedVideoAudit is download fixed video audit statistic data by csv file type.
func (s *Service) CsvFixedVideoAudit(c context.Context, startDate int64, endDate int64, unames string, typeIDS []int64) (res []byte, err error) {
statViewExts, lineWidth, err := s.FixedVideo(c, startDate, endDate, typeIDS, unames)
if err != nil {
return
}
sort.Slice(statViewExts, func(i, j int) bool {
return statViewExts[i].Date > statViewExts[j].Date
})
data := formatVideoAuditStat(statViewExts, lineWidth)
return FormatCSV(data)
}
// RandomVideo is stat random video type.
func (s *Service) RandomVideo(c context.Context, startDate int64, endDate int64, typeIDS []int64, uname string) (statViewExts []*model.StatViewExt, lineWidth int, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
var viewExts []*model.StatViewExt
var width int
for !start.After(end) {
viewExts, width, err = s.videoAudit(c, model.RandomVideoAudit, start, typeIDS, uname)
if err != nil {
return
}
lineWidth = lineWidth + width
statViewExts = append(statViewExts, viewExts...)
start = start.AddDate(0, 0, 1)
}
return
}
// FixedVideo is stat fixed video type.
func (s *Service) FixedVideo(c context.Context, startDate int64, endDate int64, typeIDS []int64, uname string) (statViewExts []*model.StatViewExt, lineWidth int, err error) {
start := time.Unix(startDate, 0)
end := time.Unix(endDate, 0)
var viewExts []*model.StatViewExt
var width int
for !start.After(end) {
viewExts, width, err = s.videoAudit(c, model.FixedVideoAudit, start, typeIDS, uname)
if err != nil {
return
}
lineWidth = lineWidth + width
statViewExts = append(statViewExts, viewExts...)
start = start.AddDate(0, 0, 1)
}
return
}
func (s *Service) videoAudit(c context.Context, business int, statDate time.Time, typeIDS []int64, unames string) (viewExts []*model.StatViewExt, lineWidth int, err error) {
var uids []int64
var res map[string]int64
needAll := true
if len(unames) != 0 {
needAll = false
res, err = s.dao.GetUIDByNames(c, unames)
if err != nil {
return
}
for _, uid := range res {
uids = append(uids, uid)
}
}
statNodes, err := s.dao.StatArchiveStat(c, business, typeIDS, uids, []int64{}, statDate)
if err != nil || len(statNodes) == 0 {
return
}
return s.statNode2ViewExt(c, statNodes, needAll)
}
func (s *Service) statNode2ViewExt(c context.Context, statNodes []*model.StatNode, needAll bool) (statViewsExts []*model.StatViewExt, lineWidth int, err error) {
mediateViews := make(map[int64]map[int64]map[int]int64)
uidMap := make(map[int64]bool)
var uids []int64
for _, v := range statNodes {
k1 := v.StatDate.Time().Unix()
k2 := v.UID
k3 := v.StatType
newValue := v.StatValue
uidMap[k2] = true
if v1, ok := mediateViews[k1]; ok {
if needAll {
if allV2, ok := v1[ALLUID]; ok {
if allV3, ok := allV2[k3]; ok {
mediateViews[k1][ALLUID][k3] = allV3 + newValue
} else {
mediateViews[k1][ALLUID][k3] = newValue
}
} else {
lineWidth = lineWidth + 1
mediateViews[k1][ALLUID] = map[int]int64{k3: newValue}
}
}
if v2, ok := v1[k2]; ok {
if v3, ok := v2[k3]; ok {
mediateViews[k1][k2][k3] = v3 + newValue
} else {
mediateViews[k1][k2][k3] = newValue
}
} else {
lineWidth = lineWidth + 1
mediateViews[k1][k2] = map[int]int64{k3: newValue}
}
} else {
lineWidth = lineWidth + 1
mediateViews[k1] = map[int64]map[int]int64{k2: {k3: newValue}}
if needAll {
lineWidth = lineWidth + 1
mediateViews[k1] = map[int64]map[int]int64{ALLUID: {k3: newValue}}
}
}
}
//fetch uid map uname
for uid := range uidMap {
uids = append(uids, uid)
}
uid2name, err := s.dao.GetUNamesByUids(c, uids)
if err != nil {
return
}
if needAll {
uid2name[ALLUID] = ALLNAME
}
for k1, v1 := range mediateViews {
for k2, v2 := range v1 {
var numeratorValue int64
var denominatorValue int64
for k3, v3 := range v2 {
if k3 == model.WaitAuditDuration {
numeratorValue = v3
}
if k3 == model.WaitAuditOper {
denominatorValue = v3
}
}
if denominatorValue == 0 {
mediateViews[k1][k2][model.WaitAuditAvgTime] = 0
} else {
mediateViews[k1][k2][model.WaitAuditAvgTime] = numeratorValue / denominatorValue
}
}
}
for k1, v1 := range mediateViews {
var wraps []*model.StatItemExt
for k2, v2 := range v1 {
var statItems []*model.StatItem
for k3, v3 := range v2 {
statItems = append(statItems, &model.StatItem{
DataCode: k3,
Value: v3,
})
}
wraps = append(wraps, &model.StatItemExt{
Uname: uid2name[k2],
Stats: statItems,
})
}
// uname排序
sort.Slice(wraps, func(i, j int) bool {
if wraps[i].Uname == ALLNAME {
return true
}
if wraps[j].Uname == ALLNAME {
return false
}
return wraps[i].Uname < wraps[j].Uname
})
statViewsExts = append(statViewsExts, &model.StatViewExt{
Date: k1,
Wraps: wraps,
})
}
return
}

View File

@@ -0,0 +1,337 @@
package service
import (
"context"
"go-common/app/admin/main/laser/model"
"testing"
"time"
"github.com/smartystreets/goconvey/convey"
)
func TestServiceArchiveRecheck(t *testing.T) {
convey.Convey("ArchiveRecheck", t, func(convCtx convey.C) {
var (
c = context.Background()
typeIDS = []int64{}
unames = ""
startDate = int64(0)
endDate = int64(0)
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.ArchiveRecheck(c, typeIDS, unames, startDate, endDate)
convCtx.Convey("Then err should be nil.recheckViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceUserRecheck(t *testing.T) {
convey.Convey("UserRecheck", t, func(convCtx convey.C) {
var (
c = context.Background()
typeIDS = []int64{}
unames = ""
startDate = int64(0)
endDate = int64(0)
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.UserRecheck(c, typeIDS, unames, startDate, endDate)
convCtx.Convey("Then err should be nil.recheckViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceDailyStatArchiveRecheck(t *testing.T) {
convey.Convey("dailyStatArchiveRecheck", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
typeIDS = []int64{}
statTypes = []int64{}
uids = []int64{}
statDate = time.Now()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.dailyStatArchiveRecheck(c, business, typeIDS, statTypes, uids, statDate)
convCtx.Convey("Then err should be nil.statViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServicemakeUpArchiveRecheck(t *testing.T) {
convey.Convey("makeUpArchiveRecheck", t, func(convCtx convey.C) {
var (
mediateView map[int64]map[int]int64
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
makeUpArchiveRecheck(mediateView)
convCtx.Convey("Then statViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceDailyStatArchiveStat(t *testing.T) {
convey.Convey("dailyArchiveStat", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
typeIDS = []int64{}
statTypes = []int64{}
uids = []int64{}
statDate = time.Now()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.dailyArchiveStat(c, business, typeIDS, statTypes, uids, statDate)
convCtx.Convey("Then err should be nil.mediateView should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceTagRecheck(t *testing.T) {
convey.Convey("TagRecheck", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
unames = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.TagRecheck(c, startDate, endDate, unames)
convCtx.Convey("Then err should be nil.tagViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceDailyStatTagRecheck(t *testing.T) {
convey.Convey("dailyStatTagRecheck", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
statTypes = []int64{}
uids = []int64{}
statDate = time.Now()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.dailyStatTagRecheck(c, business, statTypes, uids, statDate)
convCtx.Convey("Then err should be nil.statViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServicemakeUpTagRecheck(t *testing.T) {
convey.Convey("makeUpTagRecheck", t, func(convCtx convey.C) {
var (
mediateView map[int64]map[int]int64
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
makeUpTagRecheck(mediateView)
convCtx.Convey("Then statViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceRecheck123(t *testing.T) {
convey.Convey("Recheck123", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
typeIDS = []int64{}
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.Recheck123(c, startDate, endDate, typeIDS)
convCtx.Convey("Then err should be nil.recheckView should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceDailyStatArchiveStreamStat(t *testing.T) {
convey.Convey("dailyStatArchiveStreamStat", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
typeIDS = []int64{}
uids = []int64{}
statTypes = []int64{}
statDate = time.Now()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.dailyStatArchiveStreamStat(c, business, typeIDS, uids, statTypes, statDate)
convCtx.Convey("Then err should be nil.statViews should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServicewrap(t *testing.T) {
convey.Convey("wrap", t, func(convCtx convey.C) {
var (
cargoMap map[int64]*model.CargoItem
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
wrap(cargoMap)
convCtx.Convey("Then views should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceCsvAuditCargo(t *testing.T) {
convey.Convey("CsvAuditCargo", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
unames = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.CsvAuditCargo(c, startDate, endDate, unames)
convCtx.Convey("Then err should be nil.res should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceAuditorCargoList(t *testing.T) {
convey.Convey("AuditorCargoList", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
unames = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.AuditorCargoList(c, startDate, endDate, unames)
convCtx.Convey("Then err should be nil.wrappers,lineWidth should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceCsvRandomVideoAudit(t *testing.T) {
convey.Convey("CsvRandomVideoAudit", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
unames = ""
typeIDS = []int64{}
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.CsvRandomVideoAudit(c, startDate, endDate, unames, typeIDS)
convCtx.Convey("Then err should be nil.res should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceCsvFixedVideoAudit(t *testing.T) {
convey.Convey("CsvFixedVideoAudit", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
unames = ""
typeIDS = []int64{}
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.CsvFixedVideoAudit(c, startDate, endDate, unames, typeIDS)
convCtx.Convey("Then err should be nil.res should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceRandomVideo(t *testing.T) {
convey.Convey("RandomVideo", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
typeIDS = []int64{}
uname = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.RandomVideo(c, startDate, endDate, typeIDS, uname)
convCtx.Convey("Then err should be nil.statViewExts,lineWidth should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceFixedVideo(t *testing.T) {
convey.Convey("FixedVideo", t, func(convCtx convey.C) {
var (
c = context.Background()
startDate = int64(0)
endDate = int64(0)
typeIDS = []int64{}
uname = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.FixedVideo(c, startDate, endDate, typeIDS, uname)
convCtx.Convey("Then err should be nil.statViewExts,lineWidth should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceVideoAudit(t *testing.T) {
convey.Convey("videoAudit", t, func(convCtx convey.C) {
var (
c = context.Background()
business = int(0)
statDate = time.Now()
typeIDS = []int64{}
unames = ""
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.videoAudit(c, business, statDate, typeIDS, unames)
convCtx.Convey("Then err should be nil.viewExts,lineWidth should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServicestatNode2ViewExt(t *testing.T) {
convey.Convey("statNode2ViewExt", t, func(convCtx convey.C) {
var (
c = context.Background()
statNodes = []*model.StatNode{}
needALL = false
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
s.statNode2ViewExt(c, statNodes, needALL)
convCtx.Convey("Then err should be nil.statViewsExts,lineWidth should not be nil.", func(convCtx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,110 @@
package service
import (
"bytes"
"encoding/csv"
"fmt"
"go-common/app/admin/main/laser/model"
"strconv"
"time"
)
var (
csvMetaNodes = []model.CsvMetaNode{
{Index: 0, Name: "日期", DataCode: 25},
{Index: 1, Name: "操作人", DataCode: 26},
{Index: 2, Name: "总操视频量", DataCode: model.TotalVideo},
{Index: 3, Name: "总操作次数", DataCode: model.TotalVideoOper},
{Index: 4, Name: "开放浏视频量", DataCode: model.OpenVideo},
{Index: 5, Name: "开放浏览操作次数", DataCode: model.OpenVideoOper},
{Index: 6, Name: "会员可视频量", DataCode: model.VipAccessVideo},
{Index: 7, Name: "会员可见操作次数", DataCode: model.VipAccessVideoOper},
{Index: 8, Name: "打视频量", DataCode: model.RejectVideo},
{Index: 9, Name: "打回操作次数", DataCode: model.RejectVideoOper},
{Index: 10, Name: "锁视频量", DataCode: model.LockVideo},
{Index: 11, Name: "锁定操作次数", DataCode: model.LockVideoOper},
{Index: 12, Name: "通过视频总时长", DataCode: model.PassVideoTotalDuration},
{Index: 13, Name: "未通过视频总时长", DataCode: model.FailVideoTotalDuration},
{Index: 14, Name: "视频提交到进入待审平均响应时间", DataCode: model.WaitAuditAvgTime},
{Index: 15, Name: "视频提交到进入待审时间", DataCode: model.WaitAuditDuration},
{Index: 16, Name: "视频提交到进入待审次数", DataCode: model.WaitAuditOper},
}
)
// FormatCSV format to csv data
func FormatCSV(records [][]string) (data []byte, err error) {
buf := new(bytes.Buffer)
// add utf bom
if len(records) > 0 {
buf.WriteString("\xEF\xBB\xBF")
}
w := csv.NewWriter(buf)
err = w.WriteAll(records)
if err != nil {
return
}
data = buf.Bytes()
return
}
func formatAuditCargo(wrappers []*model.CargoViewWrapper, lineWidth int) (data [][]string) {
size := len(wrappers)
if size <= 0 {
return
}
data = make([][]string, lineWidth+1)
index := 0
data[index] = []string{"username", "审核时间段", "接收量", "完成量"}
for _, v1 := range wrappers {
for k2, v2 := range v1.Data {
data[index+1] = []string{
v1.Username,
fmt.Sprintf("%s %d:00:00", v1.Date, k2),
strconv.FormatInt(v2.ReceiveValue, 10),
strconv.FormatInt(v2.AuditValue, 10),
}
index = index + 1
}
}
return
}
func formatVideoAuditStat(statViewExts []*model.StatViewExt, lineWidth int) (data [][]string) {
if lineWidth <= 0 {
return
}
data = make([][]string, lineWidth+1)
index := 0
rowHeight := len(csvMetaNodes)
titles := make([]string, rowHeight)
cursorMap := make(map[int]int)
for _, v := range csvMetaNodes {
titles[v.Index] = v.Name
cursorMap[v.DataCode] = v.Index
}
data[index] = titles
for _, v1 := range statViewExts {
date := time.Unix(v1.Date, 0).Format("2006-01-02")
for _, v2 := range v1.Wraps {
name := v2.Uname
tempRows := make([]string, rowHeight)
tempRows = append([]string{date, name}, tempRows[0:]...)
for _, v3 := range v2.Stats {
if cursor, ok := cursorMap[v3.DataCode]; ok {
if v3.DataCode == model.WaitAuditAvgTime || v3.DataCode == model.WaitAuditDuration || v3.DataCode == model.PassVideoTotalDuration || v3.DataCode == model.FailVideoTotalDuration {
tempRows[cursor] = fmt.Sprintf("%d:%d:%d", v3.Value/3600, v3.Value%3600/60, v3.Value%3600%60/1)
} else {
tempRows[cursor] = strconv.FormatInt(v3.Value, 10)
}
}
}
data[index+1] = tempRows
index = index + 1
}
}
return
}

View File

@@ -0,0 +1,52 @@
package service
import (
"go-common/app/admin/main/laser/model"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestServiceFormatCSV(t *testing.T) {
convey.Convey("FormatCSV", t, func(convCtx convey.C) {
var (
records = [][]string{}
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
FormatCSV(records)
convCtx.Convey("Then err should be nil.data should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceformatAuditCargo(t *testing.T) {
convey.Convey("formatAuditCargo", t, func(convCtx convey.C) {
var (
wrappers = []*model.CargoViewWrapper{}
lineWidth = int(0)
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
formatAuditCargo(wrappers, lineWidth)
convCtx.Convey("Then data should not be nil.", func(convCtx convey.C) {
})
})
})
}
func TestServiceformatVideoAuditStat(t *testing.T) {
convey.Convey("formatVideoAuditStat", t, func(convCtx convey.C) {
var (
statViewExts = []*model.StatViewExt{}
lineWidth = int(0)
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
formatVideoAuditStat(statViewExts, lineWidth)
convCtx.Convey("Then data should not be nil.", func(convCtx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,33 @@
package service
import (
"context"
"go-common/app/admin/main/laser/conf"
"go-common/app/admin/main/laser/dao"
)
// Service struct
type Service struct {
conf *conf.Config
dao *dao.Dao
}
// New is new instance
func New(c *conf.Config) (s *Service) {
s = &Service{
conf: c,
dao: dao.New(c),
}
return
}
// Ping is check dao connected
func (s *Service) Ping(c context.Context) (err error) {
return s.dao.Ping(c)
}
// Close is close dao connection
func (s *Service) Close() (err error) {
return s.dao.Close(context.TODO())
}

View File

@@ -0,0 +1,32 @@
package service
import (
"flag"
"go-common/app/admin/main/laser/conf"
"os"
)
var (
s *Service
)
func init() {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "mobile.studio.laser-admin")
flag.Set("conf_token", "25911b439f4636ce9083f91c4882dffa")
flag.Set("tree_id", "19167")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../cmd/laser-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
s = New(conf.Conf)
}

View File

@@ -0,0 +1,124 @@
package service
import (
"fmt"
"strings"
"time"
"context"
"go-common/app/admin/main/laser/model"
"go-common/library/log"
xtime "go-common/library/time"
)
// AddTask is add a log task
func (s *Service) AddTask(ctx context.Context, mid int64, username string, adminID int64, logDate int64, contactEmail string, platform int, sourceType int) (err error) {
t, err := s.dao.FindTask(ctx, mid, 0)
if err != nil {
return
}
if t != nil {
err = fmt.Errorf("存在该 mid:%d 未完成的任务,请先删除后再添加", mid)
log.Error("s.AddTask() error(%v)", err)
return
}
_, err = s.dao.AddTask(ctx, mid, username, adminID, time.Unix(logDate, 0).Format("2006-01-02 15:03:04"), contactEmail, platform, sourceType)
go s.dao.AddTaskInfoCache(ctx, mid, &model.TaskInfo{
MID: mid,
LogDate: xtime.Time(logDate),
SourceType: sourceType,
Platform: platform,
Empty: false,
})
return
}
// DeleteTask is delete a log task
func (s *Service) DeleteTask(ctx context.Context, taskID int64, username string, adminID int64) (err error) {
t, err := s.dao.QueryTaskInfoByIDSQL(ctx, taskID)
if err != nil {
return
}
go s.dao.RemoveTaskInfoCache(ctx, t.MID)
return s.dao.DeleteTask(ctx, taskID, username, adminID)
}
// QueryTask is query Task by query params
func (s *Service) QueryTask(ctx context.Context, mid int64, logDateStart int64, logDateEnd int64, sourceType int, platform int, state int, sortBy string, pageNo int, pageSize int) (tasks []*model.Task, count int64, err error) {
var wherePairs []string
wherePairs = append(wherePairs, "is_deleted = 0")
wherePairs = append(wherePairs, fmt.Sprintf("state = %d", state))
if sourceType > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("source_type = %d", sourceType))
}
if platform > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("platform = %d", platform))
}
if mid > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("mid = %d", mid))
}
layout := "2006-01-02 15:03:04"
if logDateStart > 0 && logDateEnd > 0 {
start := time.Unix(logDateStart, 0).Format(layout)
end := time.Unix(logDateEnd, 0).Format(layout)
wherePairs = append(wherePairs, fmt.Sprintf(" log_date between '%s' and '%s' ", start, end))
} else if logDateStart > 0 {
start := time.Unix(logDateStart, 0).Format(layout)
wherePairs = append(wherePairs, fmt.Sprintf("log_date >= '%s' ", start))
}
var queryStmt string
if len(wherePairs) > 0 {
queryStmt = strings.Join(wherePairs, " AND ")
}
sort := buildSortStmt(sortBy)
var limit, offset int
if pageNo > 0 && pageSize > 0 {
offset = (pageNo - 1) * pageSize
limit = pageSize
} else {
offset = 0
limit = 20
}
return s.dao.QueryTask(ctx, queryStmt, sort, offset, limit)
}
func buildSortStmt(sortBy string) (sort string) {
if sortBy == "" {
sort = "mtime Desc"
return
}
if strings.HasPrefix(sortBy, "-") {
sort = strings.TrimPrefix(sortBy, "-") + " " + "Desc"
}
return sort
}
// UpdateTask is update undone task where state = 0.
func (s *Service) UpdateTask(ctx context.Context, username string, adminID int64, taskID int64, mid int64, logDate int64, contactEmail string, sourceType int, platform int) (err error) {
var updatePairs []string
updatePairs = append(updatePairs, fmt.Sprintf("source_type = %d", sourceType))
updatePairs = append(updatePairs, fmt.Sprintf("platform = %d", platform))
updatePairs = append(updatePairs, fmt.Sprintf("mid = %d", mid))
updatePairs = append(updatePairs, fmt.Sprintf("log_date = '%s' ", time.Unix(logDate, 0).Format("2006-01-02 15:03:04")))
updatePairs = append(updatePairs, fmt.Sprintf("contact_email = '%s'", contactEmail))
if adminID > 0 {
updatePairs = append(updatePairs, fmt.Sprintf("admin_id = %d", adminID))
}
if len(username) != 0 {
updatePairs = append(updatePairs, fmt.Sprintf("username = '%s' ", username))
}
updateStmt := strings.Join(updatePairs, ", ")
// 0 undone
state := 0
// 下列参数前端必传.
go s.dao.AddTaskInfoCache(ctx, mid, &model.TaskInfo{
MID: mid,
LogDate: xtime.Time(logDate),
SourceType: sourceType,
Platform: platform,
Empty: false,
})
return s.dao.UpdateTask(ctx, taskID, state, updateStmt)
}

View File

@@ -0,0 +1,42 @@
package service
import (
"fmt"
"strings"
"context"
"go-common/app/admin/main/laser/model"
)
// QueryTaskLog is query the finished task
func (s *Service) QueryTaskLog(ctx context.Context, mid int64, taskID int64, platform int, taskState int, sortBy string, pageNo int, pageSize int) (logs []*model.TaskLog, count int64, err error) {
var wherePairs []string
if mid > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("mid = %d", mid))
}
if taskID > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("task_id = %d", taskID))
}
if platform > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("platform = %d", platform))
}
if taskState > 0 {
wherePairs = append(wherePairs, fmt.Sprintf("task_state = %d", taskState))
}
var queryStmt string
if len(wherePairs) > 0 {
queryStmt = "WHERE " + strings.Join(wherePairs, " AND ")
}
sort := buildSortStmt(sortBy)
var limit, offset int
if pageNo >= 0 && pageSize > 0 {
offset = (pageNo - 1) * pageSize
limit = pageSize
} else {
offset = 0
limit = 20
}
logs, count, err = s.dao.QueryTaskLog(ctx, queryStmt, sort, offset, limit)
return
}

View File

@@ -0,0 +1,29 @@
package service
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestServiceQueryTaskLog(t *testing.T) {
convey.Convey("QueryTaskLog", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
taskID = int64(0)
platform = int(0)
taskState = int(0)
sortBy = ""
pageNo = int(0)
pageSize = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
s.QueryTaskLog(c, mid, taskID, platform, taskState, sortBy, pageNo, pageSize)
ctx.Convey("Then err should be nil.logs,count should not be nil.", func(ctx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,103 @@
package service
import (
"context"
"github.com/smartystreets/goconvey/convey"
"testing"
)
func TestServiceAddTask(t *testing.T) {
convey.Convey("AddTask", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
username = ""
adminID = int64(0)
logDate = int64(0)
contactEmail = ""
platform = int(0)
sourceType = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
s.AddTask(c, mid, username, adminID, logDate, contactEmail, platform, sourceType)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
})
})
})
}
func TestServiceDeleteTask(t *testing.T) {
convey.Convey("DeleteTask", t, func(ctx convey.C) {
var (
c = context.Background()
taskID = int64(0)
username = ""
adminID = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
s.DeleteTask(c, taskID, username, adminID)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
})
})
})
}
func TestServiceQueryTask(t *testing.T) {
convey.Convey("QueryTask", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
logDateStart = int64(0)
logDateEnd = int64(0)
sourceType = int(0)
platform = int(0)
state = int(0)
sortBy = ""
pageNo = int(0)
pageSize = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
s.QueryTask(c, mid, logDateStart, logDateEnd, sourceType, platform, state, sortBy, pageNo, pageSize)
ctx.Convey("Then err should be nil.tasks,count should not be nil.", func(ctx convey.C) {
})
})
})
}
func TestServicebuildSortStmt(t *testing.T) {
convey.Convey("buildSortStmt", t, func(ctx convey.C) {
var (
sortBy = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
sort := buildSortStmt(sortBy)
ctx.Convey("Then sort should not be nil.", func(ctx convey.C) {
ctx.Convey(sort, convey.ShouldNotBeNil)
})
})
})
}
func TestServiceUpdateTask(t *testing.T) {
convey.Convey("UpdateTask", t, func(ctx convey.C) {
var (
c = context.Background()
username = ""
adminID = int64(0)
taskID = int64(0)
mid = int64(0)
logDate = int64(0)
contactEmail = ""
sourceType = int(0)
platform = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
s.UpdateTask(c, username, adminID, taskID, mid, logDate, contactEmail, sourceType, platform)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
})
})
})
}