Răsfoiți Sursa

日志,文件

huangyan 10 luni în urmă
părinte
comite
078922e21c
9 a modificat fișierele cu 1546 adăugiri și 32 ștergeri
  1. 3 0
      configs/config.yaml
  2. 46 0
      configs/setting.go
  3. 22 0
      global/setting.go
  4. 7 7
      go.mod
  5. 1269 1
      go.sum
  6. 36 24
      main.go
  7. 16 0
      model/model.go
  8. 134 0
      simple_zap/simple_zap.go
  9. 13 0
      utils/md5.go

+ 3 - 0
configs/config.yaml

@@ -0,0 +1,3 @@
+server:
+  # 启动模式 debug、release
+  port: ":6203"

+ 46 - 0
configs/setting.go

@@ -0,0 +1,46 @@
+package global
+
+import "github.com/spf13/viper"
+
+type Settings struct {
+	vp *viper.Viper
+}
+
+var sections = make(map[string]interface{})
+
+// NewSetting 读取配置
+func NewSetting() (*Settings, error) {
+	vp := viper.New()
+	vp.SetConfigName("config")
+	vp.AddConfigPath("configs")
+	vp.SetConfigType("yaml")
+	err := vp.ReadInConfig()
+	if err != nil {
+		return nil, err
+	}
+	s := &Settings{vp}
+	return s, nil
+}
+
+// ReadSection 读取指定的一段
+func (s *Settings) ReadSection(k string, v interface{}) error {
+	err := s.vp.UnmarshalKey(k, v)
+	if err != nil {
+		return err
+	}
+	if _, ok := sections[k]; !ok {
+		sections[k] = v
+	}
+	return nil
+}
+
+// ReloadAllSection 重新加载
+func (s *Settings) ReloadAllSection() error {
+	for k, v := range sections {
+		err := s.ReadSection(k, v)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}

+ 22 - 0
global/setting.go

@@ -0,0 +1,22 @@
+package global
+
+import global "emqx.io/grpc/exhook/configs"
+
+// ServerSettingS 服务器配置
+type Server struct {
+	Port string `json:"port"`
+}
+
+var (
+	ServerSetting *Server
+)
+
+// SetupSetting 读取配置到全局变量
+func SetupSetting() error {
+	s, err := global.NewSetting()
+	err = s.ReadSection("Server", &ServerSetting)
+	if err != nil {
+		return err
+	}
+	return nil
+}

+ 7 - 7
go.mod

@@ -5,11 +5,11 @@ go 1.11
 replace emqx.io/grpc/exhook => ./
 
 require (
-	github.com/bytedance/sonic v1.11.6 // indirect
-	github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
-	github.com/golang/protobuf v1.5.0
-	github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
-	github.com/pkg/errors v0.9.1 // indirect
-	google.golang.org/grpc v1.36.0
-	google.golang.org/protobuf v1.27.1
+	github.com/bytedance/sonic v1.11.6
+	github.com/patrickmn/go-cache v2.1.0+incompatible
+	github.com/pkg/errors v0.9.1
+	github.com/spf13/viper v1.18.2
+	go.uber.org/zap v1.27.0
+	google.golang.org/grpc v1.59.0
+	google.golang.org/protobuf v1.31.0
 )

Fișier diff suprimat deoarece este prea mare
+ 1269 - 1
go.sum


+ 36 - 24
main.go

@@ -2,11 +2,16 @@ package main
 
 import (
 	"context"
+	"emqx.io/grpc/exhook/global"
+	"emqx.io/grpc/exhook/model"
 	pb "emqx.io/grpc/exhook/protobuf"
-	"encoding/json"
+	"emqx.io/grpc/exhook/simple_zap"
+	exhook "emqx.io/grpc/exhook/utils"
 	"fmt"
+	"github.com/bytedance/sonic"
 	"github.com/patrickmn/go-cache"
 	"github.com/pkg/errors"
+	"go.uber.org/zap"
 	"google.golang.org/grpc"
 	"log"
 	"net"
@@ -14,11 +19,14 @@ import (
 	"time"
 )
 
-const (
-	port = ":9000"
-)
+func init() {
+	err := global.SetupSetting()
+	if err != nil {
+		log.Fatalf("init setting err: %v", err)
+	}
+}
 
-var cacheInstance = cache.New(5*time.Minute, 10*time.Minute)
+var cacheInstance = cache.New(1*time.Minute, 2*time.Minute)
 
 type server struct {
 	pb.UnimplementedHookProviderServer
@@ -32,36 +40,35 @@ func (s *server) OnProviderLoaded(ctx context.Context, in *pb.ProviderLoadedRequ
 }
 
 func (s *server) OnMessagePublish(ctx context.Context, in *pb.MessagePublishRequest) (*pb.ValuedResponse, error) {
-	log.Printf("[DEBUG] OnMessagePublish: %s", in.Message.Topic)
+	logger := simple_zap.WithCtx(ctx)
 	topic := strings.TrimSuffix(in.GetMessage().GetTopic(), "/")
 	payload := in.GetMessage().GetPayload()
 
 	if payload == nil || len(payload) >= 1000 {
+		logger.Info("消息体为空或大于等于1000字节", zap.String("key", topic))
 		return nil, errors.New("消息体为空或大于等于1000字节")
 	}
 
-	var jsonPayload map[string]interface{}
-	if err := json.Unmarshal(payload, &jsonPayload); err != nil {
-		return nil, errors.Wrap(err, "json解析失败")
-	}
-
-	typeVal, ok := jsonPayload["type"].(float64)
-	if !ok {
-		return nil, errors.New("json中'type'字段解析失败")
+	var jsonPayload model.T
+	err := sonic.Unmarshal(payload, &jsonPayload)
+	if err != nil {
+		logger.Info("json解析失败", zap.String("key", topic))
+		return nil, errors.Wrap(err, topic+"json解析失败")
 	}
+	if int(jsonPayload.Type) == 2 {
+		key := fmt.Sprintf("%s-%v", topic, jsonPayload.Data)
+		data := fmt.Sprintf("%s-%v", topic, jsonPayload)
 
-	if int(typeVal) == 2 {
-		key := fmt.Sprintf("%s-%v", topic, jsonPayload["data"])
-		log.Printf("缓存中键的值: %s", key)
-		if _, found := cacheInstance.Get(key); found {
+		//md5加密key缩短缓存中的数据
+		md5 := exhook.MD5(key)
+		if _, found := cacheInstance.Get(md5); found {
 			return discardMessagePublish(ctx, in, func(response *pb.ValuedResponse) error {
-				log.Printf("丢弃重复消息")
+				logger.Warn("消息重复被丢弃", zap.String("key", data))
 				return nil
 			})
 		}
-		cacheInstance.Set(key, "alarm", cache.DefaultExpiration)
+		cacheInstance.Set(md5, "alarm", cache.DefaultExpiration)
 	}
-
 	// 正常发送消息
 	return &pb.ValuedResponse{
 		Type: pb.ValuedResponse_STOP_AND_RETURN,
@@ -72,6 +79,10 @@ func (s *server) OnMessagePublish(ctx context.Context, in *pb.MessagePublishRequ
 }
 
 func discardMessagePublish(ctx context.Context, in *pb.MessagePublishRequest, responseWriter func(*pb.ValuedResponse) error) (*pb.ValuedResponse, error) {
+	// 增加对输入参数的非空验证,防止空指针异常
+	if in == nil || in.Message == nil {
+		return nil, errors.New("输入参数不能为空")
+	}
 	emptyPayload := []byte{}
 	newMsg := &pb.Message{
 		Id:      in.Message.Id,
@@ -90,20 +101,21 @@ func discardMessagePublish(ctx context.Context, in *pb.MessagePublishRequest, re
 	}
 
 	if err := responseWriter(reply); err != nil {
+		simple_zap.Logger.Error("发送响应时出错", zap.String("key", reply.GetMessage().GetTopic()))
 		return nil, errors.Wrap(err, "发送响应时出错")
 	}
-
+	// 正常发送消息之后
 	return reply, nil
 }
 
 func main() {
-	lis, err := net.Listen("tcp", port)
+	lis, err := net.Listen("tcp", global.ServerSetting.Port)
 	if err != nil {
 		log.Fatalf("failed to listen: %v", err)
 	}
 	s := grpc.NewServer()
 	pb.RegisterHookProviderServer(s, &server{})
-	log.Println("Started gRPC server on", port)
+	log.Println("Started gRPC server on", global.ServerSetting.Port)
 	if err := s.Serve(lis); err != nil {
 		log.Fatalf("failed to serve: %v", err)
 	}

+ 16 - 0
model/model.go

@@ -0,0 +1,16 @@
+package model
+
+type T struct {
+	Type int    `json:"type"`
+	Mid  int    `json:"mid"`
+	Dut  int    `json:"dut"`
+	Data []data `json:"data"`
+}
+type data struct {
+	Id  int     `json:"id"`
+	Tp  int     `json:"tp"`
+	T   float64 `json:"t"`
+	H   float64 `json:"h"`
+	Ut  int     `json:"ut"`
+	Fut int     `json:"fut"`
+}

+ 134 - 0
simple_zap/simple_zap.go

@@ -0,0 +1,134 @@
+package simple_zap
+
+import (
+	"context"
+	"fmt"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+	"log"
+	"os"
+	"sync"
+	"time"
+)
+
+const loggerCtxKey = "baozhida"
+
+var (
+	Logger *zap.Logger
+	mutex  sync.RWMutex
+)
+
+func init() {
+	// 确保日志目录存在
+	if err := os.MkdirAll("./log", 0755); err != nil {
+		log.Fatalf("Failed to create log directory: %v", err)
+	}
+
+	// 设置不同级别的日志文件
+	levelFiles, err := setupLevelBasedLogFiles()
+	if err != nil {
+		log.Fatalf("Failed to setup level-based log files: %v", err)
+	}
+
+	// 配置zap的各个级别输出
+	var cores []zapcore.Core
+	encoderConfig := zap.NewProductionEncoderConfig()
+	encoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
+
+	for level, file := range levelFiles {
+		core := zapcore.NewCore(
+			zapcore.NewJSONEncoder(encoderConfig),
+			zapcore.AddSync(file),
+			level, // 指定日志级别
+		)
+		cores = append(cores, core)
+	}
+
+	// 组合所有核心为一个TeeCore,这样所有级别的日志都会被正确路由
+	core := zapcore.NewTee(cores...)
+
+	Logger = zap.New(core)
+	startDailyLogFileSwitcher()
+}
+func setupLevelBasedLogFiles() (map[zapcore.Level]*os.File, error) {
+	levelFiles := make(map[zapcore.Level]*os.File)
+	levels := []zapcore.Level{zap.DebugLevel, zap.InfoLevel, zap.WarnLevel, zap.ErrorLevel, zap.DPanicLevel, zap.PanicLevel, zap.FatalLevel}
+
+	for _, level := range levels {
+		today := time.Now().Format("2006-01-02")
+		logFile := fmt.Sprintf("./log/%s-%s.log", level.String(), today)
+		file, err := os.OpenFile(logFile, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644)
+		if err != nil {
+			return nil, fmt.Errorf("failed to open log file for level %s: %w", level.String(), err)
+		}
+		levelFiles[level] = file
+	}
+
+	return levelFiles, nil
+}
+
+// startDailyLogFileSwitcher 启动一个定时任务,每天定时切换日志文件。
+func startDailyLogFileSwitcher() {
+	ticker := time.NewTicker(24 * time.Hour)
+	go func() {
+		for range ticker.C {
+			switchLogFile()
+		}
+	}()
+}
+
+// switchLogFile 根据文件大小或日期切换到新的日志文件。
+func switchLogFile() {
+	mutex.Lock()
+	defer mutex.Unlock()
+
+	// 获取当前日志文件信息
+	currentLogFile := fmt.Sprintf("./log/app-%s.log", time.Now().Format("2006-01-02"))
+	info, err := os.Stat(currentLogFile)
+	if err != nil {
+		Logger.Fatal("Error getting current log file info", zap.Error(err))
+	}
+
+	// 如果文件大小超过1GB,或者日期已改变
+	if info.Size() >= 1<<30 || time.Now().Format("2006-01-02") != info.ModTime().Format("2006-01-02") {
+		// 获取第二天的日期
+		tomorrow := time.Now().AddDate(0, 0, 1).Format("2006-01-02")
+		newLogFile := fmt.Sprintf("./log/app-%s.log", tomorrow)
+
+		// 关闭当前日志文件
+		Logger.Sync()
+
+		// 重新打开新的日志文件
+		newFile, err := os.OpenFile(newLogFile, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644)
+		if err != nil {
+			Logger.Fatal("Error opening new log file", zap.Error(err))
+		}
+
+		// 更新zap的输出目标
+		Logger = zap.New(zapcore.NewCore(
+			zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
+			zapcore.AddSync(newFile),
+			zap.DebugLevel,
+		))
+
+		// 记录日志,表明日志文件已切换
+		Logger.Info("Switched to new log file", zap.String("file", newLogFile))
+	}
+}
+
+// NewCtx 给 ctx 注入一个 logger, logger 中包含Field(内含日志打印的 k-v对)
+func NewCtx(ctx context.Context, fields ...zapcore.Field) context.Context {
+	return context.WithValue(ctx, loggerCtxKey, Logger.With(fields...))
+}
+
+// WithCtx 尝试从 context 中获取带有 traceId Field的 logger
+func WithCtx(ctx context.Context) *zap.Logger {
+	if ctx == nil {
+		return Logger
+	}
+	ctxLogger, ok := ctx.Value(loggerCtxKey).(*zap.Logger)
+	if ok {
+		return ctxLogger
+	}
+	return Logger
+}

+ 13 - 0
utils/md5.go

@@ -0,0 +1,13 @@
+package exhook
+
+import (
+	"crypto/md5"
+	"encoding/hex"
+)
+
+func MD5(password string) string {
+	bytes := []byte(password)
+	sum := md5.Sum(bytes)
+	toString := hex.EncodeToString(sum[:])
+	return toString
+}

Unele fișiere nu au fost afișate deoarece prea multe fișiere au fost modificate în acest diff