From 6b59a711b9af0825858c408cdba95102b8b51cb3 Mon Sep 17 00:00:00 2001
From: zhangqian <zhangqian@123.com>
Date: 星期五, 13 十二月 2024 15:46:44 +0800
Subject: [PATCH] 结果表增加联合唯一索引,防止重复报警

---
 models/gather_model.go |   96 +++++++++++++++++++++++++++++++++++++----------
 1 files changed, 75 insertions(+), 21 deletions(-)

diff --git a/models/gather_model.go b/models/gather_model.go
index 909c01f..59187ce 100644
--- a/models/gather_model.go
+++ b/models/gather_model.go
@@ -4,6 +4,7 @@
 	"bytes"
 	"context"
 	"encoding/json"
+	"errors"
 	"fmt"
 	"github.com/elastic/go-elasticsearch/v6"
 	"log"
@@ -29,8 +30,15 @@
 	Task           *db.ModelTask
 }
 
-func (m *GatherModel) Init(task *db.ModelTask) error {
+type ProcessedRecord struct {
+	UniqueKey string    // 鍞竴鏍囪瘑
+	Timestamp time.Time // 璁板綍鐨勬椂闂存埑
+}
 
+func (m *GatherModel) Init(task *db.ModelTask) error {
+	if len(task.DomainUnitIds) == 0 {
+		return errors.New("empty domain set")
+	}
 	orgIds, areaIds, err := service.GetOrgIdsAndAreaIdsByDomainUnitIds(task.DomainUnitIds)
 	if err != nil {
 		return err
@@ -63,7 +71,22 @@
 	AppearInterval int    `gorm:"type:int;" json:"appearInterval"` //鍑虹幇闂撮殧锛屽崟浣嶄负绉�
 }
 
+var (
+	processed        map[string]ProcessedRecord         // 瀛樺偍宸插鐞嗚褰�
+	cleanupThreshold = time.Now().Add(-100 * time.Hour) // 瀹氫箟涓�涓椂闂寸獥鍙o紝鍋囪鍙繚瀛樻渶杩�100灏忔椂鐨勮褰�
+)
+
+func init() {
+	processed = make(map[string]ProcessedRecord)
+}
 func (m *GatherModel) Run() error {
+	// 娓呯悊杩囨湡鐨勮褰�
+	for key, record := range processed {
+		if record.Timestamp.Before(cleanupThreshold) {
+			delete(processed, key)
+		}
+	}
+
 	records, err := queryElasticsearch(db.GetEsClient(), m)
 	if err != nil {
 		log.Fatalf("Failed to query Elasticsearch: %v", err)
@@ -73,7 +96,27 @@
 		return nil
 	}
 
-	aggregation, err := analyzeAndAggregate(records)
+	newRecords := make([]*GatherRecord, 0)
+
+	// 鑱氬悎閫昏緫
+	for _, record := range records {
+		// 鐢熸垚鍞竴鏍囪瘑
+		uniqueKey := fmt.Sprintf("%s-%s", record.DocumentNumber, record.PicDate)
+
+		// 濡傛灉宸茬粡澶勭悊杩囷紝璺宠繃
+		if _, exists := processed[uniqueKey]; exists {
+			continue
+		}
+
+		// 娣诲姞鍒板凡澶勭悊璁板綍
+		processed[uniqueKey] = ProcessedRecord{
+			UniqueKey: uniqueKey,
+			Timestamp: time.Now(),
+		}
+		newRecords = append(newRecords, record)
+	}
+
+	aggregation, err := analyzeAndAggregate(newRecords)
 	if err != nil {
 		log.Fatalf("Failed to analyze and aggregate data: %v", err)
 	}
@@ -89,18 +132,24 @@
 		return err
 	}
 	event := strings.Join(typeNames, ",")
-	for location, persons := range aggregation {
+	for lt, persons := range aggregation {
+		if persons.Size() == 0 {
+			continue
+		}
+		personIds := persons.Elements()
 		result := &db.ModelTaskResults{
-			Title:       m.Task.Name,
-			Event:       fmt.Sprintf("%s/%d浜�", event, len(persons)),
-			ModelID:     m.Task.ModelID,
-			ModelTaskID: m.Task.ID,
-			CommunityId: location.CommunityId,
-			OrgID:       location.OrgId,
-			ObjectIds:   strings.Join(persons.Elements(), ","),
-			Location:    location.Location,
-			Building:    location.Building,
-			Floor:       location.Floor,
+			Title:         m.Task.Name,
+			Event:         fmt.Sprintf("%s/%d浜�", event, len(persons)),
+			ModelID:       m.Task.ModelID,
+			ModelTaskID:   m.Task.ID,
+			CommunityId:   lt.CommunityId,
+			OrgID:         lt.OrgId,
+			ObjectIds:     strings.Join(personIds, ","),
+			Location:      lt.Location,
+			Building:      lt.Building,
+			Floor:         lt.Floor,
+			PicDate:       lt.Time,
+			FirstPersonID: personIds[0],
 		}
 		results = append(results, result)
 	}
@@ -113,7 +162,7 @@
 	return nil
 }
 
-func queryElasticsearch(esClient *elasticsearch.Client, gatherModel *GatherModel) ([]GatherRecord, error) {
+func queryElasticsearch(esClient *elasticsearch.Client, gatherModel *GatherModel) ([]*GatherRecord, error) {
 	var buf bytes.Buffer
 	now := time.Now()
 	start := now.Add(-time.Duration(gatherModel.DaysWindow) * 24 * time.Hour)
@@ -282,7 +331,7 @@
 	}
 
 	// 瑙f瀽鑱氬悎缁撴灉
-	var records []GatherRecord
+	var records []*GatherRecord
 	if aggs, ok := result["aggregations"].(map[string]interface{}); ok {
 		if orgBuckets, ok := aggs["orgs"].(map[string]interface{})["buckets"].([]interface{}); ok {
 			for _, orgBucket := range orgBuckets {
@@ -307,7 +356,7 @@
 										if gatherEvents, ok := floorBucket.(map[string]interface{})["gather_events"].(map[string]interface{})["buckets"].([]interface{}); ok {
 											for _, eventBucket := range gatherEvents {
 												key := int64(eventBucket.(map[string]interface{})["key"].(float64)) / 1000 // 灏嗘绉掕浆鎹负绉�
-												timestamp := time.Unix(key, 0).Format("2006-01-02 15:04:05")
+												timestamp := time.Unix(key, 0).UTC().Format("2006-01-02 15:04:05")
 
 												// 瑙f瀽浜哄憳
 												if peopleBuckets, ok := eventBucket.(map[string]interface{})["people"].(map[string]interface{})["buckets"].([]interface{}); ok {
@@ -315,7 +364,7 @@
 														documentNumber := person.(map[string]interface{})["key"].(string)
 
 														// 鏋勫缓 GatherRecord 缁撴瀯浣�
-														record := GatherRecord{
+														record := &GatherRecord{
 															PicDate:        timestamp,
 															DocumentNumber: documentNumber,
 															CommunityId:    communityId,
@@ -344,16 +393,17 @@
 	return records, nil
 }
 
-type GatherLocation struct {
+type GatherLocationTime struct {
 	CommunityId string
 	OrgId       string
 	Building    string
 	Floor       string
 	Location    string
+	Time        string
 }
 
-func analyzeAndAggregate(records []GatherRecord) (map[GatherLocation]set.StringSet, error) {
-	aggregation := make(map[GatherLocation]set.StringSet)
+func analyzeAndAggregate(records []*GatherRecord) (map[GatherLocationTime]set.StringSet, error) {
+	aggregation := make(map[GatherLocationTime]set.StringSet)
 	domainIds := set.NewStringSet()
 	for _, record := range records {
 		domainIds.Add(record.CommunityId)
@@ -365,16 +415,20 @@
 	}
 
 	for _, record := range records {
+		if record.DocumentNumber == "" {
+			continue
+		}
 		if domains[record.CommunityId] == nil {
 			continue
 		}
 
-		location := GatherLocation{
+		location := GatherLocationTime{
 			CommunityId: record.CommunityId,
 			OrgId:       record.OrgId,
 			Building:    record.Building,
 			Floor:       record.Floor,
 			Location:    fmt.Sprintf("%s%s%s", domains[record.CommunityId].Name, record.Building, record.Floor),
+			Time:        record.PicDate,
 		}
 		if aggregation[location] == nil {
 			aggregation[location] = set.NewStringSet()

--
Gitblit v1.8.0