| | |
| | | //logger.Debugf("task %s last result %d", m.Task.Name, len(lastFilter)) |
| | | document_number_list = []string{} |
| | | for _, i := range keyFilter { |
| | | //logger.Debugf("keyFilter %s ------------- %s", i.Id, i.PersonType) |
| | | document_number_list = append(document_number_list, i.Id) |
| | | person := document_number_map[i.DocumentNumber] |
| | | person := document_number_map[i.Id] |
| | | person.PersonType = i.PersonType |
| | | document_number_map[i.DocumentNumber] = person |
| | | document_number_map[i.Id] = person |
| | | } |
| | | logger.Debugf("document_number_list %s last result %s", m.Task.Name, document_number_list) |
| | | //logger.Debugf("document_number_map %s last result %s", m.Task.Name, document_number_map) |
| | | records, err := queryEsLocation(db.GetEsClient(), m, document_number_list) |
| | | if err != nil { |
| | | return err |
| | |
| | | for _, record := range records { |
| | | tagTypes = []string{} |
| | | for _, personId := range record.DocumentNumbers { |
| | | logger.Debugf("document_number_map[personId] %s", document_number_map[personId]) |
| | | tagTypes = append(tagTypes, document_number_map[personId].PersonType) |
| | | lastAppearanceTime = document_number_map[personId].LastAppearanceTime |
| | | } |
| | |
| | | results = append(results, result) |
| | | } |
| | | logger.Debugf("task %s last filter result %d", m.Task.Name, len(results)) |
| | | return service.SaveTaskResults(results) |
| | | return service.SaveLocationTaskResults(results) |
| | | } |
| | | |
| | | func (m *LocationModel) KeepAlive() error { |
| | |
| | | |
| | | func queryEsLocation(esClient *elasticsearch.Client, locationModel *LocationModel, documentNumbers []string) ([]*LocationRecord, error) { |
| | | var buf bytes.Buffer |
| | | //nowTime := time.Now() |
| | | //startTime := nowTime.Add(-time.Duration(locationModel.Duration) * 24 * time.Hour) |
| | | nowTime := time.Now() |
| | | startTime := nowTime.Add(-time.Duration(locationModel.Duration) * 24 * time.Hour) |
| | | |
| | | // 构建过滤条件 |
| | | var filters []map[string]interface{} |
| | |
| | | } |
| | | filters = append(filters, addrParams) |
| | | } |
| | | //var queryTimes []map[string]interface{} |
| | | //for date := startTime; date.Before(nowTime); date = date.Add(24 * time.Hour) { |
| | | // start := time.Date(date.Year(), date.Month(), date.Day(), locationModel.StartTime, 0, 0, 0, date.Location()) |
| | | // end := time.Date(date.Year(), date.Month(), date.Day(), locationModel.EndTime, 0, 0, 0, date.Location()) |
| | | // |
| | | // //queryTimes = append(queryTimes, map[string]interface{}{ |
| | | // // "range": map[string]interface{}{ |
| | | // // "picDate": map[string]interface{}{ |
| | | // // "gte": start.Format(time.DateTime), // "2006-01-02 15:04:05d" |
| | | // // "lte": end.Format(time.DateTime), |
| | | // // }, |
| | | // // }, |
| | | // //}) |
| | | // |
| | | // filters = append(filters, map[string]interface{}{ |
| | | // "range": map[string]interface{}{ |
| | | // "picDate": map[string]interface{}{ |
| | | // "gte": start.Format(time.DateTime), |
| | | // "lte": end.Format(time.DateTime), |
| | | // }, |
| | | // }, |
| | | // }) |
| | | //} |
| | | //filters = append(filters, map[string]interface{}{ |
| | | // "bool": map[string]interface{}{ |
| | | // "filter": queryTimes, |
| | | // }, |
| | | //}) |
| | | logger.Debugf("filters--------------------------------------- %s", filters) |
| | | var queryTimes []map[string]interface{} |
| | | for date := startTime; date.Before(nowTime); date = date.Add(24 * time.Hour) { |
| | | start := time.Date(date.Year(), date.Month(), date.Day(), locationModel.StartTime, 0, 0, 0, date.Location()) |
| | | end := time.Date(date.Year(), date.Month(), date.Day(), locationModel.EndTime, 0, 0, 0, date.Location()) |
| | | |
| | | queryTimes = append(queryTimes, map[string]interface{}{ |
| | | "range": map[string]interface{}{ |
| | | "picDate": map[string]interface{}{ |
| | | "gte": start.Format(time.DateTime), // "2006-01-02 15:04:05d" |
| | | "lte": end.Format(time.DateTime), |
| | | }, |
| | | }, |
| | | }) |
| | | } |
| | | filters = append(filters, map[string]interface{}{ |
| | | "bool": map[string]interface{}{ |
| | | "should": queryTimes, |
| | | }, |
| | | }) |
| | | //logger.Debugf("filters--------------------------------------- %s", filters) |
| | | query := map[string]interface{}{ |
| | | "query": map[string]interface{}{ |
| | | "bool": map[string]interface{}{ |
| | |
| | | "threshold": locationModel.Appearances, |
| | | }, |
| | | }, |
| | | }, |
| | | }, |
| | | "pic_date": map[string]interface{}{ // 新增按 documentNumber 聚合 |
| | | "terms": map[string]interface{}{ |
| | | "field": "picDate", |
| | | "size": 10000, |
| | | }, |
| | | }, |
| | | "document_numbers": map[string]interface{}{ // 新增按 documentNumber 聚合 |
| | |
| | | if err := json.NewDecoder(res.Body).Decode(&result); err != nil { |
| | | return nil, fmt.Errorf("error parsing response body: %s", err) |
| | | } |
| | | logger.Debugf("result--------------------------------------- %s", result) |
| | | //logger.Debugf("result--------------------------------------- %s", result) |
| | | // 解析聚合结果 |
| | | var records []*LocationRecord |
| | | if aggs, ok := result["aggregations"].(map[string]interface{}); ok { |
| | |
| | | persons = append(persons, docNumBucket.(map[string]interface{})["key"].(string)) |
| | | } |
| | | } |
| | | |
| | | var PicDate string |
| | | if docNumBuckets, ok := floorBucket.(map[string]interface{})["pic_data"].(map[string]interface{})["buckets"].([]interface{}); ok { |
| | | for _, docNumBucket := range docNumBuckets { |
| | | PicDate = docNumBucket.(map[string]interface{})["key"].(string) |
| | | } |
| | | } |
| | | logger.Debugf("floor, persons--,appearCount------------------------------------- %s-- %s --", floor, persons, appearCount) |
| | | record := &LocationRecord{ |
| | | //PicDate: timestamp, |
| | | PicDate: PicDate, |
| | | DocumentNumbers: persons, |
| | | CommunityId: communityId, |
| | | Building: building, |