Added the file name in *vars for csv,fwv,xml,flatstore and partial_csv event readers. Closes #407

This commit is contained in:
Trial97
2020-03-17 17:11:11 +02:00
committed by Dan Christian Bogos
parent e0eaf2f9ee
commit f4e9d39558
6 changed files with 24 additions and 27 deletions

View File

@@ -138,7 +138,7 @@ func (rdr *CSVFileER) processFile(fPath, fName string) (err error) {
rowNr := 0 // This counts the rows in the file, not really number of CDRs
evsPosted := 0
timeStart := time.Now()
reqVars := make(map[string]interface{})
reqVars := map[string]interface{}{utils.FileName: fName}
for {
var record []string
if record, err = csvReader.Read(); err != nil {

View File

@@ -58,7 +58,7 @@ func NewFWVFileERER(cfg *config.CGRConfig, cfgIdx int,
return fwvER, nil
}
// XMLFileER implements EventReader interface for .xml files
// FWVFileER implements EventReader interface for .fwv files
type FWVFileER struct {
sync.RWMutex
cgrCfg *config.CGRConfig
@@ -139,7 +139,7 @@ func (rdr *FWVFileER) processFile(fPath, fName string) (err error) {
rowNr := 0 // This counts the rows in the file, not really number of CDRs
evsPosted := 0
timeStart := time.Now()
reqVars := make(map[string]interface{})
reqVars := map[string]interface{}{utils.FileName: fName}
for {
var hasHeader, hasTrailer bool
@@ -255,13 +255,13 @@ func (rdr *FWVFileER) setLineLen(file *os.File, hasHeader, hasTrailer bool) erro
lastLineSize = len(readBytes)
}
if hasTrailer {
if fi, err := file.Stat(); err != nil {
fi, err := file.Stat()
if err != nil {
utils.Logger.Err(fmt.Sprintf("<%s> Row 0, error: cannot get file stats: %s", utils.ERs, err.Error()))
return err
} else {
rdr.trailerOffset = fi.Size() - int64(lastLineSize)
rdr.trailerLenght = int64(lastLineSize)
}
rdr.trailerOffset = fi.Size() - int64(lastLineSize)
rdr.trailerLenght = int64(lastLineSize)
}
if _, err := file.Seek(0, 0); err != nil {

View File

@@ -137,7 +137,7 @@ func (rdr *XMLFileER) processFile(fPath, fName string) (err error) {
rowNr := 0 // This counts the rows in the file, not really number of CDRs
evsPosted := 0
timeStart := time.Now()
reqVars := make(map[string]interface{})
reqVars := map[string]interface{}{utils.FileName: fName}
for _, xmlElmt := range xmlElmts {
rowNr++ // increment the rowNr after checking if it's not the end of file
agReq := agents.NewAgentRequest(

View File

@@ -145,7 +145,7 @@ func (rdr *FlatstoreER) processFile(fPath, fName string) (err error) {
rowNr := 0 // This counts the rows in the file, not really number of CDRs
evsPosted := 0
timeStart := time.Now()
reqVars := make(map[string]interface{})
reqVars := map[string]interface{}{utils.FileName: fName}
for {
var record []string
if record, err = csvReader.Read(); err != nil {
@@ -159,8 +159,9 @@ func (rdr *FlatstoreER) processFile(fPath, fName string) (err error) {
} else {
pr, err := NewUnpairedRecord(record, rdr.Config().Timezone, fName)
if err != nil {
fmt.Sprintf("<%s> Converting row : <%s> to unpairedRecord , ignoring due to error: <%s>",
utils.ERs, record, err.Error())
utils.Logger.Warning(
fmt.Sprintf("<%s> Converting row : <%s> to unpairedRecord , ignoring due to error: <%s>",
utils.ERs, record, err.Error()))
continue
}
if val, has := rdr.cache.Get(pr.OriginID); !has {
@@ -170,8 +171,9 @@ func (rdr *FlatstoreER) processFile(fPath, fName string) (err error) {
pair := val.(*UnpairedRecord)
record, err = pairToRecord(pair, pr)
if err != nil {
fmt.Sprintf("<%s> Merging unpairedRecords : <%s> and <%s> to record , ignoring due to error: <%s>",
utils.ERs, utils.ToJSON(pair), utils.ToJSON(pr), err.Error())
utils.Logger.Warning(
fmt.Sprintf("<%s> Merging unpairedRecords : <%s> and <%s> to record , ignoring due to error: <%s>",
utils.ERs, utils.ToJSON(pair), utils.ToJSON(pr), err.Error()))
continue
}
rdr.cache.Remove(pr.OriginID)
@@ -234,7 +236,7 @@ func NewUnpairedRecord(record []string, timezone string, fileName string) (*Unpa
return pr, nil
}
// This is a partial record received from Flatstore, can be INVITE or BYE and it needs to be paired in order to produce duration
// UnpairedRecord is a partial record received from Flatstore, can be INVITE or BYE and it needs to be paired in order to produce duration
type UnpairedRecord struct {
Method string // INVITE or BYE
OriginID string // Copute here the OriginID

View File

@@ -152,7 +152,7 @@ func (rdr *PartialCSVFileER) processFile(fPath, fName string) (err error) {
rowNr := 0 // This counts the rows in the file, not really number of CDRs
evsPosted := 0
timeStart := time.Now()
reqVars := make(map[string]interface{})
reqVars := map[string]interface{}{utils.FileName: fName}
for {
var record []string
if record, err = csvReader.Read(); err != nil {
@@ -181,7 +181,7 @@ func (rdr *PartialCSVFileER) processFile(fPath, fName string) (err error) {
}
// take OriginID and OriginHost to compose CGRID
orgId, err := agReq.CGRRequest.FieldAsString([]string{utils.OriginID})
orgID, err := agReq.CGRRequest.FieldAsString([]string{utils.OriginID})
if err == utils.ErrNotFound {
utils.Logger.Warning(
fmt.Sprintf("<%s> Missing <OriginID> field for row <%d> , <%s>",
@@ -195,7 +195,7 @@ func (rdr *PartialCSVFileER) processFile(fPath, fName string) (err error) {
utils.ERs, rowNr, record))
continue
}
cgrID := utils.Sha1(orgId, orgHost)
cgrID := utils.Sha1(orgID, orgHost)
// take Partial field from NavigableMap
partial, _ := agReq.CGRRequest.FieldAsString([]string{utils.Partial})
if val, has := rdr.cache.Get(cgrID); !has {
@@ -218,10 +218,9 @@ func (rdr *PartialCSVFileER) processFile(fPath, fName string) (err error) {
sTime, _ := origCgrEvs[i].FieldAsTime(utils.SetupTime, agReq.Timezone)
sTime2, _ := origCgrEvs[j].FieldAsTime(utils.SetupTime, agReq.Timezone)
return sTime.Before(sTime2)
} else {
aTime2, _ := origCgrEvs[j].FieldAsTime(utils.AnswerTime, agReq.Timezone)
return aTime.Before(aTime2)
}
aTime2, _ := origCgrEvs[j].FieldAsTime(utils.AnswerTime, agReq.Timezone)
return aTime.Before(aTime2)
})
// compose the CGREvent from slice
cgrEv := new(utils.CGREvent)
@@ -261,10 +260,6 @@ func (rdr *PartialCSVFileER) processFile(fPath, fName string) (err error) {
return
}
const (
PartialRecordsSuffix = "partial"
)
func (rdr *PartialCSVFileER) dumpToFile(itmID string, value interface{}) {
origCgrEvs := value.([]*utils.CGREvent)
for _, origCgrEv := range origCgrEvs {
@@ -348,10 +343,9 @@ func (rdr *PartialCSVFileER) postCDR(itmID string, value interface{}) {
sTime, _ := origCgrEvs[i].FieldAsTime(utils.SetupTime, rdr.Config().Timezone)
sTime2, _ := origCgrEvs[j].FieldAsTime(utils.SetupTime, rdr.Config().Timezone)
return sTime.Before(sTime2)
} else {
aTime2, _ := origCgrEvs[j].FieldAsTime(utils.AnswerTime, rdr.Config().Timezone)
return aTime.Before(aTime2)
}
aTime2, _ := origCgrEvs[j].FieldAsTime(utils.AnswerTime, rdr.Config().Timezone)
return aTime.Before(aTime2)
})
// compose the CGREvent from slice
cgrEv := &utils.CGREvent{

View File

@@ -705,6 +705,7 @@ const (
MetaCost = "*cost"
MetaGroup = "*group"
InternalRPCSet = "InternalRPCSet"
FileName = "FileName"
)
// Migrator Action