Added tests for DynRSRParser

This commit is contained in:
Trial97
2020-07-02 12:41:38 +03:00
committed by Dan Christian Bogos
parent f9454bfb4c
commit ce6266f9fa
5 changed files with 128 additions and 21 deletions

View File

@@ -122,6 +122,7 @@ func (prsrs RSRParsers) GetRule() (out string) {
return
}
// Compile parses Rules string and repopulates other fields
func (prsrs RSRParsers) Compile() (err error) {
for _, prsr := range prsrs {
if err = prsr.Compile(); err != nil {
@@ -214,7 +215,7 @@ func (prsr *RSRParser) Compile() (err error) {
}
prsr.dynRules = dynrules
prsr.dynIdxStart = dynIdxStart
prsr.dynIdxEnd = dynIdxStart + dynIdxEnd
prsr.dynIdxEnd = dynIdxStart + dynIdxEnd + 1
return
}
}
@@ -300,7 +301,7 @@ func (prsr *RSRParser) ParseDataProvider(dP utils.DataProvider) (out string, err
return
}
var dynRSR *RSRParser
if dynRSR, err = NewRSRParser(prsr.Rules[:prsr.dynIdxStart] + dynPath + prsr.Rules[prsr.dynIdxEnd+1:]); err != nil {
if dynRSR, err = NewRSRParser(prsr.Rules[:prsr.dynIdxStart] + dynPath + prsr.Rules[prsr.dynIdxEnd:]); err != nil {
return
}
return dynRSR.ParseDataProvider(dP)
@@ -319,10 +320,10 @@ func (prsr *RSRParser) ParseDataProviderWithInterfaces(dP utils.DataProvider) (o
return
}
var dynRSR *RSRParser
if dynRSR, err = NewRSRParser(prsr.Rules[:prsr.dynIdxStart] + dynPath + prsr.Rules[prsr.dynIdxEnd+1:]); err != nil {
if dynRSR, err = NewRSRParser(prsr.Rules[:prsr.dynIdxStart] + dynPath + prsr.Rules[prsr.dynIdxEnd:]); err != nil {
return
}
return dynRSR.ParseDataProviderWithInterfaces(dP)
return dynRSR.ParseDataProvider(dP)
}
var outIface interface{}
if outIface, err = utils.DPDynamicInterface(prsr.path, dP); err != nil {
@@ -341,14 +342,21 @@ func NewDynRSRParser(parsersRules string, sep string, idxStart, idxEnd int) (prs
sepIdx := strings.Index(parsersRules[idxEnd:], sep)
if sepIdx == -1 { // not found any other separtor so is the last rule
sepIdx = len(parsersRules)
} else {
sepIdx += idxEnd
}
dynRuleStr := parsersRules[lastSepIdxBDyn+1 : sepIdx] // this should contain the rule with the dynamic information
var dynrules RSRParsers
if dynrules, err = NewRSRParsers(parsersRules[idxStart+1:idxEnd], sep); err != nil {
return
}
if lastSepIdxBDyn == -1 {
idxEnd++
} else {
idxStart = idxStart - lastSepIdxBDyn - 1
idxEnd -= lastSepIdxBDyn
}
// add
prsrs = append(prsrs, &RSRParser{
Rules: dynRuleStr,

View File

@@ -72,6 +72,14 @@ func TestRSRParserCompile(t *testing.T) {
} else if !reflect.DeepEqual(ePrsr, prsr) {
t.Errorf("expecting: %+v, received: %+v", ePrsr, prsr)
}
prsr = &RSRParser{
Rules: "~*req.Field{*}",
}
expErr := "invalid converter value in string: <*>, err: unsupported converter definition: <*>"
if err := prsr.Compile(); err == nil || err.Error() != expErr {
t.Fatal(err)
}
}
func TestRSRParserConstant(t *testing.T) {
@@ -330,7 +338,7 @@ func TestRSRParserDynamic(t *testing.T) {
dynRules: NewRSRParsersMustCompile("~*req.CGRID;~*req.RunID;-Cost", ";"),
dynIdxStart: 6,
dynIdxEnd: 36,
dynIdxEnd: 37,
}
prsr := &RSRParser{
Rules: "~*req.<~*req.CGRID;~*req.RunID;-Cost>",
@@ -353,4 +361,95 @@ func TestRSRParserDynamic(t *testing.T) {
} else if out != "10" {
t.Errorf("Expected 10 received: %q", out)
}
prsr = &RSRParser{
Rules: "~*req.<~*req.CGRID;~*req.RunID;-Cost{*}>",
}
expErr := "invalid converter value in string: <*>, err: unsupported converter definition: <*>"
if err := prsr.Compile(); err == nil || err.Error() != expErr {
t.Fatal(err)
}
}
func TestRSRParserDynamic2(t *testing.T) {
prsr, err := NewRSRParsers("~*req.<~*req.CGRID;~*req.RunID;-Cos>t;s", ";")
if err != nil {
t.Fatal(err)
}
dP := utils.MapStorage{
utils.MetaReq: utils.MapStorage{
utils.CGRID: "cgridUniq",
utils.RunID: utils.MetaDefault,
"cgridUniq*default-Cost": 10,
},
}
if out, err := prsr.ParseDataProvider(dP); err != nil {
t.Error(err)
} else if out != "10s" {
t.Errorf("Expected 10s received: %q", out)
}
prsr, err = NewRSRParsers("2.;~*req.<~*req.CGRID;~*req.RunID;-Cos>t;s", ";")
if err != nil {
t.Fatal(err)
}
if out, err := prsr.ParseDataProvider(dP); err != nil {
t.Error(err)
} else if out != "2.10s" {
t.Errorf("Expected 2.10s received: %q", out)
}
prsr, err = NewRSRParsers("2.;~*req.<~*req.CGRID;~*req.RunID;-Cost>", ";")
if err != nil {
t.Fatal(err)
}
if out, err := prsr.ParseDataProvider(dP); err != nil {
t.Error(err)
} else if out != "2.10" {
t.Errorf("Expected 2.10 received: %q", out)
}
}
func TestRSRParserDynamic3(t *testing.T) {
prsr, err := NewRSRParsers("2.;~*req.<~*req.CGRID;~*req.RunID>-Cost;-;~*req.<~*req.UnitField>", ";")
if err != nil {
t.Fatal(err)
}
dP := utils.MapStorage{
utils.MetaReq: utils.MapStorage{
utils.CGRID: "cgridUniq",
utils.RunID: utils.MetaDefault,
"cgridUniq*default-Cost": 10,
"UnitField": "Unit",
"Unit": "MB",
"IP": "127.0.0.1",
},
}
if out, err := prsr.ParseDataProvider(dP); err != nil {
t.Error(err)
} else if out != "2.10-MB" {
t.Errorf("Expected 2.10-MB received: %q", out)
}
prsr, err = NewRSRParsers("2.{*};~*req.<~*req.CGRID;~*req.RunID;-Cos>t;-;~*req.<~*req.UnitField>", ";")
expErr := "invalid converter value in string: <*>, err: unsupported converter definition: <*>"
if err == nil || err.Error() != expErr {
t.Fatal(err)
}
prsr, err = NewRSRParsers("2.;~*req.<~*req.CGRID;~*req.RunID;-Cos>t;-;~*req.Unit{*}", ";")
if err == nil || err.Error() != expErr {
t.Fatal(err)
}
prsr, err = NewRSRParsers("2.;~*req.<~*req.CGRID;~*req.RunID;-Cos>t;-;~*req.<~*req.UnitField{*}>", ";")
if err == nil || err.Error() != expErr {
t.Fatal(err)
}
}

View File

@@ -151,11 +151,11 @@ func (cP *csvProvider) FieldAsInterface(fldPath []string) (data interface{}, err
} else if fldPath[0] != utils.MetaReq {
return nil, fmt.Errorf("invalid prefix for : %s", fldPath)
}
if cfgFieldIdx, err := strconv.Atoi(fldPath[len(fldPath)-1]); err != nil || len(cP.req) <= cfgFieldIdx {
var cfgFieldIdx int
if cfgFieldIdx, err = strconv.Atoi(fldPath[len(fldPath)-1]); err != nil || len(cP.req) <= cfgFieldIdx {
return nil, fmt.Errorf("Ignoring record: %v with error : %+v", cP.req, err)
} else {
data = cP.req[cfgFieldIdx]
}
data = cP.req[cfgFieldIdx]
cP.cache.Set(fldPath, data)
return

View File

@@ -704,7 +704,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
var cachePartition string
switch loaderType {
case utils.MetaAttributes:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: AttributeProfileID: %s",
@@ -723,7 +723,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
case utils.MetaResources:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: ResourceProfileID: %s",
@@ -746,7 +746,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaFilters:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: Filter: %s",
@@ -764,7 +764,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaStats:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: StatsQueueProfileID: %s",
@@ -786,7 +786,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaThresholds:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: ThresholdProfileID: %s",
@@ -808,7 +808,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaRoutes:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: RouteProfileID: %s",
@@ -826,7 +826,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaChargers:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: ChargerProfileID: %s",
@@ -844,7 +844,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaDispatchers:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: DispatcherProfileID: %s",
@@ -862,7 +862,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
}
}
case utils.MetaDispatcherHosts:
for tntID, _ := range lds {
for tntID := range lds {
if ldr.dryRun {
utils.Logger.Info(
fmt.Sprintf("<%s-%s> DRY_RUN: DispatcherHostID: %s",
@@ -931,7 +931,7 @@ func (ldr *Loader) removeLoadedData(loaderType string, lds map[string][]LoaderDa
return
}
case utils.MetaRemove:
for tntID, _ := range lds {
for tntID := range lds {
if err = ldr.connMgr.Call(ldr.cacheConns, nil,
utils.CacheSv1RemoveItem, &utils.ArgsGetCacheItemWithArgDispatcher{
ArgsGetCacheItem: utils.ArgsGetCacheItem{

View File

@@ -161,13 +161,13 @@ func testLoaderCheckAttributes(t *testing.T) {
ActivationInterval: &utils.ActivationInterval{
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC)},
Attributes: []*engine.Attribute{
&engine.Attribute{
{
FilterIDs: []string{"*string:~*req.Field1:Initial"},
Path: utils.MetaReq + utils.NestingSep + "Field1",
Type: utils.MetaVariable,
Value: config.NewRSRParsersMustCompile("Sub1", utils.INFIELD_SEP),
},
&engine.Attribute{
{
FilterIDs: []string{},
Path: utils.MetaReq + utils.NestingSep + "Field2",
Type: utils.MetaVariable,