mirror of
https://github.com/cgrates/cgrates.git
synced 2026-02-11 18:16:24 +05:00
Implement ExportToFolder + add test fixes #1529
This commit is contained in:
committed by
Dan Christian Bogos
parent
f1029faca0
commit
0cba2f4dd4
@@ -1369,6 +1369,9 @@ func (apiV1 *APIerSv1) ExportToFolder(arg *utils.ArgExportToFolder, reply *strin
|
||||
arg.Items = []string{utils.MetaAttributes, utils.MetaChargers, utils.MetaDispatchers, utils.MetaDispatcherHosts,
|
||||
utils.MetaFilters, utils.MetaResources, utils.MetaStats, utils.MetaSuppliers, utils.MetaThresholds}
|
||||
}
|
||||
if _, err := os.Stat(arg.Path); os.IsNotExist(err) {
|
||||
os.Mkdir(arg.Path, os.ModeDir)
|
||||
}
|
||||
for _, item := range arg.Items {
|
||||
switch item {
|
||||
case utils.MetaAttributes:
|
||||
@@ -1599,6 +1602,120 @@ func (apiV1 *APIerSv1) ExportToFolder(arg *utils.ArgExportToFolder, reply *strin
|
||||
}
|
||||
}
|
||||
csvWriter.Flush()
|
||||
case utils.MetaStatS:
|
||||
prfx := utils.StatQueueProfilePrefix
|
||||
keys, err := apiV1.DataManager.DataDB().GetKeysForPrefix(prfx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(keys) == 0 { // if we don't find items we skip
|
||||
continue
|
||||
}
|
||||
f, err := os.Create(path.Join(arg.Path, utils.StatsCsv))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
csvWriter := csv.NewWriter(f)
|
||||
csvWriter.Comma = utils.CSV_SEP
|
||||
//write the header of the file
|
||||
if err := csvWriter.Write(engine.TpStats{}.CSVHeader()); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, key := range keys {
|
||||
tntID := strings.SplitN(key[len(prfx):], utils.InInFieldSep, 2)
|
||||
stsPrf, err := apiV1.DataManager.GetStatQueueProfile(tntID[0], tntID[1],
|
||||
true, false, utils.NonTransactional)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, model := range engine.APItoModelStats(
|
||||
engine.StatQueueProfileToAPI(stsPrf)) {
|
||||
if record, err := engine.CsvDump(model); err != nil {
|
||||
return err
|
||||
} else if err := csvWriter.Write(record); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
csvWriter.Flush()
|
||||
case utils.MetaSuppliers:
|
||||
prfx := utils.SupplierProfilePrefix
|
||||
keys, err := apiV1.DataManager.DataDB().GetKeysForPrefix(prfx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(keys) == 0 { // if we don't find items we skip
|
||||
continue
|
||||
}
|
||||
f, err := os.Create(path.Join(arg.Path, utils.SuppliersCsv))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
csvWriter := csv.NewWriter(f)
|
||||
csvWriter.Comma = utils.CSV_SEP
|
||||
//write the header of the file
|
||||
if err := csvWriter.Write(engine.TpSuppliers{}.CSVHeader()); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, key := range keys {
|
||||
tntID := strings.SplitN(key[len(prfx):], utils.InInFieldSep, 2)
|
||||
spp, err := apiV1.DataManager.GetSupplierProfile(tntID[0], tntID[1],
|
||||
true, false, utils.NonTransactional)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, model := range engine.APItoModelTPSuppliers(
|
||||
engine.SupplierProfileToAPI(spp)) {
|
||||
if record, err := engine.CsvDump(model); err != nil {
|
||||
return err
|
||||
} else if err := csvWriter.Write(record); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
csvWriter.Flush()
|
||||
case utils.MetaThresholds:
|
||||
prfx := utils.ThresholdProfilePrefix
|
||||
keys, err := apiV1.DataManager.DataDB().GetKeysForPrefix(prfx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(keys) == 0 { // if we don't find items we skip
|
||||
continue
|
||||
}
|
||||
f, err := os.Create(path.Join(arg.Path, utils.ThresholdsCsv))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
csvWriter := csv.NewWriter(f)
|
||||
csvWriter.Comma = utils.CSV_SEP
|
||||
//write the header of the file
|
||||
if err := csvWriter.Write(engine.TpThresholds{}.CSVHeader()); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, key := range keys {
|
||||
tntID := strings.SplitN(key[len(prfx):], utils.InInFieldSep, 2)
|
||||
thPrf, err := apiV1.DataManager.GetThresholdProfile(tntID[0], tntID[1],
|
||||
true, false, utils.NonTransactional)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, model := range engine.APItoModelTPThreshold(
|
||||
engine.ThresholdProfileToAPI(thPrf)) {
|
||||
if record, err := engine.CsvDump(model); err != nil {
|
||||
return err
|
||||
} else if err := csvWriter.Write(record); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
csvWriter.Flush()
|
||||
}
|
||||
}
|
||||
*reply = utils.OK
|
||||
|
||||
@@ -1297,6 +1297,13 @@ func ResourceProfileToAPI(rp *ResourceProfile) (tpRL *utils.TPResourceProfile) {
|
||||
|
||||
type TpStats []*TpStat
|
||||
|
||||
// CSVHeader return the header for csv fields as a slice of string
|
||||
func (tps TpStats) CSVHeader() (result []string) {
|
||||
return []string{"#" + utils.Tenant, utils.ID, utils.FilterIDs, utils.ActivationIntervalString,
|
||||
utils.QueueLength, utils.TTL, utils.MinItems, utils.MetricIDs, utils.MetricFilterIDs,
|
||||
utils.Stored, utils.Blocker, utils.Weight, utils.ThresholdIDs}
|
||||
}
|
||||
|
||||
func (models TpStats) AsTPStats() (result []*utils.TPStatProfile) {
|
||||
filterMap := make(map[string]utils.StringMap)
|
||||
thresholdMap := make(map[string]utils.StringMap)
|
||||
@@ -1490,8 +1497,62 @@ func APItoStats(tpST *utils.TPStatProfile, timezone string) (st *StatQueueProfil
|
||||
return st, nil
|
||||
}
|
||||
|
||||
func StatQueueProfileToAPI(st *StatQueueProfile) (tpST *utils.TPStatProfile) {
|
||||
tpST = &utils.TPStatProfile{
|
||||
Tenant: st.Tenant,
|
||||
ID: st.ID,
|
||||
FilterIDs: make([]string, len(st.FilterIDs)),
|
||||
ActivationInterval: new(utils.TPActivationInterval),
|
||||
QueueLength: st.QueueLength,
|
||||
Metrics: make([]*utils.MetricWithFilters, len(st.Metrics)),
|
||||
Blocker: st.Blocker,
|
||||
Stored: st.Stored,
|
||||
Weight: st.Weight,
|
||||
MinItems: st.MinItems,
|
||||
ThresholdIDs: make([]string, len(st.ThresholdIDs)),
|
||||
}
|
||||
for i, metric := range st.Metrics {
|
||||
tpST.Metrics[i] = &utils.MetricWithFilters{
|
||||
MetricID: metric.MetricID,
|
||||
}
|
||||
if len(metric.FilterIDs) != 0 {
|
||||
tpST.Metrics[i].FilterIDs = make([]string, len(metric.FilterIDs))
|
||||
for j, fltr := range metric.FilterIDs {
|
||||
tpST.Metrics[i].FilterIDs[j] = fltr
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if st.TTL != time.Duration(0) {
|
||||
tpST.TTL = st.TTL.String()
|
||||
}
|
||||
for i, fli := range st.FilterIDs {
|
||||
tpST.FilterIDs[i] = fli
|
||||
}
|
||||
for i, fli := range st.ThresholdIDs {
|
||||
tpST.ThresholdIDs[i] = fli
|
||||
}
|
||||
|
||||
if st.ActivationInterval != nil {
|
||||
if !st.ActivationInterval.ActivationTime.IsZero() {
|
||||
tpST.ActivationInterval.ActivationTime = st.ActivationInterval.ActivationTime.Format(time.RFC3339)
|
||||
}
|
||||
if !st.ActivationInterval.ExpiryTime.IsZero() {
|
||||
tpST.ActivationInterval.ExpiryTime = st.ActivationInterval.ExpiryTime.Format(time.RFC3339)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type TpThresholds []*TpThreshold
|
||||
|
||||
// CSVHeader return the header for csv fields as a slice of string
|
||||
func (tps TpThresholds) CSVHeader() (result []string) {
|
||||
return []string{"#" + utils.Tenant, utils.ID, utils.FilterIDs, utils.ActivationIntervalString,
|
||||
utils.MaxHits, utils.MinHits, utils.MinSleep,
|
||||
utils.Blocker, utils.Weight, utils.ActionIDs, utils.Async}
|
||||
}
|
||||
|
||||
func (tps TpThresholds) AsTPThreshold() (result []*utils.TPThresholdProfile) {
|
||||
mst := make(map[string]*utils.TPThresholdProfile)
|
||||
filterMap := make(map[string]utils.StringMap)
|
||||
@@ -1669,6 +1730,40 @@ func APItoThresholdProfile(tpTH *utils.TPThresholdProfile, timezone string) (th
|
||||
return th, nil
|
||||
}
|
||||
|
||||
func ThresholdProfileToAPI(th *ThresholdProfile) (tpTH *utils.TPThresholdProfile) {
|
||||
tpTH = &utils.TPThresholdProfile{
|
||||
Tenant: th.Tenant,
|
||||
ID: th.ID,
|
||||
FilterIDs: make([]string, len(th.FilterIDs)),
|
||||
ActivationInterval: new(utils.TPActivationInterval),
|
||||
MaxHits: th.MaxHits,
|
||||
MinHits: th.MinHits,
|
||||
Blocker: th.Blocker,
|
||||
Weight: th.Weight,
|
||||
ActionIDs: make([]string, len(th.ActionIDs)),
|
||||
Async: th.Async,
|
||||
}
|
||||
if th.MinSleep != time.Duration(0) {
|
||||
tpTH.MinSleep = th.MinSleep.String()
|
||||
}
|
||||
for i, fli := range th.FilterIDs {
|
||||
tpTH.FilterIDs[i] = fli
|
||||
}
|
||||
for i, fli := range th.ActionIDs {
|
||||
tpTH.ActionIDs[i] = fli
|
||||
}
|
||||
|
||||
if th.ActivationInterval != nil {
|
||||
if !th.ActivationInterval.ActivationTime.IsZero() {
|
||||
tpTH.ActivationInterval.ActivationTime = th.ActivationInterval.ActivationTime.Format(time.RFC3339)
|
||||
}
|
||||
if !th.ActivationInterval.ExpiryTime.IsZero() {
|
||||
tpTH.ActivationInterval.ExpiryTime = th.ActivationInterval.ExpiryTime.Format(time.RFC3339)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type TpFilterS []*TpFilter
|
||||
|
||||
// CSVHeader return the header for csv fields as a slice of string
|
||||
@@ -1800,6 +1895,16 @@ func FilterToTPFilter(f *Filter) (tpFltr *utils.TPFilterProfile) {
|
||||
|
||||
type TpSuppliers []*TpSupplier
|
||||
|
||||
// CSVHeader return the header for csv fields as a slice of string
|
||||
func (tps TpSuppliers) CSVHeader() (result []string) {
|
||||
return []string{"#" + utils.Tenant, utils.ID, utils.FilterIDs, utils.ActivationIntervalString,
|
||||
utils.Sorting, utils.SortingParameters, utils.SupplierID, utils.SupplierFilterIDs,
|
||||
utils.SupplierAccountIDs, utils.SupplierRatingplanIDs, utils.SupplierResourceIDs,
|
||||
utils.SupplierStatIDs, utils.SupplierWeight, utils.SupplierBlocker,
|
||||
utils.SupplierParameters, utils.Weight,
|
||||
}
|
||||
}
|
||||
|
||||
func (tps TpSuppliers) AsTPSuppliers() (result []*utils.TPSupplierProfile) {
|
||||
filtermap := make(map[string]utils.StringMap)
|
||||
mst := make(map[string]*utils.TPSupplierProfile)
|
||||
@@ -2015,6 +2120,48 @@ func APItoSupplierProfile(tpSPP *utils.TPSupplierProfile, timezone string) (spp
|
||||
return spp, nil
|
||||
}
|
||||
|
||||
func SupplierProfileToAPI(spp *SupplierProfile) (tpSPP *utils.TPSupplierProfile) {
|
||||
tpSPP = &utils.TPSupplierProfile{
|
||||
Tenant: spp.Tenant,
|
||||
ID: spp.ID,
|
||||
FilterIDs: make([]string, len(spp.FilterIDs)),
|
||||
ActivationInterval: new(utils.TPActivationInterval),
|
||||
Sorting: spp.Sorting,
|
||||
SortingParameters: make([]string, len(spp.SortingParameters)),
|
||||
Suppliers: make([]*utils.TPSupplier, len(spp.Suppliers)),
|
||||
Weight: spp.Weight,
|
||||
}
|
||||
|
||||
for i, supp := range spp.Suppliers {
|
||||
tpSPP.Suppliers[i] = &utils.TPSupplier{
|
||||
ID: supp.ID,
|
||||
FilterIDs: supp.FilterIDs,
|
||||
AccountIDs: supp.AccountIDs,
|
||||
RatingPlanIDs: supp.RatingPlanIDs,
|
||||
ResourceIDs: supp.ResourceIDs,
|
||||
StatIDs: supp.StatIDs,
|
||||
Weight: supp.Weight,
|
||||
Blocker: supp.Blocker,
|
||||
SupplierParameters: supp.SupplierParameters,
|
||||
}
|
||||
}
|
||||
for i, fli := range spp.FilterIDs {
|
||||
tpSPP.FilterIDs[i] = fli
|
||||
}
|
||||
for i, fli := range spp.SortingParameters {
|
||||
tpSPP.SortingParameters[i] = fli
|
||||
}
|
||||
if spp.ActivationInterval != nil {
|
||||
if !spp.ActivationInterval.ActivationTime.IsZero() {
|
||||
tpSPP.ActivationInterval.ActivationTime = spp.ActivationInterval.ActivationTime.Format(time.RFC3339)
|
||||
}
|
||||
if !spp.ActivationInterval.ExpiryTime.IsZero() {
|
||||
tpSPP.ActivationInterval.ExpiryTime = spp.ActivationInterval.ExpiryTime.Format(time.RFC3339)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type TPAttributes []*TPAttribute
|
||||
|
||||
// CSVHeader return the header for csv fields as a slice of string
|
||||
|
||||
@@ -741,6 +741,59 @@ func TestAPItoTPStats(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestStatQueueProfileToAPI(t *testing.T) {
|
||||
expected := &utils.TPStatProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "Stats1",
|
||||
FilterIDs: []string{"FLTR_1"},
|
||||
ActivationInterval: &utils.TPActivationInterval{ActivationTime: "2014-07-29T15:00:00Z"},
|
||||
QueueLength: 100,
|
||||
TTL: "1s",
|
||||
Metrics: []*utils.MetricWithFilters{
|
||||
&utils.MetricWithFilters{
|
||||
MetricID: "*sum#BalanceValue",
|
||||
},
|
||||
&utils.MetricWithFilters{
|
||||
MetricID: "*average#BalanceValue",
|
||||
},
|
||||
&utils.MetricWithFilters{
|
||||
MetricID: "*tcc",
|
||||
},
|
||||
},
|
||||
MinItems: 1,
|
||||
ThresholdIDs: []string{"THRESH1", "THRESH2"},
|
||||
Weight: 20.0,
|
||||
}
|
||||
sqPrf := &StatQueueProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "Stats1",
|
||||
QueueLength: 100,
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
Metrics: []*MetricWithFilters{
|
||||
&MetricWithFilters{
|
||||
MetricID: "*sum#BalanceValue",
|
||||
},
|
||||
&MetricWithFilters{
|
||||
MetricID: "*average#BalanceValue",
|
||||
},
|
||||
&MetricWithFilters{
|
||||
MetricID: "*tcc",
|
||||
},
|
||||
},
|
||||
TTL: time.Duration(1 * time.Second),
|
||||
ThresholdIDs: []string{"THRESH1", "THRESH2"},
|
||||
FilterIDs: []string{"FLTR_1"},
|
||||
Weight: 20.0,
|
||||
MinItems: 1,
|
||||
}
|
||||
|
||||
if rcv := StatQueueProfileToAPI(sqPrf); !reflect.DeepEqual(expected, rcv) {
|
||||
t.Errorf("Expecting: %+v,\n received: %+v", utils.ToJSON(expected), utils.ToJSON(rcv))
|
||||
}
|
||||
}
|
||||
|
||||
func TestAPItoModelStats(t *testing.T) {
|
||||
tpS := &utils.TPStatProfile{
|
||||
TPid: "TPS1",
|
||||
@@ -1076,6 +1129,39 @@ func TestAPItoTPThreshold(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestThresholdProfileToAPI(t *testing.T) {
|
||||
expected := &utils.TPThresholdProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "TH1",
|
||||
FilterIDs: []string{"FilterID1", "FilterID2"},
|
||||
ActivationInterval: &utils.TPActivationInterval{ActivationTime: "2014-07-29T15:00:00Z"},
|
||||
MaxHits: 12,
|
||||
MinHits: 10,
|
||||
MinSleep: "1s",
|
||||
Weight: 20.0,
|
||||
ActionIDs: []string{"WARN3"},
|
||||
}
|
||||
|
||||
thPrf := &ThresholdProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "TH1",
|
||||
FilterIDs: []string{"FilterID1", "FilterID2"},
|
||||
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
MaxHits: 12,
|
||||
MinHits: 10,
|
||||
MinSleep: time.Duration(1 * time.Second),
|
||||
Weight: 20.0,
|
||||
ActionIDs: []string{"WARN3"},
|
||||
}
|
||||
|
||||
if rcv := ThresholdProfileToAPI(thPrf); !reflect.DeepEqual(expected, rcv) {
|
||||
t.Errorf("Expecting: %+v,\n received: %+v", utils.ToJSON(expected), utils.ToJSON(rcv))
|
||||
}
|
||||
}
|
||||
|
||||
func TestTPFilterAsTPFilter(t *testing.T) {
|
||||
tps := []*TpFilter{
|
||||
&TpFilter{
|
||||
|
||||
@@ -21,8 +21,11 @@ package general_tests
|
||||
|
||||
import (
|
||||
"net/rpc"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cgrates/cgrates/config"
|
||||
"github.com/cgrates/cgrates/engine"
|
||||
@@ -42,7 +45,20 @@ var (
|
||||
testExpStartEngine,
|
||||
testExpRPCConn,
|
||||
testExpLoadTPFromFolder,
|
||||
testExpAttribute,
|
||||
testExpExportToFolder,
|
||||
testExpStopCgrEngine, // restart the engine and reset the database
|
||||
testExpResetDataDB,
|
||||
testExpResetStorDb,
|
||||
testExpStartEngine,
|
||||
testExpRPCConn,
|
||||
testExpLoadTPFromExported,
|
||||
testExpVerifyAttributes,
|
||||
testExpVerifyFilters,
|
||||
testExpVerifyThresholds,
|
||||
testExpVerifyResources,
|
||||
testExpVerifyStats,
|
||||
testExpVerifySuppliers,
|
||||
testExpCleanFiles,
|
||||
testExpStopCgrEngine,
|
||||
}
|
||||
)
|
||||
@@ -109,11 +125,10 @@ func testExpLoadTPFromFolder(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func testExpAttribute(t *testing.T) {
|
||||
func testExpExportToFolder(t *testing.T) {
|
||||
var reply string
|
||||
arg := &utils.ArgExportToFolder{
|
||||
Path: "/tmp",
|
||||
Items: []string{utils.MetaAttributes},
|
||||
Path: "/tmp/tp/",
|
||||
}
|
||||
if err := expRpc.Call(utils.APIerSv1ExportToFolder, arg, &reply); err != nil {
|
||||
t.Error(err)
|
||||
@@ -122,6 +137,279 @@ func testExpAttribute(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func testExpLoadTPFromExported(t *testing.T) {
|
||||
var reply string
|
||||
attrs := &utils.AttrLoadTpFromFolder{FolderPath: "/tmp/tp/"}
|
||||
if err := expRpc.Call(utils.APIerSv1LoadTariffPlanFromFolder, attrs, &reply); err != nil {
|
||||
t.Error(err)
|
||||
} else if reply != utils.OK {
|
||||
t.Error(reply)
|
||||
}
|
||||
}
|
||||
|
||||
func testExpVerifyAttributes(t *testing.T) {
|
||||
exp := &engine.AttributeProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "ATTR_1003_SESSIONAUTH",
|
||||
FilterIDs: []string{"*string:~*req.Account:1003"},
|
||||
Contexts: []string{utils.MetaSessionS},
|
||||
Attributes: []*engine.Attribute{
|
||||
{
|
||||
Path: utils.MetaReq + utils.NestingSep + "Password",
|
||||
FilterIDs: []string{},
|
||||
Type: utils.META_CONSTANT,
|
||||
Value: config.NewRSRParsersMustCompile("CGRateS.org", true, utils.INFIELD_SEP),
|
||||
},
|
||||
{
|
||||
Path: utils.MetaReq + utils.NestingSep + utils.RequestType,
|
||||
FilterIDs: []string{},
|
||||
Type: utils.META_CONSTANT,
|
||||
Value: config.NewRSRParsersMustCompile("*prepaid", true, utils.INFIELD_SEP),
|
||||
},
|
||||
{
|
||||
Path: utils.MetaReq + utils.NestingSep + "PaypalAccount",
|
||||
FilterIDs: []string{},
|
||||
Type: utils.META_CONSTANT,
|
||||
Value: config.NewRSRParsersMustCompile("cgrates@paypal.com", true, utils.INFIELD_SEP),
|
||||
},
|
||||
{
|
||||
Path: utils.MetaReq + utils.NestingSep + "LCRProfile",
|
||||
FilterIDs: []string{},
|
||||
Type: utils.META_CONSTANT,
|
||||
Value: config.NewRSRParsersMustCompile("premium_cli", true, utils.INFIELD_SEP),
|
||||
},
|
||||
},
|
||||
Weight: 10.0,
|
||||
}
|
||||
var reply *engine.AttributeProfile
|
||||
if err := expRpc.Call(utils.APIerSv1GetAttributeProfile,
|
||||
utils.TenantIDWithArgDispatcher{
|
||||
TenantID: &utils.TenantID{Tenant: "cgrates.org", ID: "ATTR_1003_SESSIONAUTH"}}, &reply); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
reply.Compile()
|
||||
if !reflect.DeepEqual(exp, reply) {
|
||||
t.Errorf("Expecting : %+v, \n received: %+v", utils.ToJSON(exp), utils.ToJSON(reply))
|
||||
}
|
||||
}
|
||||
|
||||
func testExpVerifyFilters(t *testing.T) {
|
||||
exp := &engine.Filter{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "FLTR_ACNT_1001_1002",
|
||||
Rules: []*engine.FilterRule{
|
||||
{
|
||||
Element: utils.DynamicDataPrefix + utils.MetaReq + utils.NestingSep + utils.Account,
|
||||
Type: utils.MetaString,
|
||||
Values: []string{"1001", "1002"},
|
||||
},
|
||||
{
|
||||
Element: utils.DynamicDataPrefix + utils.MetaReq + utils.NestingSep + utils.RunID,
|
||||
Type: utils.MetaString,
|
||||
Values: []string{utils.MetaDefault},
|
||||
},
|
||||
{
|
||||
Element: utils.DynamicDataPrefix + utils.MetaReq + utils.NestingSep + utils.Destination,
|
||||
Type: utils.MetaString,
|
||||
Values: []string{"1001", "1002", "1003"},
|
||||
},
|
||||
},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
}
|
||||
var reply *engine.Filter
|
||||
if err := expRpc.Call(utils.APIerSv1GetFilter,
|
||||
&utils.TenantID{Tenant: "cgrates.org", ID: "FLTR_ACNT_1001_1002"}, &reply); err != nil {
|
||||
t.Error(err)
|
||||
} else if !reflect.DeepEqual(exp, reply) {
|
||||
t.Errorf("Expecting : %+v,\n received: %+v", utils.ToJSON(exp), utils.ToJSON(reply))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func testExpVerifyThresholds(t *testing.T) {
|
||||
tPrfl := &engine.ThresholdWithCache{
|
||||
ThresholdProfile: &engine.ThresholdProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "THD_ACNT_1001",
|
||||
FilterIDs: []string{"FLTR_ACNT_1001"},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
MaxHits: 1,
|
||||
MinHits: 1,
|
||||
MinSleep: time.Duration(1 * time.Second),
|
||||
Blocker: false,
|
||||
Weight: 10.0,
|
||||
ActionIDs: []string{"ACT_LOG_WARNING"},
|
||||
Async: true,
|
||||
},
|
||||
}
|
||||
var reply *engine.ThresholdProfile
|
||||
if err := expRpc.Call(utils.APIerSv1GetThresholdProfile,
|
||||
&utils.TenantID{Tenant: "cgrates.org", ID: "THD_ACNT_1001"}, &reply); err != nil {
|
||||
t.Error(err)
|
||||
} else if !reflect.DeepEqual(tPrfl.ThresholdProfile, reply) {
|
||||
t.Errorf("Expecting: %+v, \n received: %+v", utils.ToJSON(tPrfl.ThresholdProfile), utils.ToJSON(reply))
|
||||
}
|
||||
}
|
||||
|
||||
func testExpVerifyResources(t *testing.T) {
|
||||
rPrf := &engine.ResourceProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "ResGroup1",
|
||||
FilterIDs: []string{"FLTR_RES"},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
UsageTTL: time.Duration(-1),
|
||||
Limit: 7,
|
||||
Blocker: false,
|
||||
Stored: true,
|
||||
Weight: 10,
|
||||
ThresholdIDs: []string{utils.META_NONE},
|
||||
}
|
||||
var reply *engine.ResourceProfile
|
||||
if err := expRpc.Call(utils.APIerSv1GetResourceProfile,
|
||||
&utils.TenantID{Tenant: "cgrates.org", ID: "ResGroup1"}, &reply); err != nil {
|
||||
t.Error(err)
|
||||
} else if !reflect.DeepEqual(reply, rPrf) {
|
||||
t.Errorf("Expecting: %+v, received: %+v", utils.ToJSON(rPrf), utils.ToJSON(reply))
|
||||
}
|
||||
}
|
||||
|
||||
func testExpVerifyStats(t *testing.T) {
|
||||
sPrf := &engine.StatQueueProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "Stats2",
|
||||
FilterIDs: []string{"FLTR_ACNT_1001_1002"},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
QueueLength: 100,
|
||||
TTL: time.Duration(-1),
|
||||
Metrics: []*engine.MetricWithFilters{
|
||||
&engine.MetricWithFilters{
|
||||
MetricID: utils.MetaTCC,
|
||||
},
|
||||
&engine.MetricWithFilters{
|
||||
MetricID: utils.MetaTCD,
|
||||
},
|
||||
},
|
||||
Blocker: true,
|
||||
Stored: false,
|
||||
Weight: 30,
|
||||
MinItems: 0,
|
||||
ThresholdIDs: []string{utils.META_NONE},
|
||||
}
|
||||
|
||||
sPrf2 := &engine.StatQueueProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "Stats2",
|
||||
FilterIDs: []string{"FLTR_ACNT_1001_1002"},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2014, 7, 29, 15, 0, 0, 0, time.UTC),
|
||||
},
|
||||
QueueLength: 100,
|
||||
TTL: time.Duration(-1),
|
||||
Metrics: []*engine.MetricWithFilters{
|
||||
&engine.MetricWithFilters{
|
||||
MetricID: utils.MetaTCD,
|
||||
},
|
||||
&engine.MetricWithFilters{
|
||||
MetricID: utils.MetaTCC,
|
||||
},
|
||||
},
|
||||
Blocker: true,
|
||||
Stored: false,
|
||||
Weight: 30,
|
||||
MinItems: 0,
|
||||
ThresholdIDs: []string{utils.META_NONE},
|
||||
}
|
||||
|
||||
var reply *engine.StatQueueProfile
|
||||
if err := expRpc.Call(utils.APIerSv1GetStatQueueProfile,
|
||||
&utils.TenantID{Tenant: "cgrates.org", ID: "Stats2"}, &reply); err != nil {
|
||||
t.Error(err)
|
||||
} else if !reflect.DeepEqual(sPrf, reply) && !reflect.DeepEqual(sPrf2, reply) {
|
||||
t.Errorf("Expecting: %+v \n or %+v \n ,\n received: %+v",
|
||||
utils.ToJSON(sPrf), utils.ToJSON(sPrf2), utils.ToJSON(reply))
|
||||
}
|
||||
}
|
||||
|
||||
func testExpVerifySuppliers(t *testing.T) {
|
||||
var reply *engine.SupplierProfile
|
||||
splPrf := &engine.SupplierProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "SPL_ACNT_1002",
|
||||
FilterIDs: []string{"FLTR_ACNT_1002"},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2017, 11, 27, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
Sorting: utils.MetaLC,
|
||||
SortingParameters: []string{},
|
||||
Suppliers: []*engine.Supplier{
|
||||
{
|
||||
ID: "supplier1",
|
||||
RatingPlanIDs: []string{"RP_1002_LOW"},
|
||||
Weight: 10,
|
||||
Blocker: false,
|
||||
SupplierParameters: utils.EmptyString,
|
||||
},
|
||||
{
|
||||
ID: "supplier2",
|
||||
RatingPlanIDs: []string{"RP_1002"},
|
||||
Weight: 20,
|
||||
Blocker: false,
|
||||
SupplierParameters: utils.EmptyString,
|
||||
},
|
||||
},
|
||||
Weight: 10,
|
||||
}
|
||||
|
||||
splPrf2 := &engine.SupplierProfile{
|
||||
Tenant: "cgrates.org",
|
||||
ID: "SPL_ACNT_1002",
|
||||
FilterIDs: []string{"FLTR_ACNT_1002"},
|
||||
ActivationInterval: &utils.ActivationInterval{
|
||||
ActivationTime: time.Date(2017, 11, 27, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
Sorting: utils.MetaLC,
|
||||
SortingParameters: []string{},
|
||||
Suppliers: []*engine.Supplier{
|
||||
{
|
||||
ID: "supplier2",
|
||||
RatingPlanIDs: []string{"RP_1002"},
|
||||
Weight: 20,
|
||||
Blocker: false,
|
||||
SupplierParameters: utils.EmptyString,
|
||||
},
|
||||
{
|
||||
ID: "supplier1",
|
||||
RatingPlanIDs: []string{"RP_1002_LOW"},
|
||||
Weight: 10,
|
||||
Blocker: false,
|
||||
SupplierParameters: utils.EmptyString,
|
||||
},
|
||||
},
|
||||
Weight: 10,
|
||||
}
|
||||
if err := expRpc.Call(utils.APIerSv1GetSupplierProfile,
|
||||
&utils.TenantID{Tenant: "cgrates.org", ID: "SPL_ACNT_1002"}, &reply); err != nil {
|
||||
t.Error(err)
|
||||
} else if !reflect.DeepEqual(splPrf, reply) && !reflect.DeepEqual(splPrf2, reply) {
|
||||
t.Errorf("Expecting: %+v \n or %+v \n,\n received: %+v",
|
||||
utils.ToJSON(splPrf), utils.ToJSON(splPrf2), utils.ToJSON(reply))
|
||||
}
|
||||
}
|
||||
|
||||
func testExpCleanFiles(t *testing.T) {
|
||||
if err := os.RemoveAll("/tmp/tp/"); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func testExpStopCgrEngine(t *testing.T) {
|
||||
if err := engine.KillEngine(100); err != nil {
|
||||
t.Error(err)
|
||||
|
||||
@@ -555,7 +555,27 @@ const (
|
||||
CGR_ACD = "cgr_acd"
|
||||
FilterIDs = "FilterIDs"
|
||||
ActivationIntervalString = "ActivationInterval"
|
||||
MaxHits = "MaxHits"
|
||||
MinHits = "MinHits"
|
||||
ActionIDs = "ActionIDs"
|
||||
Async = "Async"
|
||||
Sorting = "Sorting"
|
||||
SortingParameters = "SortingParameters"
|
||||
SupplierAccountIDs = "SupplierAccountIDs"
|
||||
SupplierRatingplanIDs = "SupplierRatingplanIDs"
|
||||
SupplierStatIDs = "SupplierStatIDs"
|
||||
SupplierWeight = "SupplierWeight"
|
||||
SupplierParameters = "SupplierParameters"
|
||||
SupplierBlocker = "SupplierBlocker"
|
||||
SupplierResourceIDs = "SupplierResourceIDs"
|
||||
SupplierID = "SupplierID"
|
||||
SupplierFilterIDs = "SupplierFilterIDs"
|
||||
AttributeFilterIDs = "AttributeFilterIDs"
|
||||
QueueLength = "QueueLength"
|
||||
TTL = "TTL"
|
||||
MinItems = "MinItems"
|
||||
MetricIDs = "MetricIDs"
|
||||
MetricFilterIDs = "MetricFilterIDs"
|
||||
FieldName = "FieldName"
|
||||
Path = "Path"
|
||||
MetaRound = "*round"
|
||||
@@ -684,7 +704,7 @@ const (
|
||||
MetaDiamreq = "*diamreq"
|
||||
MetaCost = "*cost"
|
||||
MetaGroup = "*group"
|
||||
InternalRPCSet = "InternalRPCSet"
|
||||
InternalRPCSet = "InternalRPCSet"
|
||||
)
|
||||
|
||||
// Migrator Action
|
||||
|
||||
Reference in New Issue
Block a user