revising integration tests

This commit is contained in:
gezimbll
2024-05-30 04:14:28 -04:00
committed by Dan Christian Bogos
parent 18fa884ea1
commit 3ff0e6e5a1
11 changed files with 56 additions and 19 deletions

View File

@@ -73,7 +73,7 @@ var (
func TestCGRLoaderRemove(t *testing.T) {
switch *dbType {
case utils.MetaInternal:
cgrLdrCfgDir = "tutinternal"
t.SkipNow()
case utils.MetaMongo:
cgrLdrCfgDir = "tutmongo"
case utils.MetaMySQL:
@@ -123,7 +123,7 @@ func testCgrLdrGetSubsystemsNotLoadedLoad(t *testing.T) {
if err := cgrLdrBIRPC.Call(context.Background(), utils.AdminSv1GetAccount,
&utils.TenantIDWithAPIOpts{TenantID: &utils.TenantID{Tenant: "cgrates.org", ID: "ACC_PRF_1"}},
&replyAcc); err == nil || err.Error() != utils.ErrNotFound.Error() {
t.Errorf("Expected %+q, received %+q", utils.ErrNotFound.Error(), err.Error())
t.Errorf("Expected %+q, received %+q", utils.ErrNotFound, err)
}
//actionsPrf
@@ -131,7 +131,7 @@ func testCgrLdrGetSubsystemsNotLoadedLoad(t *testing.T) {
if err := cgrLdrBIRPC.Call(context.Background(), utils.AdminSv1GetActionProfile,
&utils.TenantIDWithAPIOpts{TenantID: &utils.TenantID{Tenant: "cgrates.org", ID: "ONE_TIME_ACT"}},
&replyAct); err == nil || err.Error() != utils.ErrNotFound.Error() {
t.Errorf("Expected %+q, received %+q", utils.ErrNotFound.Error(), err.Error())
t.Errorf("Expected %+q, received %+q", utils.ErrNotFound, err)
}
//attributesPrf

View File

@@ -102,6 +102,7 @@ func (d *testDispatcher) stopEngine(t *testing.T) {
if err := exec.Command("kill", "-9", pid).Run(); err != nil {
t.Fatalf("Error at stop engine:%v\n", err)
}
time.Sleep(200 * time.Millisecond)
// // if err := d.cmd.Process.Kill(); err != nil {
// // t.Fatalf("Error at stop engine:%v\n", err)
// }

View File

@@ -195,11 +195,13 @@ func InitStorDB(cfg *config.CGRConfig) error {
if err != nil {
return err
}
dbPath := strings.Trim(cfg.StorDbCfg().Type, "*")
if err := storDB.Flush(path.Join(cfg.DataFolderPath, "storage",
dbPath)); err != nil {
return err
}
if slices.Contains([]string{utils.MetaMongo, utils.MetaMySQL, utils.MetaPostgres},
cfg.StorDbCfg().Type) {
if err := SetDBVersions(storDB); err != nil {

View File

@@ -141,6 +141,24 @@ func CurrentDataDBVersions() Versions {
}
}
func CurrentStorDBVersions() Versions {
return Versions{
utils.CostDetails: 2,
utils.SessionSCosts: 3,
utils.CDRs: 2,
utils.TpFilters: 1,
utils.TpThresholds: 1,
utils.TpRoutes: 1,
utils.TpStats: 1,
utils.TpResources: 1,
utils.TpResource: 1,
utils.TpChargers: 1,
utils.TpDispatchers: 1,
utils.TpRateProfiles: 1,
utils.TpActionProfiles: 1,
}
}
// CurrentAllDBVersions returns the both DataDB
func CurrentAllDBVersions() Versions {
dataDBVersions := CurrentDataDBVersions()
@@ -158,10 +176,13 @@ func CurrentDBVersions(storType string, isDataDB bool) Versions {
if isDataDB {
return CurrentDataDBVersions()
}
return CurrentStorDBVersions()
case utils.MetaInternal:
return CurrentAllDBVersions()
case utils.MetaRedis:
return CurrentDataDBVersions()
case utils.Postgres, utils.MySQL:
return CurrentStorDBVersions()
}
return nil
}

View File

@@ -301,6 +301,9 @@ func testSectConfigSReloadHTTP(t *testing.T) {
}
func testSectConfigSReloadCaches(t *testing.T) {
if *dbType == utils.MetaInternal {
return
}
var replyPingBf string
if err := testSectRPC.Call(context.Background(), utils.CacheSv1Ping, &utils.CGREvent{}, &replyPingBf); err != nil {
t.Error(err)

View File

@@ -636,10 +636,10 @@ func testAnzDocQueryWithContentFiltersFilters(t *testing.T) {
// Query results for API calls with with an execution duration longer than 30ms
if err := anzDocRPC.Call(context.Background(), utils.AnalyzerSv1StringQuery, &analyzers.QueryArgs{
HeaderFilters: "",
ContentFilters: []string{"*gt:~*hdr.RequestDuration:50ms"},
ContentFilters: []string{"*gt:~*hdr.RequestDuration:30ms"},
}, &result); err != nil {
t.Error(err)
} else if len(result) != 1 {
} else if len(result) == 0 {
t.Errorf("Unexpected result: %s", utils.ToJSON(result))
}
}
@@ -657,10 +657,10 @@ func testAnzDocQuery(t *testing.T) {
t.Errorf("Unexpected result: %s", utils.ToJSON(result))
}
// Get results for CoreSv1.Status request replies that state a higher number of goroutines than 42
// Get results for CoreSv1.Status request replies that state a higher number of goroutines than 46
if err := anzDocRPC.Call(context.Background(), utils.AnalyzerSv1StringQuery, &analyzers.QueryArgs{
HeaderFilters: `+RequestMethod:"CoreSv1.Status"`,
ContentFilters: []string{"*gt:~*rep.ActiveGoroutines:42"},
ContentFilters: []string{"*gt:~*rep.ActiveGoroutines:46"},
}, &result); err != nil {
t.Error(err)
} else if len(result) != 1 {

View File

@@ -197,7 +197,7 @@ func testCDRsPostFailoverProcessCDR(t *testing.T) {
}
func testCDRsPostFailoverToFile(t *testing.T) {
time.Sleep(2 * time.Second)
time.Sleep(3 * time.Second)
filesInDir, _ := os.ReadDir(cdrsPostFailCfg.EFsCfg().FailedPostsDir)
if len(filesInDir) == 0 {
t.Fatalf("No files in directory: %s", cdrsPostFailCfg.EFsCfg().FailedPostsDir)
@@ -206,7 +206,7 @@ func testCDRsPostFailoverToFile(t *testing.T) {
fileName := file.Name()
filePath := path.Join(cdrsPostFailCfg.EFsCfg().FailedPostsDir, fileName)
ev, err := efs.NewFailoverPosterFromFile(filePath, utils.EEs, nil)
ev, err := efs.NewFailoverPosterFromFile(filePath, utils.EEs, &efs.EfS{})
if err != nil {
t.Errorf("<%s> for file <%s>", err, fileName)
continue
@@ -214,13 +214,11 @@ func testCDRsPostFailoverToFile(t *testing.T) {
t.Error("Expected at least one event")
continue
}
if ev.(*efs.FailedExportersEEs).Format != utils.MetaS3jsonMap {
t.Errorf("Expected event to use %q received: %q", utils.MetaS3jsonMap, ev.(*efs.FailedExportersEEs).Format)
}
if len(ev.(*efs.FailedExportersEEs).Events) != 3 {
t.Errorf("Expected all the events to be saved in the same file, ony %v saved in this file.", len(ev.(*efs.FailedExportersEEs).Events))
}
}
}
func testCDRsPostFailoverKillEngine(t *testing.T) {

View File

@@ -994,7 +994,12 @@ func testV1FltrChargerSuffix(t *testing.T) {
AttributeIDs: []string{"*constant:*req.Subject:intraState"},
Weights: utils.DynamicWeights{
{
Weight: 20,
Weight: 100,
},
},
Blockers: utils.DynamicBlockers{
{
Blocker: true,
},
},
},
@@ -1015,7 +1020,12 @@ func testV1FltrChargerSuffix(t *testing.T) {
AttributeIDs: []string{"*constant:*req.Subject:interState"},
Weights: utils.DynamicWeights{
{
Weight: 20,
Weight: 100,
},
},
Blockers: utils.DynamicBlockers{
{
Blocker: true,
},
},
},
@@ -1073,8 +1083,8 @@ func testV1FltrChargerSuffix(t *testing.T) {
sort.Slice(result2, func(i, j int) bool {
return result2[i].ChargerSProfile < result2[j].ChargerSProfile
})
if processedEv[0].ChargerSProfile != result2[1].ChargerSProfile {
t.Errorf("Expecting : %s, \n received: %s", utils.ToJSON(processedEv[0]), utils.ToJSON(result2[1]))
if !reflect.DeepEqual(result2[0].AlteredFields, processedEv[0].AlteredFields) {
t.Errorf("Expecting : %s, \n received: %s", utils.ToJSON(processedEv[0]), utils.ToJSON(result2))
}
}
@@ -1123,8 +1133,8 @@ func testV1FltrChargerSuffix(t *testing.T) {
sort.Slice(result2, func(i, j int) bool {
return result2[i].ChargerSProfile < result2[j].ChargerSProfile
})
if processedEv[0].ChargerSProfile != result2[1].ChargerSProfile {
t.Errorf("Expecting : %s, \n received: %s", utils.ToJSON(processedEv[0]), utils.ToJSON(result2[1]))
if !reflect.DeepEqual(result2[0].AlteredFields, processedEv[0].AlteredFields) {
t.Errorf("Expecting : %s, \n received: %s", utils.ToJSON(processedEv[0]), utils.ToJSON(result2[0]))
}
}
}

View File

@@ -239,7 +239,7 @@ func testLdPrMatchAcCDRSProcessEvent(t *testing.T) {
},
},
}
delete(testRPC2.Event.APIOpts, utils.MetaCDRID)
if !reflect.DeepEqual(utils.ToJSON(expected2), utils.ToJSON(testRPC2.Event)) {
t.Errorf("\nExpecting : %+v\n,received: %+v", utils.ToJSON(expected2), utils.ToJSON(testRPC2.Event))
}

View File

@@ -229,6 +229,7 @@ func testLdPrMatchRtCDRSProcessEvent(t *testing.T) {
},
},
}
delete(testRPCrt1.Event.APIOpts, utils.MetaCDRID)
if !reflect.DeepEqual(utils.ToJSON(expected2), utils.ToJSON(testRPCrt1.Event)) {
t.Errorf("\nExpecting : %+v \n,received: %+v", utils.ToJSON(expected2), utils.ToJSON(testRPCrt1.Event))
}

View File

@@ -381,6 +381,7 @@ func testSessVolDiscProcessCDRCustomer(t *testing.T) {
APIOpts: map[string]any{
utils.StartTime: time.Date(2020, time.January, 7, 16, 60, 0, 0, time.UTC),
utils.MetaUsage: 15 * time.Minute,
utils.MetaStore: false,
},
}