mirror of
https://github.com/cgrates/cgrates.git
synced 2026-02-15 13:19:53 +05:00
handling *any destination id
This commit is contained in:
@@ -25,6 +25,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/cgrates/cgrates/history"
|
||||
"github.com/cgrates/cgrates/utils"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -645,6 +646,23 @@ func TestMaxDebitConsumesMinutes(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestCDGetCostANY(t *testing.T) {
|
||||
cd1 := &CallDescriptor{
|
||||
Direction: "*out",
|
||||
TOR: "0",
|
||||
Tenant: "vdf",
|
||||
Subject: "rif",
|
||||
Destination: utils.ANY,
|
||||
TimeStart: time.Date(2014, 3, 4, 6, 0, 0, 0, time.UTC),
|
||||
TimeEnd: time.Date(2014, 3, 4, 6, 0, 5, 0, time.UTC),
|
||||
LoopIndex: 0,
|
||||
CallDuration: 0}
|
||||
cc, err := cd1.GetCost()
|
||||
if err != nil || cc.Cost != 6 {
|
||||
t.Errorf("Error getting *any dest: %+v %v", cc, err)
|
||||
}
|
||||
}
|
||||
|
||||
/*************** BENCHMARKS ********************/
|
||||
func BenchmarkStorageGetting(b *testing.B) {
|
||||
b.StopTimer()
|
||||
|
||||
@@ -36,7 +36,8 @@ func TestHistoryRatinPlans(t *testing.T) {
|
||||
func TestHistoryDestinations(t *testing.T) {
|
||||
scribe := historyScribe.(*history.MockScribe)
|
||||
buf := scribe.BufMap[history.DESTINATIONS_FN]
|
||||
expected := `[{"Id":"ALL","Prefixes":["49","41","43"]},
|
||||
expected := `[{"Id":"*any","Prefixes":[""]},
|
||||
{"Id":"ALL","Prefixes":["49","41","43"]},
|
||||
{"Id":"DST_UK_Mobile_BIG5","Prefixes":["447956"]},
|
||||
{"Id":"GERMANY","Prefixes":["49"]},
|
||||
{"Id":"GERMANY_O2","Prefixes":["41"]},
|
||||
|
||||
@@ -44,6 +44,7 @@ PSTN_71,+4971
|
||||
PSTN_72,+4972
|
||||
PSTN_70,+4970
|
||||
DST_UK_Mobile_BIG5,447956
|
||||
*any,
|
||||
`
|
||||
timings = `
|
||||
WORKDAYS_00,*any,*any,*any,1;2;3;4;5,00:00:00
|
||||
@@ -82,6 +83,7 @@ T2,GERMANY_O2,GBP_70
|
||||
T2,GERMANY_PREMIUM,GBP_71
|
||||
DR_UK_Mobile_BIG5_PKG,DST_UK_Mobile_BIG5,RT_UK_Mobile_BIG5_PKG
|
||||
DR_UK_Mobile_BIG5,DST_UK_Mobile_BIG5,RT_UK_Mobile_BIG5
|
||||
DATA_RATE,*any,R4
|
||||
`
|
||||
ratingPlans = `
|
||||
STANDARD,RT_STANDARD,WORKDAYS_00,10
|
||||
@@ -94,6 +96,7 @@ DEFAULT,RT_DEFAULT,WORKDAYS_00,10
|
||||
EVENING,P1,WORKDAYS_00,10
|
||||
EVENING,P2,WORKDAYS_18,10
|
||||
EVENING,P2,WEEKENDS,10
|
||||
EVENING,DATA_RATE,ALWAYS,10
|
||||
TDRT,T1,WORKDAYS_00,10
|
||||
TDRT,T2,WORKDAYS_00,10
|
||||
G,RT_STANDARD,WORKDAYS_00,10
|
||||
@@ -186,7 +189,7 @@ func init() {
|
||||
}
|
||||
|
||||
func TestLoadDestinations(t *testing.T) {
|
||||
if len(csvr.destinations) != 10 {
|
||||
if len(csvr.destinations) != 11 {
|
||||
t.Error("Failed to load destinations: ", len(csvr.destinations))
|
||||
}
|
||||
for _, d := range csvr.destinations {
|
||||
@@ -352,7 +355,7 @@ func TestLoadRates(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLoadDestinationRates(t *testing.T) {
|
||||
if len(csvr.destinationRates) != 9 {
|
||||
if len(csvr.destinationRates) != 10 {
|
||||
t.Error("Failed to load destinationrates: ", csvr.destinationRates)
|
||||
}
|
||||
drs := csvr.destinationRates["RT_STANDARD"]
|
||||
@@ -463,9 +466,9 @@ func TestLoadDestinationRates(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadDestinationRateTimings(t *testing.T) {
|
||||
func TestLoadRatingPlans(t *testing.T) {
|
||||
if len(csvr.ratingPlans) != 9 {
|
||||
t.Error("Failed to load rate timings: ", csvr.ratingPlans)
|
||||
t.Error("Failed to load rating plans: ", len(csvr.ratingPlans))
|
||||
}
|
||||
rplan := csvr.ratingPlans["STANDARD"]
|
||||
expected := &RatingPlan{
|
||||
|
||||
@@ -117,20 +117,28 @@ func (rp *RatingProfile) GetRatingPlansForPrefix(cd *CallDescriptor) (err error)
|
||||
prefix := ""
|
||||
destinationId := ""
|
||||
var rps RateIntervalList
|
||||
for _, p := range utils.SplitPrefix(cd.Destination, MIN_PREFIX_MATCH) {
|
||||
if x, err := cache2go.GetCached(DESTINATION_PREFIX + p); err == nil {
|
||||
destIds := x.([]string)
|
||||
for _, dId := range destIds {
|
||||
if _, ok := rpl.DestinationRates[dId]; ok {
|
||||
rps = rpl.RateIntervalList(dId)
|
||||
prefix = p
|
||||
destinationId = dId
|
||||
break
|
||||
if cd.Destination == utils.ANY {
|
||||
if _, ok := rpl.DestinationRates[utils.ANY]; ok {
|
||||
rps = rpl.RateIntervalList(utils.ANY)
|
||||
prefix = utils.ANY
|
||||
destinationId = utils.ANY
|
||||
}
|
||||
} else {
|
||||
for _, p := range utils.SplitPrefix(cd.Destination, MIN_PREFIX_MATCH) {
|
||||
if x, err := cache2go.GetCached(DESTINATION_PREFIX + p); err == nil {
|
||||
destIds := x.([]string)
|
||||
for _, dId := range destIds {
|
||||
if _, ok := rpl.DestinationRates[dId]; ok {
|
||||
rps = rpl.RateIntervalList(dId)
|
||||
prefix = p
|
||||
destinationId = dId
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if rps != nil {
|
||||
break
|
||||
if rps != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
// check if it's the first ri and add a blank one for the initial part not covered
|
||||
|
||||
@@ -164,9 +164,7 @@ func (tss TimeSpans) Compress() {
|
||||
var cIncrs Increments
|
||||
for _, incr := range ts.Increments {
|
||||
if len(cIncrs) == 0 || !cIncrs[len(cIncrs)-1].Equal(incr) {
|
||||
if incr.CompressFactor == 0 {
|
||||
incr.CompressFactor = 1
|
||||
}
|
||||
incr.GetCompressFactor() // sideefect
|
||||
cIncrs = append(cIncrs, incr)
|
||||
} else {
|
||||
cIncrs[len(cIncrs)-1].CompressFactor++
|
||||
@@ -180,10 +178,7 @@ func (tss TimeSpans) Decompress() {
|
||||
for _, ts := range tss {
|
||||
var incrs Increments
|
||||
for _, cIncr := range ts.Increments {
|
||||
if cIncr.CompressFactor == 0 { // if never compressed
|
||||
cIncr.CompressFactor = 1
|
||||
}
|
||||
for i := 0; i < cIncr.CompressFactor; i++ {
|
||||
for i := 0; i < cIncr.GetCompressFactor(); i++ {
|
||||
incrs = append(incrs, cIncr.Clone())
|
||||
}
|
||||
}
|
||||
@@ -210,19 +205,30 @@ func (incr *Increment) Equal(other *Increment) bool {
|
||||
((incr.MinuteInfo == nil && other.MinuteInfo == nil) || incr.MinuteInfo.Equal(other.MinuteInfo))
|
||||
}
|
||||
|
||||
func (incr *Increment) GetCompressFactor() int {
|
||||
if incr.CompressFactor == 0 {
|
||||
incr.CompressFactor = 1
|
||||
}
|
||||
return incr.CompressFactor
|
||||
}
|
||||
|
||||
type Increments []*Increment
|
||||
|
||||
func (incs Increments) GetTotalCost() float64 {
|
||||
cost := 0.0
|
||||
for _, increment := range incs {
|
||||
if increment.CompressFactor == 0 {
|
||||
increment.CompressFactor = 1
|
||||
}
|
||||
cost += (float64(increment.CompressFactor) * increment.Cost)
|
||||
cost += (float64(increment.GetCompressFactor()) * increment.Cost)
|
||||
}
|
||||
return cost
|
||||
}
|
||||
|
||||
func (incs Increments) Length() (length int) {
|
||||
for _, incr := range incs {
|
||||
length += incr.GetCompressFactor()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Returns the duration of the timespan
|
||||
func (ts *TimeSpan) GetDuration() time.Duration {
|
||||
return ts.TimeEnd.Sub(ts.TimeStart)
|
||||
@@ -253,7 +259,7 @@ func (ts *TimeSpan) SetRateInterval(i *RateInterval) {
|
||||
// It also sets the Cost field of this timespan (used for refund on session
|
||||
// manager debit loop where the cost cannot be recalculated)
|
||||
func (ts *TimeSpan) getCost() float64 {
|
||||
if len(ts.Increments) == 0 {
|
||||
if ts.Increments.Length() == 0 {
|
||||
if ts.RateInterval == nil {
|
||||
return 0
|
||||
}
|
||||
@@ -261,7 +267,7 @@ func (ts *TimeSpan) getCost() float64 {
|
||||
ts.Cost = utils.Round(cost, ts.RateInterval.Rating.RoundingDecimals, ts.RateInterval.Rating.RoundingMethod)
|
||||
return ts.Cost
|
||||
} else {
|
||||
return ts.Increments[0].Cost * float64(len(ts.Increments))
|
||||
return ts.Increments[0].Cost * float64(ts.Increments.Length())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,7 +299,7 @@ func (ts *TimeSpan) createIncrementsSlice() {
|
||||
// returns whether the timespan has all increments marked as paid and if not
|
||||
// it also returns the first unpaied increment
|
||||
func (ts *TimeSpan) IsPaid() (bool, int) {
|
||||
if len(ts.Increments) == 0 {
|
||||
if ts.Increments.Length() == 0 {
|
||||
return false, 0
|
||||
}
|
||||
for incrementIndex, increment := range ts.Increments {
|
||||
|
||||
Reference in New Issue
Block a user