merge Dan's work

This commit is contained in:
Radu Ioan Fericean
2013-07-30 21:54:27 +03:00
57 changed files with 1452 additions and 688 deletions

View File

@@ -25,11 +25,10 @@ import (
"github.com/cgrates/cgrates/utils"
)
// Creates a new AccountActions profile within a tariff plan
func (self *Apier) SetTPAccountActions(attrs utils.ApiTPAccountActions, reply *string) error {
if missing := utils.MissingStructFields(&attrs,
[]string{"TPid", "AccountActionsId","Tenant","Account","Direction","ActionTimingsId","ActionTriggersId"}); len(missing) != 0 {
[]string{"TPid", "AccountActionsId", "Tenant", "Account", "Direction", "ActionTimingsId", "ActionTriggersId"}); len(missing) != 0 {
return fmt.Errorf("%s:%v", utils.ERR_MANDATORY_IE_MISSING, missing)
}
if exists, err := self.StorDb.ExistsTPAccountActions(attrs.TPid, attrs.AccountActionsId); err != nil {
@@ -38,9 +37,9 @@ func (self *Apier) SetTPAccountActions(attrs utils.ApiTPAccountActions, reply *s
return errors.New(utils.ERR_DUPLICATE)
}
aa := map[string]*engine.AccountAction{
attrs.AccountActionsId: &engine.AccountAction{Tenant: attrs.Tenant, Account: attrs.Account, Direction: attrs.Direction,
ActionTimingsTag: attrs.ActionTimingsId, ActionTriggersTag: attrs.ActionTriggersId},
}
attrs.AccountActionsId: &engine.AccountAction{Tenant: attrs.Tenant, Account: attrs.Account, Direction: attrs.Direction,
ActionTimingsTag: attrs.ActionTimingsId, ActionTriggersTag: attrs.ActionTriggersId},
}
if err := self.StorDb.SetTPAccountActions(attrs.TPid, aa); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
@@ -64,13 +63,13 @@ func (self *Apier) GetTPAccountActions(attrs AttrGetTPAccountActions, reply *uti
} else if len(aa) == 0 {
return errors.New(utils.ERR_NOT_FOUND)
} else {
*reply = utils.ApiTPAccountActions{TPid: attrs.TPid,
AccountActionsId: attrs.AccountActionsId,
Tenant: aa[attrs.AccountActionsId].Tenant,
Account:aa[attrs.AccountActionsId].Account,
Direction: aa[attrs.AccountActionsId].Direction,
ActionTimingsId: aa[attrs.AccountActionsId].ActionTimingsTag,
ActionTriggersId: aa[attrs.AccountActionsId].ActionTriggersTag }
*reply = utils.ApiTPAccountActions{TPid: attrs.TPid,
AccountActionsId: attrs.AccountActionsId,
Tenant: aa[attrs.AccountActionsId].Tenant,
Account: aa[attrs.AccountActionsId].Account,
Direction: aa[attrs.AccountActionsId].Direction,
ActionTimingsId: aa[attrs.AccountActionsId].ActionTimingsTag,
ActionTriggersId: aa[attrs.AccountActionsId].ActionTriggersTag}
}
return nil
}

View File

@@ -21,7 +21,9 @@ package apier
import (
"errors"
"fmt"
"time"
"github.com/cgrates/cgrates/utils"
"github.com/cgrates/cgrates/engine"
)
// Creates a new Actions profile within a tariff plan
@@ -31,8 +33,8 @@ func (self *Apier) SetTPActions(attrs utils.TPActions, reply *string) error {
}
for _, action := range attrs.Actions {
requiredFields := []string{"Identifier", "Weight"}
if action.BalanceId != "" { // Add some inter-dependent parameters - if balanceType then we are not talking about simply calling actions
requiredFields = append(requiredFields, "Direction", "Units", "ExpirationTime")
if action.BalanceType != "" { // Add some inter-dependent parameters - if balanceType then we are not talking about simply calling actions
requiredFields = append(requiredFields, "Direction", "Units", "ExpiryTime")
}
if missing := utils.MissingStructFields(&action, requiredFields); len(missing) != 0 {
return fmt.Errorf("%s:Action:%s:%v", utils.ERR_MANDATORY_IE_MISSING, action.Identifier, missing)
@@ -43,7 +45,22 @@ func (self *Apier) SetTPActions(attrs utils.TPActions, reply *string) error {
} else if exists {
return errors.New(utils.ERR_DUPLICATE)
}
if err := self.StorDb.SetTPActions(&attrs); err != nil {
acts := make([]*engine.Action, len(attrs.Actions))
for idx, act := range attrs.Actions {
acts[idx] = &engine.Action{
ActionType: act.Identifier,
BalanceId: act.BalanceType,
Direction: act.Direction,
Units: act.Units,
ExpirationDate: time.Unix(act.ExpiryTime,0),
DestinationTag: act.DestinationId,
RateType: act.RateType,
RateValue: act.Rate,
MinutesWeight: act.MinutesWeight,
Weight: act.Weight,
}
}
if err := self.StorDb.SetTPActions(attrs.TPid, map[string][]*engine.Action{attrs.ActionsId: acts}); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
}
*reply = "OK"

View File

@@ -22,6 +22,7 @@ import (
"errors"
"fmt"
"github.com/cgrates/cgrates/utils"
"github.com/cgrates/cgrates/engine"
)
// Creates a new ActionTimings profile within a tariff plan
@@ -40,11 +41,16 @@ func (self *Apier) SetTPActionTimings(attrs utils.ApiTPActionTimings, reply *str
} else if exists {
return errors.New(utils.ERR_DUPLICATE)
}
ats := make(map[string][]*utils.TPActionTimingsRow, 1) // Only one id will be stored in the map
for _, at := range attrs.ActionTimings {
ats[attrs.ActionTimingsId] = append(ats[attrs.ActionTimingsId], &utils.TPActionTimingsRow{at.ActionsId, at.TimingId, at.Weight})
ats := make([]*engine.ActionTiming, len(attrs.ActionTimings))
for idx, at := range attrs.ActionTimings {
ats[idx] = &engine.ActionTiming{
Tag: attrs.ActionTimingsId,
ActionsTag: at.ActionsId,
TimingsTag: at.TimingId,
Weight: at.Weight,
}
}
if err := self.StorDb.SetTPActionTimings(attrs.TPid, ats); err != nil {
if err := self.StorDb.SetTPActionTimings(attrs.TPid, map[string][]*engine.ActionTiming{attrs.ActionTimingsId: ats}); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
}
*reply = "OK"

View File

@@ -38,12 +38,12 @@ func (self *Apier) SetTPActionTriggers(attrs utils.ApiTPActionTriggers, reply *s
}
aTriggers := make([]*engine.ActionTrigger, len(attrs.ActionTriggers))
for idx, at := range attrs.ActionTriggers {
requiredFields := []string{"BalanceId", "Direction", "ThresholdType", "ThresholdValue", "ActionsId", "Weight"}
requiredFields := []string{"BalanceType", "Direction", "ThresholdType", "ThresholdValue", "ActionsId", "Weight"}
if missing := utils.MissingStructFields(&at, requiredFields); len(missing) != 0 {
return fmt.Errorf("%s:Balance:%s:%v", utils.ERR_MANDATORY_IE_MISSING, at.BalanceId, missing)
return fmt.Errorf("%s:Balance:%s:%v", utils.ERR_MANDATORY_IE_MISSING, at.BalanceType, missing)
}
at := &engine.ActionTrigger{
BalanceId: at.BalanceId,
BalanceId: at.BalanceType,
Direction: at.Direction,
ThresholdType: at.ThresholdType,
ThresholdValue: at.ThresholdValue,
@@ -82,7 +82,7 @@ func (self *Apier) GetTPActionTriggers(attrs AttrGetTPActionTriggers, reply *uti
aTriggers := make([]utils.ApiActionTrigger, len(ats[attrs.ActionTriggersId]))
for idx, row := range ats[attrs.ActionTriggersId] {
aTriggers[idx] = utils.ApiActionTrigger{
BalanceId: row.BalanceId,
BalanceType: row.BalanceId,
Direction: row.Direction,
ThresholdType: row.ThresholdType,
ThresholdValue: row.ThresholdValue,

View File

@@ -23,6 +23,7 @@ package apier
import (
"errors"
"fmt"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/utils"
)
@@ -36,7 +37,11 @@ func (self *Apier) SetTPDestinationRate(attrs utils.TPDestinationRate, reply *st
} else if exists {
return errors.New(utils.ERR_DUPLICATE)
}
if err := self.StorDb.SetTPDestinationRate(&attrs); err != nil {
drs := make([]*engine.DestinationRate, len(attrs.DestinationRates))
for idx, dr := range attrs.DestinationRates {
drs[idx] = &engine.DestinationRate{attrs.DestinationRateId, dr.DestinationId, dr.RateId, nil}
}
if err := self.StorDb.SetTPDestinationRates(attrs.TPid, map[string][]*engine.DestinationRate{attrs.DestinationRateId: drs}); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
}
*reply = "OK"

View File

@@ -23,6 +23,7 @@ package apier
import (
"errors"
"fmt"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/utils"
)
@@ -36,7 +37,15 @@ func (self *Apier) SetTPDestRateTiming(attrs utils.TPDestRateTiming, reply *stri
} else if exists {
return errors.New(utils.ERR_DUPLICATE)
}
if err := self.StorDb.SetTPDestRateTiming(&attrs); err != nil {
drts := make([]*engine.DestinationRateTiming, len(attrs.DestRateTimings))
for idx, drt := range attrs.DestRateTimings {
drts[idx] = &engine.DestinationRateTiming{Tag: attrs.DestRateTimingId,
DestinationRatesTag: drt.DestRatesId,
Weight: drt.Weight,
TimingsTag: drt.TimingId,
}
}
if err := self.StorDb.SetTPDestRateTimings(attrs.TPid, map[string][]*engine.DestinationRateTiming{attrs.DestRateTimingId: drts}); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
}
*reply = "OK"

View File

@@ -23,6 +23,7 @@ package apier
import (
"errors"
"fmt"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/utils"
)
@@ -36,7 +37,12 @@ func (self *Apier) SetTPRate(attrs utils.TPRate, reply *string) error {
} else if exists {
return errors.New(utils.ERR_DUPLICATE)
}
if err := self.StorDb.SetTPRate(&attrs); err != nil {
rts := make([]*engine.Rate, len(attrs.RateSlots))
for idx, rtSlot := range attrs.RateSlots {
rts[idx] = &engine.Rate{attrs.RateId, rtSlot.ConnectFee, rtSlot.Rate, float64(rtSlot.RatedUnits),
float64(rtSlot.RateIncrements), float64(rtSlot.GroupInterval), rtSlot.RoundingMethod, rtSlot.RoundingDecimals, rtSlot.Weight}
}
if err := self.StorDb.SetTPRates(attrs.TPid, map[string][]*engine.Rate{attrs.RateId: rts}); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
}
*reply = "OK"

View File

@@ -23,37 +23,50 @@ package apier
import (
"errors"
"fmt"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/utils"
)
// Creates a new RateProfile within a tariff plan
func (self *Apier) SetTPRateProfile(attrs utils.TPRateProfile, reply *string) error {
if missing := utils.MissingStructFields(&attrs, []string{"TPid", "RateProfileId", "Tenant", "TOR", "Direction", "Subject", "RatingActivations"}); len(missing) != 0 {
// Creates a new RatingProfile within a tariff plan
func (self *Apier) SetTPRatingProfile(attrs utils.TPRatingProfile, reply *string) error {
if missing := utils.MissingStructFields(&attrs, []string{"TPid", "RatingProfileId", "Tenant", "TOR", "Direction", "Subject", "RatingActivations"}); len(missing) != 0 {
return fmt.Errorf("%s:%v", utils.ERR_MANDATORY_IE_MISSING, missing)
}
if exists, err := self.StorDb.ExistsTPRateProfile(attrs.TPid, attrs.RateProfileId); err != nil {
if exists, err := self.StorDb.ExistsTPRatingProfile(attrs.TPid, attrs.RatingProfileId); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
} else if exists {
return errors.New(utils.ERR_DUPLICATE)
}
if err := self.StorDb.SetTPRateProfile(&attrs); err != nil {
rps := make([]*engine.RatingProfile, len(attrs.RatingActivations))
for idx, ra := range attrs.RatingActivations {
rps[idx] = &engine.RatingProfile{Tag: attrs.RatingProfileId,
Tenant: attrs.Tenant,
TOR: attrs.TOR,
Direction: attrs.Direction,
Subject: attrs.Subject,
ActivationTime: ra.ActivationTime,
DestRatesTimingTag: ra.DestRateTimingId,
RatesFallbackSubject: attrs.RatesFallbackSubject,
}
}
if err := self.StorDb.SetTPRatingProfiles(attrs.TPid, map[string][]*engine.RatingProfile{attrs.RatingProfileId: rps}); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
}
*reply = "OK"
return nil
}
type AttrGetTPRateProfile struct {
TPid string // Tariff plan id
RateProfileId string // RateProfile id
type AttrGetTPRatingProfile struct {
TPid string // Tariff plan id
RatingProfileId string // RatingProfile id
}
// Queries specific RateProfile on tariff plan
func (self *Apier) GetTPRateProfile(attrs AttrGetTPRateProfile, reply *utils.TPRateProfile) error {
if missing := utils.MissingStructFields(&attrs, []string{"TPid", "RateProfileId"}); len(missing) != 0 { //Params missing
// Queries specific RatingProfile on tariff plan
func (self *Apier) GetTPRatingProfile(attrs AttrGetTPRatingProfile, reply *utils.TPRatingProfile) error {
if missing := utils.MissingStructFields(&attrs, []string{"TPid", "RatingProfileId"}); len(missing) != 0 { //Params missing
return fmt.Errorf("%s:%v", utils.ERR_MANDATORY_IE_MISSING, missing)
}
if dr, err := self.StorDb.GetTPRateProfile(attrs.TPid, attrs.RateProfileId); err != nil {
if dr, err := self.StorDb.GetTPRatingProfile(attrs.TPid, attrs.RatingProfileId); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
} else if dr == nil {
return errors.New(utils.ERR_NOT_FOUND)
@@ -63,12 +76,12 @@ func (self *Apier) GetTPRateProfile(attrs AttrGetTPRateProfile, reply *utils.TPR
return nil
}
// Queries RateProfile identities on specific tariff plan.
func (self *Apier) GetTPRateProfileIds(attrs utils.AttrTPRateProfileIds, reply *[]string) error {
// Queries RatingProfile identities on specific tariff plan.
func (self *Apier) GetTPRatingProfileIds(attrs utils.AttrTPRatingProfileIds, reply *[]string) error {
if missing := utils.MissingStructFields(&attrs, []string{"TPid"}); len(missing) != 0 { //Params missing
return fmt.Errorf("%s:%v", utils.ERR_MANDATORY_IE_MISSING, missing)
}
if ids, err := self.StorDb.GetTPRateProfileIds(&attrs); err != nil {
if ids, err := self.StorDb.GetTPRatingProfileIds(&attrs); err != nil {
return fmt.Errorf("%s:%s", utils.ERR_SERVER_ERROR, err.Error())
} else if ids == nil {
return errors.New(utils.ERR_NOT_FOUND)

View File

@@ -21,8 +21,8 @@ package cdrs
import (
"fmt"
"github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/mediator"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/mediator"
"io/ioutil"
"net/http"
)
@@ -33,7 +33,7 @@ var (
medi *mediator.Mediator
)
func cdrHandler(w http.ResponseWriter, r *http.Request) {
func fsCdrHandler(w http.ResponseWriter, r *http.Request) {
body, _ := ioutil.ReadAll(r.Body)
if fsCdr, err := new(FSCdr).New(body); err == nil {
storage.SetCdr(fsCdr)
@@ -50,6 +50,23 @@ func cdrHandler(w http.ResponseWriter, r *http.Request) {
}
}
func genCdrHandler(w http.ResponseWriter, r *http.Request) {
body, _ := ioutil.ReadAll(r.Body)
if genCdr, err := new(GenCdr).New(body); err == nil {
storage.SetCdr(genCdr)
if cfg.CDRSMediator == "internal" {
errMedi := medi.MediateDBCDR(genCdr, storage)
if errMedi != nil {
engine.Logger.Err(fmt.Sprintf("Could not run mediation on CDR: %s", errMedi.Error()))
}
} else {
//TODO: use the connection to mediator
}
} else {
engine.Logger.Err(fmt.Sprintf("Could not create CDR entry: %v", err))
}
}
type CDRS struct{}
func New(s engine.DataStorage, m *mediator.Mediator, c *config.CGRConfig) *CDRS {
@@ -61,7 +78,10 @@ func New(s engine.DataStorage, m *mediator.Mediator, c *config.CGRConfig) *CDRS
func (cdrs *CDRS) StartCapturingCDRs() {
if cfg.CDRSfsJSONEnabled {
http.HandleFunc("/freeswitch_json", cdrHandler)
http.HandleFunc("/freeswitch_json", fsCdrHandler)
}
if cfg.CDRSgenJSONEnabled {
http.HandleFunc("/generic_json", genCdrHandler)
}
http.ListenAndServe(cfg.CDRSListen, nil)
}

View File

@@ -28,23 +28,23 @@ import (
const (
// Freswitch event property names
CDR_MAP = "variables"
DIRECTION = "direction"
ORIG_ID = "sip_call_id" //- originator_id - match cdrs
SUBJECT = "cgr_subject"
ACCOUNT = "cgr_account"
DESTINATION = "cgr_destination"
REQTYPE = "cgr_reqtype" //prepaid or postpaid
TOR = "cgr_tor"
UUID = "uuid" // -Unique ID for this call leg
CSTMID = "cgr_cstmid"
CALL_DEST_NR = "dialed_extension"
PARK_TIME = "start_epoch"
ANSWER_TIME = "answer_epoch"
HANGUP_TIME = "end_epoch"
DURATION = "billsec"
USERNAME = "user_name"
FS_IP = "sip_local_network_addr"
FS_CDR_MAP = "variables"
FS_DIRECTION = "direction"
FS_ORIG_ID = "sip_call_id" //- originator_id - match cdrs
FS_SUBJECT = "cgr_subject"
FS_ACCOUNT = "cgr_account"
FS_DESTINATION = "cgr_destination"
FS_REQTYPE = "cgr_reqtype" //prepaid or postpaid
FS_TOR = "cgr_tor"
FS_UUID = "uuid" // -Unique ID for this call leg
FS_CSTMID = "cgr_cstmid"
FS_CALL_DEST_NR = "dialed_extension"
FS_PARK_TIME = "start_epoch"
FS_ANSWER_TIME = "answer_epoch"
FS_HANGUP_TIME = "end_epoch"
FS_DURATION = "billsec"
FS_USERNAME = "user_name"
FS_IP = "sip_local_network_addr"
)
type FSCdr map[string]string
@@ -54,7 +54,7 @@ func (fsCdr FSCdr) New(body []byte) (utils.CDR, error) {
var tmp map[string]interface{}
var err error
if err = json.Unmarshal(body, &tmp); err == nil {
if variables, ok := tmp[CDR_MAP]; ok {
if variables, ok := tmp[FS_CDR_MAP]; ok {
if variables, ok := variables.(map[string]interface{}); ok {
for k, v := range variables {
fsCdr[k] = v.(string)
@@ -67,10 +67,10 @@ func (fsCdr FSCdr) New(body []byte) (utils.CDR, error) {
}
func (fsCdr FSCdr) GetCgrId() string {
return utils.FSCgrId(fsCdr[UUID])
return utils.FSCgrId(fsCdr[FS_UUID])
}
func (fsCdr FSCdr) GetAccId() string {
return fsCdr[UUID]
return fsCdr[FS_UUID]
}
func (fsCdr FSCdr) GetCdrHost() string {
return fsCdr[FS_IP]
@@ -80,29 +80,29 @@ func (fsCdr FSCdr) GetDirection() string {
return "OUT"
}
func (fsCdr FSCdr) GetOrigId() string {
return fsCdr[ORIG_ID]
return fsCdr[FS_ORIG_ID]
}
func (fsCdr FSCdr) GetSubject() string {
return utils.FirstNonEmpty(fsCdr[SUBJECT], fsCdr[USERNAME])
return utils.FirstNonEmpty(fsCdr[FS_SUBJECT], fsCdr[FS_USERNAME])
}
func (fsCdr FSCdr) GetAccount() string {
return utils.FirstNonEmpty(fsCdr[ACCOUNT], fsCdr[USERNAME])
return utils.FirstNonEmpty(fsCdr[FS_ACCOUNT], fsCdr[FS_USERNAME])
}
// Charging destination number
func (fsCdr FSCdr) GetDestination() string {
return utils.FirstNonEmpty(fsCdr[DESTINATION], fsCdr[CALL_DEST_NR])
return utils.FirstNonEmpty(fsCdr[FS_DESTINATION], fsCdr[FS_CALL_DEST_NR])
}
func (fsCdr FSCdr) GetTOR() string {
return utils.FirstNonEmpty(fsCdr[TOR], cfg.DefaultTOR)
return utils.FirstNonEmpty(fsCdr[FS_TOR], cfg.DefaultTOR)
}
func (fsCdr FSCdr) GetTenant() string {
return utils.FirstNonEmpty(fsCdr[CSTMID], cfg.DefaultTenant)
return utils.FirstNonEmpty(fsCdr[FS_CSTMID], cfg.DefaultTenant)
}
func (fsCdr FSCdr) GetReqType() string {
return utils.FirstNonEmpty(fsCdr[REQTYPE], cfg.DefaultReqType)
return utils.FirstNonEmpty(fsCdr[FS_REQTYPE], cfg.DefaultReqType)
}
func (fsCdr FSCdr) GetExtraFields() map[string]string {
extraFields := make(map[string]string, len(cfg.CDRSExtraFields))
@@ -115,19 +115,19 @@ func (fsCdr FSCdr) GetFallbackSubj() string {
return cfg.DefaultSubject
}
func (fsCdr FSCdr) GetAnswerTime() (t time.Time, err error) {
st, err := strconv.ParseInt(fsCdr[ANSWER_TIME], 0, 64)
st, err := strconv.ParseInt(fsCdr[FS_ANSWER_TIME], 0, 64)
t = time.Unix(0, st*1000)
return
}
func (fsCdr FSCdr) GetHangupTime() (t time.Time, err error) {
st, err := strconv.ParseInt(fsCdr[HANGUP_TIME], 0, 64)
st, err := strconv.ParseInt(fsCdr[FS_HANGUP_TIME], 0, 64)
t = time.Unix(0, st*1000)
return
}
// Extracts duration as considered by the telecom switch
func (fsCdr FSCdr) GetDuration() int64 {
dur, _ := strconv.ParseInt(fsCdr[DURATION], 0, 64)
dur, _ := strconv.ParseInt(fsCdr[FS_DURATION], 0, 64)
return dur
}

163
cdrs/gencdr.go Normal file
View File

@@ -0,0 +1,163 @@
/*
Rating system designed to be used in VoIP Carriers World
Copyright (C) 2013 ITsysCOM
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package cdrs
import (
"encoding/json"
"errors"
"github.com/cgrates/cgrates/utils"
"strconv"
"time"
)
const (
CDR_MAP = "variables"
DIRECTION = "direction"
ORIG_ID = "id"
SUBJECT = "subject"
ACCOUNT = "account"
DESTINATION = "destination"
REQTYPE = "reqtype" //prepaid or postpaid
TOR = "tor"
UUID = "uuid" // -Unique ID for this call leg
CSTMID = "cstmid"
CALL_DEST_NR = "dialed_extension"
PARK_TIME = "start_epoch"
ANSWER_TIME = "time_answer"
HANGUP_TIME = "time_hangup"
DURATION = "duration"
USERNAME = "user_name"
IP = "sip_local_network_addr"
)
type GenCdr map[string]string
func (genCdr GenCdr) New(body []byte) (utils.CDR, error) {
genCdr = make(map[string]string)
var tmp map[string]interface{}
var err error
if err = json.Unmarshal(body, &tmp); err == nil {
if variables, ok := tmp[CDR_MAP]; ok {
if variables, ok := variables.(map[string]interface{}); ok {
for k, v := range variables {
genCdr[k] = v.(string)
}
}
return genCdr, nil
}
}
return nil, err
}
func (genCdr GenCdr) GetCgrId() string {
return utils.FSCgrId(genCdr[UUID])
}
func (genCdr GenCdr) GetAccId() string {
return genCdr[UUID]
}
func (genCdr GenCdr) GetCdrHost() string {
return genCdr[FS_IP]
}
func (genCdr GenCdr) GetDirection() string {
//TODO: implement direction
return "OUT"
}
func (genCdr GenCdr) GetOrigId() string {
return genCdr[ORIG_ID]
}
func (genCdr GenCdr) GetSubject() string {
return utils.FirstNonEmpty(genCdr[SUBJECT], genCdr[USERNAME])
}
func (genCdr GenCdr) GetAccount() string {
return utils.FirstNonEmpty(genCdr[ACCOUNT], genCdr[USERNAME])
}
// Charging destination number
func (genCdr GenCdr) GetDestination() string {
return utils.FirstNonEmpty(genCdr[DESTINATION], genCdr[CALL_DEST_NR])
}
func (genCdr GenCdr) GetTOR() string {
return utils.FirstNonEmpty(genCdr[TOR], cfg.DefaultTOR)
}
func (genCdr GenCdr) GetTenant() string {
return utils.FirstNonEmpty(genCdr[CSTMID], cfg.DefaultTenant)
}
func (genCdr GenCdr) GetReqType() string {
return utils.FirstNonEmpty(genCdr[REQTYPE], cfg.DefaultReqType)
}
func (genCdr GenCdr) GetExtraFields() map[string]string {
extraFields := make(map[string]string, len(cfg.CDRSExtraFields))
for _, field := range cfg.CDRSExtraFields {
extraFields[field] = genCdr[field]
}
return extraFields
}
func (genCdr GenCdr) GetFallbackSubj() string {
return cfg.DefaultSubject
}
func (genCdr GenCdr) GetAnswerTime() (t time.Time, err error) {
st, err := strconv.ParseInt(genCdr[ANSWER_TIME], 0, 64)
t = time.Unix(0, st*1000)
return
}
func (genCdr GenCdr) GetHangupTime() (t time.Time, err error) {
st, err := strconv.ParseInt(genCdr[HANGUP_TIME], 0, 64)
t = time.Unix(0, st*1000)
return
}
// Extracts duration as considered by the telecom switch
func (genCdr GenCdr) GetDuration() int64 {
dur, _ := strconv.ParseInt(genCdr[DURATION], 0, 64)
return dur
}
func (genCdr GenCdr) Store() (result string, err error) {
result += genCdr.GetCgrId() + "|"
result += genCdr.GetAccId() + "|"
result += genCdr.GetCdrHost() + "|"
result += genCdr.GetDirection() + "|"
result += genCdr.GetOrigId() + "|"
result += genCdr.GetSubject() + "|"
result += genCdr.GetAccount() + "|"
result += genCdr.GetDestination() + "|"
result += genCdr.GetTOR() + "|"
result += genCdr.GetAccId() + "|"
result += genCdr.GetTenant() + "|"
result += genCdr.GetReqType() + "|"
st, err := genCdr.GetAnswerTime()
if err != nil {
return "", err
}
result += strconv.FormatInt(st.UnixNano(), 10) + "|"
et, err := genCdr.GetHangupTime()
if err != nil {
return "", err
}
result += strconv.FormatInt(et.UnixNano(), 10) + "|"
result += strconv.FormatInt(genCdr.GetDuration(), 10) + "|"
result += genCdr.GetFallbackSubj() + "|"
return
}
func (genCdr GenCdr) Restore(input string) error {
return errors.New("Not implemented")
}

View File

@@ -26,8 +26,8 @@ import (
"github.com/cgrates/cgrates/balancer2go"
"github.com/cgrates/cgrates/cdrs"
"github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/mediator"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/mediator"
"github.com/cgrates/cgrates/scheduler"
"github.com/cgrates/cgrates/sessionmanager"
"github.com/cgrates/cgrates/utils"
@@ -252,6 +252,7 @@ func main() {
}
defer loggerDb.Close()
engine.SetStorageLogger(loggerDb)
engine.SetRoundingMethodAndDecimals(cfg.RoundingMethod, cfg.RoundingDecimals)
if cfg.SMDebitInterval > 0 {
if dp, err := time.ParseDuration(fmt.Sprintf("%vs", cfg.SMDebitInterval)); err == nil {

View File

@@ -21,23 +21,22 @@ package main
import (
"flag"
"fmt"
"github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/utils"
"github.com/cgrates/cgrates/config"
"log"
"path"
"regexp"
)
var (
//separator = flag.String("separator", ",", "Default field separator")
cgrConfig,_ = config.NewDefaultCGRConfig()
cgrConfig, _ = config.NewDefaultCGRConfig()
data_db_type = flag.String("datadb_type", cgrConfig.DataDBType, "The type of the dataDb database (redis|mongo|postgres|mysql)")
data_db_host = flag.String("datadb_host", cgrConfig.DataDBHost, "The dataDb host to connect to.")
data_db_port = flag.String("datadb_port", cgrConfig.DataDBPort, "The dataDb port to bind to.")
data_db_name = flag.String("datadb_name", cgrConfig.DataDBName, "The name/number of the dataDb to connect to.")
data_db_user = flag.String("datadb_user", cgrConfig.DataDBUser, "The dataDb user to sign in as.")
data_db_pass = flag.String("datadb_passwd", cgrConfig.DataDBPass, "The dataDb user's password.")
data_db_pass = flag.String("datadb_passwd", cgrConfig.DataDBPass, "The dataDb user's password.")
stor_db_type = flag.String("stordb_type", cgrConfig.StorDBType, "The type of the storDb database (redis|mongo|postgres|mysql)")
stor_db_host = flag.String("stordb_host", cgrConfig.StorDBHost, "The storDb host to connect to.")
@@ -46,22 +45,16 @@ var (
stor_db_user = flag.String("stordb_user", cgrConfig.StorDBUser, "The storDb user to sign in as.")
stor_db_pass = flag.String("stordb_passwd", cgrConfig.StorDBPass, "The storDb user's password.")
flush = flag.Bool("flush", false, "Flush the database before importing")
tpid = flag.String("tpid", "", "The tariff plan id from the database")
dataPath = flag.String("path", ".", "The path containing the data files")
version = flag.Bool("version", false, "Prints the application version.")
fromStorDb = flag.Bool("from_stordb", false, "Load the tariff plan from storDb to dataDb")
toStorDb = flag.Bool("to_stordb", false, "Import the tariff plan from files to storDb")
sep rune
flush = flag.Bool("flush", false, "Flush the database before importing")
tpid = flag.String("tpid", "", "The tariff plan id from the database")
dataPath = flag.String("path", ".", "The path containing the data files")
version = flag.Bool("version", false, "Prints the application version.")
verbose = flag.Bool("verbose", false, "Enable detailed verbose logging output")
fromStorDb = flag.Bool("from-stordb", false, "Load the tariff plan from storDb to dataDb")
toStorDb = flag.Bool("to-stordb", false, "Import the tariff plan from files to storDb")
runId = flag.String("runid", "", "Uniquely identify an import/load, postpended to some automatic fields")
)
type validator struct {
fn string
re *regexp.Regexp
message string
}
func main() {
flag.Parse()
if *version {
@@ -72,64 +65,44 @@ func main() {
var dataDb, storDb engine.DataStorage
// Init necessary db connections
if *fromStorDb {
dataDb, errDataDb = engine.ConfigureDatabase(*stor_db_type, *stor_db_host, *stor_db_port, *stor_db_name, *stor_db_user, *stor_db_pass)
storDb, errStorDb = engine.ConfigureDatabase(*data_db_type, *data_db_host, *data_db_port, *data_db_name, *data_db_user, *data_db_pass)
dataDb, errDataDb = engine.ConfigureDatabase(*data_db_type, *data_db_host, *data_db_port, *data_db_name, *data_db_user, *data_db_pass)
storDb, errStorDb = engine.ConfigureDatabase(*stor_db_type, *stor_db_host, *stor_db_port, *stor_db_name, *stor_db_user, *stor_db_pass)
} else if *toStorDb { // Import from csv files to storDb
storDb, errStorDb = engine.ConfigureDatabase(*data_db_type, *data_db_host, *data_db_port, *data_db_name, *data_db_user, *data_db_pass)
storDb, errStorDb = engine.ConfigureDatabase(*stor_db_type, *stor_db_host, *stor_db_port, *stor_db_name, *stor_db_user, *stor_db_pass)
} else { // Default load from csv files to dataDb
dataDb, errDataDb = engine.ConfigureDatabase(*stor_db_type, *stor_db_host, *stor_db_port, *stor_db_name, *stor_db_user, *stor_db_pass)
dataDb, errDataDb = engine.ConfigureDatabase(*data_db_type, *data_db_host, *data_db_port, *data_db_name, *data_db_user, *data_db_pass)
}
defer dataDb.Close()
defer storDb.Close()
for _,err = range []error{errDataDb, errStorDb} {
// Defer databases opened to be closed when we are done
for _, db := range []engine.DataStorage{dataDb, storDb} {
if db != nil {
defer db.Close()
}
}
// Stop on db errors
for _, err = range []error{errDataDb, errStorDb} {
if err != nil {
log.Fatalf("Could not open database connection: %v", err)
}
}
var loader engine.TPLoader
if *fromStorDb {
if *fromStorDb { // Load Tariff Plan from storDb into dataDb
loader = engine.NewDbReader(storDb, dataDb, *tpid)
} else { // Default load from csv files to dataDb
dataFilesValidators := []*validator{
&validator{utils.DESTINATIONS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\d+.?\d*){1}$`),
"Tag[0-9A-Za-z_],Prefix[0-9]"},
&validator{utils.TIMINGS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\*all\s*,\s*|(?:\d{1,4};?)+\s*,\s*|\s*,\s*){4}(?:\d{2}:\d{2}:\d{2}|\*asap){1}$`),
"Tag[0-9A-Za-z_],Years[0-9;]|*all|<empty>,Months[0-9;]|*all|<empty>,MonthDays[0-9;]|*all|<empty>,WeekDays[0-9;]|*all|<empty>,Time[0-9:]|*asap(00:00:00)"},
&validator{utils.RATES_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){2}(?:\d+.?\d*,?){4}$`),
"Tag[0-9A-Za-z_],ConnectFee[0-9.],Price[0-9.],PricedUnits[0-9.],RateIncrement[0-9.]"},
&validator{utils.DESTINATION_RATES_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){2}(?:\d+.?\d*,?){4}$`),
"Tag[0-9A-Za-z_],DestinationsTag[0-9A-Za-z_],RateTag[0-9A-Za-z_]"},
&validator{utils.DESTRATE_TIMINGS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){3}(?:\d+.?\d*){1}$`),
"Tag[0-9A-Za-z_],DestinationRatesTag[0-9A-Za-z_],TimingProfile[0-9A-Za-z_],Weight[0-9.]"},
&validator{utils.RATE_PROFILES_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\d+\s*,\s*){1}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\*all\s*,\s*|[\w:\.]+\s*,\s*){1}(?:\w*\s*,\s*){1}(?:\w+\s*,\s*){1}(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z){1}$`),
"Tenant[0-9A-Za-z_],TOR[0-9],Direction OUT|IN,Subject[0-9A-Za-z_:.]|*all,RatesFallbackSubject[0-9A-Za-z_]|<empty>,RatesTimingTag[0-9A-Za-z_],ActivationTime[[0-9T:X]] (2012-01-01T00:00:00Z)"},
&validator{utils.ACTIONS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){3}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\d+\s*,\s*){1}(?:\w+\s*,\s*|\*all\s*,\s*){1}(?:ABSOLUTE\s*,\s*|PERCENT\s*,\s*|\s*,\s*){1}(?:\d*\.?\d*\s*,?\s*){3}$`),
"Tag[0-9A-Za-z_],Action[0-9A-Za-z_],BalanceTag[0-9A-Za-z_],Direction OUT|IN,Units[0-9],DestinationTag[0-9A-Za-z_]|*all,PriceType ABSOLUT|PERCENT,PriceValue[0-9.],MinutesWeight[0-9.],Weight[0-9.]"},
&validator{utils.ACTION_TIMINGS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){3}(?:\d+\.?\d*){1}`),
"Tag[0-9A-Za-z_],ActionsTag[0-9A-Za-z_],TimingTag[0-9A-Za-z_],Weight[0-9.]"},
&validator{utils.ACTION_TRIGGERS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:MONETARY\s*,\s*|SMS\s*,\s*|MINUTES\s*,\s*|INTERNET\s*,\s*|INTERNET_TIME\s*,\s*){1}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\d+\.?\d*\s*,\s*){1}(?:\w+\s*,\s*|\*all\s*,\s*){1}(?:\w+\s*,\s*){1}(?:\d+\.?\d*){1}$`),
"Tag[0-9A-Za-z_],BalanceTag MONETARY|SMS|MINUTES|INTERNET|INTERNET_TIME,Direction OUT|IN,ThresholdValue[0-9.],DestinationTag[0-9A-Za-z_]|*all,ActionsTag[0-9A-Za-z_],Weight[0-9.]"},
&validator{utils.ACCOUNT_ACTIONS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:[\w:.]+\s*,\s*){1}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\w+\s*,?\s*){2}$`),
"Tenant[0-9A-Za-z_],Account[0-9A-Za-z_:.],Direction OUT|IN,ActionTimingsTag[0-9A-Za-z_],ActionTriggersTag[0-9A-Za-z_]"},
} else if *toStorDb { // Import files from a directory into storDb
if *tpid == "" {
log.Fatal("TPid required, please define it via *-tpid* command argument.")
}
for _, v := range dataFilesValidators {
err := engine.ValidateCSVData(path.Join(*dataPath, v.fn), v.re)
csvImporter := engine.TPCSVImporter{*tpid, storDb, *dataPath, ',', *verbose, *runId}
if errImport := csvImporter.Run(); errImport != nil {
log.Fatal(errImport)
}
return
} else { // Default load from csv files to dataDb
for fn, v := range engine.FileValidators {
err := engine.ValidateCSVData(path.Join(*dataPath, fn), v.Rule)
if err != nil {
log.Fatal(err, "\n\t", v.message)
log.Fatal(err, "\n\t", v.Message)
}
}
//sep = []rune(*separator)[0]
loader = engine.NewFileCSVReader(dataDb, ',', utils.DESTINATIONS_CSV, utils.TIMINGS_CSV, utils.RATES_CSV, utils.DESTINATION_RATES_CSV, utils.DESTRATE_TIMINGS_CSV, utils.RATE_PROFILES_CSV, utils.ACTIONS_CSV, utils.ACTION_TIMINGS_CSV, utils.ACTION_TRIGGERS_CSV, utils.ACCOUNT_ACTIONS_CSV)
}
@@ -175,7 +148,7 @@ func main() {
}
// write maps to database
if err := loader.WriteToDatabase(*flush, true); err != nil {
if err := loader.WriteToDatabase(*flush, *verbose); err != nil {
log.Fatal("Could not write to database: ", err)
}
}

View File

@@ -26,80 +26,78 @@ import (
)
const (
DISABLED = "disabled"
INTERNAL = "internal"
JSON = "json"
GOB = "gob"
POSTGRES = "postgres"
MONGO = "mongo"
REDIS = "redis"
SAME = "same"
FS = "freeswitch"
PREPAID = "prepaid"
POSTPAID = "postpaid"
PSEUDOPREPAID = "pseudoprepaid"
RATED = "rated"
DISABLED = "disabled"
INTERNAL = "internal"
JSON = "json"
GOB = "gob"
POSTGRES = "postgres"
MONGO = "mongo"
REDIS = "redis"
SAME = "same"
FS = "freeswitch"
)
// Holds system configuration, defaults are overwritten with values from config file if found
type CGRConfig struct {
DataDBType string
DataDBHost string // The host to connect to. Values that start with / are for UNIX domain sockets.
DataDBPort string // The port to bind to.
DataDBName string // The name of the database to connect to.
DataDBUser string // The user to sign in as.
DataDBPass string // The user's password.
DataDBType string
DataDBHost string // The host to connect to. Values that start with / are for UNIX domain sockets.
DataDBPort string // The port to bind to.
DataDBName string // The name of the database to connect to.
DataDBUser string // The user to sign in as.
DataDBPass string // The user's password.
StorDBType string // Should reflect the database type used to store logs
StorDBHost string // The host to connect to. Values that start with / are for UNIX domain sockets.
StorDBPort string // The port to bind to.
StorDBName string // The name of the database to connect to.
StorDBUser string // The user to sign in as.
StorDBPass string // The user's password.
RPCEncoding string // RPC encoding used on APIs: <gob|json>.
DefaultReqType string // Use this request type if not defined on top
DefaultTOR string // set default type of record
DefaultTenant string // set default tenant
DefaultSubject string // set default rating subject, useful in case of fallback
RaterEnabled bool // start standalone server (no balancer)
RaterBalancer string // balancer address host:port
RaterListen string // listening address host:port
RaterRoundingMethod string // Rounding method for the end price: <up|middle|down>
RaterRoundingDecimals int // Number of decimals to round end prices at
BalancerEnabled bool
BalancerListen string // Json RPC server address
SchedulerEnabled bool
CDRSListen string // CDRS's listening interface: <x.y.z.y:1234>.
CDRSfsJSONEnabled bool // Enable the handler for FreeSWITCH JSON CDRs: <enabled|disabled>.
CDRSMediator string // Address where to reach the Mediator. Empty for disabling mediation. <""|internal>
CDRSExtraFields []string //Extra fields to store in CDRs
SMEnabled bool
SMSwitchType string
SMRater string // address where to access rater. Can be internal, direct rater address or the address of a balancer
SMRaterReconnects int // Number of reconnect attempts to rater
SMDebitInterval int // the period to be debited in advanced during a call (in seconds)
MediatorEnabled bool // Starts Mediator service: <true|false>.
MediatorListen string // Mediator's listening interface: <internal>.
MediatorRater string // Address where to reach the Rater: <internal|x.y.z.y:1234>
MediatorRaterReconnects int // Number of reconnects to rater before giving up.
MediatorCDRType string // CDR type <freeswitch_http_json|freeswitch_file_csv>.
MediatorAccIdField string // Name of field identifying accounting id used during mediation. Use index number in case of .csv cdrs.
MediatorSubjectFields []string // Name of subject fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorReqTypeFields []string // Name of request type fields to be used during mediation. Use index number in case of .csv cdrs.
MediatorDirectionFields []string // Name of direction fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorTenantFields []string // Name of tenant fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorTORFields []string // Name of tor fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorAccountFields []string // Name of account fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorDestFields []string // Name of destination fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorTimeAnswerFields []string // Name of time_start fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorDurationFields []string // Name of duration fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorCDRInDir string // Absolute path towards the directory where the CDRs are kept (file stored CDRs).
MediatorCDROutDir string // Absolute path towards the directory where processed CDRs will be exported (file stored CDRs).
FreeswitchServer string // freeswitch address host:port
FreeswitchPass string // FS socket password
FreeswitchReconnects int // number of times to attempt reconnect after connect fails
RPCEncoding string // RPC encoding used on APIs: <gob|json>.
DefaultReqType string // Use this request type if not defined on top
DefaultTOR string // set default type of record
DefaultTenant string // set default tenant
DefaultSubject string // set default rating subject, useful in case of fallback
RoundingMethod string // Rounding method for the end price: <*up|*middle|*down>
RoundingDecimals int // Number of decimals to round end prices at
RaterEnabled bool // start standalone server (no balancer)
RaterBalancer string // balancer address host:port
RaterListen string // listening address host:port
BalancerEnabled bool
BalancerListen string // Json RPC server address
SchedulerEnabled bool
CDRSListen string // CDRS's listening interface: <x.y.z.y:1234>.
CDRSfsJSONEnabled bool // Enable the handler for FreeSWITCH JSON CDRs: <enabled|disabled>.
CDRSgenJSONEnabled bool // Enable the handler for Generic JSON CDRs: <enabled|disabled>.
CDRSMediator string // Address where to reach the Mediator. Empty for disabling mediation. <""|internal>
CDRSExtraFields []string //Extra fields to store in CDRs
SMEnabled bool
SMSwitchType string
SMRater string // address where to access rater. Can be internal, direct rater address or the address of a balancer
SMRaterReconnects int // Number of reconnect attempts to rater
SMDebitInterval int // the period to be debited in advanced during a call (in seconds)
MediatorEnabled bool // Starts Mediator service: <true|false>.
MediatorListen string // Mediator's listening interface: <internal>.
MediatorRater string // Address where to reach the Rater: <internal|x.y.z.y:1234>
MediatorRaterReconnects int // Number of reconnects to rater before giving up.
MediatorCDRType string // CDR type <freeswitch_http_json|freeswitch_file_csv>.
MediatorAccIdField string // Name of field identifying accounting id used during mediation. Use index number in case of .csv cdrs.
MediatorSubjectFields []string // Name of subject fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorReqTypeFields []string // Name of request type fields to be used during mediation. Use index number in case of .csv cdrs.
MediatorDirectionFields []string // Name of direction fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorTenantFields []string // Name of tenant fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorTORFields []string // Name of tor fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorAccountFields []string // Name of account fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorDestFields []string // Name of destination fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorTimeAnswerFields []string // Name of time_start fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorDurationFields []string // Name of duration fields to be used during mediation. Use index numbers in case of .csv cdrs.
MediatorCDRInDir string // Absolute path towards the directory where the CDRs are kept (file stored CDRs).
MediatorCDROutDir string // Absolute path towards the directory where processed CDRs will be exported (file stored CDRs).
FreeswitchServer string // freeswitch address host:port
FreeswitchPass string // FS socket password
FreeswitchReconnects int // number of times to attempt reconnect after connect fails
}
func ( self *CGRConfig ) setDefaults() error {
func (self *CGRConfig) setDefaults() error {
self.DataDBType = REDIS
self.DataDBHost = "127.0.0.1"
self.DataDBPort = "6379"
@@ -113,20 +111,21 @@ func ( self *CGRConfig ) setDefaults() error {
self.StorDBUser = "cgrates"
self.StorDBPass = "CGRateS.org"
self.RPCEncoding = JSON
self.DefaultReqType = "rated"
self.DefaultReqType = utils.RATED
self.DefaultTOR = "0"
self.DefaultTenant = "0"
self.DefaultSubject = "0"
self.RoundingMethod = utils.ROUNDING_MIDDLE
self.RoundingDecimals = 4
self.RaterEnabled = false
self.RaterBalancer = DISABLED
self.RaterListen = "127.0.0.1:2012"
self.RaterRoundingMethod = utils.ROUNDING_MIDDLE
self.RaterRoundingDecimals = 4
self.BalancerEnabled = false
self.BalancerListen = "127.0.0.1:2013"
self.SchedulerEnabled = false
self.CDRSListen = "127.0.0.1:2022"
self.CDRSfsJSONEnabled = false
self.CDRSgenJSONEnabled = false
self.CDRSMediator = INTERNAL
self.CDRSExtraFields = []string{}
self.MediatorEnabled = false
@@ -237,6 +236,12 @@ func loadConfig(c *conf.ConfigFile) (*CGRConfig, error) {
if hasOpt = c.HasOption("global", "default_subject"); hasOpt {
cfg.DefaultSubject, _ = c.GetString("global", "default_subject")
}
if hasOpt = c.HasOption("global", "rounding_method"); hasOpt {
cfg.RoundingMethod, _ = c.GetString("global", "rounding_method")
}
if hasOpt = c.HasOption("global", "rounding_decimals"); hasOpt {
cfg.RoundingDecimals, _ = c.GetInt("global", "rounding_decimals")
}
if hasOpt = c.HasOption("rater", "enabled"); hasOpt {
cfg.RaterEnabled, _ = c.GetBool("rater", "enabled")
}
@@ -246,12 +251,6 @@ func loadConfig(c *conf.ConfigFile) (*CGRConfig, error) {
if hasOpt = c.HasOption("rater", "listen"); hasOpt {
cfg.RaterListen, _ = c.GetString("rater", "listen")
}
if hasOpt = c.HasOption("rater", "rounding_method"); hasOpt {
cfg.RaterRoundingMethod, _ = c.GetString("rater", "rounding_method")
}
if hasOpt = c.HasOption("rater", "rounding_decimals"); hasOpt {
cfg.RaterRoundingDecimals, _ = c.GetInt("rater", "rounding_decimals")
}
if hasOpt = c.HasOption("balancer", "enabled"); hasOpt {
cfg.BalancerEnabled, _ = c.GetBool("balancer", "enabled")
}
@@ -267,11 +266,14 @@ func loadConfig(c *conf.ConfigFile) (*CGRConfig, error) {
if hasOpt = c.HasOption("cdrs", "freeswitch_json_enabled"); hasOpt {
cfg.CDRSfsJSONEnabled, _ = c.GetBool("cdrs", "freeswitch_json_enabled")
}
if hasOpt = c.HasOption("cdrs", "generic_json_enabled"); hasOpt {
cfg.CDRSgenJSONEnabled, _ = c.GetBool("cdrs", "generic_json_enabled")
}
if hasOpt = c.HasOption("cdrs", "mediator"); hasOpt {
cfg.CDRSMediator, _ = c.GetString("cdrs", "mediator")
}
if hasOpt = c.HasOption("cdrs", "extra_fields"); hasOpt {
if cfg.CDRSExtraFields, errParse = ConfigSlice( c, "cdrs", "extra_fields"); errParse!=nil {
if cfg.CDRSExtraFields, errParse = ConfigSlice(c, "cdrs", "extra_fields"); errParse != nil {
return nil, errParse
}
}
@@ -294,47 +296,47 @@ func loadConfig(c *conf.ConfigFile) (*CGRConfig, error) {
cfg.MediatorAccIdField, _ = c.GetString("mediator", "accid_field")
}
if hasOpt = c.HasOption("mediator", "subject_fields"); hasOpt {
if cfg.MediatorSubjectFields, errParse = ConfigSlice( c, "mediator", "subject_fields"); errParse!=nil {
if cfg.MediatorSubjectFields, errParse = ConfigSlice(c, "mediator", "subject_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "reqtype_fields"); hasOpt {
if cfg.MediatorReqTypeFields, errParse = ConfigSlice( c, "mediator", "reqtype_fields"); errParse!=nil {
if cfg.MediatorReqTypeFields, errParse = ConfigSlice(c, "mediator", "reqtype_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "direction_fields"); hasOpt {
if cfg.MediatorDirectionFields, errParse = ConfigSlice( c, "mediator", "direction_fields"); errParse!=nil {
if cfg.MediatorDirectionFields, errParse = ConfigSlice(c, "mediator", "direction_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "tenant_fields"); hasOpt {
if cfg.MediatorTenantFields, errParse = ConfigSlice( c, "mediator", "tenant_fields"); errParse!=nil {
if cfg.MediatorTenantFields, errParse = ConfigSlice(c, "mediator", "tenant_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "tor_fields"); hasOpt {
if cfg.MediatorTORFields, errParse = ConfigSlice( c, "mediator", "tor_fields"); errParse!=nil {
if cfg.MediatorTORFields, errParse = ConfigSlice(c, "mediator", "tor_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "account_fields"); hasOpt {
if cfg.MediatorAccountFields, errParse = ConfigSlice( c, "mediator", "account_fields"); errParse!=nil {
if cfg.MediatorAccountFields, errParse = ConfigSlice(c, "mediator", "account_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "destination_fields"); hasOpt {
if cfg.MediatorDestFields, errParse = ConfigSlice( c, "mediator", "destination_fields"); errParse!=nil {
if cfg.MediatorDestFields, errParse = ConfigSlice(c, "mediator", "destination_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "time_answer_fields"); hasOpt {
if cfg.MediatorTimeAnswerFields, errParse = ConfigSlice( c, "mediator", "time_answer_fields"); errParse!=nil {
if cfg.MediatorTimeAnswerFields, errParse = ConfigSlice(c, "mediator", "time_answer_fields"); errParse != nil {
return nil, errParse
}
}
if hasOpt = c.HasOption("mediator", "duration_fields"); hasOpt {
if cfg.MediatorDurationFields, errParse = ConfigSlice( c, "mediator", "duration_fields"); errParse!=nil {
if cfg.MediatorDurationFields, errParse = ConfigSlice(c, "mediator", "duration_fields"); errParse != nil {
return nil, errParse
}
}

View File

@@ -20,53 +20,54 @@ package config
import (
"fmt"
"testing"
"reflect"
"github.com/cgrates/cgrates/utils"
"reflect"
"testing"
)
// Make sure defaults did not change by mistake
func TestDefaults(t *testing.T) {
cfg := &CGRConfig{}
errSet := cfg.setDefaults()
if errSet != nil {
if errSet != nil {
t.Log(fmt.Sprintf("Coud not set defaults: %s!", errSet.Error()))
t.FailNow()
}
eCfg := &CGRConfig{}
eCfg.DataDBType = REDIS
eCfg.DataDBHost = "127.0.0.1"
eCfg.DataDBPort = "6379"
eCfg.DataDBName = "10"
eCfg.DataDBUser = ""
eCfg.DataDBPass = ""
eCfg.DataDBType = REDIS
eCfg.DataDBHost = "127.0.0.1"
eCfg.DataDBPort = "6379"
eCfg.DataDBName = "10"
eCfg.DataDBUser = ""
eCfg.DataDBPass = ""
eCfg.StorDBType = utils.MYSQL
eCfg.StorDBHost = "localhost"
eCfg.StorDBPort = "3306"
eCfg.StorDBName = "cgrates"
eCfg.StorDBUser = "cgrates"
eCfg.StorDBPass = "CGRateS.org"
eCfg.StorDBHost = "localhost"
eCfg.StorDBPort = "3306"
eCfg.StorDBName = "cgrates"
eCfg.StorDBUser = "cgrates"
eCfg.StorDBPass = "CGRateS.org"
eCfg.RPCEncoding = JSON
eCfg.DefaultReqType = RATED
eCfg.DefaultTOR = "0"
eCfg.DefaultTenant = "0"
eCfg.DefaultSubject = "0"
eCfg.RaterEnabled = false
eCfg.RaterBalancer = DISABLED
eCfg.RaterListen = "127.0.0.1:2012"
eCfg.RaterRoundingMethod = utils.ROUNDING_MIDDLE
eCfg.RaterRoundingDecimals = 4
eCfg.BalancerEnabled = false
eCfg.BalancerListen = "127.0.0.1:2013"
eCfg.SchedulerEnabled = false
eCfg.CDRSListen = "127.0.0.1:2022"
eCfg.CDRSfsJSONEnabled = false
eCfg.DefaultReqType = utils.RATED
eCfg.DefaultTOR = "0"
eCfg.DefaultTenant = "0"
eCfg.DefaultSubject = "0"
eCfg.RoundingMethod = utils.ROUNDING_MIDDLE
eCfg.RoundingDecimals = 4
eCfg.RaterEnabled = false
eCfg.RaterBalancer = DISABLED
eCfg.RaterListen = "127.0.0.1:2012"
eCfg.BalancerEnabled = false
eCfg.BalancerListen = "127.0.0.1:2013"
eCfg.SchedulerEnabled = false
eCfg.CDRSListen = "127.0.0.1:2022"
eCfg.CDRSfsJSONEnabled = false
eCfg.CDRSgenJSONEnabled = false
eCfg.CDRSMediator = INTERNAL
eCfg.CDRSExtraFields = []string{}
eCfg.MediatorEnabled = false
eCfg.MediatorListen = "127.0.0.1:2032"
eCfg.MediatorRater = "127.0.0.1:2012"
eCfg.MediatorRaterReconnects = 3
eCfg.MediatorEnabled = false
eCfg.MediatorListen = "127.0.0.1:2032"
eCfg.MediatorRater = "127.0.0.1:2012"
eCfg.MediatorRaterReconnects = 3
eCfg.MediatorCDRType = "freeswitch_http_json"
eCfg.MediatorAccIdField = "accid"
eCfg.MediatorSubjectFields = []string{"subject"}
@@ -78,17 +79,17 @@ func TestDefaults(t *testing.T) {
eCfg.MediatorDestFields = []string{"destination"}
eCfg.MediatorTimeAnswerFields = []string{"time_answer"}
eCfg.MediatorDurationFields = []string{"duration"}
eCfg.MediatorCDRInDir = "/var/log/freeswitch/cdr-csv"
eCfg.MediatorCDRInDir = "/var/log/freeswitch/cdr-csv"
eCfg.MediatorCDROutDir = "/var/log/cgrates/cdr/out/freeswitch/csv"
eCfg.SMEnabled = false
eCfg.SMSwitchType = FS
eCfg.SMRater = "127.0.0.1:2012"
eCfg.SMRaterReconnects = 3
eCfg.SMDebitInterval = 10
eCfg.FreeswitchServer = "127.0.0.1:8021"
eCfg.FreeswitchPass = "ClueCon"
eCfg.FreeswitchReconnects = 5
if !reflect.DeepEqual(cfg ,eCfg ){
eCfg.SMEnabled = false
eCfg.SMSwitchType = FS
eCfg.SMRater = "127.0.0.1:2012"
eCfg.SMRaterReconnects = 3
eCfg.SMDebitInterval = 10
eCfg.FreeswitchServer = "127.0.0.1:8021"
eCfg.FreeswitchPass = "ClueCon"
eCfg.FreeswitchReconnects = 5
if !reflect.DeepEqual(cfg, eCfg) {
t.Log(eCfg)
t.Log(cfg)
t.Error("Defaults different than expected!")
@@ -99,22 +100,22 @@ func TestDefaults(t *testing.T) {
func TestDefaultsSanity(t *testing.T) {
cfg := &CGRConfig{}
errSet := cfg.setDefaults()
if errSet != nil {
if errSet != nil {
t.Log(fmt.Sprintf("Coud not set defaults: %s!", errSet.Error()))
t.FailNow()
}
if (cfg.RaterListen != INTERNAL &&
(cfg.RaterListen == cfg.BalancerListen ||
if (cfg.RaterListen != INTERNAL &&
(cfg.RaterListen == cfg.BalancerListen ||
cfg.RaterListen == cfg.CDRSListen ||
cfg.RaterListen == cfg.MediatorListen )) ||
cfg.RaterListen == cfg.MediatorListen)) ||
(cfg.BalancerListen != INTERNAL && (cfg.BalancerListen == cfg.CDRSListen ||
cfg.BalancerListen == cfg.MediatorListen ))||
cfg.BalancerListen == cfg.MediatorListen)) ||
(cfg.CDRSListen != INTERNAL && cfg.CDRSListen == cfg.MediatorListen) {
t.Error("Listen defaults on the same port!")
t.Error("Listen defaults on the same port!")
}
}
// Load config from file and make sure we have all set
// Load config from file and make sure we have all set
func TestConfigFromFile(t *testing.T) {
cfgPth := "test_data.txt"
cfg, err := NewCGRConfig(&cfgPth)
@@ -141,16 +142,17 @@ func TestConfigFromFile(t *testing.T) {
eCfg.DefaultTOR = "test"
eCfg.DefaultTenant = "test"
eCfg.DefaultSubject = "test"
eCfg.RoundingMethod = "test"
eCfg.RoundingDecimals = 99
eCfg.RaterEnabled = true
eCfg.RaterBalancer = "test"
eCfg.RaterListen = "test"
eCfg.RaterRoundingMethod = "test"
eCfg.RaterRoundingDecimals = 99
eCfg.BalancerEnabled = true
eCfg.BalancerListen = "test"
eCfg.SchedulerEnabled = true
eCfg.CDRSListen = "test"
eCfg.CDRSfsJSONEnabled = true
eCfg.CDRSgenJSONEnabled = true
eCfg.CDRSMediator = "test"
eCfg.CDRSExtraFields = []string{"test"}
eCfg.MediatorEnabled = true
@@ -178,9 +180,9 @@ func TestConfigFromFile(t *testing.T) {
eCfg.FreeswitchServer = "test"
eCfg.FreeswitchPass = "test"
eCfg.FreeswitchReconnects = 99
if !reflect.DeepEqual(cfg ,eCfg ){
if !reflect.DeepEqual(cfg, eCfg) {
t.Log(eCfg)
t.Log(cfg)
t.Error("Loading of configuration from file failed!")
}
}
}

View File

@@ -24,21 +24,20 @@ import (
"strings"
)
// Adds support for slice values in config
func ConfigSlice( c *conf.ConfigFile, section, valName string ) ([]string, error) {
func ConfigSlice(c *conf.ConfigFile, section, valName string) ([]string, error) {
sliceStr, errGet := c.GetString(section, valName)
if errGet != nil {
return nil, errGet
}
cfgValStrs := strings.Split(sliceStr, ",") // If need arrises, we can make the separator configurable
if len(cfgValStrs)==1 && cfgValStrs[0]=="" { // Prevents returning iterable with empty value
cfgValStrs := strings.Split(sliceStr, ",") // If need arrises, we can make the separator configurable
if len(cfgValStrs) == 1 && cfgValStrs[0] == "" { // Prevents returning iterable with empty value
return []string{}, nil
}
for _,elm := range cfgValStrs {
for _, elm := range cfgValStrs {
if elm == "" { //One empty element is presented when splitting empty string
return nil, errors.New("Empty values in config slice")
}
}
return cfgValStrs, nil

View File

@@ -19,6 +19,8 @@ default_reqtype = test # Default request type to consider when missing from r
default_tor = test # Default Type of Record to consider when missing from requests.
default_tenant = test # Default Tenant to consider when missing from requests.
default_subject = test # Default rating Subject to consider when missing from requests.
rounding_method = test # Rounding method for floats/costs: <up|middle|down>
rounding_decimals = 99 # Number of decimals to round floats/costs at
[balancer]
@@ -29,8 +31,6 @@ listen = test # Balancer listen interface: <disabled|x.y.z.y:1234>.
enabled = true # Enable Rater service: <true|false>.
balancer = test # Register to Balancer as worker: <enabled|disabled>.
listen = test # Rater's listening interface: <internal|x.y.z.y:1234>.
rounding_method = test # Rounding method for the end price: <up|middle|down>
rounding_decimals = 99 # Number of decimals to round prices at
[scheduler]
enabled = true # Starts Scheduler service: <true|false>.
@@ -38,6 +38,7 @@ rounding_decimals = 99 # Number of decimals to round prices at
[cdrs]
listen=test # CDRS's listening interface: <x.y.z.y:1234>.
freeswitch_json_enabled=true # Enable the handler for FreeSWITCH JSON CDRs: <true|false>.
generic_json_enabled=true # Enable the handler for generic JSON CDRs: <true|false>.
mediator = test # Address where to reach the Mediator. Empty for disabling mediation. <""|internal>
extra_fields = test # Extra fields to store in CDRs

View File

@@ -22,6 +22,8 @@
# default_tor = 0 # Default Type of Record to consider when missing from requests.
# default_tenant = 0 # Default Tenant to consider when missing from requests.
# default_subject = 0 # Default rating Subject to consider when missing from requests.
# rounding_method = *middle # Rounding method for floats/costs: <*up|*middle|*down>
# rounding_decimals = 4 # Number of decimals to round float/costs at
[balancer]
@@ -32,8 +34,6 @@
# enabled = false # Enable Rater service: <true|false>.
# balancer = disabled # Register to Balancer as worker: <enabled|disabled>.
# listen = 127.0.0.1:2012 # Rater's listening interface: <internal|x.y.z.y:1234>.
# rounding_method = middle # Rounding method for the end price: <up|middle|down>
# rounding_decimals = 4 # Number of decimals to round end prices at
[scheduler]
# enabled = false # Starts Scheduler service: <true|false>.

View File

@@ -1,4 +1,4 @@
#!/bin/sh
echo Running CGRateS service
exec /usr/bin/cgr-rater -config /etc/cgrates/cgrates.cfg
echo Running CGRateS engine
exec /usr/bin/cgr-engine -config /etc/cgrates/cgrates.cfg

View File

@@ -1,6 +1,7 @@
## CGRateS official APT repository.
# Place this source file into your /etc/apt/sources.list.d/ folder and execute commands from bellow
# wget -O - http://apt.itsyscom.com/repos/apt/conf/cgrates.gpg.key|apt-key add -
# apt-get update && apt-get install
# apt-get update && apt-get install cgrates
deb http://apt.itsyscom.com/repos/apt/debian wheezy main
deb http://apt.itsyscom.com/repos/apt/debian squeeze main

View File

@@ -1,3 +1,3 @@
Tag,Years,Months,MonthDays,WeekDays,Time
#Tag,Years,Months,MonthDays,WeekDays,Time
ALWAYS,*all,*all,*all,*all,00:00:00
ONE_TIME_RUN,,,,,*asap
1 Tag #Tag Years Months MonthDays WeekDays Time
2 ALWAYS ALWAYS *all *all *all *all 00:00:00
3 ONE_TIME_RUN ONE_TIME_RUN *asap

View File

@@ -43,11 +43,12 @@ CREATE TABLE `tp_rates` (
`rate` decimal(5,4) NOT NULL,
`rated_units` int(11) NOT NULL,
`rate_increments` int(11) NOT NULL,
`group_interval` int(11) NOT NULL,
`rounding_method` varchar(255) NOT NULL,
`rounding_decimals` tinyint(4) NOT NULL,
`weight` decimal(5,2) NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `tpid_tag_rate_weight` (`tpid`,`tag`,`weight`),
UNIQUE KEY `unique_tprate` (`tpid`,`tag`,`group_interval`),
KEY `tpid` (`tpid`),
KEY `tpid_tag` (`tpid`,`tag`)
);
@@ -89,7 +90,7 @@ CREATE TABLE `tp_destrate_timings` (
-- Table structure for table `tp_rate_profiles`
--
CREATE TABLE `tp_rate_profiles` (
CREATE TABLE `tp_rating_profiles` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`tpid` char(40) NOT NULL,
`tag` varchar(24) NOT NULL,
@@ -114,18 +115,23 @@ CREATE TABLE `tp_actions` (
`tpid` char(40) NOT NULL,
`tag` varchar(24) NOT NULL,
`action` varchar(24) NOT NULL,
`balance_tag` varchar(24) NOT NULL,
`balance_type` varchar(24) NOT NULL,
`direction` varchar(8) NOT NULL,
<<<<<<< HEAD
`units` DECIMAL(5,2) NOT NULL,
`expiration_time` varchar(24) NOT NULL,
=======
`units` DECIMAL(8,4) NOT NULL,
`expiry_time` int(16) NOT NULL,
>>>>>>> 2f733525b215e608478e1cddf2b001fb92fb8cbd
`destination_tag` varchar(24) NOT NULL,
`rate_type` varchar(8) NOT NULL,
`rate` DECIMAL(5,4) NOT NULL,
`rate` DECIMAL(8,4) NOT NULL,
`minutes_weight` DECIMAL(5,2) NOT NULL,
`weight` DECIMAL(5,2) NOT NULL,
PRIMARY KEY (`id`),
KEY `tpid` (`tpid`),
UNIQUE KEY `unique_action` (`tpid`,`tag`,`action`,`balance_tag`,`direction`,`expiration_time`,`destination_tag`,`rate_type`,`minutes_weight`,`weight`)
UNIQUE KEY `unique_action` (`tpid`,`tag`,`action`,`balance_type`,`direction`,`expiry_time`,`destination_tag`,`rate_type`,`minutes_weight`,`weight`)
);
--
@@ -152,16 +158,16 @@ CREATE TABLE `tp_action_triggers` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`tpid` char(40) NOT NULL,
`tag` varchar(24) NOT NULL,
`balance_tag` varchar(24) NOT NULL,
`balance_type` varchar(24) NOT NULL,
`direction` varchar(8) NOT NULL,
`threshold_type` char(11) NOT NULL,
`threshold_value` DECIMAL(5,4) NOT NULL,
`threshold_type` char(12) NOT NULL,
`threshold_value` DECIMAL(8,4) NOT NULL,
`destination_tag` varchar(24) NOT NULL,
`actions_tag` varchar(24) NOT NULL,
`weight` DECIMAL(5,2) NOT NULL,
PRIMARY KEY (`id`),
KEY `tpid` (`tpid`),
UNIQUE KEY `unique_trigger_definition` (`tpid`,`tag`,`balance_tag`,`direction`,`threshold_type`,`threshold_value`,`destination_tag`,`actions_tag`)
UNIQUE KEY `unique_trigger_definition` (`tpid`,`tag`,`balance_type`,`direction`,`threshold_type`,`threshold_value`,`destination_tag`,`actions_tag`)
);
--

View File

@@ -0,0 +1,6 @@
#Tenant,Account,Direction,ActionTimingsTag,ActionTriggersTag
cgrates.org,1001,*out,PREPAID_10,STANDARD_TRIGGERS
cgrates.org,1002,*out,PREPAID_10,STANDARD_TRIGGERS
cgrates.org,1003,*out,PREPAID_10,STANDARD_TRIGGERS
cgrates.org,1004,*out,PREPAID_10,STANDARD_TRIGGERS
cgrates.org,1005,*out,PREPAID_10,STANDARD_TRIGGERS
1 #Tenant Account Direction ActionTimingsTag ActionTriggersTag
2 cgrates.org 1001 *out PREPAID_10 STANDARD_TRIGGERS
3 cgrates.org 1002 *out PREPAID_10 STANDARD_TRIGGERS
4 cgrates.org 1003 *out PREPAID_10 STANDARD_TRIGGERS
5 cgrates.org 1004 *out PREPAID_10 STANDARD_TRIGGERS
6 cgrates.org 1005 *out PREPAID_10 STANDARD_TRIGGERS

View File

@@ -0,0 +1,2 @@
#Tag,ActionsTag,TimingTag,Weight
PREPAID_10,PREPAID_10,ASAP,10
1 #Tag ActionsTag TimingTag Weight
2 PREPAID_10 PREPAID_10 ASAP 10

View File

@@ -0,0 +1,4 @@
#Tag,BalanceTag,Direction,ThresholdType,ThresholdValue,DestinationTag,ActionsTag,Weight
STANDARD_TRIGGERS,*monetary,*out,*min_balance,2,,LOG_BALANCE,10
STANDARD_TRIGGERS,*monetary,*out,*max_balance,20,,LOG_BALANCE,10
STANDARD_TRIGGERS,*monetary,*out,*max_counter,15,FS_USERS,LOG_BALANCE,10
1 #Tag BalanceTag Direction ThresholdType ThresholdValue DestinationTag ActionsTag Weight
2 STANDARD_TRIGGERS *monetary *out *min_balance 2 LOG_BALANCE 10
3 STANDARD_TRIGGERS *monetary *out *max_balance 20 LOG_BALANCE 10
4 STANDARD_TRIGGERS *monetary *out *max_counter 15 FS_USERS LOG_BALANCE 10

View File

@@ -0,0 +1,2 @@
#ActionsTag,Action,BalanceType,Direction,Units,ExpirationDate,DestinationTag,RateType,RateValue,MinutesWeight,Weight
PREPAID_10,*topup_reset,*monetary,*out,10,*unlimited,*any,,,,10
1 #ActionsTag Action BalanceType Direction Units ExpirationDate DestinationTag RateType RateValue MinutesWeight Weight
2 PREPAID_10 *topup_reset *monetary *out 10 *unlimited *any 10

View File

@@ -0,0 +1,6 @@
#Tag,DestinationRatesTag,TimingTag,Weight
RETAIL1,DR_RETAIL_PEAK,PEAK,10
RETAIL1,DR_RETAIL_OFFPEAK,OFFPEAK_MORNING,10
RETAIL1,DR_RETAIL_OFFPEAK,OFFPEAK_EVENING,10
RETAIL1,DR_RETAIL_OFFPEAK,WEEKEND,10
RETAIL1,DR_FREESWITCH_USERS,ALWAYS,10
1 #Tag DestinationRatesTag TimingTag Weight
2 RETAIL1 DR_RETAIL_PEAK PEAK 10
3 RETAIL1 DR_RETAIL_OFFPEAK OFFPEAK_MORNING 10
4 RETAIL1 DR_RETAIL_OFFPEAK OFFPEAK_EVENING 10
5 RETAIL1 DR_RETAIL_OFFPEAK WEEKEND 10
6 RETAIL1 DR_FREESWITCH_USERS ALWAYS 10

View File

@@ -0,0 +1,6 @@
#Tag,DestinationsTag,RatesTag
DR_RETAIL_PEAK,GERMANY,LANDLINE_PEAK
DR_RETAIL_PEAK,GERMANY_MOBILE,MOBILE_PEAK
DR_RETAIL_OFFPEAK,GERMANY,LANDLINE_OFFPEAK
DR_RETAIL_OFFPEAK,GERMANY_MOBILE,MOBILE_OFFPEAK
DR_FREESWITCH_USERS,FS_USERS,RT_FS_USERS
1 #Tag DestinationsTag RatesTag
2 DR_RETAIL_PEAK GERMANY LANDLINE_PEAK
3 DR_RETAIL_PEAK GERMANY_MOBILE MOBILE_PEAK
4 DR_RETAIL_OFFPEAK GERMANY LANDLINE_OFFPEAK
5 DR_RETAIL_OFFPEAK GERMANY_MOBILE MOBILE_OFFPEAK
6 DR_FREESWITCH_USERS FS_USERS RT_FS_USERS

View File

@@ -0,0 +1,6 @@
#Tag,Prefix
GERMANY,+49
GERMANY_MOBILE,+4915
GERMANY_MOBILE,+4916
GERMANY_MOBILE,+4917
FS_USERS,10
1 #Tag Prefix
2 GERMANY +49
3 GERMANY_MOBILE +4915
4 GERMANY_MOBILE +4916
5 GERMANY_MOBILE +4917
6 FS_USERS 10

View File

@@ -0,0 +1,15 @@
CGRateS - FSGermanyPrep1
==========================
Scenario:
---------
* Create the necessary timings (always, peak, offpeak, asap).
* Configure 3 different destinations: GERMANY, GERMANY_MOBILE and FS_USERS.
* Calls to landline and mobile numbers in Germany will be charged time based (structured in peak and offpeak profiles). Calls to landline during peak times are charged using different rate slots: first minute charged as a whole at one rate, next minutes charged per second at another rate.
* Calls to FreeSWITCH users will be free and time independent.
* This rating profile will be valid for any rating subject.
* Create 5 prepaid accounts (equivalent of 5 FreeSWITCH default test users - 1001, 1002, 1003, 1004, 1005).
* Add to each of the accounts a monetary balance of 10 units.
* For each balance created, attach 3 triggers to control the balance: log on balance=2, log on balance=20, log on 15 mins talked towards FS_USERS destination.

View File

@@ -0,0 +1,7 @@
#Tag,ConnectFee,Rate,RatedUnits,RateIncrements,GroupInterval,RoundingMethod,RoundingDecimals,Weight
LANDLINE_PEAK,0.02,0.02,60,60,0,*up,4,10
LANDLINE_PEAK,0.02,0.01,1,1,60,*up,4,10
MOBILE_PEAK,0.02,0.14,60,60,0,*up,4,10
LANDLINE_OFFPEAK,1,0,60,60,0,*up,4,10
MOBILE_OFFPEAK,0.02,0.1,60,60,0,*up,4,10
RT_FS_USERS,0,0,60,60,0,*up,0,10
1 #Tag ConnectFee Rate RatedUnits RateIncrements GroupInterval RoundingMethod RoundingDecimals Weight
2 LANDLINE_PEAK 0.02 0.02 60 60 0 *up 4 10
3 LANDLINE_PEAK 0.02 0.01 1 1 60 *up 4 10
4 MOBILE_PEAK 0.02 0.14 60 60 0 *up 4 10
5 LANDLINE_OFFPEAK 1 0 60 60 0 *up 4 10
6 MOBILE_OFFPEAK 0.02 0.1 60 60 0 *up 4 10
7 RT_FS_USERS 0 0 60 60 0 *up 0 10

View File

@@ -0,0 +1,2 @@
#Tenant,TOR,Direction,Subject,ActivationTime,DestinationRateTimingTag,RatesFallbackSubject
cgrates.org,call,*out,*any,2012-01-01T00:00:00Z,RETAIL1,
1 #Tenant TOR Direction Subject ActivationTime DestinationRateTimingTag RatesFallbackSubject
2 cgrates.org call *out *any 2012-01-01T00:00:00Z RETAIL1

View File

@@ -0,0 +1,7 @@
#Tag,Years,Months,MonthDays,WeekDays,Time
ALWAYS,*any,*any,*any,*any,00:00:00
ASAP,*any,*any,*any,*any,*asap
OFFPEAK_MORNING,*any,*any,*any,1;2;3;4;5,00:00:00
PEAK,*any,*any,*any,1;2;3;4;5,08:00:00
OFFPEAK_EVENING,*any,*any,*any,1;2;3;4;5,20:00:00
WEEKEND,*any,*any,*any,6;7,00:00:00
1 #Tag Years Months MonthDays WeekDays Time
2 ALWAYS *any *any *any *any 00:00:00
3 ASAP *any *any *any *any *asap
4 OFFPEAK_MORNING *any *any *any 1;2;3;4;5 00:00:00
5 PEAK *any *any *any 1;2;3;4;5 08:00:00
6 OFFPEAK_EVENING *any *any *any 1;2;3;4;5 20:00:00
7 WEEKEND *any *any *any 6;7 00:00:00

View File

@@ -16,16 +16,16 @@ Creates a new Actions profile within a tariff plan.
type Action struct {
Identifier string // Identifier mapped in the code
BalanceId string // Type of balance the action will operate on
BalanceType string // Type of balance the action will operate on
Direction string // Balance direction
Units float64 // Number of units to add/deduct
ExpirationTime int64 // Time when the units will expire
ExpiryTime int64 // Time when the units will expire
DestinationId string // Destination profile id
RateType string // Type of price <ABSOLUTE|PERCENT>
RateType string // Type of rate <*absolute|*percent>
Rate float64 // Price value
MinutesWeight float64 // Minutes weight
Weight float64 // Action's weight
}
}
Mandatory parameters: ``[]string{"TPid", "ActionsId", "Actions", "Identifier", "Weight"}``
@@ -39,14 +39,14 @@ Creates a new Actions profile within a tariff plan.
{
"Actions": [
{
"BalanceId": "MONEY",
"BalanceType": "*monetary",
"DestinationId": "CGRATES_NET",
"Direction": "OUT",
"ExpirationTime": 1374082259,
"Direction": "*out",
"ExpiryTime": 1374082259,
"Identifier": "TOPUP_RESET",
"MinutesWeight": 10,
"Rate": 0.12,
"RateType": "ABSOLUTE",
"RateType": "*absolute",
"Units": 10,
"Weight": 10
}
@@ -129,12 +129,12 @@ Queries specific Actions profile on tariff plan.
type Action struct {
Identifier string // Identifier mapped in the code
BalanceId string // Type of balance the action will operate on
BalanceType string // Type of balance the action will operate on
Direction string // Balance direction
Units float64 // Number of units to add/deduct
ExpirationTime int64 // Time when the units will expire
ExpiryTime int64 // Time when the units will expire
DestinationId string // Destination profile id
RateType string // Type of price <ABSOLUTE|PERCENT>
RateType string // Type of price <*absolute|*percent>
Rate float64 // Price value
MinutesWeight float64 // Minutes weight
Weight float64 // Action's weight
@@ -149,14 +149,14 @@ Queries specific Actions profile on tariff plan.
"result": {
"Actions": [
{
"BalanceId": "MONEY",
"BalanceType": "*monetary",
"DestinationId": "CGRATES_NET",
"Direction": "OUT",
"ExpirationTime": 1374082259,
"Direction": "*out",
"ExpiryTime": 1374082259,
"Identifier": "TOPUP_RESET",
"MinutesWeight": 10,
"Rate": 0.12,
"RateType": "ABSOLUTE",
"RateType": "*absolute",
"Units": 10,
"Weight": 10
}

View File

@@ -16,7 +16,7 @@ Creates a new ActionTriggers profile within a tariff plan.
}
type ApiActionTrigger struct {
BalanceId string // Id of the balance this trigger monitors
BalanceType string // Id of the balance this trigger monitors
Direction string // Traffic direction
ThresholdType string // This threshold type
ThresholdValue float64 // Threshold
@@ -25,7 +25,7 @@ Creates a new ActionTriggers profile within a tariff plan.
Weight float64 // weight
}
Mandatory parameters: ``[]string{"TPid", "ActionTriggersId","BalanceId", "Direction", "ThresholdType", "ThresholdValue", "ActionsId", "Weight"}``
Mandatory parameters: ``[]string{"TPid", "ActionTriggersId","BalanceType", "Direction", "ThresholdType", "ThresholdValue", "ActionsId", "Weight"}``
*JSON sample*:
::
@@ -38,7 +38,7 @@ Creates a new ActionTriggers profile within a tariff plan.
"ActionTriggers": [
{
"ActionsId": "ACTION_1",
"BalanceId": "MONETARY",
"BalanceType": "MONETARY",
"DestinationId": "",
"Direction": "OUT",
"ThresholdType": "MIN_BALANCE",
@@ -124,7 +124,7 @@ Queries specific ActionTriggers profile on tariff plan.
}
type ApiActionTrigger struct {
BalanceId string // Id of the balance this trigger monitors
BalanceType string // Id of the balance this trigger monitors
Direction string // Traffic direction
ThresholdType string // This threshold type
ThresholdValue float64 // Threshold
@@ -143,9 +143,9 @@ Queries specific ActionTriggers profile on tariff plan.
"ActionTriggers": [
{
"ActionsId": "ACTION_1",
"BalanceId": "MONETARY",
"BalanceType": "*monetary",
"DestinationId": "",
"Direction": "OUT",
"Direction": "*out",
"ThresholdType": "MIN_BALANCE",
"ThresholdValue": 5,
"Weight": 10

View File

@@ -15,13 +15,14 @@ Creates a new rate within a tariff plan.
}
type RateSlot struct {
ConnectFee float64 // ConnectFee applied once the call is answered
Rate float64 // Rate applied
RatedUnits int // Number of billing units this rate applies to
RateIncrements int // This rate will apply in increments of duration
RoundingMethod string // Use this method to round the cost
RoundingDecimals int // Round the cost number of decimals
Weight float64 // Rate's priority when dealing with grouped rates
ConnectFee float64 // ConnectFee applied once the call is answered
Rate float64 // Rate applied
RatedUnits int // Number of billing units this rate applies to
RateIncrements int // This rate will apply in increments of duration
GroupInterval int // Group position
RoundingMethod string // Use this method to round the cost
RoundingDecimals int // Round the cost number of decimals
Weight float64 // Rate's priority when dealing with grouped rates
}
Mandatory parameters: ``[]string{"TPid", "RateId", "ConnectFee", "RateSlots"}``
@@ -39,9 +40,10 @@ Creates a new rate within a tariff plan.
{
"ConnectFee": 0.2,
"Rate": 2,
"RateIncrements": 1,
"RateIncrements": 60,
"RatedUnits": 1,
"RoundingDecimals": 2,
"RoundingDecimals": 2,
"GroupInterval": 0,
"RoundingMethod": "*up",
"Weight": 10.0
},
@@ -50,7 +52,8 @@ Creates a new rate within a tariff plan.
"Rate": 2.1,
"RateIncrements": 1,
"RatedUnits": 1,
"RoundingDecimals": 2,
"RoundingDecimals": 2,
"GroupInterval": 60,
"RoundingMethod": "*up",
"Weight": 20.0
}
@@ -131,13 +134,14 @@ Queries specific rate on tariff plan.
}
type RateSlot struct {
ConnectFee float64 // ConnectFee applied once the call is answered
Rate float64 // Rate applied
RatedUnits int // Number of billing units this rate applies to
RateIncrements int // This rate will apply in increments of duration
RoundingMethod string // Use this method to round the cost
RoundingDecimals int // Round the cost number of decimals
Weight float64 // Rate's priority when dealing with grouped rates
ConnectFee float64 // ConnectFee applied once the call is answered
Rate float64 // Rate applied
RatedUnits int // Number of billing units this rate applies to
RateIncrements int // This rate will apply in increments of duration
GroupInterval int // Group position
RoundingMethod string // Use this method to round the cost
RoundingDecimals int // Round the cost number of decimals
Weight float64 // Rate's priority when dealing with grouped rates
}
*JSON sample*:
@@ -152,9 +156,10 @@ Queries specific rate on tariff plan.
{
"ConnectFee": 0.2,
"Rate": 2,
"RateIncrements": 1,
"RateIncrements": 60,
"RatedUnits": 1,
"RoundingDecimals": 2,
"RoundingDecimals": 2,
"GroupInterval": 0,
"RoundingMethod": "*up",
"Weight": 10
},
@@ -163,7 +168,8 @@ Queries specific rate on tariff plan.
"Rate": 2.1,
"RateIncrements": 1,
"RatedUnits": 1,
"RoundingDecimals": 2,
"RoundingDecimals": 2,
"GroupInterval": 60,
"RoundingMethod": "*up",
"Weight": 20
}

View File

@@ -1,16 +1,16 @@
Apier.SetTPRateProfile
++++++++++++++++++++++
Apier.SetTPRatingProfile
++++++++++++++++++++++++
Creates a new RateProfile within a tariff plan.
Creates a new RatingProfile within a tariff plan.
**Request**:
Data:
::
type TPRateProfile struct {
type TPRatingProfile struct {
TPid string // Tariff plan id
RateProfileId string // RateProfile id
RatingProfileId string // RatingProfile id
Tenant string // Tenant's Id
TOR string // TypeOfRecord
Direction string // Traffic direction, OUT is the only one supported for now
@@ -24,18 +24,18 @@ Creates a new RateProfile within a tariff plan.
DestRateTimingId string // Id of DestRateTiming profile
}
Mandatory parameters: ``[]string{"TPid", "RateProfileId", "Tenant", "TOR", "Direction", "Subject", "RatingActivations"}``
Mandatory parameters: ``[]string{"TPid", "RatingProfileId", "Tenant", "TOR", "Direction", "Subject", "RatingActivations"}``
*JSON sample*:
::
{
"id": 3,
"method": "Apier.SetTPRateProfile",
"method": "Apier.SetTPRatingProfile",
"params": [
{
"Direction": "OUT",
"RateProfileId": "SAMPLE_RP_2",
"RatingProfileId": "SAMPLE_RP_2",
"RatingActivations": [
{
"ActivationTime": 1373609003,
@@ -79,35 +79,35 @@ Creates a new RateProfile within a tariff plan.
``SERVER_ERROR`` - Server error occurred.
``DUPLICATE`` - The specified combination of TPid/RateProfileId already exists in StorDb.
``DUPLICATE`` - The specified combination of TPid/RatingProfileId already exists in StorDb.
Apier.GetTPRateProfile
++++++++++++++++++++++
Apier.GetTPRatingProfile
++++++++++++++++++++++++
Queries specific RateProfile on tariff plan.
Queries specific RatingProfile on tariff plan.
**Request**:
Data:
::
type AttrGetTPRateProfile struct {
type AttrGetTPRatingProfile struct {
TPid string // Tariff plan id
RateProfileId string // RateProfile id
RatingProfileId string // RatingProfile id
}
Mandatory parameters: ``[]string{"TPid", "RateProfileId"}``
Mandatory parameters: ``[]string{"TPid", "RatingProfileId"}``
*JSON sample*:
::
{
"id": 0,
"method": "Apier.GetTPRateProfile",
"method": "Apier.GetTPRatingProfile",
"params": [
{
"RateProfileId": "SAMPLE_RP_2",
"RatingProfileId": "SAMPLE_RP_2",
"TPid": "SAMPLE_TP"
}
]
@@ -118,9 +118,9 @@ Queries specific RateProfile on tariff plan.
Data:
::
type TPRateProfile struct {
type TPRatingProfile struct {
TPid string // Tariff plan id
RateProfileId string // RateProfile id
RatingProfileId string // RatingProfile id
Tenant string // Tenant's Id
TOR string // TypeOfRecord
Direction string // Traffic direction, OUT is the only one supported for now
@@ -142,7 +142,7 @@ Queries specific RateProfile on tariff plan.
"id": 0,
"result": {
"Direction": "OUT",
"RateProfileId": "SAMPLE_RP_2",
"RatingProfileId": "SAMPLE_RP_2",
"RatesFallbackSubject": "",
"RatingActivations": [
{
@@ -167,20 +167,20 @@ Queries specific RateProfile on tariff plan.
``SERVER_ERROR`` - Server error occurred.
``NOT_FOUND`` - Requested RateProfile profile not found.
``NOT_FOUND`` - Requested RatingProfile profile not found.
Apier.GetTPRateProfileIds
+++++++++++++++++++++++++
Apier.GetTPRatingProfileIds
+++++++++++++++++++++++++++
Queries specific RateProfile on tariff plan. Attribute parameters used as extra filters.
Queries specific RatingProfile on tariff plan. Attribute parameters used as extra filters.
**Request**:
Data:
::
type AttrTPRateProfileIds struct {
type AttrTPRatingProfileIds struct {
TPid string // Tariff plan id
Tenant string // Tenant's Id
TOR string // TypeOfRecord
@@ -195,7 +195,7 @@ Queries specific RateProfile on tariff plan. Attribute parameters used as extra
{
"id": 0,
"method": "Apier.GetTPRateProfileIds",
"method": "Apier.GetTPRatingProfileIds",
"params": [
{
"Subject": "dan",

View File

@@ -169,13 +169,13 @@ DestinationRateTimings
api_tpdestratetimings
RateProfiles
~~~~~~~~~~~~
RatingProfiles
~~~~~~~~~~~~~~
.. toctree::
:maxdepth: 2
api_tprateprofiles
api_tpratingprofiles
Actions
~~~~~~~

View File

@@ -28,15 +28,17 @@ import (
Structure to be filled for each tariff plan with the bonus value for received calls minutes.
*/
type Action struct {
Id string
ActionType string
BalanceId string
Direction string
ExpirationString string
ExpirationDate time.Time
Units float64
Weight float64
MinuteBucket *MinuteBucket
Id string
ActionType string
BalanceId string
Direction string
ExpirationString string
ExpirationDate time.Time
Units float64
Weight float64
MinuteBucket *MinuteBucket
DestinationTag, RateType string // From here for import/load purposes only
RateValue, MinutesWeight float64
}
const (
@@ -117,7 +119,7 @@ func topupResetAction(ub *UserBalance, a *Action) (err error) {
if a.BalanceId == MINUTES {
ub.MinuteBuckets = make([]*MinuteBucket, 0)
} else {
ub.BalanceMap[a.BalanceId+a.Direction] = BalanceChain{&Balance{Value: 0}}
ub.BalanceMap[a.BalanceId+a.Direction] = BalanceChain{&Balance{Value: 0}} // ToDo: can ub be empty here?
}
genericMakeNegative(a)
genericDebit(ub, a)

View File

@@ -41,7 +41,7 @@ type ActionTiming struct {
ActionsId string
actions Actions
stCache time.Time // cached time of the next start
actionsTag, timingsTag string // used only for loading
ActionsTag, TimingsTag string // used only for loading
}
type ActionTimings []*ActionTiming

View File

@@ -24,7 +24,6 @@ import (
"github.com/cgrates/cgrates/cache2go"
"github.com/cgrates/cgrates/utils"
"log/syslog"
"math"
"strings"
"time"
)
@@ -51,27 +50,12 @@ var (
storageGetter, _ = NewMapStorage()
//storageGetter, _ = NewMongoStorage(db_server, "27017", "cgrates_test", "", "")
//storageGetter, _ = NewRedisStorage(db_server+":6379", 11, "")
storageLogger = storageGetter
debitPeriod = 10 * time.Second
storageLogger = storageGetter
debitPeriod = 10 * time.Second
roundingMethod = "*middle"
roundingDecimals = 4
)
/*
Utility function for rounding a float to a certain number of decimals (not present in math).
*/
func round(val float64, prec int) float64 {
var rounder float64
intermed := val * math.Pow(10, float64(prec))
if val >= 0.5 {
rounder = math.Ceil(intermed)
} else {
rounder = math.Floor(intermed)
}
return rounder / math.Pow(10, float64(prec))
}
/*
The input stucture that contains call information.
*/
@@ -102,9 +86,7 @@ func (cd *CallDescriptor) GetUserBalanceKey() string {
return fmt.Sprintf("%s:%s:%s", cd.Direction, cd.Tenant, subj)
}
/*
Gets and caches the user balance information.
*/
// Gets and caches the user balance information.
func (cd *CallDescriptor) getUserBalance() (ub *UserBalance, err error) {
if cd.userBalance == nil {
cd.userBalance, err = storageGetter.GetUserBalance(cd.GetUserBalanceKey())
@@ -112,13 +94,17 @@ func (cd *CallDescriptor) getUserBalance() (ub *UserBalance, err error) {
return cd.userBalance, err
}
/*
Exported method to set the storage getter.
*/
// Exported method to set the storage getter.
func SetDataStorage(sg DataStorage) {
storageGetter = sg
}
// Sets the global rounding method and decimal precision for GetCost method
func SetRoundingMethodAndDecimals(rm string, rd int) {
roundingMethod = rm
roundingDecimals = rd
}
/*
Sets the database for logging (can be de same as storage getter or different db)
*/
@@ -285,7 +271,7 @@ func (cd *CallDescriptor) GetCost() (*CallCost, error) {
}
cost += ts.getCost(cd)
}
cost = utils.Round(cost, roundingDecimals, roundingMethod)
cc := &CallCost{
Direction: cd.Direction,
TOR: cd.TOR,

View File

@@ -33,7 +33,7 @@ import (
type CSVReader struct {
sep rune
storage DataStorage
readerFunc func(string, rune) (*csv.Reader, *os.File, error)
readerFunc func(string, rune, int) (*csv.Reader, *os.File, error)
actions map[string][]*Action
actionsTimings map[string][]*ActionTiming
actionsTriggers map[string][]*ActionTrigger
@@ -74,20 +74,24 @@ func NewStringCSVReader(storage DataStorage, sep rune, destinationsFn, timingsFn
return c
}
func openFileCSVReader(fn string, comma rune) (csvReader *csv.Reader, fp *os.File, err error) {
func openFileCSVReader(fn string, comma rune, nrFields int) (csvReader *csv.Reader, fp *os.File, err error) {
fp, err = os.Open(fn)
if err != nil {
return
}
csvReader = csv.NewReader(fp)
csvReader.Comma = comma
csvReader.Comment = utils.COMMENT_CHAR
csvReader.FieldsPerRecord = nrFields
csvReader.TrailingComma = true
return
}
func openStringCSVReader(data string, comma rune) (csvReader *csv.Reader, fp *os.File, err error) {
func openStringCSVReader(data string, comma rune, nrFields int) (csvReader *csv.Reader, fp *os.File, err error) {
csvReader = csv.NewReader(strings.NewReader(data))
csvReader.Comma = comma
csvReader.Comment = utils.COMMENT_CHAR
csvReader.FieldsPerRecord = nrFields
csvReader.TrailingComma = true
return
}
@@ -164,7 +168,7 @@ func (csvr *CSVReader) WriteToDatabase(flush, verbose bool) (err error) {
}
func (csvr *CSVReader) LoadDestinations() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.destinationsFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.destinationsFn, csvr.sep, utils.DESTINATIONS_NRCOLS)
if err != nil {
log.Print("Could not load destinations file: ", err)
// allow writing of the other values
@@ -174,12 +178,7 @@ func (csvr *CSVReader) LoadDestinations() (err error) {
defer fp.Close()
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
var dest *Destination
for _, d := range csvr.destinations {
if d.Id == tag {
@@ -197,7 +196,7 @@ func (csvr *CSVReader) LoadDestinations() (err error) {
}
func (csvr *CSVReader) LoadTimings() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.timingsFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.timingsFn, csvr.sep, utils.TIMINGS_NRCOLS)
if err != nil {
log.Print("Could not load timings file: ", err)
// allow writing of the other values
@@ -208,18 +207,13 @@ func (csvr *CSVReader) LoadTimings() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
csvr.timings[tag] = NewTiming(record...)
}
return
}
func (csvr *CSVReader) LoadRates() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.ratesFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.ratesFn, csvr.sep, utils.RATES_NRCOLS)
if err != nil {
log.Print("Could not load rates file: ", err)
// allow writing of the other values
@@ -230,10 +224,6 @@ func (csvr *CSVReader) LoadRates() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
var r *Rate
r, err = NewRate(record[0], record[1], record[2], record[3], record[4], record[5], record[6], record[7], record[8])
if err != nil {
@@ -245,7 +235,7 @@ func (csvr *CSVReader) LoadRates() (err error) {
}
func (csvr *CSVReader) LoadDestinationRates() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.destinationratesFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.destinationratesFn, csvr.sep, utils.DESTINATION_RATES_NRCOLS)
if err != nil {
log.Print("Could not load rates file: ", err)
// allow writing of the other values
@@ -256,14 +246,11 @@ func (csvr *CSVReader) LoadDestinationRates() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
r, exists := csvr.rates[record[2]]
if !exists {
return errors.New(fmt.Sprintf("Could not get rating for tag %v", record[2]))
return errors.New(fmt.Sprintf("Could not get rates for tag %v", record[2]))
}
//ToDo: Not checking presence of destinations?
dr := &DestinationRate{
Tag: tag,
DestinationsTag: record[1],
@@ -276,7 +263,7 @@ func (csvr *CSVReader) LoadDestinationRates() (err error) {
}
func (csvr *CSVReader) LoadDestinationRateTimings() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.destinationratetimingsFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.destinationratetimingsFn, csvr.sep, utils.DESTRATE_TIMINGS_NRCOLS)
if err != nil {
log.Print("Could not load rate timings file: ", err)
// allow writing of the other values
@@ -287,11 +274,6 @@ func (csvr *CSVReader) LoadDestinationRateTimings() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
t, exists := csvr.timings[record[2]]
if !exists {
return errors.New(fmt.Sprintf("Could not get timing for tag %v", record[2]))
@@ -313,7 +295,7 @@ func (csvr *CSVReader) LoadDestinationRateTimings() (err error) {
}
func (csvr *CSVReader) LoadRatingProfiles() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.ratingprofilesFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.ratingprofilesFn, csvr.sep, utils.RATE_PROFILES_NRCOLS)
if err != nil {
log.Print("Could not load rating profiles file: ", err)
// allow writing of the other values
@@ -323,18 +305,10 @@ func (csvr *CSVReader) LoadRatingProfiles() (err error) {
defer fp.Close()
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tenant" {
// skip header line
continue
}
if len(record) != 7 {
return errors.New(fmt.Sprintf("Malformed rating profile: %v", record))
}
tenant, tor, direction, subject, fallbacksubject := record[0], record[1], record[2], record[3], record[4]
at, err := time.Parse(time.RFC3339, record[6])
tenant, tor, direction, subject, fallbacksubject := record[0], record[1], record[2], record[3], record[6]
at, err := time.Parse(time.RFC3339, record[4])
if err != nil {
return errors.New(fmt.Sprintf("Cannot parse activation time from %v", record[6]))
return errors.New(fmt.Sprintf("Cannot parse activation time from %v", record[4]))
}
key := fmt.Sprintf("%s:%s:%s:%s", direction, tenant, tor, subject)
rp, ok := csvr.ratingProfiles[key]
@@ -360,7 +334,7 @@ func (csvr *CSVReader) LoadRatingProfiles() (err error) {
}
func (csvr *CSVReader) LoadActions() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.actionsFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.actionsFn, csvr.sep, utils.ACTIONS_NRCOLS)
if err != nil {
log.Print("Could not load action triggers file: ", err)
// allow writing of the other values
@@ -371,10 +345,6 @@ func (csvr *CSVReader) LoadActions() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
units, err := strconv.ParseFloat(record[4], 64)
if err != nil {
return errors.New(fmt.Sprintf("Could not parse action units: %v", err))
@@ -431,7 +401,7 @@ func (csvr *CSVReader) LoadActions() (err error) {
}
func (csvr *CSVReader) LoadActionTimings() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.actiontimingsFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.actiontimingsFn, csvr.sep, utils.ACTION_TIMINGS_NRCOLS)
if err != nil {
log.Print("Could not load action triggers file: ", err)
// allow writing of the other values
@@ -442,10 +412,6 @@ func (csvr *CSVReader) LoadActionTimings() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
_, exists := csvr.actions[record[1]]
if !exists {
return errors.New(fmt.Sprintf("ActionTiming: Could not load the action for tag: %v", record[1]))
@@ -476,7 +442,7 @@ func (csvr *CSVReader) LoadActionTimings() (err error) {
}
func (csvr *CSVReader) LoadActionTriggers() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.actiontriggersFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.actiontriggersFn, csvr.sep, utils.ACTION_TRIGGERS_NRCOLS)
if err != nil {
log.Print("Could not load action triggers file: ", err)
// allow writing of the other values
@@ -487,10 +453,6 @@ func (csvr *CSVReader) LoadActionTriggers() (err error) {
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
tag := record[0]
if tag == "Tag" {
// skip header line
continue
}
value, err := strconv.ParseFloat(record[4], 64)
if err != nil {
return errors.New(fmt.Sprintf("Could not parse action trigger value: %v", err))
@@ -515,7 +477,7 @@ func (csvr *CSVReader) LoadActionTriggers() (err error) {
}
func (csvr *CSVReader) LoadAccountActions() (err error) {
csvReader, fp, err := csvr.readerFunc(csvr.accountactionsFn, csvr.sep)
csvReader, fp, err := csvr.readerFunc(csvr.accountactionsFn, csvr.sep, utils.ACCOUNT_ACTIONS_NRCOLS)
if err != nil {
log.Print("Could not load account actions file: ", err)
// allow writing of the other values
@@ -525,9 +487,6 @@ func (csvr *CSVReader) LoadAccountActions() (err error) {
defer fp.Close()
}
for record, err := csvReader.Read(); err == nil; record, err = csvReader.Read() {
if record[0] == "Tenant" {
continue
}
tag := fmt.Sprintf("%s:%s:%s", record[2], record[0], record[1])
aTriggers, exists := csvr.actionsTriggers[record[4]]
if record[4] != "" && !exists {

View File

@@ -25,7 +25,7 @@ import (
var (
destinations = `
Tag,Prefix
#Tag,Prefix
GERMANY,49
GERMANY_O2,41
GERMANY_PREMIUM,43
@@ -72,27 +72,27 @@ EVENING,P2,WORKDAYS_18,10
EVENING,P2,WEEKENDS,10
`
ratingProfiles = `
CUSTOMER_1,0,*out,rif:from:tm,danb,PREMIUM,2012-01-01T00:00:00Z
CUSTOMER_1,0,*out,rif:from:tm,danb,STANDARD,2012-02-28T00:00:00Z
CUSTOMER_2,0,*out,danb:87.139.12.167,danb,STANDARD,2012-01-01T00:00:00Z
CUSTOMER_1,0,*out,danb,,PREMIUM,2012-01-01T00:00:00Z
vdf,0,*out,rif,,EVENING,2012-01-01T00:00:00Z
vdf,0,*out,rif,,EVENING,2012-02-28T00:00:00Z
vdf,0,*out,minu,,EVENING,2012-01-01T00:00:00Z
vdf,0,*out,*any,,EVENING,2012-02-28T00:00:00Z
vdf,0,*out,one,,STANDARD,2012-02-28T00:00:00Z
vdf,0,*out,inf,inf,STANDARD,2012-02-28T00:00:00Z
vdf,0,*out,fall,one|rif,PREMIUM,2012-02-28T00:00:00Z
CUSTOMER_1,0,*out,rif:from:tm,2012-01-01T00:00:00Z,PREMIUM,danb
CUSTOMER_1,0,*out,rif:from:tm,2012-02-28T00:00:00Z,STANDARD,danb
CUSTOMER_2,0,*out,danb:87.139.12.167,2012-01-01T00:00:00Z,STANDARD,danb
CUSTOMER_1,0,*out,danb,2012-01-01T00:00:00Z,PREMIUM,
vdf,0,*out,rif,2012-01-01T00:00:00Z,EVENING,
vdf,0,*out,rif,2012-02-28T00:00:00Z,EVENING,
vdf,0,*out,minu,2012-01-01T00:00:00Z,EVENING,
vdf,0,*out,*any,2012-02-28T00:00:00Z,EVENING,
vdf,0,*out,one,2012-02-28T00:00:00Z,STANDARD,
vdf,0,*out,inf,2012-02-28T00:00:00Z,STANDARD,inf
vdf,0,*out,fall,2012-02-28T00:00:00Z,PREMIUM,rif
`
actions = `
MINI,TOPUP,MINUTES,*out,100,1374239002,NAT,*absolute,0,10,10
MINI,TOPUP,MINUTES,*out,100,2013-07-19T13:03:22Z,NAT,*absolute,0,10,10
`
actionTimings = `
MORE_MINUTES,MINI,ONE_TIME_RUN,10
`
actionTriggers = `
STANDARD_TRIGGER,MINUTES,*out,COUNTER,10,GERMANY_O2,SOME_1,10
STANDARD_TRIGGER,MINUTES,*out,BALANCE,200,GERMANY,SOME_2,10
STANDARD_TRIGGER,MINUTES,*out,*min_counter,10,GERMANY_O2,SOME_1,10
STANDARD_TRIGGER,MINUTES,*out,*max_balance,200,GERMANY,SOME_2,10
`
accountActions = `
vdf,minitsboy,*out,MORE_MINUTES,STANDARD_TRIGGER
@@ -144,9 +144,6 @@ func TestLoadDestinationRateTimings(t *testing.T) {
if len(csvr.activationPeriods) != 4 {
t.Error("Failed to load rate timings: ", csvr.activationPeriods)
}
//for _, ap := range csvr.activationPeriods {
//log.Print(ap.Intervals[0].Prices[1])
//}
}
func TestLoadRatingProfiles(t *testing.T) {

View File

@@ -184,11 +184,11 @@ func (dbr *DbReader) LoadRatingProfiles() error {
return err
}
for _, rp := range rpfs {
at := time.Unix(rp.activationTime, 0)
at := time.Unix(rp.ActivationTime, 0)
for _, d := range dbr.destinations {
ap, exists := dbr.activationPeriods[rp.destRatesTimingTag]
ap, exists := dbr.activationPeriods[rp.DestRatesTimingTag]
if !exists {
return errors.New(fmt.Sprintf("Could not load rating timing for tag: %v", rp.destRatesTimingTag))
return errors.New(fmt.Sprintf("Could not load rating timing for tag: %v", rp.DestRatesTimingTag))
}
newAP := &ActivationPeriod{ActivationTime: at}
//copy(newAP.Intervals, ap.Intervals)
@@ -211,12 +211,12 @@ func (dbr *DbReader) LoadRatingProfileByTag(tag string) error {
}
for _, ratingProfile := range rpm {
resultRatingProfile.FallbackKey = ratingProfile.FallbackKey // it will be the last fallback key
at := time.Unix(ratingProfile.activationTime, 0)
drtm, err := dbr.storDb.GetTpDestinationRateTimings(dbr.tpid, ratingProfile.destRatesTimingTag)
at := time.Unix(ratingProfile.ActivationTime, 0)
drtm, err := dbr.storDb.GetTpDestinationRateTimings(dbr.tpid, ratingProfile.DestRatesTimingTag)
if err != nil {
return err
} else if len(drtm) == 0 {
return fmt.Errorf("No DestRateTimings profile with id: %s", ratingProfile.destRatesTimingTag)
return fmt.Errorf("No DestRateTimings profile with id: %s", ratingProfile.DestRatesTimingTag)
}
for _, destrateTiming := range drtm {
tm, err := dbr.storDb.GetTpTimings(dbr.tpid, destrateTiming.TimingsTag)
@@ -249,7 +249,7 @@ func (dbr *DbReader) LoadRatingProfileByTag(tag string) error {
return err
}
for _, destination := range dm {
ap := activationPeriods[ratingProfile.destRatesTimingTag]
ap := activationPeriods[ratingProfile.DestRatesTimingTag]
newAP := &ActivationPeriod{ActivationTime: at}
newAP.Intervals = append(newAP.Intervals, ap.Intervals...)
resultRatingProfile.AddActivationPeriodIfNotPresent(destination.Id, newAP)

View File

@@ -25,8 +25,10 @@ import (
"github.com/cgrates/cgrates/utils"
"log"
"os"
"path"
"regexp"
"strconv"
"strings"
)
type TPLoader interface {
@@ -202,41 +204,89 @@ func ValidateCSVData(fn string, re *regexp.Regexp) (err error) {
return
}
type TPCSVRowValidator struct {
FileName string // File name
Rule *regexp.Regexp // Regexp rule
ErrMessage string // Error message
type FileLineRegexValidator struct {
FieldsPerRecord int // Number of fields in one record, useful for crosschecks
Rule *regexp.Regexp // Regexp rule
Message string // Pass this message as helper
}
var TPCSVRowValidators = []*TPCSVRowValidator{
&TPCSVRowValidator{utils.DESTINATIONS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\d+.?\d*){1}$`),
"Tag[0-9A-Za-z_],Prefix[0-9]"},
&TPCSVRowValidator{utils.TIMINGS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\*all\s*,\s*|(?:\d{1,4};?)+\s*,\s*|\s*,\s*){4}(?:\d{2}:\d{2}:\d{2}|\*asap){1}$`),
"Tag[0-9A-Za-z_],Years[0-9;]|*all|<empty>,Months[0-9;]|*all|<empty>,MonthDays[0-9;]|*all|<empty>,WeekDays[0-9;]|*all|<empty>,Time[0-9:]|*asap(00:00:00)"},
&TPCSVRowValidator{utils.RATES_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){2}(?:\d+.?\d*,?){4}$`),
"Tag[0-9A-Za-z_],ConnectFee[0-9.],Price[0-9.],PricedUnits[0-9.],RateIncrement[0-9.]"},
&TPCSVRowValidator{utils.DESTINATION_RATES_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){2}(?:\d+.?\d*,?){4}$`),
"Tag[0-9A-Za-z_],DestinationsTag[0-9A-Za-z_],RateTag[0-9A-Za-z_]"},
&TPCSVRowValidator{utils.DESTRATE_TIMINGS_CSV,
var FileValidators = map[string]*FileLineRegexValidator{
utils.DESTINATIONS_CSV: &FileLineRegexValidator{utils.DESTINATIONS_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\+?\d+.?\d*){1}$`),
"Tag([0-9A-Za-z_]),Prefix([0-9])"},
utils.TIMINGS_CSV: &FileLineRegexValidator{utils.TIMINGS_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\*any\s*,\s*|(?:\d{1,4};?)+\s*,\s*|\s*,\s*){4}(?:\d{2}:\d{2}:\d{2}|\*asap){1}$`),
"Tag([0-9A-Za-z_]),Years([0-9;]|*all|<empty>),Months([0-9;]|*all|<empty>),MonthDays([0-9;]|*all|<empty>),WeekDays([0-9;]|*all|<empty>),Time([0-9:]|*asap)"},
utils.RATES_CSV: &FileLineRegexValidator{utils.RATES_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\d+\.?\d*,){5}(?:\*\w+,){1}(?:\d+\.?\d*,?){2}$`),
"Tag([0-9A-Za-z_]),ConnectFee([0-9.]),Rate([0-9.]),RatedUnits([0-9.]),RateIncrement([0-9.])"},
utils.DESTINATION_RATES_CSV: &FileLineRegexValidator{utils.DESTINATION_RATES_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,?\s*){3}$`),
"Tag([0-9A-Za-z_]),DestinationsTag([0-9A-Za-z_]),RateTag([0-9A-Za-z_])"},
utils.DESTRATE_TIMINGS_CSV: &FileLineRegexValidator{utils.DESTRATE_TIMINGS_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){3}(?:\d+.?\d*){1}$`),
"Tag[0-9A-Za-z_],DestinationRatesTag[0-9A-Za-z_],TimingProfile[0-9A-Za-z_],Weight[0-9.]"},
&TPCSVRowValidator{utils.RATE_PROFILES_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\d+\s*,\s*){1}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\*all\s*,\s*|[\w:\.]+\s*,\s*){1}(?:\w*\s*,\s*){1}(?:\w+\s*,\s*){1}(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z){1}$`),
"Tenant[0-9A-Za-z_],TOR[0-9],Direction OUT|IN,Subject[0-9A-Za-z_:.]|*all,RatesFallbackSubject[0-9A-Za-z_]|<empty>,RatesTimingTag[0-9A-Za-z_],ActivationTime[[0-9T:X]] (2012-01-01T00:00:00Z)"},
&TPCSVRowValidator{utils.ACTIONS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){3}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\d+\s*,\s*){1}(?:\w+\s*,\s*|\*all\s*,\s*){1}(?:ABSOLUTE\s*,\s*|PERCENT\s*,\s*|\s*,\s*){1}(?:\d*\.?\d*\s*,?\s*){3}$`),
"Tag[0-9A-Za-z_],Action[0-9A-Za-z_],BalanceTag[0-9A-Za-z_],Direction OUT|IN,Units[0-9],DestinationTag[0-9A-Za-z_]|*all,PriceType ABSOLUT|PERCENT,PriceValue[0-9.],MinutesWeight[0-9.],Weight[0-9.]"},
&TPCSVRowValidator{utils.ACTION_TIMINGS_CSV,
"Tag([0-9A-Za-z_]),DestinationRatesTag([0-9A-Za-z_]),TimingProfile([0-9A-Za-z_]),Weight([0-9.])"},
utils.RATE_PROFILES_CSV: &FileLineRegexValidator{utils.RATE_PROFILES_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){2}(?:\*out\s*,\s*){1}(?:\*any\s*,\s*|\w+\s*,\s*){1}(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z){1}(?:\w*\s*,?\s*){2}$`),
"Tenant([0-9A-Za-z_]),TOR([0-9A-Za-z_]),Direction(*out),Subject([0-9A-Za-z_]|*all),RatesFallbackSubject([0-9A-Za-z_]|<empty>),RatesTimingTag([0-9A-Za-z_]),ActivationTime([0-9T:X])"},
utils.ACTIONS_CSV: &FileLineRegexValidator{utils.ACTIONS_NRCOLS,
regexp.MustCompile(`(?:\w+\s*),(?:\*\w+\s*),(?:\*\w+\s*),(?:\*out\s*),(?:\d+\s*),(?:\*\w+\s*|\+\d+[smh]\s*|\d+\s*),(?:\*any|\w+\s*),(?:\*\w+\s*)?,(?:\d+\.?\d*\s*)?,(?:\d+\.?\d*\s*)?,(?:\d+\.?\d*\s*)$`),
"Tag([0-9A-Za-z_]),Action([0-9A-Za-z_]),BalanceType([*a-z_]),Direction(*out),Units([0-9]),ExpiryTime(*[a-z_]|+[0-9][smh]|[0-9])DestinationTag([0-9A-Za-z_]|*all),RateType(*[a-z_]),RateValue([0-9.]),MinutesWeight([0-9.]),Weight([0-9.])"},
utils.ACTION_TIMINGS_CSV: &FileLineRegexValidator{utils.ACTION_TIMINGS_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){3}(?:\d+\.?\d*){1}`),
"Tag[0-9A-Za-z_],ActionsTag[0-9A-Za-z_],TimingTag[0-9A-Za-z_],Weight[0-9.]"},
&TPCSVRowValidator{utils.ACTION_TRIGGERS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:MONETARY\s*,\s*|SMS\s*,\s*|MINUTES\s*,\s*|INTERNET\s*,\s*|INTERNET_TIME\s*,\s*){1}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\d+\.?\d*\s*,\s*){1}(?:\w+\s*,\s*|\*all\s*,\s*){1}(?:\w+\s*,\s*){1}(?:\d+\.?\d*){1}$`),
"Tag[0-9A-Za-z_],BalanceTag MONETARY|SMS|MINUTES|INTERNET|INTERNET_TIME,Direction OUT|IN,ThresholdValue[0-9.],DestinationTag[0-9A-Za-z_]|*all,ActionsTag[0-9A-Za-z_],Weight[0-9.]"},
&TPCSVRowValidator{utils.ACCOUNT_ACTIONS_CSV,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:[\w:.]+\s*,\s*){1}(?:OUT\s*,\s*|IN\s*,\s*){1}(?:\w+\s*,?\s*){2}$`),
"Tenant[0-9A-Za-z_],Account[0-9A-Za-z_:.],Direction OUT|IN,ActionTimingsTag[0-9A-Za-z_],ActionTriggersTag[0-9A-Za-z_]"},
"Tag([0-9A-Za-z_]),ActionsTag([0-9A-Za-z_]),TimingTag([0-9A-Za-z_]),Weight([0-9.])"},
utils.ACTION_TRIGGERS_CSV: &FileLineRegexValidator{utils.ACTION_TRIGGERS_NRCOLS,
regexp.MustCompile(`(?:\w+),(?:\*\w+),(?:\*out),(?:\*\w+),(?:\d+\.?\d*),(?:\w+|\*any)?,(?:\w+),(?:\d+\.?\d*)$`),
"Tag([0-9A-Za-z_]),BalanceType(*[a-z_]),Direction(*out),ThresholdType(*[a-z_]),ThresholdValue([0-9]+),DestinationTag([0-9A-Za-z_]|*all),ActionsTag([0-9A-Za-z_]),Weight([0-9]+)"},
utils.ACCOUNT_ACTIONS_CSV: &FileLineRegexValidator{utils.ACCOUNT_ACTIONS_NRCOLS,
regexp.MustCompile(`(?:\w+\s*,\s*){1}(?:\w+\s*,\s*){1}(?:\*out\s*,\s*){1}(?:\w+\s*,?\s*){2}$`),
"Tenant([0-9A-Za-z_]),Account([0-9A-Za-z_.]),Direction(*out),ActionTimingsTag([0-9A-Za-z_]),ActionTriggersTag([0-9A-Za-z_])"},
}
func NewTPCSVFileParser(dirPath, fileName string) (*TPCSVFileParser, error) {
validator, hasValidator := FileValidators[fileName]
if !hasValidator {
return nil, fmt.Errorf("No validator found for file <%s>", fileName)
}
// Open the file here
fin, err := os.Open(path.Join(dirPath, fileName))
if err != nil {
return nil, err
}
//defer fin.Close()
reader := bufio.NewReader(fin)
return &TPCSVFileParser{validator, reader}, nil
}
// Opens the connection to a file and returns the parsed lines one by one when ParseNextLine() is called
type TPCSVFileParser struct {
validator *FileLineRegexValidator // Row validator
reader *bufio.Reader // Reader to the file we are interested in
}
func (self *TPCSVFileParser) ParseNextLine() ([]string, error) {
line, truncated, err := self.reader.ReadLine()
if err != nil {
return nil, err
} else if truncated {
return nil, errors.New("Line too long.")
}
// skip commented lines
if strings.HasPrefix(string(line), string(utils.COMMENT_CHAR)) {
return nil, errors.New("Line starts with comment character.")
}
// Validate here string line
if !self.validator.Rule.Match(line) {
return nil, fmt.Errorf("Invalid line, <%s>", self.validator.Message)
}
// Open csv reader directly on string line
csvReader, _, err := openStringCSVReader(string(line), ',', self.validator.FieldsPerRecord)
if err != nil {
return nil, err
}
record, err := csvReader.Read() // if no errors, record should be good to go having right format and length
if err != nil {
return nil, err
}
return record, nil
}

View File

@@ -29,11 +29,11 @@ const (
)
type RatingProfile struct {
Id string
FallbackKey string
DestinationMap map[string][]*ActivationPeriod
tag, destRatesTimingTag string // used only for loading
activationTime int64
Id string
FallbackKey string // FallbackKey is used as complete combination of Tenant:TOR:Direction:Subject
DestinationMap map[string][]*ActivationPeriod
Tag, Tenant, TOR, Direction, Subject, DestRatesTimingTag, RatesFallbackSubject string // used only for loading
ActivationTime int64
}
// Adds an activation period that applyes to current rating profile if not already present.

View File

@@ -68,27 +68,27 @@ type DataStorage interface {
GetTPDestination(string, string) (*Destination, error)
GetTPDestinationIds(string) ([]string, error)
ExistsTPRate(string, string) (bool, error)
SetTPRate(*utils.TPRate) error
SetTPRates(string, map[string][]*Rate) error
GetTPRate(string, string) (*utils.TPRate, error)
GetTPRateIds(string) ([]string, error)
ExistsTPDestinationRate(string, string) (bool, error)
SetTPDestinationRate(*utils.TPDestinationRate) error
SetTPDestinationRates(string, map[string][]*DestinationRate) error
GetTPDestinationRate(string, string) (*utils.TPDestinationRate, error)
GetTPDestinationRateIds(string) ([]string, error)
ExistsTPDestRateTiming(string, string) (bool, error)
SetTPDestRateTiming(*utils.TPDestRateTiming) error
SetTPDestRateTimings(string, map[string][]*DestinationRateTiming) error
GetTPDestRateTiming(string, string) (*utils.TPDestRateTiming, error)
GetTPDestRateTimingIds(string) ([]string, error)
ExistsTPRateProfile(string, string) (bool, error)
SetTPRateProfile(*utils.TPRateProfile) error
GetTPRateProfile(string, string) (*utils.TPRateProfile, error)
GetTPRateProfileIds(*utils.AttrTPRateProfileIds) ([]string, error)
ExistsTPRatingProfile(string, string) (bool, error)
SetTPRatingProfiles(string, map[string][]*RatingProfile) error
GetTPRatingProfile(string, string) (*utils.TPRatingProfile, error)
GetTPRatingProfileIds(*utils.AttrTPRatingProfileIds) ([]string, error)
ExistsTPActions(string, string) (bool, error)
SetTPActions(*utils.TPActions) error
SetTPActions(string, map[string][]*Action) error
GetTPActions(string, string) (*utils.TPActions, error)
GetTPActionIds(string) ([]string, error)
ExistsTPActionTimings(string, string) (bool, error)
SetTPActionTimings(string, map[string][]*utils.TPActionTimingsRow) error
SetTPActionTimings(string, map[string][]*ActionTiming) error
GetTPActionTimings(string, string) (map[string][]*utils.TPActionTimingsRow, error)
GetTPActionTimingIds(string) ([]string, error)
ExistsTPActionTriggers(string, string) (bool, error)

View File

@@ -113,7 +113,7 @@ func (ms *MapStorage) ExistsTPRate(tpid, rtId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) SetTPRate(rt *utils.TPRate) error {
func (ms *MapStorage) SetTPRates(tpid string, rts map[string][]*Rate) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -129,7 +129,7 @@ func (ms *MapStorage) ExistsTPDestinationRate(tpid, drId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) SetTPDestinationRate(dr *utils.TPDestinationRate) error {
func (ms *MapStorage) SetTPDestinationRates(tpid string, drs map[string][]*DestinationRate) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -145,7 +145,7 @@ func (ms *MapStorage) ExistsTPDestRateTiming(tpid, drtId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) SetTPDestRateTiming(drt *utils.TPDestRateTiming) error {
func (ms *MapStorage) SetTPDestRateTimings(tpid string, drts map[string][]*DestinationRateTiming) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -157,19 +157,19 @@ func (ms *MapStorage) GetTPDestRateTimingIds(tpid string) ([]string, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) ExistsTPRateProfile(tpid, rpId string) (bool, error) {
func (ms *MapStorage) ExistsTPRatingProfile(tpid, rpId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) SetTPRateProfile(rp *utils.TPRateProfile) error {
func (ms *MapStorage) SetTPRatingProfiles(tpid string, rps map[string][]*RatingProfile) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) GetTPRateProfile(tpid, rpId string) (*utils.TPRateProfile, error) {
func (ms *MapStorage) GetTPRatingProfile(tpid, rpId string) (*utils.TPRatingProfile, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) GetTPRateProfileIds(filters *utils.AttrTPRateProfileIds) ([]string, error) {
func (ms *MapStorage) GetTPRatingProfileIds(filters *utils.AttrTPRatingProfileIds) ([]string, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -177,7 +177,7 @@ func (ms *MapStorage) ExistsTPActions(tpid, aId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) SetTPActions(ap *utils.TPActions) error {
func (ms *MapStorage) SetTPActions(tpid string, acts map[string][]*Action) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -193,7 +193,7 @@ func (ms *MapStorage) ExistsTPActionTimings(tpid, atId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MapStorage) SetTPActionTimings(tpid string, ats map[string][]*utils.TPActionTimingsRow) error {
func (ms *MapStorage) SetTPActionTimings(tpid string, ats map[string][]*ActionTiming) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}

View File

@@ -188,7 +188,7 @@ func (ms *MongoStorage) ExistsTPRate(tpid, rtId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) SetTPRate(rt *utils.TPRate) error {
func (ms *MongoStorage) SetTPRates(tpid string, rts map[string][]*Rate) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -204,7 +204,7 @@ func (ms *MongoStorage) ExistsTPDestinationRate(tpid, drId string) (bool, error)
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) SetTPDestinationRate(dr *utils.TPDestinationRate) error {
func (ms *MongoStorage) SetTPDestinationRates(tpid string, drs map[string][]*DestinationRate) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -220,7 +220,7 @@ func (ms *MongoStorage) ExistsTPDestRateTiming(tpid, drtId string) (bool, error)
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) SetTPDestRateTiming(drt *utils.TPDestRateTiming) error {
func (ms *MongoStorage) SetTPDestRateTimings(tpid string, drts map[string][]*DestinationRateTiming) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -232,19 +232,19 @@ func (ms *MongoStorage) GetTPDestRateTimingIds(tpid string) ([]string, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) ExistsTPRateProfile(tpid, rpId string) (bool, error) {
func (ms *MongoStorage) ExistsTPRatingProfile(tpid, rpId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) SetTPRateProfile(rp *utils.TPRateProfile) error {
func (ms *MongoStorage) SetTPRatingProfiles(tpid string, rps map[string][]*RatingProfile) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) GetTPRateProfile(tpid, rpId string) (*utils.TPRateProfile, error) {
func (ms *MongoStorage) GetTPRatingProfile(tpid, rpId string) (*utils.TPRatingProfile, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) GetTPRateProfileIds(filters *utils.AttrTPRateProfileIds) ([]string, error) {
func (ms *MongoStorage) GetTPRatingProfileIds(filters *utils.AttrTPRatingProfileIds) ([]string, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -252,7 +252,7 @@ func (ms *MongoStorage) ExistsTPActions(tpid, aId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) SetTPActions(ap *utils.TPActions) error {
func (ms *MongoStorage) SetTPActions(tpid string, acts map[string][]*Action) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -268,7 +268,7 @@ func (ms *MongoStorage) ExistsTPActionTimings(tpid, atId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (ms *MongoStorage) SetTPActionTimings(tpid string, ats map[string][]*utils.TPActionTimingsRow) error {
func (ms *MongoStorage) SetTPActionTimings(tpid string, ats map[string][]*ActionTiming) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}

View File

@@ -143,7 +143,7 @@ func (rs *RedisStorage) ExistsTPRate(tpid, rtId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) SetTPRate(rt *utils.TPRate) error {
func (rs *RedisStorage) SetTPRates(tpid string, rts map[string][]*Rate) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -159,7 +159,7 @@ func (rs *RedisStorage) ExistsTPDestinationRate(tpid, drId string) (bool, error)
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) SetTPDestinationRate(dr *utils.TPDestinationRate) error {
func (rs *RedisStorage) SetTPDestinationRates(tpid string, drs map[string][]*DestinationRate) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -175,7 +175,7 @@ func (rs *RedisStorage) ExistsTPDestRateTiming(tpid, drtId string) (bool, error)
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) SetTPDestRateTiming(drt *utils.TPDestRateTiming) error {
func (rs *RedisStorage) SetTPDestRateTimings(tpid string, drts map[string][]*DestinationRateTiming) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -187,19 +187,19 @@ func (rs *RedisStorage) GetTPDestRateTimingIds(tpid string) ([]string, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) ExistsTPRateProfile(tpid, rpId string) (bool, error) {
func (rs *RedisStorage) ExistsTPRatingProfile(tpid, rpId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) SetTPRateProfile(rp *utils.TPRateProfile) error {
func (rs *RedisStorage) SetTPRatingProfiles(tpid string, rps map[string][]*RatingProfile) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) GetTPRateProfile(tpid, rpId string) (*utils.TPRateProfile, error) {
func (rs *RedisStorage) GetTPRatingProfile(tpid, rpId string) (*utils.TPRatingProfile, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) GetTPRateProfileIds(filters *utils.AttrTPRateProfileIds) ([]string, error) {
func (rs *RedisStorage) GetTPRatingProfileIds(filters *utils.AttrTPRatingProfileIds) ([]string, error) {
return nil, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -207,7 +207,7 @@ func (rs *RedisStorage) ExistsTPActions(tpid, aId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) SetTPActions(ap *utils.TPActions) error {
func (rs *RedisStorage) SetTPActions(tpid string, acts map[string][]*Action) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}
@@ -223,7 +223,7 @@ func (rs *RedisStorage) ExistsTPActionTimings(tpid, atId string) (bool, error) {
return false, errors.New(utils.ERR_NOT_IMPLEMENTED)
}
func (rs *RedisStorage) SetTPActionTimings(tpid string, ats map[string][]*utils.TPActionTimingsRow) error {
func (rs *RedisStorage) SetTPActionTimings(tpid string, ats map[string][]*ActionTiming) error {
return errors.New(utils.ERR_NOT_IMPLEMENTED)
}

View File

@@ -208,19 +208,31 @@ func (self *SQLStorage) ExistsTPRate(tpid, rtId string) (bool, error) {
return exists, nil
}
func (self *SQLStorage) SetTPRate(rt *utils.TPRate) error {
for _, rtSlot := range rt.RateSlots {
if _, err := self.Db.Exec(fmt.Sprintf("INSERT INTO %s (tpid, tag, connect_fee, rate, rated_units, rate_increments, rounding_method, rounding_decimals, weight) VALUES ('%s', '%s', %f, %f, %d, %d,'%s', %d, %f)",
utils.TBL_TP_RATES, rt.TPid, rt.RateId, rtSlot.ConnectFee, rtSlot.Rate, rtSlot.RatedUnits, rtSlot.RateIncrements,
rtSlot.RoundingMethod, rtSlot.RoundingDecimals, rtSlot.Weight)); err != nil {
return err
func (self *SQLStorage) SetTPRates(tpid string, rts map[string][]*Rate) error {
if len(rts) == 0 {
return nil //Nothing to set
}
qry := fmt.Sprintf("INSERT INTO %s (tpid, tag, connect_fee, rate, rated_units, rate_increments, group_interval, rounding_method, rounding_decimals, weight) VALUES ", utils.TBL_TP_RATES)
i := 0
for rtId, rtRows := range rts {
for _, rt := range rtRows {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s', '%s', %f, %f, %d, %d,%d,'%s', %d, %f)",
tpid, rtId, rt.ConnectFee, rt.Price, int(rt.PricedUnits), int(rt.RateIncrements), int(rt.GroupInterval),
rt.RoundingMethod, rt.RoundingDecimals, rt.Weight)
i++
}
}
if _, err := self.Db.Exec(qry); err != nil {
return err
}
return nil
}
func (self *SQLStorage) GetTPRate(tpid, rtId string) (*utils.TPRate, error) {
rows, err := self.Db.Query(fmt.Sprintf("SELECT connect_fee, rate, rated_units, rate_increments, rounding_method, rounding_decimals, weight FROM %s WHERE tpid='%s' AND tag='%s'", utils.TBL_TP_RATES, tpid, rtId))
rows, err := self.Db.Query(fmt.Sprintf("SELECT connect_fee, rate, rated_units, rate_increments, group_interval, rounding_method, rounding_decimals, weight FROM %s WHERE tpid='%s' AND tag='%s'", utils.TBL_TP_RATES, tpid, rtId))
if err != nil {
return nil, err
}
@@ -230,13 +242,14 @@ func (self *SQLStorage) GetTPRate(tpid, rtId string) (*utils.TPRate, error) {
for rows.Next() {
i++ //Keep here a reference so we know we got at least one prefix
var connectFee, rate, weight float64
var ratedUnits, rateIncrements, roundingDecimals int
var ratedUnits, rateIncrements, roundingDecimals, groupInterval int
var roundingMethod string
err = rows.Scan(&connectFee, &rate, &ratedUnits, &rateIncrements, &roundingMethod, &roundingDecimals, &weight)
err = rows.Scan(&connectFee, &rate, &ratedUnits, &rateIncrements, &groupInterval, &roundingMethod, &roundingDecimals, &weight)
if err != nil {
return nil, err
}
rt.RateSlots = append(rt.RateSlots, utils.RateSlot{connectFee, rate, ratedUnits, rateIncrements, roundingMethod, roundingDecimals, weight})
rt.RateSlots = append(rt.RateSlots, utils.RateSlot{connectFee, rate, ratedUnits, rateIncrements, groupInterval,
roundingMethod, roundingDecimals, weight})
}
if i == 0 {
return nil, nil
@@ -276,17 +289,21 @@ func (self *SQLStorage) ExistsTPDestinationRate(tpid, drId string) (bool, error)
return exists, nil
}
func (self *SQLStorage) SetTPDestinationRate(dr *utils.TPDestinationRate) error {
if len(dr.DestinationRates) == 0 {
func (self *SQLStorage) SetTPDestinationRates(tpid string, drs map[string][]*DestinationRate) error {
if len(drs) == 0 {
return nil //Nothing to set
}
// Using multiple values in query to spare some network processing time
qry := fmt.Sprintf("INSERT INTO %s (tpid, tag, destinations_tag, rates_tag) VALUES ", utils.TBL_TP_DESTINATION_RATES)
for idx, drPair := range dr.DestinationRates {
if idx != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
i := 0
for drId, drRows := range drs {
for _, dr := range drRows {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s','%s','%s','%s')",
tpid, drId, dr.DestinationsTag, dr.RateTag)
i++
}
qry += fmt.Sprintf("('%s','%s','%s','%s')", dr.TPid, dr.DestinationRateId, drPair.DestinationId, drPair.RateId)
}
if _, err := self.Db.Exec(qry); err != nil {
return err
@@ -349,17 +366,21 @@ func (self *SQLStorage) ExistsTPDestRateTiming(tpid, drtId string) (bool, error)
return exists, nil
}
func (self *SQLStorage) SetTPDestRateTiming(drt *utils.TPDestRateTiming) error {
if len(drt.DestRateTimings) == 0 {
func (self *SQLStorage) SetTPDestRateTimings(tpid string, drts map[string][]*DestinationRateTiming) error {
if len(drts) == 0 {
return nil //Nothing to set
}
// Using multiple values in query to spare some network processing time
qry := fmt.Sprintf("INSERT INTO %s (tpid, tag, destrates_tag, timing_tag, weight) VALUES ", utils.TBL_TP_DESTRATE_TIMINGS)
for idx, drtPair := range drt.DestRateTimings {
if idx != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
i := 0
for drtId, drtRows := range drts {
for _, drt := range drtRows {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s','%s','%s','%s',%f)",
tpid, drtId, drt.DestinationRatesTag, drt.TimingsTag, drt.Weight)
i++
}
qry += fmt.Sprintf("('%s','%s','%s','%s',%f)", drt.TPid, drt.DestRateTimingId, drtPair.DestRatesId, drtPair.TimingId, drtPair.Weight)
}
if _, err := self.Db.Exec(qry); err != nil {
return err
@@ -414,7 +435,7 @@ func (self *SQLStorage) GetTPDestRateTimingIds(tpid string) ([]string, error) {
return ids, nil
}
func (self *SQLStorage) ExistsTPRateProfile(tpid, rpId string) (bool, error) {
func (self *SQLStorage) ExistsTPRatingProfile(tpid, rpId string) (bool, error) {
var exists bool
err := self.Db.QueryRow(fmt.Sprintf("SELECT EXISTS (SELECT 1 FROM %s WHERE tpid='%s' AND tag='%s')", utils.TBL_TP_RATE_PROFILES, tpid, rpId)).Scan(&exists)
if err != nil {
@@ -423,20 +444,23 @@ func (self *SQLStorage) ExistsTPRateProfile(tpid, rpId string) (bool, error) {
return exists, nil
}
func (self *SQLStorage) SetTPRateProfile(rp *utils.TPRateProfile) error {
var qry string
if len(rp.RatingActivations) == 0 { // Possibility to only set fallback rate subject
qry = fmt.Sprintf("INSERT INTO %s (tpid,tag,tenant,tor,direction,subject,activation_time,destrates_timing_tag,rates_fallback_subject) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', 0,'','%s')",
utils.TBL_TP_RATE_PROFILES, rp.TPid, rp.RateProfileId, rp.Tenant, rp.TOR, rp.Direction, rp.Subject, rp.RatesFallbackSubject)
} else {
qry = fmt.Sprintf("INSERT INTO %s (tpid,tag,tenant,tor,direction,subject,activation_time,destrates_timing_tag,rates_fallback_subject) VALUES ", utils.TBL_TP_RATE_PROFILES)
// Using multiple values in query to spare some network processing time
for idx, rpa := range rp.RatingActivations {
if idx != 0 { //Consecutive values after the first will be prefixed with "," as separator
func (self *SQLStorage) SetTPRatingProfiles(tpid string, rps map[string][]*RatingProfile) error {
if len(rps) == 0 {
return nil //Nothing to set
}
qry := fmt.Sprintf("INSERT INTO %s (tpid,tag,tenant,tor,direction,subject,activation_time,destrates_timing_tag,rates_fallback_subject) VALUES ",
utils.TBL_TP_RATE_PROFILES)
i := 0
for rpId, rp := range rps {
for _, rpa := range rp {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s', '%s', '%s', '%s', '%s', '%s', %d,'%s','%s')", rp.TPid, rp.RateProfileId, rp.Tenant, rp.TOR, rp.Direction, rp.Subject, rpa.ActivationTime, rpa.DestRateTimingId, rp.RatesFallbackSubject)
qry += fmt.Sprintf("('%s', '%s', '%s', '%s', '%s', '%s', %d,'%s','%s')", tpid, rpId, rpa.Tenant, rpa.TOR, rpa.Direction,
rpa.Subject, rpa.ActivationTime, rpa.DestRatesTimingTag, rpa.RatesFallbackSubject)
i++
}
}
if _, err := self.Db.Exec(qry); err != nil {
return err
@@ -444,13 +468,13 @@ func (self *SQLStorage) SetTPRateProfile(rp *utils.TPRateProfile) error {
return nil
}
func (self *SQLStorage) GetTPRateProfile(tpid, rpId string) (*utils.TPRateProfile, error) {
func (self *SQLStorage) GetTPRatingProfile(tpid, rpId string) (*utils.TPRatingProfile, error) {
rows, err := self.Db.Query(fmt.Sprintf("SELECT tenant,tor,direction,subject,activation_time,destrates_timing_tag,rates_fallback_subject FROM %s WHERE tpid='%s' AND tag='%s'", utils.TBL_TP_RATE_PROFILES, tpid, rpId))
if err != nil {
return nil, err
}
defer rows.Close()
rp := &utils.TPRateProfile{TPid: tpid, RateProfileId: rpId}
rp := &utils.TPRatingProfile{TPid: tpid, RatingProfileId: rpId}
i := 0
for rows.Next() {
i++ //Keep here a reference so we know we got at least one result
@@ -475,7 +499,7 @@ func (self *SQLStorage) GetTPRateProfile(tpid, rpId string) (*utils.TPRateProfil
return rp, nil
}
func (self *SQLStorage) GetTPRateProfileIds(filters *utils.AttrTPRateProfileIds) ([]string, error) {
func (self *SQLStorage) GetTPRatingProfileIds(filters *utils.AttrTPRatingProfileIds) ([]string, error) {
qry := fmt.Sprintf("SELECT DISTINCT tag FROM %s where tpid='%s'", utils.TBL_TP_RATE_PROFILES, filters.TPid)
if filters.Tenant != "" {
qry += fmt.Sprintf(" AND tenant='%s'", filters.Tenant)
@@ -520,19 +544,26 @@ func (self *SQLStorage) ExistsTPActions(tpid, actsId string) (bool, error) {
return exists, nil
}
func (self *SQLStorage) SetTPActions(acts *utils.TPActions) error {
if len(acts.Actions) == 0 {
func (self *SQLStorage) SetTPActions(tpid string, acts map[string][]*Action) error {
if len(acts) == 0 {
return nil //Nothing to set
}
// Using multiple values in query to spare some network processing time
qry := fmt.Sprintf("INSERT INTO %s (tpid,tag,action,balance_tag,direction,units,expiration_time,destination_tag,rate_type,rate, minutes_weight,weight) VALUES ", utils.TBL_TP_ACTIONS)
for idx, act := range acts.Actions {
if idx != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
qry := fmt.Sprintf("INSERT INTO %s (tpid,tag,action,balance_type,direction,units,expiry_time,destination_tag,rate_type,rate, minutes_weight,weight) VALUES ", utils.TBL_TP_ACTIONS)
i := 0
for actId, actRows := range acts {
for _, act := range actRows {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
var expTime int64
if !act.ExpirationDate.IsZero() {
expTime = act.ExpirationDate.Unix()
}
qry += fmt.Sprintf("('%s','%s','%s','%s','%s',%f,%d,'%s','%s',%f,%f,%f)",
tpid, actId, act.ActionType, act.BalanceId, act.Direction, act.Units, expTime,
act.DestinationTag, act.RateType, act.RateValue, act.MinutesWeight, act.Weight)
i++
}
qry += fmt.Sprintf("('%s','%s','%s','%s','%s',%f,%d,'%s','%s',%f,%f,%f)",
acts.TPid, acts.ActionsId, act.Identifier, act.BalanceId, act.Direction, act.Units, act.ExpirationTime,
act.DestinationId, act.RateType, act.Rate, act.MinutesWeight, act.Weight)
}
if _, err := self.Db.Exec(qry); err != nil {
return err
@@ -541,7 +572,7 @@ func (self *SQLStorage) SetTPActions(acts *utils.TPActions) error {
}
func (self *SQLStorage) GetTPActions(tpid, actsId string) (*utils.TPActions, error) {
rows, err := self.Db.Query(fmt.Sprintf("SELECT action,balance_tag,direction,units,expiration_time,destination_tag,rate_type,rate, minutes_weight,weight FROM %s WHERE tpid='%s' AND tag='%s'", utils.TBL_TP_ACTIONS, tpid, actsId))
rows, err := self.Db.Query(fmt.Sprintf("SELECT action,balance_type,direction,units,expiry_time,destination_tag,rate_type,rate, minutes_weight,weight FROM %s WHERE tpid='%s' AND tag='%s'", utils.TBL_TP_ACTIONS, tpid, actsId))
if err != nil {
return nil, err
}
@@ -595,18 +626,20 @@ func (self *SQLStorage) ExistsTPActionTimings(tpid, atId string) (bool, error) {
}
// Sets actionTimings in sqlDB. Imput is expected in form map[actionTimingId][]rows, eg a full .csv file content
func (self *SQLStorage) SetTPActionTimings(tpid string, ats map[string][]*utils.TPActionTimingsRow) error {
func (self *SQLStorage) SetTPActionTimings(tpid string, ats map[string][]*ActionTiming) error {
if len(ats) == 0 {
return nil //Nothing to set
}
qry := fmt.Sprintf("INSERT INTO %s (tpid,tag,actions_tag,timing_tag,weight) VALUES ", utils.TBL_TP_ACTION_TIMINGS)
i := 0
for atId, atRows := range ats {
for idx, atsRow := range atRows {
if idx != 0 { //Consecutive values after the first will be prefixed with "," as separator
for _, at := range atRows {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s','%s','%s','%s',%f)",
tpid, atId, atsRow.ActionsId, atsRow.TimingId, atsRow.Weight)
tpid, atId, at.ActionsTag, at.TimingsTag, at.Weight)
i++
}
}
if _, err := self.Db.Exec(qry); err != nil {
@@ -674,16 +707,18 @@ func (self *SQLStorage) SetTPActionTriggers(tpid string, ats map[string][]*Actio
if len(ats) == 0 {
return nil //Nothing to set
}
qry := fmt.Sprintf("INSERT INTO %s (tpid,tag,balance_tag,direction,threshold_type,threshold_value,destination_tag,actions_tag,weight) VALUES ",
qry := fmt.Sprintf("INSERT INTO %s (tpid,tag,balance_type,direction,threshold_type,threshold_value,destination_tag,actions_tag,weight) VALUES ",
utils.TBL_TP_ACTION_TRIGGERS)
i := 0
for atId, atRows := range ats {
for idx, atsRow := range atRows {
if idx != 0 { //Consecutive values after the first will be prefixed with "," as separator
for _, atsRow := range atRows {
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s','%s','%s','%s','%s', %f, '%s','%s',%f)",
tpid, atId, atsRow.BalanceId, atsRow.Direction, atsRow.ThresholdType,
atsRow.ThresholdValue, atsRow.DestinationId, atsRow.ActionsId, atsRow.Weight)
i++
}
}
if _, err := self.Db.Exec(qry); err != nil {
@@ -731,12 +766,12 @@ func (self *SQLStorage) SetTPAccountActions(tpid string, aa map[string]*AccountA
utils.TBL_TP_ACCOUNT_ACTIONS)
i := 0
for aaId, aActs := range aa {
i++
if i != 1 { //Consecutive values after the first will be prefixed with "," as separator
if i != 0 { //Consecutive values after the first will be prefixed with "," as separator
qry += ","
}
qry += fmt.Sprintf("('%s','%s','%s','%s','%s','%s','%s')",
tpid, aaId, aActs.Tenant, aActs.Account, aActs.Direction, aActs.ActionTimingsTag, aActs.ActionTriggersTag)
i++
}
if _, err := self.Db.Exec(qry); err != nil {
return err
@@ -1048,12 +1083,12 @@ func (self *SQLStorage) GetTpRatingProfiles(tpid, tag string) (map[string]*Ratin
}
key := fmt.Sprintf("%s:%s:%s:%s", direction, tenant, tor, subject)
rp, ok := rpfs[key]
if !ok || rp.tag != tag {
rp = &RatingProfile{Id: key, tag: tag}
if !ok || rp.Tag != tag {
rp = &RatingProfile{Id: key, Tag: tag}
rpfs[key] = rp
}
rp.destRatesTimingTag = destrates_timing_tag
rp.activationTime = activation_time
rp.DestRatesTimingTag = destrates_timing_tag
rp.ActivationTime = activation_time
if fallback_subject != "" {
rp.FallbackKey = fmt.Sprintf("%s:%s:%s:%s", direction, tenant, tor, fallback_subject)
}
@@ -1074,15 +1109,15 @@ func (self *SQLStorage) GetTpActions(tpid, tag string) (map[string][]*Action, er
for rows.Next() {
var id int
var units, rate, minutes_weight, weight float64
var tpid, tag, action, balance_tag, direction, destinations_tag, rate_type, expirationDate string
if err := rows.Scan(&id, &tpid, &tag, &action, &balance_tag, &direction, &units, &expirationDate, &destinations_tag, &rate_type, &rate, &minutes_weight, &weight); err != nil {
var tpid, tag, action, balance_type, direction, destinations_tag, rate_type, expirationDate string
if err := rows.Scan(&id, &tpid, &tag, &action, &balance_type, &direction, &units, &expirationDate, &destinations_tag, &rate_type, &rate, &minutes_weight, &weight); err != nil {
return nil, err
}
var a *Action
if balance_tag != MINUTES {
if balance_type != MINUTES {
a = &Action{
ActionType: action,
BalanceId: balance_tag,
BalanceId: balance_type,
Direction: direction,
Units: units,
ExpirationString: expirationDate,
@@ -1092,7 +1127,7 @@ func (self *SQLStorage) GetTpActions(tpid, tag string) (map[string][]*Action, er
a = &Action{
Id: utils.GenUUID(),
ActionType: action,
BalanceId: balance_tag,
BalanceId: balance_type,
Direction: direction,
Weight: weight,
ExpirationString: expirationDate,
@@ -1143,7 +1178,7 @@ func (self *SQLStorage) GetTpActionTimings(tpid, tag string) (ats map[string][]*
func (self *SQLStorage) GetTpActionTriggers(tpid, tag string) (map[string][]*ActionTrigger, error) {
ats := make(map[string][]*ActionTrigger)
q := fmt.Sprintf("SELECT tpid,tag,balance_tag,direction,threshold_type,threshold_value,destination_tag,actions_tag,weight FROM %s WHERE tpid='%s'",
q := fmt.Sprintf("SELECT tpid,tag,balance_type,direction,threshold_type,threshold_value,destination_tag,actions_tag,weight FROM %s WHERE tpid='%s'",
utils.TBL_TP_ACTION_TRIGGERS, tpid)
if tag != "" {
q += fmt.Sprintf(" AND tag='%s'", tag)

View File

@@ -19,9 +19,9 @@ along with this program. If not, see <http://www.gnu.org/licenses/>
package engine
import (
"strconv"
"errors"
"github.com/cgrates/cgrates/utils"
"strconv"
)
// Various helpers to deal with database
@@ -53,4 +53,3 @@ func ConfigureDatabase(db_type, host, port, name, user, pass string) (db DataSto
}
return db, nil
}

View File

@@ -18,13 +18,445 @@ along with this program. If not, see <http://www.gnu.org/licenses/>
package engine
// Import tariff plan from csv into storDb
import (
"github.com/cgrates/cgrates/utils"
"io"
"io/ioutil"
"log"
"strconv"
"time"
)
type TPImporterCSV struct {
sep rune
storDb DataStorage
// Import tariff plan from csv into storDb
type TPCSVImporter struct {
TPid string // Load data on this tpid
StorDb DataStorage // StorDb connection handle
DirPath string // Directory path to import from
Sep rune // Separator in the csv file
Verbose bool // If true will print a detailed information instead of silently discarding it
ImportId string // Use this to differentiate between imports (eg: when autogenerating fields like RatingProfileId
}
func (self *TPImporterCSV) ProcessFolder (fPath string) (err error) {
// Maps csv file to handler which should process it. Defined like this since tests on 1.0.3 were failing on Travis.
// Change it to func(string) error as soon as Travis updates.
var fileHandlers = map[string]func(*TPCSVImporter, string) error{
utils.TIMINGS_CSV: (*TPCSVImporter).importTimings,
utils.DESTINATIONS_CSV: (*TPCSVImporter).importDestinations,
utils.RATES_CSV: (*TPCSVImporter).importRates,
utils.DESTINATION_RATES_CSV: (*TPCSVImporter).importDestinationRates,
utils.DESTRATE_TIMINGS_CSV: (*TPCSVImporter).importDestRateTimings,
utils.RATE_PROFILES_CSV: (*TPCSVImporter).importRatingProfiles,
utils.ACTIONS_CSV: (*TPCSVImporter).importActions,
utils.ACTION_TIMINGS_CSV: (*TPCSVImporter).importActionTimings,
utils.ACTION_TRIGGERS_CSV: (*TPCSVImporter).importActionTriggers,
utils.ACCOUNT_ACTIONS_CSV: (*TPCSVImporter).importAccountActions,
}
func (self *TPCSVImporter) Run() error {
files, _ := ioutil.ReadDir(self.DirPath)
for _, f := range files {
fHandler, hasName := fileHandlers[f.Name()]
if !hasName {
continue
}
fHandler(self, f.Name())
}
return nil
}
// Handler importing timings from file, saved row by row to storDb
func (self *TPCSVImporter) importTimings(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
tm := NewTiming(record...)
if err := self.StorDb.SetTPTiming(self.TPid, tm); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importDestinations(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
dst := &Destination{record[0], []string{record[1]}}
if err := self.StorDb.SetTPDestination(self.TPid, dst); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importRates(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
rt, err := NewRate(record[0], record[1], record[2], record[3], record[4], record[5], record[6], record[7], record[8])
if err != nil {
return err
}
if err := self.StorDb.SetTPRates(self.TPid, map[string][]*Rate{record[0]: []*Rate{rt}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importDestinationRates(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
dr := &DestinationRate{record[0], record[1], record[2], nil}
if err := self.StorDb.SetTPDestinationRates(self.TPid,
map[string][]*DestinationRate{dr.Tag: []*DestinationRate{dr}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importDestRateTimings(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
weight, err := strconv.ParseFloat(record[3], 64)
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
drt := &DestinationRateTiming{Tag: record[0],
DestinationRatesTag: record[1],
Weight: weight,
TimingsTag: record[2],
}
if err := self.StorDb.SetTPDestRateTimings(self.TPid, map[string][]*DestinationRateTiming{drt.Tag: []*DestinationRateTiming{drt}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importRatingProfiles(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
tenant, tor, direction, subject, destRatesTimingTag, fallbacksubject := record[0], record[1], record[2], record[3], record[5], record[6]
at, err := time.Parse(time.RFC3339, record[4])
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
}
rpTag := "TPCSV" //Autogenerate rating profile id
if self.ImportId != "" {
rpTag += "_" + self.ImportId
}
rp := &RatingProfile{Tag: rpTag,
Tenant: tenant,
TOR: tor,
Direction: direction,
Subject: subject,
ActivationTime: at.Unix(),
DestRatesTimingTag: destRatesTimingTag,
RatesFallbackSubject: fallbacksubject,
}
if err := self.StorDb.SetTPRatingProfiles(self.TPid, map[string][]*RatingProfile{rpTag: []*RatingProfile{rp}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importActions(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
actId, actionType, balanceType, direction, destTag, rateType := record[0], record[1], record[2], record[3], record[6], record[7]
units, err := strconv.ParseFloat(record[4], 64)
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
var expiryTime time.Time // Empty initialized time represents never expire
if record[5] != "*unlimited" { // ToDo: Expand here for other meta tags or go way of adding time for expiry
expiryTime, err = time.Parse(time.RFC3339, record[5])
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
}
rateValue, _ := strconv.ParseFloat(record[8], 64) // Ignore errors since empty string is error, we can find out based on rateType if defined
minutesWeight, _ := strconv.ParseFloat(record[9], 64)
weight, err := strconv.ParseFloat(record[10], 64)
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
act := &Action{
ActionType: actionType,
BalanceId: balanceType,
Direction: direction,
Units: units,
ExpirationDate: expiryTime,
DestinationTag: destTag,
RateType: rateType,
RateValue: rateValue,
MinutesWeight: minutesWeight,
Weight: weight,
}
if err := self.StorDb.SetTPActions(self.TPid, map[string][]*Action{actId: []*Action{act}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importActionTimings(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
tag, actionsTag, timingTag := record[0], record[1], record[2]
weight, err := strconv.ParseFloat(record[3], 64)
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
at := &ActionTiming{
Tag: tag,
ActionsTag: actionsTag,
TimingsTag: timingTag,
Weight: weight,
}
if err := self.StorDb.SetTPActionTimings(self.TPid, map[string][]*ActionTiming{tag: []*ActionTiming{at}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importActionTriggers(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
tag, balanceType, direction, thresholdType, destinationTag, actionsTag := record[0], record[1], record[2], record[3], record[5], record[6]
threshold, err := strconv.ParseFloat(record[4], 64)
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
weight, err := strconv.ParseFloat(record[7], 64)
if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
at := &ActionTrigger{
BalanceId: balanceType,
Direction: direction,
ThresholdType: thresholdType,
ThresholdValue: threshold,
DestinationId: destinationTag,
Weight: weight,
ActionsId: actionsTag,
}
if err := self.StorDb.SetTPActionTriggers(self.TPid, map[string][]*ActionTrigger{tag: []*ActionTrigger{at}}); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}
func (self *TPCSVImporter) importAccountActions(fn string) error {
log.Printf("Processing file: <%s> ", fn)
fParser, err := NewTPCSVFileParser(self.DirPath, fn)
if err != nil {
return err
}
lineNr := 0
for {
lineNr++
record, err := fParser.ParseNextLine()
if err == io.EOF { // Reached end of file
break
} else if err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, warning: <%s> ", lineNr, err.Error())
}
continue
}
tenant, account, direction, actionTimingsTag, actionTriggersTag := record[0], record[1], record[2], record[3], record[4]
tag := "TPCSV" //Autogenerate account actions profile id
if self.ImportId != "" {
tag += "_" + self.ImportId
}
aa := map[string]*AccountAction{
tag: &AccountAction{Tenant: tenant, Account: account, Direction: direction,
ActionTimingsTag: actionTimingsTag, ActionTriggersTag: actionTriggersTag},
}
if err := self.StorDb.SetTPAccountActions(self.TPid, aa); err != nil {
if self.Verbose {
log.Printf("Ignoring line %d, storDb operational error: <%s> ", lineNr, err.Error())
}
}
}
return nil
}

View File

@@ -31,6 +31,7 @@ type RateSlot struct {
Rate float64 // Rate applied
RatedUnits int // Number of billing units this rate applies to
RateIncrements int // This rate will apply in increments of duration
GroupInterval int // Group position
RoundingMethod string // Use this method to round the cost
RoundingDecimals int // Round the cost number of decimals
Weight float64 // Rate's priority when dealing with grouped rates
@@ -59,9 +60,9 @@ type DestRateTiming struct {
Weight float64 // Binding priority taken into consideration when more DestinationRates are active on a time slot
}
type TPRateProfile struct {
type TPRatingProfile struct {
TPid string // Tariff plan id
RateProfileId string // RateProfile id
RatingProfileId string // RatingProfile id
Tenant string // Tenant's Id
TOR string // TypeOfRecord
Direction string // Traffic direction, OUT is the only one supported for now
@@ -75,7 +76,7 @@ type RatingActivation struct {
DestRateTimingId string // Id of DestRateTiming profile
}
type AttrTPRateProfileIds struct {
type AttrTPRatingProfileIds struct {
TPid string // Tariff plan id
Tenant string // Tenant's Id
TOR string // TypeOfRecord
@@ -90,16 +91,16 @@ type TPActions struct {
}
type Action struct {
Identifier string // Identifier mapped in the code
BalanceId string // Type of balance the action will operate on
Direction string // Balance direction
Units float64 // Number of units to add/deduct
ExpirationTime string // Time when the units will expire
DestinationId string // Destination profile id
RateType string // Type of price <ABSOLUTE|PERCENT>
Rate float64 // Price value
MinutesWeight float64 // Minutes weight
Weight float64 // Action's weight
Identifier string // Identifier mapped in the code
BalanceType string // Type of balance the action will operate on
Direction string // Balance direction
Units float64 // Number of units to add/deduct
ExpiryTime string // Time when the units will expire
DestinationId string // Destination profile id
RateType string // Type of rate <*absolute|*percent>
Rate float64 // Price value
MinutesWeight float64 // Minutes weight
Weight float64 // Action's weight
}
type ApiTPActionTimings struct {
@@ -122,7 +123,7 @@ type ApiTPActionTriggers struct {
}
type ApiActionTrigger struct {
BalanceId string // Id of the balance this trigger monitors
BalanceType string // Type of balance this trigger monitors
Direction string // Traffic direction
ThresholdType string // This threshold type
ThresholdValue float64 // Threshold

View File

@@ -1,11 +1,11 @@
package utils
const (
VERSION = "0.9.1rc3"
POSTGRES = "postgres"
MYSQL = "mysql"
MONGO = "mongo"
REDIS = "redis"
VERSION = "0.9.1rc3"
POSTGRES = "postgres"
MYSQL = "mysql"
MONGO = "mongo"
REDIS = "redis"
LOCALHOST = "127.0.0.1"
FSCDR_FILE_CSV = "freeswitch_file_csv"
FSCDR_HTTP_JSON = "freeswitch_http_json"
@@ -24,22 +24,33 @@ const (
TBL_TP_RATES = "tp_rates"
TBL_TP_DESTINATION_RATES = "tp_destination_rates"
TBL_TP_DESTRATE_TIMINGS = "tp_destrate_timings"
TBL_TP_RATE_PROFILES = "tp_rate_profiles"
TBL_TP_RATE_PROFILES = "tp_rating_profiles"
TBL_TP_ACTIONS = "tp_actions"
TBL_TP_ACTION_TIMINGS = "tp_action_timings"
TBL_TP_ACTION_TRIGGERS = "tp_action_triggers"
TBL_TP_ACCOUNT_ACTIONS = "tp_account_actions"
TIMINGS_CSV = "Timings.csv"
DESTINATIONS_CSV = "Destinations.csv"
RATES_CSV = "Rates.csv"
DESTINATION_RATES_CSV = "DestinationRates.csv"
DESTRATE_TIMINGS_CSV = "DestinationRateTimings.csv"
RATE_PROFILES_CSV = "RatingProfiles.csv"
ACTIONS_CSV = "Actions.csv"
ACTION_TIMINGS_CSV = "ActionTimings.csv"
ACTION_TRIGGERS_CSV = "ActionTriggers.csv"
ACCOUNT_ACTIONS_CSV = "AccountActions.csv"
ROUNDING_UP = "up"
ROUNDING_MIDDLE = "middle"
ROUNDING_DOWN = "down"
TIMINGS_CSV = "Timings.csv"
DESTINATIONS_CSV = "Destinations.csv"
RATES_CSV = "Rates.csv"
DESTINATION_RATES_CSV = "DestinationRates.csv"
DESTRATE_TIMINGS_CSV = "DestinationRateTimings.csv"
RATE_PROFILES_CSV = "RatingProfiles.csv"
ACTIONS_CSV = "Actions.csv"
ACTION_TIMINGS_CSV = "ActionTimings.csv"
ACTION_TRIGGERS_CSV = "ActionTriggers.csv"
ACCOUNT_ACTIONS_CSV = "AccountActions.csv"
TIMINGS_NRCOLS = 6
DESTINATIONS_NRCOLS = 2
RATES_NRCOLS = 9
DESTINATION_RATES_NRCOLS = 3
DESTRATE_TIMINGS_NRCOLS = 4
RATE_PROFILES_NRCOLS = 7
ACTIONS_NRCOLS = 11
ACTION_TIMINGS_NRCOLS = 4
ACTION_TRIGGERS_NRCOLS = 8
ACCOUNT_ACTIONS_NRCOLS = 5
ROUNDING_UP = "*up"
ROUNDING_MIDDLE = "*middle"
ROUNDING_DOWN = "*down"
COMMENT_CHAR = '#'
)

View File

@@ -78,11 +78,11 @@ func Round(x float64, prec int, method string) float64 {
_, frac := math.Modf(intermed)
switch method {
case "*up":
case ROUNDING_UP:
rounder = math.Ceil(intermed)
case "*down":
case ROUNDING_DOWN:
rounder = math.Floor(intermed)
case "*middle":
case ROUNDING_MIDDLE:
if frac >= 0.5 {
rounder = math.Ceil(intermed)
} else {

View File

@@ -41,7 +41,7 @@ func TestUUID(t *testing.T) {
}
func TestRoundUp(t *testing.T) {
result := Round(12.52, 0, "*middle")
result := Round(12.52, 0, ROUNDING_UP)
expected := 13.0
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -49,7 +49,7 @@ func TestRoundUp(t *testing.T) {
}
func TestRoundUpMiddle(t *testing.T) {
result := Round(12.5, 0, "*middle")
result := Round(12.5, 0, ROUNDING_UP)
expected := 13.0
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -57,7 +57,7 @@ func TestRoundUpMiddle(t *testing.T) {
}
func TestRoundDown(t *testing.T) {
result := Round(12.49, 0, "*middle")
result := Round(12.49, 0, ROUNDING_MIDDLE)
expected := 12.0
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -65,7 +65,7 @@ func TestRoundDown(t *testing.T) {
}
func TestRoundPrec(t *testing.T) {
result := Round(12.49, 1, "*middle")
result := Round(12.49, 1, ROUNDING_UP)
expected := 12.5
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -73,7 +73,7 @@ func TestRoundPrec(t *testing.T) {
}
func TestRoundPrecNothing(t *testing.T) {
result := Round(12.49, 2, "*middle")
result := Round(12.49, 2, ROUNDING_MIDDLE)
expected := 12.49
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -89,7 +89,7 @@ func TestRoundPrecNoTouch(t *testing.T) {
}
func TestRoundByMethodUp1(t *testing.T) {
result := Round(12.49, 1, "*up")
result := Round(12.49, 1, ROUNDING_UP)
expected := 12.5
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -97,7 +97,7 @@ func TestRoundByMethodUp1(t *testing.T) {
}
func TestRoundByMethodUp2(t *testing.T) {
result := Round(12.21, 1, "*up")
result := Round(12.21, 1, ROUNDING_UP)
expected := 12.3
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
@@ -105,15 +105,15 @@ func TestRoundByMethodUp2(t *testing.T) {
}
func TestRoundByMethodDown1(t *testing.T) {
result := Round(12.49, 1, "*down")
result := Round(12.49, 1, ROUNDING_DOWN)
expected := 12.4
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)
t.Errorf("Error rounding down: sould be %v was %v", expected, result)
}
}
func TestRoundByMethodDown2(t *testing.T) {
result := Round(12.21, 1, "*down")
result := Round(12.21, 1, ROUNDING_DOWN)
expected := 12.2
if result != expected {
t.Errorf("Error rounding up: sould be %v was %v", expected, result)