diff --git a/cmd/cgr-loader/cgr-loader.go b/cmd/cgr-loader/cgr-loader.go
index 1d8770b56..7cfb3c2d5 100755
--- a/cmd/cgr-loader/cgr-loader.go
+++ b/cmd/cgr-loader/cgr-loader.go
@@ -19,6 +19,7 @@ along with this program. If not, see
package main
import (
+ "errors"
"flag"
"fmt"
"log"
@@ -32,10 +33,8 @@ import (
)
var (
- err error
- dm *engine.DataManager
- storDb engine.LoadStorage
- loader engine.LoadReader
+ dataDB engine.DataDB
+ storDB engine.LoadStorage
cgrLoaderFlags = flag.NewFlagSet("cgr-loader", flag.ContinueOnError)
dfltCfg = config.CgrConfig()
@@ -106,8 +105,9 @@ var (
func loadConfig() (ldrCfg *config.CGRConfig) {
ldrCfg = config.CgrConfig()
if *cfgPath != "" {
+ var err error
if ldrCfg, err = config.NewCGRConfigFromPath(*cfgPath); err != nil {
- log.Fatalf("Error loading config file %s", err.Error())
+ log.Fatalf("Error loading config file %s", err)
}
config.SetCgrConfig(ldrCfg)
}
@@ -226,41 +226,80 @@ func loadConfig() (ldrCfg *config.CGRConfig) {
if *disableReverse != dfltCfg.LoaderCgrCfg().DisableReverse {
ldrCfg.LoaderCgrCfg().DisableReverse = *disableReverse
}
+
+ if *cachingArg != utils.EmptyString {
+ ldrCfg.GeneralCfg().DefaultCaching = *cachingArg
+ }
+ return
+}
+
+func importData(cfg *config.CGRConfig) (err error) {
+ if cfg.LoaderCgrCfg().TpID == "" {
+ return errors.New("TPid required")
+ }
+ if *flushStorDB {
+ if err = storDB.RemTpData("", cfg.LoaderCgrCfg().TpID, map[string]string{}); err != nil {
+ return err
+ }
+ }
+ csvImporter := engine.TPCSVImporter{
+ TPid: cfg.LoaderCgrCfg().TpID,
+ StorDb: storDB,
+ DirPath: *dataPath,
+ Sep: cfg.LoaderCgrCfg().FieldSeparator,
+ Verbose: *verbose,
+ ImportId: *importID,
+ }
+ return csvImporter.Run()
+}
+
+func getLoader(cfg *config.CGRConfig) (loader engine.LoadReader, err error) {
+ if *fromStorDB { // Load Tariff Plan from storDb into dataDb
+ loader = storDB
+ return
+ }
+ if gprefix := utils.MetaGoogleAPI + utils.CONCATENATED_KEY_SEP; strings.HasPrefix(*dataPath, gprefix) { // Default load from csv files to dataDb
+ return engine.NewGoogleCSVStorage(cfg.LoaderCgrCfg().FieldSeparator, strings.TrimPrefix(*dataPath, gprefix), *cfgPath)
+ }
+ if !utils.IsURL(*dataPath) {
+ loader = engine.NewFileCSVStorage(cfg.LoaderCgrCfg().FieldSeparator, *dataPath)
+ return
+ }
+ loader = engine.NewURLCSVStorage(cfg.LoaderCgrCfg().FieldSeparator, *dataPath)
return
}
func main() {
- if err := cgrLoaderFlags.Parse(os.Args[1:]); err != nil {
- return
+ var err error
+ if err = cgrLoaderFlags.Parse(os.Args[1:]); err != nil {
+ log.Fatal(err)
}
if *version {
- if rcv, err := utils.GetCGRVersion(); err != nil {
- fmt.Println(err)
- } else {
- fmt.Println(rcv)
+ var version string
+ if version, err = utils.GetCGRVersion(); err != nil {
+ log.Fatal(err)
}
+ fmt.Println(version)
return
}
ldrCfg := loadConfig()
// we initialize connManager here with nil for InternalChannels
- cM := engine.NewConnManager(ldrCfg, nil)
+ engine.NewConnManager(ldrCfg, nil)
if !*toStorDB {
- d, err := engine.NewDataDBConn(ldrCfg.DataDbCfg().DataDbType,
+ if dataDB, err = engine.NewDataDBConn(ldrCfg.DataDbCfg().DataDbType,
ldrCfg.DataDbCfg().DataDbHost, ldrCfg.DataDbCfg().DataDbPort,
ldrCfg.DataDbCfg().DataDbName, ldrCfg.DataDbCfg().DataDbUser,
ldrCfg.DataDbCfg().DataDbPass, ldrCfg.GeneralCfg().DBDataEncoding,
- ldrCfg.DataDbCfg().DataDbSentinelName, ldrCfg.DataDbCfg().Items)
- if err != nil {
+ ldrCfg.DataDbCfg().DataDbSentinelName, ldrCfg.DataDbCfg().Items); err != nil {
log.Fatalf("Coud not open dataDB connection: %s", err.Error())
}
- dm = engine.NewDataManager(d, config.CgrConfig().CacheCfg(), cM)
- defer dm.DataDB().Close()
+ defer dataDB.Close()
}
if *fromStorDB || *toStorDB {
- if storDb, err = engine.NewStorDBConn(ldrCfg.StorDbCfg().Type,
+ if storDB, err = engine.NewStorDBConn(ldrCfg.StorDbCfg().Type,
ldrCfg.StorDbCfg().Host, ldrCfg.StorDbCfg().Port,
ldrCfg.StorDbCfg().Name, ldrCfg.StorDbCfg().User,
ldrCfg.StorDbCfg().Password, ldrCfg.GeneralCfg().DBDataEncoding, ldrCfg.StorDbCfg().SSLMode,
@@ -269,46 +308,24 @@ func main() {
ldrCfg.StorDbCfg().PrefixIndexedFields, ldrCfg.StorDbCfg().Items); err != nil {
log.Fatalf("Coud not open storDB connection: %s", err.Error())
}
- defer storDb.Close()
+ defer storDB.Close()
}
if !*dryRun && *toStorDB { // Import files from a directory into storDb
- if ldrCfg.LoaderCgrCfg().TpID == "" {
- log.Fatal("TPid required.")
- }
- if *flushStorDB {
- if err = storDb.RemTpData("", ldrCfg.LoaderCgrCfg().TpID, map[string]string{}); err != nil {
- log.Fatal(err)
- }
- }
- csvImporter := engine.TPCSVImporter{
- TPid: ldrCfg.LoaderCgrCfg().TpID,
- StorDb: storDb,
- DirPath: *dataPath,
- Sep: ldrCfg.LoaderCgrCfg().FieldSeparator,
- Verbose: *verbose,
- ImportId: *importID,
- }
- if errImport := csvImporter.Run(); errImport != nil {
- log.Fatal(errImport)
+ if err = importData(ldrCfg); err != nil {
+ log.Fatal(err)
}
return
}
-
- if *fromStorDB { // Load Tariff Plan from storDb into dataDb
- loader = storDb
- } else if gprefix := utils.MetaGoogleAPI + utils.CONCATENATED_KEY_SEP; strings.HasPrefix(*dataPath, gprefix) { // Default load from csv files to dataDb
- loader, err = engine.NewGoogleCSVStorage(ldrCfg.LoaderCgrCfg().FieldSeparator, strings.TrimPrefix(*dataPath, gprefix), *cfgPath)
- if err != nil {
- log.Fatal(err)
- }
- } else {
- loader = engine.NewFileCSVStorage(ldrCfg.LoaderCgrCfg().FieldSeparator, *dataPath)
+ var loader engine.LoadReader
+ if loader, err = getLoader(ldrCfg); err != nil {
+ log.Fatal(err)
}
-
- tpReader, err := engine.NewTpReader(dm.DataDB(), loader, ldrCfg.LoaderCgrCfg().TpID,
- ldrCfg.GeneralCfg().DefaultTimezone, ldrCfg.LoaderCgrCfg().CachesConns, ldrCfg.LoaderCgrCfg().SchedulerConns)
- if err != nil {
+ var tpReader *engine.TpReader
+ if tpReader, err = engine.NewTpReader(dataDB, loader,
+ ldrCfg.LoaderCgrCfg().TpID, ldrCfg.GeneralCfg().DefaultTimezone,
+ ldrCfg.LoaderCgrCfg().CachesConns,
+ ldrCfg.LoaderCgrCfg().SchedulerConns); err != nil {
log.Fatal(err)
}
if err = tpReader.LoadAll(); err != nil {
@@ -320,29 +337,25 @@ func main() {
}
if *remove {
- if err := tpReader.RemoveFromDatabase(*verbose, *disableReverse); err != nil {
+ if err = tpReader.RemoveFromDatabase(*verbose, *disableReverse); err != nil {
log.Fatal("Could not delete from database: ", err)
}
return
}
// write maps to database
- if err := tpReader.WriteToDatabase(*verbose, *disableReverse); err != nil {
+ if err = tpReader.WriteToDatabase(*verbose, *disableReverse); err != nil {
log.Fatal("Could not write to database: ", err)
}
- caching := config.CgrConfig().GeneralCfg().DefaultCaching
- if cachingArg != nil && *cachingArg != utils.EmptyString {
- caching = *cachingArg
- }
// reload cache
- if err := tpReader.ReloadCache(caching, *verbose, &utils.ArgDispatcher{
+ if err = tpReader.ReloadCache(ldrCfg.GeneralCfg().DefaultCaching, *verbose, &utils.ArgDispatcher{
APIKey: apiKey,
RouteID: routeID,
}); err != nil {
log.Fatal("Could not reload cache: ", err)
}
if len(ldrCfg.LoaderCgrCfg().SchedulerConns) != 0 {
- if err := tpReader.ReloadScheduler(*verbose); err != nil {
+ if err = tpReader.ReloadScheduler(*verbose); err != nil {
log.Fatal("Could not reload scheduler: ", err)
}
}
diff --git a/config/config.go b/config/config.go
index 002f74eb3..d7194cd10 100755
--- a/config/config.go
+++ b/config/config.go
@@ -1237,7 +1237,7 @@ func (*CGRConfig) loadConfigFromReader(rdr io.Reader, loadFuncs []func(jsnCfg *C
// Reads all .json files out of a folder/subfolders and loads them up in lexical order
func (cfg *CGRConfig) loadConfigFromPath(path string, loadFuncs []func(jsnCfg *CgrJsonCfg) error) (err error) {
- if isUrl(path) {
+ if utils.IsURL(path) {
return cfg.loadConfigFromHTTP(path, loadFuncs) // prefix protocol
}
var fi os.FileInfo
diff --git a/config/libconfig.go b/config/libconfig.go
deleted file mode 100644
index 1a61675c0..000000000
--- a/config/libconfig.go
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
-Copyright (C) ITsysCOM GmbH
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with this program. If not, see
-*/
-
-package config
-
-import "strings"
-
-func isUrl(path string) bool {
- return strings.HasPrefix(path, "https://") ||
- strings.HasPrefix(path, "http://")
-}
diff --git a/config/libconfig_test.go b/config/libconfig_test.go
deleted file mode 100644
index 1d41a437b..000000000
--- a/config/libconfig_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
-Copyright (C) ITsysCOM GmbH
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with this program. If not, see
-*/
-package config
-
-import "testing"
-
-func TestIsURL(t *testing.T) {
- urls := map[string]bool{
- "/etc/usr/": false,
- "https://github.com/cgrates/cgrates/": true,
- "http://github.com/cgrates/cgrates/i": true,
- }
- for url, expected := range urls {
- if rply := isUrl(url); rply != expected {
- t.Errorf("For: %q ,expected %v received: %v", url, expected, rply)
- }
- }
-}
diff --git a/engine/storage_csv.go b/engine/storage_csv.go
index 1592efbdb..3427ca8d0 100644
--- a/engine/storage_csv.go
+++ b/engine/storage_csv.go
@@ -26,11 +26,13 @@ import (
"io/ioutil"
"log"
"net/http"
+ "net/url"
"os"
"path"
"path/filepath"
"strings"
+ "github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/utils"
"golang.org/x/net/context"
"golang.org/x/oauth2"
@@ -38,6 +40,7 @@ import (
"google.golang.org/api/sheets/v4"
)
+// CSVStorage the basic csv storage
type CSVStorage struct {
sep rune
generator func() csvReaderCloser
@@ -64,6 +67,7 @@ type CSVStorage struct {
dispatcherHostsFn []string
}
+// NewCSVStorage creates a CSV storege that takes the data from the paths specified
func NewCSVStorage(sep rune,
destinationsFn, timingsFn, ratesFn, destinationratesFn,
destinationratetimingsFn, ratingprofilesFn, sharedgroupsFn,
@@ -97,55 +101,57 @@ func NewCSVStorage(sep rune,
}
}
+// NewFileCSVStorage returns a csv storage that uses all files from the folder
func NewFileCSVStorage(sep rune, dataPath string) *CSVStorage {
allFoldersPath, err := getAllFolders(dataPath)
if err != nil {
log.Fatal(err)
}
- destinations_paths := appendName(allFoldersPath, utils.DestinationsCsv)
- timings_paths := appendName(allFoldersPath, utils.TimingsCsv)
- rates_paths := appendName(allFoldersPath, utils.RatesCsv)
- destination_rates_paths := appendName(allFoldersPath, utils.DestinationRatesCsv)
- rating_plans_paths := appendName(allFoldersPath, utils.RatingPlansCsv)
- rating_profiles_paths := appendName(allFoldersPath, utils.RatingProfilesCsv)
- shared_groups_paths := appendName(allFoldersPath, utils.SharedGroupsCsv)
- actions_paths := appendName(allFoldersPath, utils.ActionsCsv)
- action_plans_paths := appendName(allFoldersPath, utils.ActionPlansCsv)
- action_triggers_paths := appendName(allFoldersPath, utils.ActionTriggersCsv)
- account_actions_paths := appendName(allFoldersPath, utils.AccountActionsCsv)
- resources_paths := appendName(allFoldersPath, utils.ResourcesCsv)
- stats_paths := appendName(allFoldersPath, utils.StatsCsv)
- thresholds_paths := appendName(allFoldersPath, utils.ThresholdsCsv)
- filters_paths := appendName(allFoldersPath, utils.FiltersCsv)
- suppliers_paths := appendName(allFoldersPath, utils.SuppliersCsv)
- attributes_paths := appendName(allFoldersPath, utils.AttributesCsv)
- chargers_paths := appendName(allFoldersPath, utils.ChargersCsv)
- dispatcherprofiles_paths := appendName(allFoldersPath, utils.DispatcherProfilesCsv)
- dispatcherhosts_paths := appendName(allFoldersPath, utils.DispatcherHostsCsv)
+ destinationsPaths := appendName(allFoldersPath, utils.DestinationsCsv)
+ timingsPaths := appendName(allFoldersPath, utils.TimingsCsv)
+ ratesPaths := appendName(allFoldersPath, utils.RatesCsv)
+ destinationRatesPaths := appendName(allFoldersPath, utils.DestinationRatesCsv)
+ ratingPlansPaths := appendName(allFoldersPath, utils.RatingPlansCsv)
+ ratingProfilesPaths := appendName(allFoldersPath, utils.RatingProfilesCsv)
+ sharedGroupsPaths := appendName(allFoldersPath, utils.SharedGroupsCsv)
+ actionsPaths := appendName(allFoldersPath, utils.ActionsCsv)
+ actionPlansPaths := appendName(allFoldersPath, utils.ActionPlansCsv)
+ actionTriggersPaths := appendName(allFoldersPath, utils.ActionTriggersCsv)
+ accountActionsPaths := appendName(allFoldersPath, utils.AccountActionsCsv)
+ resourcesPaths := appendName(allFoldersPath, utils.ResourcesCsv)
+ statsPaths := appendName(allFoldersPath, utils.StatsCsv)
+ thresholdsPaths := appendName(allFoldersPath, utils.ThresholdsCsv)
+ filtersPaths := appendName(allFoldersPath, utils.FiltersCsv)
+ suppliersPaths := appendName(allFoldersPath, utils.SuppliersCsv)
+ attributesPaths := appendName(allFoldersPath, utils.AttributesCsv)
+ chargersPaths := appendName(allFoldersPath, utils.ChargersCsv)
+ dispatcherprofilesPaths := appendName(allFoldersPath, utils.DispatcherProfilesCsv)
+ dispatcherhostsPaths := appendName(allFoldersPath, utils.DispatcherHostsCsv)
return NewCSVStorage(sep,
- destinations_paths,
- timings_paths,
- rates_paths,
- destination_rates_paths,
- rating_plans_paths,
- rating_profiles_paths,
- shared_groups_paths,
- actions_paths,
- action_plans_paths,
- action_triggers_paths,
- account_actions_paths,
- resources_paths,
- stats_paths,
- thresholds_paths,
- filters_paths,
- suppliers_paths,
- attributes_paths,
- chargers_paths,
- dispatcherprofiles_paths,
- dispatcherhosts_paths,
+ destinationsPaths,
+ timingsPaths,
+ ratesPaths,
+ destinationRatesPaths,
+ ratingPlansPaths,
+ ratingProfilesPaths,
+ sharedGroupsPaths,
+ actionsPaths,
+ actionPlansPaths,
+ actionTriggersPaths,
+ accountActionsPaths,
+ resourcesPaths,
+ statsPaths,
+ thresholdsPaths,
+ filtersPaths,
+ suppliersPaths,
+ attributesPaths,
+ chargersPaths,
+ dispatcherprofilesPaths,
+ dispatcherhostsPaths,
)
}
+// NewStringCSVStorage creates a csv storage from strings
func NewStringCSVStorage(sep rune,
destinationsFn, timingsFn, ratesFn, destinationratesFn,
destinationratetimingsFn, ratingprofilesFn, sharedgroupsFn,
@@ -165,12 +171,13 @@ func NewStringCSVStorage(sep rune,
return c
}
-func NewGoogleCSVStorage(sep rune, spreadsheetId, cfgPath string) (*CSVStorage, error) {
+// NewGoogleCSVStorage creates a csv storege from google sheets
+func NewGoogleCSVStorage(sep rune, spreadsheetID, cfgPath string) (*CSVStorage, error) {
sht, err := newSheet(cfgPath)
if err != nil {
return nil, err
}
- sheetNames, err := getSpreatsheetTabs(spreadsheetId, sht)
+ sheetNames, err := getSpreatsheetTabs(spreadsheetID, sht)
if err != nil {
return nil, err
}
@@ -203,13 +210,141 @@ func NewGoogleCSVStorage(sep rune, spreadsheetId, cfgPath string) (*CSVStorage,
getIfExist(utils.DispatcherHosts))
c.generator = func() csvReaderCloser {
return &csvGoogle{
- spreadsheetId: spreadsheetId,
+ spreadsheetID: spreadsheetID,
srv: sht,
}
}
return c, nil
}
+// NewURLCSVStorage returns a CSVStorage that can parse URLs
+func NewURLCSVStorage(sep rune, dataPath string) *CSVStorage {
+ var destinationsPaths []string
+ var timingsPaths []string
+ var ratesPaths []string
+ var destinationRatesPaths []string
+ var ratingPlansPaths []string
+ var ratingProfilesPaths []string
+ var sharedGroupsPaths []string
+ var actionsPaths []string
+ var actionPlansPaths []string
+ var actionTriggersPaths []string
+ var accountActionsPaths []string
+ var resourcesPaths []string
+ var statsPaths []string
+ var thresholdsPaths []string
+ var filtersPaths []string
+ var suppliersPaths []string
+ var attributesPaths []string
+ var chargersPaths []string
+ var dispatcherprofilesPaths []string
+ var dispatcherhostsPaths []string
+
+ for _, baseURL := range strings.Split(dataPath, utils.INFIELD_SEP) {
+ if !strings.HasSuffix(baseURL, utils.CSVSuffix) {
+ destinationsPaths = append(destinationsPaths, joinURL(baseURL, utils.DestinationsCsv))
+ timingsPaths = append(timingsPaths, joinURL(baseURL, utils.TimingsCsv))
+ ratesPaths = append(ratesPaths, joinURL(baseURL, utils.RatesCsv))
+ destinationRatesPaths = append(destinationRatesPaths, joinURL(baseURL, utils.DestinationRatesCsv))
+ ratingPlansPaths = append(ratingPlansPaths, joinURL(baseURL, utils.RatingPlansCsv))
+ ratingProfilesPaths = append(ratingProfilesPaths, joinURL(baseURL, utils.RatingProfilesCsv))
+ sharedGroupsPaths = append(sharedGroupsPaths, joinURL(baseURL, utils.SharedGroupsCsv))
+ actionsPaths = append(actionsPaths, joinURL(baseURL, utils.ActionsCsv))
+ actionPlansPaths = append(actionPlansPaths, joinURL(baseURL, utils.ActionPlansCsv))
+ actionTriggersPaths = append(actionTriggersPaths, joinURL(baseURL, utils.ActionTriggersCsv))
+ accountActionsPaths = append(accountActionsPaths, joinURL(baseURL, utils.AccountActionsCsv))
+ resourcesPaths = append(resourcesPaths, joinURL(baseURL, utils.ResourcesCsv))
+ statsPaths = append(statsPaths, joinURL(baseURL, utils.StatsCsv))
+ thresholdsPaths = append(thresholdsPaths, joinURL(baseURL, utils.ThresholdsCsv))
+ filtersPaths = append(filtersPaths, joinURL(baseURL, utils.FiltersCsv))
+ suppliersPaths = append(suppliersPaths, joinURL(baseURL, utils.SuppliersCsv))
+ attributesPaths = append(attributesPaths, joinURL(baseURL, utils.AttributesCsv))
+ chargersPaths = append(chargersPaths, joinURL(baseURL, utils.ChargersCsv))
+ dispatcherprofilesPaths = append(dispatcherprofilesPaths, joinURL(baseURL, utils.DispatcherProfilesCsv))
+ dispatcherhostsPaths = append(dispatcherhostsPaths, joinURL(baseURL, utils.DispatcherHostsCsv))
+ continue
+ }
+ switch {
+ case strings.HasSuffix(baseURL, utils.DestinationsCsv):
+ destinationsPaths = append(destinationsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.TimingsCsv):
+ timingsPaths = append(timingsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.RatesCsv):
+ ratesPaths = append(ratesPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.DestinationRatesCsv):
+ destinationRatesPaths = append(destinationRatesPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.RatingPlansCsv):
+ ratingPlansPaths = append(ratingPlansPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.RatingProfilesCsv):
+ ratingProfilesPaths = append(ratingProfilesPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.SharedGroupsCsv):
+ sharedGroupsPaths = append(sharedGroupsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.ActionsCsv):
+ actionsPaths = append(actionsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.ActionPlansCsv):
+ actionPlansPaths = append(actionPlansPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.ActionTriggersCsv):
+ actionTriggersPaths = append(actionTriggersPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.AccountActionsCsv):
+ accountActionsPaths = append(accountActionsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.ResourcesCsv):
+ resourcesPaths = append(resourcesPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.StatsCsv):
+ statsPaths = append(statsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.ThresholdsCsv):
+ thresholdsPaths = append(thresholdsPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.FiltersCsv):
+ filtersPaths = append(filtersPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.SuppliersCsv):
+ suppliersPaths = append(suppliersPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.AttributesCsv):
+ attributesPaths = append(attributesPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.ChargersCsv):
+ chargersPaths = append(chargersPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.DispatcherProfilesCsv):
+ dispatcherprofilesPaths = append(dispatcherprofilesPaths, baseURL)
+ case strings.HasSuffix(baseURL, utils.DispatcherHostsCsv):
+ dispatcherhostsPaths = append(dispatcherhostsPaths, baseURL)
+ }
+ }
+
+ c := NewCSVStorage(sep,
+ destinationsPaths,
+ timingsPaths,
+ ratesPaths,
+ destinationRatesPaths,
+ ratingPlansPaths,
+ ratingProfilesPaths,
+ sharedGroupsPaths,
+ actionsPaths,
+ actionPlansPaths,
+ actionTriggersPaths,
+ accountActionsPaths,
+ resourcesPaths,
+ statsPaths,
+ thresholdsPaths,
+ filtersPaths,
+ suppliersPaths,
+ attributesPaths,
+ chargersPaths,
+ dispatcherprofilesPaths,
+ dispatcherhostsPaths,
+ )
+ c.generator = func() csvReaderCloser {
+ return &csvURL{}
+ }
+ return c
+}
+
+func joinURL(baseURL, fn string) string {
+ u, err := url.Parse(baseURL)
+ if err != nil {
+ return baseURL + fn
+ }
+ u.Path = path.Join(u.Path, fn)
+ return u.String()
+}
+
func getAllFolders(inPath string) (paths []string, err error) {
err = filepath.Walk(inPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
@@ -225,8 +360,8 @@ func getAllFolders(inPath string) (paths []string, err error) {
func appendName(paths []string, fileName string) (out []string) {
out = make([]string, len(paths))
- for i, path_ := range paths {
- out[i] = path.Join(path_, fileName)
+ for i, basePath := range paths {
+ out[i] = path.Join(basePath, fileName)
}
return
}
@@ -665,9 +800,9 @@ func newSheet(configPath string) (sht *sheets.Service, err error) { //*google_ap
return
}
-func getSpreatsheetTabs(spreadsheetId string, srv *sheets.Service) (sheetsName map[string]struct{}, err error) {
+func getSpreatsheetTabs(spreadsheetID string, srv *sheets.Service) (sheetsName map[string]struct{}, err error) {
sheetsName = make(map[string]struct{})
- sht, err := srv.Spreadsheets.Get(spreadsheetId).Do()
+ sht, err := srv.Spreadsheets.Get(spreadsheetID).Do()
if err != nil {
err = fmt.Errorf("Unable get the information about spreadsheet because: %v", err)
return
@@ -679,7 +814,7 @@ func getSpreatsheetTabs(spreadsheetId string, srv *sheets.Service) (sheetsName m
}
type csvGoogle struct {
- spreadsheetId string
+ spreadsheetID string
srv *sheets.Service
response *sheets.ValueRange
indx int
@@ -687,7 +822,7 @@ type csvGoogle struct {
}
func (c *csvGoogle) Open(data string, sep rune, nrFields int) (err error) {
- c.response, err = c.srv.Spreadsheets.Values.Get(c.spreadsheetId, data).Do()
+ c.response, err = c.srv.Spreadsheets.Values.Get(c.spreadsheetID, data).Do()
if err != nil {
return
}
@@ -732,3 +867,43 @@ func (c *csvGoogle) Read() (record []string, err error) {
func (c *csvGoogle) Close() { // no need for close
}
+
+type csvURL struct {
+ csvReader *csv.Reader
+ page io.ReadCloser
+}
+
+func (c *csvURL) Open(fn string, sep rune, nrFields int) (err error) {
+ if _, err = url.ParseRequestURI(fn); err != nil {
+ return
+ }
+ var myClient = &http.Client{
+ Timeout: config.CgrConfig().GeneralCfg().ReplyTimeout,
+ }
+ var req *http.Response
+ req, err = myClient.Get(fn)
+ if err != nil {
+ return utils.ErrPathNotReachable(fn)
+ }
+ if req.StatusCode != http.StatusOK {
+ return utils.ErrNotFound
+ }
+ c.page = req.Body
+
+ c.csvReader = csv.NewReader(c.page)
+ c.csvReader.Comma = sep
+ c.csvReader.Comment = utils.COMMENT_CHAR
+ c.csvReader.FieldsPerRecord = nrFields
+ c.csvReader.TrailingComma = true
+ return
+}
+
+func (c *csvURL) Read() (record []string, err error) {
+ return c.csvReader.Read()
+}
+
+func (c *csvURL) Close() {
+ if c.page != nil {
+ c.page.Close()
+ }
+}
diff --git a/utils/coreutils.go b/utils/coreutils.go
index a032fb065..37aab924f 100644
--- a/utils/coreutils.go
+++ b/utils/coreutils.go
@@ -942,3 +942,9 @@ type LoadIDsWithArgDispatcher struct {
TenantArg
*ArgDispatcher
}
+
+// IsURL returns if the path is an URL
+func IsURL(path string) bool {
+ return strings.HasPrefix(path, "https://") ||
+ strings.HasPrefix(path, "http://")
+}
diff --git a/utils/coreutils_test.go b/utils/coreutils_test.go
index ef4010570..82176a3cc 100644
--- a/utils/coreutils_test.go
+++ b/utils/coreutils_test.go
@@ -1349,3 +1349,16 @@ func TestGetPathIndex(t *testing.T) {
t.Errorf("Expecting: nil, received: %+v", *index)
}
}
+
+func TestIsURL(t *testing.T) {
+ urls := map[string]bool{
+ "/etc/usr/": false,
+ "https://github.com/cgrates/cgrates/": true,
+ "http://github.com/cgrates/cgrates/i": true,
+ }
+ for url, expected := range urls {
+ if rply := IsURL(url); rply != expected {
+ t.Errorf("For: %q ,expected %v received: %v", url, expected, rply)
+ }
+ }
+}