package config import ( "encoding/json" "os" "strings" "github.com/rs/zerolog/log" "gitlab.com/tensorsecurity-rd/waf-console/internal/config" ) const ( DB_USER = "ivan" DB_PASSWORD = "Mysql-ha@123" DB_HOST = "localhost" DB_PORT = "3306" DB_NAME = "waf" ) type Config struct { DBConfig *DBConfig `json:"db_config"` RegionConfigs []RegionConfig `json:"region_configs"` Debug bool `json:"debug"` GatewayUrl string `json:"gateway_url"` SSOUrl string `json:"sso_url"` ElasticsearchConfig *config.ElasticsearchConfig `json:"elasticsearch_config"` KafkaConfig *config.KafkaConfig `json:"kafka_config"` } type DBConfig struct { User string `json:"user"` Password string `json:"password"` Host string `json:"host"` Port string `json:"port"` Database string `json:"database"` } type RegionConfig struct { RegionCode string `json:"region_code"` ApiServer string `json:"api_server"` WafApiServer string `json:"waf_api_server"` CAData string `json:"ca_data"` Token string `json:"token"` ClientCertData string `json:"client_cert_data"` ClientKeyData string `json:"client_key_data"` Insecure bool `json:"insecure"` } // type ElasticsearchConfig struct { // Url string `json:"url"` // Username string `json:"username"` // Password string `json:"password"` // Sniff bool `json:"sniff"` // } // type KafkaConfig struct { // Brokers []string `json:"brokers"` // Topic string `json:"topic"` // Group string `json:"group"` // AuthMethod string `json:"auth_method"` // Username string `json:"username"` // Password string `json:"password"` // ScramAlgo string `json:"scram_algo"` // } func LoadConfig() *Config { configFile := "config/config.json" if envFile := os.Getenv("CONFIG_FILE"); envFile != "" { configFile = envFile } data, err := os.ReadFile(configFile) if err != nil { log.Err(err).Msgf("Failed to read config file: %s", configFile) return nil } var config Config if err := json.Unmarshal(data, &config); err != nil { log.Err(err).Msg("Failed to parse config file") return nil } password := os.Getenv("RDB_PASSWORD") if password != "" { config.DBConfig.Password = password } // 如果config.DBConfig为nil,则使用默认值 if config.DBConfig == nil { config.DBConfig = &DBConfig{ User: DB_USER, Password: DB_PASSWORD, Host: DB_HOST, Port: DB_PORT, Database: DB_NAME, } } esURL := os.Getenv("ELASTIC_URL") if esURL != "" { config.ElasticsearchConfig.Url = esURL } esUsername := os.Getenv("ELASTIC_USERNAME") if esUsername != "" { config.ElasticsearchConfig.Username = esUsername } esPassword := os.Getenv("ELASTIC_PASSWORD") if esPassword != "" { config.ElasticsearchConfig.Password = esPassword } sniff := os.Getenv("ELASTIC_SNIFF") if sniff != "" { config.ElasticsearchConfig.Sniff = (sniff == "true") } kafkaBrokers := os.Getenv("KAFKA_BROKERS") if kafkaBrokers != "" { config.KafkaConfig.Brokers = strings.Split(kafkaBrokers, ",") } kafkaTopic := os.Getenv("KAFKA_TOPIC") if kafkaTopic != "" { config.KafkaConfig.Topic = kafkaTopic } kafkaGroup := os.Getenv("KAFKA_GROUP") if kafkaGroup != "" { config.KafkaConfig.Group = kafkaGroup } kafkaAuthMethod := os.Getenv("KAFKA_AUTH_METHOD") if kafkaAuthMethod != "" { config.KafkaConfig.AuthMethod = kafkaAuthMethod } kafkaUsername := os.Getenv("KAFKA_USERNAME") if kafkaUsername != "" { config.KafkaConfig.Username = kafkaUsername } kafkaPassword := os.Getenv("KAFKA_PASSWORD") if kafkaPassword != "" { config.KafkaConfig.Password = kafkaPassword } kafkaScramAlgo := os.Getenv("KAFKA_SCRAM_ALGO") if kafkaScramAlgo != "" { config.KafkaConfig.ScramAlgo = kafkaScramAlgo } return &config }