|
| 1 | +#include "model_config.hpp" |
| 2 | + |
| 3 | +namespace infinilm::config { |
| 4 | +ModelConfig::ModelConfig(const std::string &path) { |
| 5 | + std::ifstream file(path); |
| 6 | + if (file.is_open()) { |
| 7 | + file >> config_json; |
| 8 | + file.close(); |
| 9 | + } else { |
| 10 | + throw std::runtime_error("Could not open config file: " + path); |
| 11 | + } |
| 12 | + this->quant_config = QuantConfig(config_json["quantization_config"]); |
| 13 | +} |
| 14 | + |
| 15 | +infinicore::quantization::QuantScheme |
| 16 | +ModelConfig::get_quant_scheme() const { |
| 17 | + if (quant_config.get_quant_scheme() != infinicore::quantization::QuantScheme::NONE) { |
| 18 | + return quant_config.get_quant_scheme(); |
| 19 | + } else { |
| 20 | + return infinicore::quantization::QuantScheme::NONE; |
| 21 | + } |
| 22 | +} |
| 23 | + |
| 24 | +std::shared_ptr<infinicore::nn::RoPE::ScalingConfig> |
| 25 | +ModelConfig::get_rope_scaling() const { |
| 26 | + if (!config_json.contains("rope_scaling") || config_json["rope_scaling"].is_null()) { |
| 27 | + return nullptr; |
| 28 | + } |
| 29 | + |
| 30 | + const auto &rope_scaling = config_json["rope_scaling"]; |
| 31 | + if (!rope_scaling.is_object()) { |
| 32 | + throw std::runtime_error("rope_scaling must be an object"); |
| 33 | + } |
| 34 | + |
| 35 | + if (!rope_scaling.contains("type")) { |
| 36 | + throw std::runtime_error("rope_scaling must contain 'type' field"); |
| 37 | + } |
| 38 | + |
| 39 | + std::string type_str = rope_scaling["type"].get<std::string>(); |
| 40 | + if (type_str == "longrope") { |
| 41 | + // Required fields for LongRopeConfig |
| 42 | + if (!rope_scaling.contains("short_factor") || !rope_scaling.contains("long_factor") || !rope_scaling.contains("original_max_position_embeddings")) { |
| 43 | + throw std::runtime_error( |
| 44 | + "LongRopeConfig requires 'short_factor', 'long_factor', and 'original_max_position_embeddings'"); |
| 45 | + } |
| 46 | + |
| 47 | + auto short_factor = rope_scaling["short_factor"].get<std::vector<float>>(); |
| 48 | + auto long_factor = rope_scaling["long_factor"].get<std::vector<float>>(); |
| 49 | + size_t original_max_position_embeddings = rope_scaling["original_max_position_embeddings"].get<size_t>(); |
| 50 | + |
| 51 | + float factor = 1.0f; |
| 52 | + if (rope_scaling.contains("factor")) { |
| 53 | + factor = rope_scaling["factor"].get<float>(); |
| 54 | + } |
| 55 | + |
| 56 | + return std::make_shared<infinicore::nn::RoPE::LongRopeConfig>( |
| 57 | + std::move(short_factor), |
| 58 | + std::move(long_factor), |
| 59 | + original_max_position_embeddings, |
| 60 | + factor); |
| 61 | + } else if (type_str == "default" || type_str == "none") { |
| 62 | + // Default scaling, no scaling applied |
| 63 | + return nullptr; |
| 64 | + } else { |
| 65 | + throw std::runtime_error("Unsupported rope_scaling type: " + type_str); |
| 66 | + } |
| 67 | +} |
| 68 | + |
| 69 | +infinicore::DataType |
| 70 | +ModelConfig::get_dtype() const { |
| 71 | + try { |
| 72 | + std::string dtype_str = this->get<std::string>("torch_dtype"); |
| 73 | + if (dtype_str == "float32") { |
| 74 | + return infinicore::DataType::F32; |
| 75 | + } else if (dtype_str == "float16") { |
| 76 | + return infinicore::DataType::F16; |
| 77 | + } else if (dtype_str == "bfloat16") { |
| 78 | + return infinicore::DataType::BF16; |
| 79 | + } else if (dtype_str == "int8") { |
| 80 | + return infinicore::DataType::I8; |
| 81 | + } else { |
| 82 | + throw std::runtime_error("Unsupported dtype string: " + dtype_str); |
| 83 | + } |
| 84 | + } catch (const std::exception &e) { |
| 85 | + throw std::runtime_error("Error getting dtype from config: " + std::string(e.what())); |
| 86 | + } |
| 87 | +} |
| 88 | +} // namespace infinilm::config |
0 commit comments