diff --git a/src/cpp/src/continuous_batching_pipeline.cpp b/src/cpp/src/continuous_batching_pipeline.cpp index a66a88cad4..a2f639bf8c 100644 --- a/src/cpp/src/continuous_batching_pipeline.cpp +++ b/src/cpp/src/continuous_batching_pipeline.cpp @@ -84,7 +84,7 @@ class ContinuousBatchingPipeline::Impl { // The model can be compiled for GPU as well std::shared_ptr model = core.read_model(models_path + "/openvino_model.xml"); - DeviceConfig device_config(core, scheduler_config, device); + DeviceConfig device_config(core, scheduler_config, device, plugin_config); apply_paged_attention_transformations(model, device_config); diff --git a/src/cpp/src/device_config.hpp b/src/cpp/src/device_config.hpp index f2ed5d424b..cb653ac514 100644 --- a/src/cpp/src/device_config.hpp +++ b/src/cpp/src/device_config.hpp @@ -20,7 +20,7 @@ class DeviceConfig { std::string m_device; public: - DeviceConfig(ov::Core& core, const SchedulerConfig& scheduling_config, const std::string& device) { + DeviceConfig(ov::Core& core, const SchedulerConfig& scheduling_config, const std::string& device, const ov::AnyMap& plugin_config = {}) { m_device = device; // keep information about blocsk @@ -29,6 +29,20 @@ class DeviceConfig { if (m_device == "CPU") { auto inference_precision = core.get_property(device, ov::hint::inference_precision); m_kv_cache_type = inference_precision == ov::element::bf16 ? ov::element::bf16 : ov::element::f16; + // if user sets precision hint, kv cache type should be changed + if (plugin_config.find(ov::hint::inference_precision.name()) != plugin_config.end()) { + const auto precision = plugin_config.at(ov::hint::inference_precision.name()).as(); + if (precision == ov::element::f32) { + m_kv_cache_type = ov::element::f32; + } else if (precision == ov::element::f16) { + m_kv_cache_type = ov::element::f16; + } else if (precision == ov::element::bf16) { + m_kv_cache_type = ov::element::bf16; + } else { + // use default f32 + m_kv_cache_type = ov::element::f32; + } + } } else if (m_device == "GPU") { OPENVINO_ASSERT("GPU is not currently supported. Please, remove this assert and fill configuration"); } else {