@@ -53,6 +53,94 @@ const std::string EMBEDDINGS_SESSION_SIDE_PACKET_TAG = "EMBEDDINGS_NODE_RESOURCE
5353using InputDataType = ovms::HttpPayload;
5454using OutputDataType = std::string;
5555
56+ static void dumpTensorToFile (const ov::Tensor& tensor, const std::string& tensor_name, const std::string& filename) {
57+ // if log level is trace
58+ if (!embeddings_calculator_logger->should_log (spdlog::level::trace)) {
59+ return ;
60+ }
61+
62+ std::ofstream output_file (filename);
63+ if (!output_file.is_open ()) {
64+ SPDLOG_LOGGER_ERROR (embeddings_calculator_logger, " Failed to open file {} for writing tensor data" , filename);
65+ return ;
66+ }
67+
68+ const auto & shape = tensor.get_shape ();
69+
70+ // Write tensor name and shape information
71+ output_file << " Tensor '" << tensor_name << " ' shape: [" ;
72+ for (size_t i = 0 ; i < shape.size (); ++i) {
73+ output_file << shape[i];
74+ if (i < shape.size () - 1 )
75+ output_file << " , " ;
76+ }
77+ output_file << " ]\n " ;
78+ output_file << " Tensor data:\n " ;
79+
80+ if (shape.size () == 2 ) {
81+ // Handle 2D tensors
82+ for (size_t i = 0 ; i < shape[0 ]; ++i) {
83+ for (size_t j = 0 ; j < shape[1 ]; ++j) {
84+ if (tensor.get_element_type () == ov::element::i32 ) {
85+ output_file << reinterpret_cast <const int32_t *>(tensor.data ())[i * shape[1 ] + j] << " \t " ;
86+ } else if (tensor.get_element_type () == ov::element::i64 ) {
87+ output_file << reinterpret_cast <const int64_t *>(tensor.data ())[i * shape[1 ] + j] << " \t " ;
88+ } else if (tensor.get_element_type () == ov::element::f32 ) {
89+ output_file << reinterpret_cast <const float *>(tensor.data ())[i * shape[1 ] + j] << " \t " ;
90+ } else {
91+ output_file << " unsupported_type\t " ;
92+ }
93+ }
94+ output_file << " \n " ;
95+ }
96+ } else {
97+ output_file << " Tensor shape not supported for dumping (dimensions: " << shape.size () << " )\n " ;
98+ }
99+ }
100+
101+ static void dumpTensorToTrace (const ov::Tensor& tensor, const std::string& tensor_name) {
102+ // if log level is trace
103+ if (!embeddings_calculator_logger->should_log (spdlog::level::trace)) {
104+ return ;
105+ }
106+
107+ const auto & shape = tensor.get_shape ();
108+ std::ostringstream oss;
109+
110+ // Build shape string
111+ oss << " Tensor '" << tensor_name << " ' shape: [" ;
112+ for (size_t i = 0 ; i < shape.size (); ++i) {
113+ oss << shape[i];
114+ if (i < shape.size () - 1 )
115+ oss << " , " ;
116+ }
117+ oss << " ]\n Tensor data:\n " ;
118+
119+ if (shape.size () == 2 ) {
120+ // Handle 2D tensors
121+ for (size_t i = 0 ; i < shape[0 ]; ++i) {
122+ for (size_t j = 0 ; j < shape[1 ]; ++j) {
123+ if (tensor.get_element_type () == ov::element::i32 ) {
124+ oss << reinterpret_cast <const int32_t *>(tensor.data ())[i * shape[1 ] + j];
125+ } else if (tensor.get_element_type () == ov::element::i64 ) {
126+ oss << reinterpret_cast <const int64_t *>(tensor.data ())[i * shape[1 ] + j];
127+ } else if (tensor.get_element_type () == ov::element::f32 ) {
128+ oss << reinterpret_cast <const float *>(tensor.data ())[i * shape[1 ] + j];
129+ } else {
130+ oss << " unsupported_type" ;
131+ }
132+ if (j < shape[1 ] - 1 )
133+ oss << " " ;
134+ }
135+ oss << " \n " ;
136+ }
137+ } else {
138+ oss << " Tensor shape not supported for tracing (dimensions: " << shape.size () << " )\n " ;
139+ }
140+
141+ SPDLOG_LOGGER_TRACE (embeddings_calculator_logger, " {}" , oss.str ());
142+ }
143+
56144class EmbeddingsCalculatorOV : public CalculatorBase {
57145 static const std::string INPUT_TAG_NAME;
58146 static const std::string OUTPUT_TAG_NAME;
@@ -127,6 +215,14 @@ class EmbeddingsCalculatorOV : public CalculatorBase {
127215 try {
128216 auto input = handler.getInput ();
129217 if (auto strings = std::get_if<std::vector<std::string>>(&input)) {
218+ if (!embeddings_calculator_logger->should_log (spdlog::level::trace)) {
219+ std::ostringstream oss;
220+ oss << " Received " << strings->size () << " strings:\n " ;
221+ for (const auto & str : *strings) {
222+ oss << " [" << str << " ]\n " ;
223+ }
224+ SPDLOG_INFO (" {}" , oss.str ());
225+ }
130226 received_batch_size = strings->size ();
131227 ov::AnyMap params = {};
132228 if (cc->Options <EmbeddingsCalculatorOVOptions>().truncate ()) {
@@ -207,9 +303,17 @@ class EmbeddingsCalculatorOV : public CalculatorBase {
207303 auto executingStreamIdGuard = std::make_unique<ExecutingStreamIdGuard>(embeddings_session->getInferRequestsQueue (), unused);
208304 ov::InferRequest& inferRequest = executingStreamIdGuard->getInferRequest ();
209305 inferRequest.set_tensor (EMBEDDINGS_MODEL_INPUT_IDS_NAME, tokens.input_ids );
306+ dumpTensorToTrace (tokens.input_ids , EMBEDDINGS_MODEL_INPUT_IDS_NAME);
307+ dumpTensorToFile (tokens.input_ids , EMBEDDINGS_MODEL_INPUT_IDS_NAME, " input_ids_tensor.txt" );
308+
210309 inferRequest.set_tensor (EMBEDDINGS_MODEL_ATTENTION_MASK_NAME, tokens.attention_mask );
310+ dumpTensorToTrace (tokens.attention_mask , EMBEDDINGS_MODEL_ATTENTION_MASK_NAME);
311+ dumpTensorToFile (tokens.attention_mask , EMBEDDINGS_MODEL_ATTENTION_MASK_NAME, " attention_mask_tensor.txt" );
312+
211313 if (embeddings_session->getNumberOfModelInputs () == 3 ) {
212314 inferRequest.set_tensor (EMBEDDINGS_MODEL_TOKEN_TYPE_IDS_NAME, typeIds);
315+ dumpTensorToTrace (typeIds, EMBEDDINGS_MODEL_TOKEN_TYPE_IDS_NAME);
316+ dumpTensorToFile (typeIds, EMBEDDINGS_MODEL_TOKEN_TYPE_IDS_NAME, " token_type_ids_tensor.txt" );
213317 }
214318 inferRequest.start_async ();
215319 inferRequest.wait ();
0 commit comments