Skip to content

Commit f73e666

Browse files
committed
bugfix: handle failure in openrao when return failure status
1 parent 77340a7 commit f73e666

File tree

4 files changed

+66
-27
lines changed

4 files changed

+66
-27
lines changed

integrations/rmq.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import time
33
import pika
44
import config
5+
import traceback
56
from typing import List
67
from concurrent.futures import ThreadPoolExecutor
78
from loguru import logger
@@ -375,9 +376,9 @@ def _process_messages(self, basic_deliver, properties, body):
375376
properties.content_type = content_type
376377
logger.info(f"Message converted")
377378
except Exception as error:
378-
logger.error(f"Message conversion failed: {error}", exc_info=True)
379+
logger.error(f"Message conversion failed: {error}\n{traceback.format_exc()}")
379380
ack = False
380-
self._channel.basic_reject(basic_deliver.delivery_tag, requeue=True)
381+
self._channel.basic_reject(basic_deliver.delivery_tag, requeue=False)
381382
# self.connection.close()
382383
# self.stop()
383384

@@ -388,9 +389,10 @@ def _process_messages(self, basic_deliver, properties, body):
388389
logger.info(f"Handling message with handler: {message_handler.__class__.__name__}")
389390
body, properties = message_handler.handle(body, properties=properties, channel=self._channel)
390391
except Exception as error:
391-
logger.error(f"Message handling failed: {error}", exc_info=True)
392+
logger.error(f"Message handling failed: {error}\n{traceback.format_exc()}")
393+
logger.exception("Message handling failed, see traceback in document")
392394
ack = False
393-
self._channel.basic_reject(basic_deliver.delivery_tag, requeue=True)
395+
self._channel.basic_reject(basic_deliver.delivery_tag, requeue=False)
394396
# self.connection.close()
395397
# self.stop()
396398

rao/handlers.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -262,6 +262,12 @@ def handle(self, message: bytes, properties: object, **kwargs):
262262

263263
# Serialize results to json
264264
results = optimizer.results.to_json()
265+
if results['computationStatus'] == 'failure':
266+
logger.error(f"Optimizer failed computation: {results}")
267+
logger.error(f"Enable pypowsybl logs for more information")
268+
continue
269+
270+
# Check if there are any optimized remedial actions
265271
if not results['networkActionResults'] and not results['rangeActionResults']:
266272
logger.warning(f"No possible actions proposed by optimizer")
267273

remedial_action_schedules/config.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,5 @@ RMQ_QUEUE_IN = object-storage.schedules.csa
33

44
[HANDLER]
55
CONVERTER_KEY_MODE = local
6-
ELASTIC_SCHEDULES_INDEX = rao-results
6+
ELASTIC_SCHEDULES_INDEX = csa-remedial-action-schedules
77

remedial_action_schedules/handlers.py

Lines changed: 53 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from pika import BasicProperties
22
import uuid
3+
import json
34
from datetime import datetime
45
from pathlib import Path
56
from typing import Any, Dict, List, Optional, Set, Tuple, Union
@@ -326,13 +327,16 @@ def handle(self, message: bytes, properties: BasicProperties, **kwargs):
326327
# JSON normalize and transform to DataFrame
327328
df = self.normalize_cim_payload(data)
328329

330+
# TODO need to get CO and RA from object storage and merge
331+
329332
# Convert to dictionary
330333
data_to_send = df.to_dict(orient='records')
331334

332-
response = self.elastic_service.send_to_elastic(
335+
response = self.elastic_service.send_to_elastic_bulk(
333336
index=ELASTIC_METADATA_INDEX,
334-
json_message=data_to_send,
335-
id=metadata_object.get('identifier', None)
337+
json_message_list=data_to_send,
338+
id_from_metadata=True,
339+
id_metadata_list=["@id"],
336340
)
337341

338342
logger.info(f"Message sending to Elastic successful: {response}")
@@ -341,23 +345,50 @@ def handle(self, message: bytes, properties: BasicProperties, **kwargs):
341345

342346

343347
if __name__ == "__main__":
344-
rdf_xml = r"C:\Users\martynas.karobcikas\Downloads\ras-example.xml"
345-
# rdf_xml = r"C:\Users\martynas.karobcikas\Documents\Python projects\RAO\test-data\TC1_assessed_elements.xml"
346-
# rdf_xml = r"C:\Users\martynas.karobcikas\Documents\Python projects\RAO\test-data\TC1_contingencies.xml"
347-
# rdf_xml = r"C:\Users\martynas.karobcikas\Documents\Python projects\RAO\test-data\TC1_remedial_actions.xml"
348-
g = Graph()
349-
g.parse(rdf_xml, format="xml") # your RDF/XML file
350-
351-
result = convert_cim_rdf_to_json(rdf_xml, root_class=["RemedialActionSchedule"], key_mode='local')
352-
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["RemedialActionSchedule"], key_mode='qualified')
353-
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["GridStateAlterationRemedialAction"], key_mode='local')
354-
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["OrdinaryContingency", "ExceptionalContingency"], key_mode='local')
355-
356-
import json
357-
print(json.dumps(result, indent=2))
358-
359-
with open("test.json", "w") as f:
360-
json.dump(result, f, ensure_ascii=False, indent=4)
348+
# rdf_xml = r"C:\Users\martynas.karobcickas\Downloads\ras-example.xml"
349+
# rdf_xml = r"C:\Users\martynas.karobcickas\Documents\Python projects\RAO\test-data\TC1_assessed_elements.xml"
350+
# rdf_xml = r"C:\Users\martynas.karobcickas\Documents\Python projects\RAO\test-data\TC1_contingencies.xml"
351+
# rdf_xml = r"C:\Users\martynas.karobcickas\Documents\Python projects\RAO\test-data\TC1_remedial_actions.xml"
352+
# g = Graph()
353+
# g.parse(rdf_xml, format="xml") # Put your RDF/XML file
354+
355+
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["RemedialActionSchedule"], key_mode="local")
356+
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["RemedialActionSchedule"], key_mode="qualified")
357+
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["GridStateAlterationRemedialAction"], key_mode="local")
358+
# result = convert_cim_rdf_to_json(rdf_xml, root_class=["OrdinaryContingency", "ExceptionalContingency"], key_mode="local")
359+
360+
# print json
361+
# print(json.dumps(result, indent=2))
362+
363+
# with open("test.json", "w") as f:
364+
# json.dump(result, f, ensure_ascii=False, indent=4)
365+
# df = RemedialActionScheduleToElasticHandler.normalize_cim_payload(result)
366+
# print(df.head())
367+
368+
# Define RMQ test message
369+
headers = {
370+
"baCorrelationID": f"{uuid.uuid4()}",
371+
"baseMessageID": f"{uuid.uuid4()}",
372+
"businessType": "CSA-INPUT",
373+
"messageID": f"{uuid.uuid4()}",
374+
"sendTimestamp": datetime.utcnow().isoformat(),
375+
"sender": "TSOX",
376+
"senderApplication": "APPX",
377+
"service": "INPUT-DATA",
378+
}
361379

362-
df = RemedialActionScheduleToElasticHandler.normalize_cim_payload(result)
363-
print(df.head())
380+
properties = BasicProperties(
381+
content_type="application/octet-stream",
382+
delivery_mode=2,
383+
priority=4,
384+
message_id=f"{uuid.uuid4()}",
385+
timestamp=147728025,
386+
headers=headers,
387+
)
388+
389+
with open(r"C:\Users\martynas.karobcickas\Downloads\ras-example.xml", "rb") as file:
390+
file_bytes = file.read()
391+
392+
# Create instance
393+
service = RemedialActionScheduleToElasticHandler()
394+
result = service.handle(message=file_bytes, properties=properties)

0 commit comments

Comments
 (0)