utils.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784
  1. # SPDX-License-Identifier: Apache-2.0
  2. # Copyright 2020 Contributors to OpenLEADR
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. # Unless required by applicable law or agreed to in writing, software
  8. # distributed under the License is distributed on an "AS IS" BASIS,
  9. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10. # See the License for the specific language governing permissions and
  11. # limitations under the License.
  12. from datetime import datetime, timedelta, timezone
  13. from dataclasses import is_dataclass, asdict
  14. from collections import OrderedDict
  15. from openleadr import enums, objects
  16. import asyncio
  17. import re
  18. import ssl
  19. import hashlib
  20. import uuid
  21. import logging
  22. import functools
  23. logger = logging.getLogger('openleadr')
  24. DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
  25. DATETIME_FORMAT_NO_MICROSECONDS = "%Y-%m-%dT%H:%M:%SZ"
  26. def generate_id(*args, **kwargs):
  27. """
  28. Generate a string that can be used as an identifier in OpenADR messages.
  29. """
  30. return str(uuid.uuid4())
  31. def flatten_xml(message):
  32. """
  33. Flatten the entire XML structure.
  34. """
  35. lines = [line.strip() for line in message.split("\n") if line.strip() != ""]
  36. for line in lines:
  37. line = re.sub(r'\n', '', line)
  38. line = re.sub(r'\s\s+', ' ', line)
  39. return "".join(lines)
  40. def normalize_dict(ordered_dict):
  41. """
  42. Main conversion function for the output of xmltodict to the OpenLEADR
  43. representation of OpenADR contents.
  44. :param ordered_dict dict: The OrderedDict, dict or dataclass that you wish to convert.
  45. """
  46. if is_dataclass(ordered_dict):
  47. ordered_dict = asdict(ordered_dict)
  48. def normalize_key(key):
  49. if key.startswith('oadr'):
  50. key = key[4:]
  51. elif key.startswith('ei'):
  52. key = key[2:]
  53. # Don't normalize the measurement descriptions
  54. if key in enums._MEASUREMENT_NAMESPACES:
  55. return key
  56. key = re.sub(r'([a-z])([A-Z])', r'\1_\2', key)
  57. if '-' in key:
  58. key = key.replace('-', '_')
  59. return key.lower()
  60. d = {}
  61. for key, value in ordered_dict.items():
  62. # Interpret values from the dict
  63. if key.startswith("@"):
  64. continue
  65. key = normalize_key(key)
  66. if isinstance(value, (OrderedDict, dict)):
  67. d[key] = normalize_dict(value)
  68. elif isinstance(value, list):
  69. d[key] = []
  70. for item in value:
  71. if isinstance(item, (OrderedDict, dict)):
  72. dict_item = normalize_dict(item)
  73. d[key].append(normalize_dict(dict_item))
  74. else:
  75. d[key].append(item)
  76. elif key in ("duration", "startafter", "max_period", "min_period"):
  77. d[key] = parse_duration(value)
  78. elif ("date_time" in key or key == "dtstart") and isinstance(value, str):
  79. d[key] = parse_datetime(value)
  80. elif value in ('true', 'false'):
  81. d[key] = parse_boolean(value)
  82. elif isinstance(value, str):
  83. if re.match(r'^-?\d+$', value):
  84. d[key] = int(value)
  85. elif re.match(r'^-?[\d.]+$', value):
  86. d[key] = float(value)
  87. else:
  88. d[key] = value
  89. else:
  90. d[key] = value
  91. # Do our best to make the dictionary structure as pythonic as possible
  92. if key.startswith("x_ei_"):
  93. d[key[5:]] = d.pop(key)
  94. key = key[5:]
  95. # Group all targets as a list of dicts under the key "target"
  96. if key == 'target':
  97. targets = d.pop(key)
  98. new_targets = []
  99. if targets:
  100. for ikey in targets:
  101. if isinstance(targets[ikey], list):
  102. new_targets.extend([{ikey: value} for value in targets[ikey]])
  103. else:
  104. new_targets.append({ikey: targets[ikey]})
  105. d[key + "s"] = new_targets
  106. key = key + "s"
  107. # Also add a targets_by_type element to this dict
  108. # to access the targets in a more convenient way.
  109. d['targets_by_type'] = group_targets_by_type(new_targets)
  110. # Group all reports as a list of dicts under the key "pending_reports"
  111. if key == "pending_reports":
  112. if isinstance(d[key], dict) and 'report_request_id' in d[key] \
  113. and isinstance(d[key]['report_request_id'], list):
  114. d['pending_reports'] = [{'request_id': rrid}
  115. for rrid in d['pending_reports']['report_request_id']]
  116. # Group all events al a list of dicts under the key "events"
  117. elif key == "event" and isinstance(d[key], list):
  118. events = d.pop("event")
  119. new_events = []
  120. for event in events:
  121. new_event = event['event']
  122. new_event['response_required'] = event['response_required']
  123. new_events.append(new_event)
  124. d["events"] = new_events
  125. # If there's only one event, also put it into a list
  126. elif key == "event" and isinstance(d[key], dict) and "event" in d[key]:
  127. oadr_event = d.pop('event')
  128. ei_event = oadr_event['event']
  129. ei_event['response_required'] = oadr_event['response_required']
  130. d['events'] = [ei_event]
  131. elif key in ("request_event", "created_event") and isinstance(d[key], dict):
  132. d = d[key]
  133. # Plurarize some lists
  134. elif key in ('report_request', 'report', 'specifier_payload'):
  135. if isinstance(d[key], list):
  136. d[key + 's'] = d.pop(key)
  137. else:
  138. d[key + 's'] = [d.pop(key)]
  139. elif key in ('report_description', 'event_signal'):
  140. descriptions = d.pop(key)
  141. if not isinstance(descriptions, list):
  142. descriptions = [descriptions]
  143. for description in descriptions:
  144. # We want to make the identification of the measurement universal
  145. for measurement in enums._MEASUREMENT_NAMESPACES:
  146. if measurement in description:
  147. name, item = measurement, description.pop(measurement)
  148. break
  149. else:
  150. break
  151. item['description'] = item.pop('item_description', None)
  152. item['unit'] = item.pop('item_units', None)
  153. if 'si_scale_code' in item:
  154. item['scale'] = item.pop('si_scale_code')
  155. if 'pulse_factor' in item:
  156. item['pulse_factor'] = item.pop('pulse_factor')
  157. description['measurement'] = {'name': name,
  158. **item}
  159. d[key + 's'] = descriptions
  160. # Promote the contents of the Qualified Event ID
  161. elif key == "qualified_event_id" and isinstance(d['qualified_event_id'], dict):
  162. qeid = d.pop('qualified_event_id')
  163. d['event_id'] = qeid['event_id']
  164. d['modification_number'] = qeid['modification_number']
  165. # Durations are encapsulated in their own object, remove this nesting
  166. elif isinstance(d[key], dict) and "duration" in d[key] and len(d[key]) == 1:
  167. d[key] = d[key]["duration"]
  168. # In general, remove all double nesting
  169. elif isinstance(d[key], dict) and key in d[key] and len(d[key]) == 1:
  170. d[key] = d[key][key]
  171. # In general, remove the double nesting of lists of items
  172. elif isinstance(d[key], dict) and key[:-1] in d[key] and len(d[key]) == 1:
  173. if isinstance(d[key][key[:-1]], list):
  174. d[key] = d[key][key[:-1]]
  175. else:
  176. d[key] = [d[key][key[:-1]]]
  177. # Payload values are wrapped in an object according to their type. We don't need that.
  178. elif key in ("signal_payload", "current_value"):
  179. value = d[key]
  180. if isinstance(d[key], dict):
  181. if 'payload_float' in d[key] and 'value' in d[key]['payload_float'] \
  182. and d[key]['payload_float']['value'] is not None:
  183. d[key] = float(d[key]['payload_float']['value'])
  184. elif 'payload_int' in d[key] and 'value' in d[key]['payload_int'] \
  185. and d[key]['payload_int'] is not None:
  186. d[key] = int(d[key]['payload_int']['value'])
  187. # Report payloads contain an r_id and a type-wrapped payload_float
  188. elif key == 'report_payload':
  189. if 'payload_float' in d[key] and 'value' in d[key]['payload_float']:
  190. v = d[key].pop('payload_float')
  191. d[key]['value'] = float(v['value'])
  192. elif 'payload_int' in d[key] and 'value' in d[key]['payload_int']:
  193. v = d[key].pop('payload_float')
  194. d[key]['value'] = int(v['value'])
  195. # All values other than 'false' must be interpreted as True for testEvent (rule 006)
  196. elif key == 'test_event' and not isinstance(d[key], bool):
  197. d[key] = True
  198. # Promote the 'text' item
  199. elif isinstance(d[key], dict) and "text" in d[key] and len(d[key]) == 1:
  200. if key == 'uid':
  201. d[key] = int(d[key]["text"])
  202. else:
  203. d[key] = d[key]["text"]
  204. # Promote a 'date-time' item
  205. elif isinstance(d[key], dict) and "date_time" in d[key] and len(d[key]) == 1:
  206. d[key] = d[key]["date_time"]
  207. # Promote 'properties' item, discard the unused? 'components' item
  208. elif isinstance(d[key], dict) and "properties" in d[key] and len(d[key]) <= 2:
  209. d[key] = d[key]["properties"]
  210. # Remove all empty dicts
  211. elif isinstance(d[key], dict) and len(d[key]) == 0:
  212. d.pop(key)
  213. return d
  214. def parse_datetime(value):
  215. """
  216. Parse an ISO8601 datetime into a datetime.datetime object.
  217. """
  218. matches = re.match(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.?(\d{1,6})?\d*Z', value)
  219. if matches:
  220. year, month, day, hour, minute, second = (int(value)for value in matches.groups()[:-1])
  221. micro = matches.groups()[-1]
  222. if micro is None:
  223. micro = 0
  224. else:
  225. micro = int(micro + "0" * (6 - len(micro)))
  226. return datetime(year, month, day, hour, minute, second, micro, tzinfo=timezone.utc)
  227. else:
  228. logger.warning(f"parse_datetime: {value} did not match format")
  229. return value
  230. def parse_duration(value):
  231. """
  232. Parse a RFC5545 duration.
  233. """
  234. if isinstance(value, timedelta):
  235. return value
  236. regex = r'(\+|\-)?P(?:(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)D)?T?(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?)|(?:(\d+)W)'
  237. matches = re.match(regex, value)
  238. if not matches:
  239. raise ValueError(f"The duration '{value}' did not match the requested format")
  240. years, months, days, hours, minutes, seconds, weeks = (int(g) if g else 0 for g in matches.groups()[1:])
  241. if years != 0:
  242. logger.warning("Received a duration that specifies years, which is not a determinate duration. "
  243. "It will be interpreted as 1 year = 365 days.")
  244. days = days + 365 * years
  245. if months != 0:
  246. logger.warning("Received a duration that specifies months, which is not a determinate duration "
  247. "It will be interpreted as 1 month = 30 days.")
  248. days = days + 30 * months
  249. duration = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds)
  250. if matches.groups()[0] == "-":
  251. duration = -1 * duration
  252. return duration
  253. def parse_boolean(value):
  254. if value == 'true':
  255. return True
  256. else:
  257. return False
  258. def datetimeformat(value, format=DATETIME_FORMAT):
  259. """
  260. Format a given datetime as a UTC ISO3339 string.
  261. """
  262. if not isinstance(value, datetime):
  263. return value
  264. return value.astimezone(timezone.utc).strftime(format)
  265. def timedeltaformat(value):
  266. """
  267. Format a timedelta to a RFC5545 Duration.
  268. """
  269. if not isinstance(value, timedelta):
  270. return value
  271. days = value.days
  272. hours, seconds = divmod(value.seconds, 3600)
  273. minutes, seconds = divmod(seconds, 60)
  274. formatted = "P"
  275. if days:
  276. formatted += f"{days}D"
  277. if hours or minutes or seconds:
  278. formatted += "T"
  279. if hours:
  280. formatted += f"{hours}H"
  281. if minutes:
  282. formatted += f"{minutes}M"
  283. if seconds:
  284. formatted += f"{seconds}S"
  285. return formatted
  286. def booleanformat(value):
  287. """
  288. Format a boolean value
  289. """
  290. if isinstance(value, bool):
  291. if value is True:
  292. return "true"
  293. elif value is False:
  294. return "false"
  295. elif value in ("true", "false"):
  296. return value
  297. else:
  298. raise ValueError(f"A boolean value must be provided, not {value}.")
  299. def ensure_bytes(obj):
  300. """
  301. Converts a utf-8 str object to bytes.
  302. """
  303. if obj is None:
  304. return obj
  305. if isinstance(obj, bytes):
  306. return obj
  307. if isinstance(obj, str):
  308. return bytes(obj, 'utf-8')
  309. else:
  310. raise TypeError("Must be bytes or str")
  311. def ensure_str(obj):
  312. """
  313. Converts bytes to a utf-8 string.
  314. """
  315. if obj is None:
  316. return None
  317. if isinstance(obj, str):
  318. return obj
  319. if isinstance(obj, bytes):
  320. return obj.decode('utf-8')
  321. else:
  322. raise TypeError("Must be bytes or str")
  323. def certificate_fingerprint_from_der(der_bytes):
  324. hash = hashlib.sha256(der_bytes).digest().hex()
  325. return ":".join([hash[i-2:i].upper() for i in range(-20, 0, 2)])
  326. def certificate_fingerprint(certificate_str):
  327. """
  328. Calculate the fingerprint for the given certificate, as defined by OpenADR.
  329. """
  330. der_bytes = ssl.PEM_cert_to_DER_cert(ensure_str(certificate_str))
  331. return certificate_fingerprint_from_der(der_bytes)
  332. def extract_pem_cert(tree):
  333. """
  334. Extract a given X509 certificate inside an XML tree and return the standard
  335. form of a PEM-encoded certificate.
  336. :param tree lxml.etree: The tree that contains the X509 element. This is
  337. usually the KeyInfo element from the XMLDsig Signature
  338. part of the message.
  339. """
  340. cert = tree.find('.//{http://www.w3.org/2000/09/xmldsig#}X509Certificate').text
  341. return "-----BEGIN CERTIFICATE-----\n" + cert + "-----END CERTIFICATE-----\n"
  342. def find_by(dict_or_list, key, value, *args):
  343. """
  344. Find a dict inside a dict or list by key, value properties.
  345. """
  346. search_params = [(key, value)]
  347. if args:
  348. search_params += [(args[i], args[i+1]) for i in range(0, len(args), 2)]
  349. if isinstance(dict_or_list, dict):
  350. dict_or_list = dict_or_list.values()
  351. for item in dict_or_list:
  352. if not isinstance(item, dict):
  353. _item = item.__dict__
  354. else:
  355. _item = item
  356. for key, value in search_params:
  357. if isinstance(value, tuple):
  358. if key not in _item or _item[key] not in value:
  359. break
  360. else:
  361. if key not in _item or _item[key] != value:
  362. break
  363. else:
  364. return item
  365. else:
  366. return None
  367. def group_by(list_, key, pop_key=False):
  368. """
  369. Return a dict that groups values
  370. """
  371. grouped = {}
  372. key_path = key.split(".")
  373. for item in list_:
  374. value = item
  375. for key in key_path:
  376. value = value.get(key)
  377. if value not in grouped:
  378. grouped[value] = []
  379. grouped[value].append(item)
  380. return grouped
  381. def cron_config(interval, randomize_seconds=False):
  382. """
  383. Returns a dict with cron settings for the given interval
  384. """
  385. if interval < timedelta(minutes=1):
  386. second = f"*/{interval.seconds}"
  387. minute = "*"
  388. hour = "*"
  389. elif interval < timedelta(hours=1):
  390. second = "0"
  391. minute = f"*/{int(interval.total_seconds()/60)}"
  392. hour = "*"
  393. elif interval < timedelta(hours=24):
  394. second = "0"
  395. minute = "0"
  396. hour = f"*/{int(interval.total_seconds()/3600)}"
  397. else:
  398. second = "0"
  399. minute = "0"
  400. hour = "0"
  401. cron_config = {"second": second, "minute": minute, "hour": hour}
  402. if randomize_seconds:
  403. jitter = min(int(interval.total_seconds() / 10), 300)
  404. cron_config['jitter'] = jitter
  405. return cron_config
  406. def get_cert_fingerprint_from_request(request):
  407. ssl_object = request.transport.get_extra_info('ssl_object')
  408. if ssl_object:
  409. der_bytes = ssl_object.getpeercert(binary_form=True)
  410. if der_bytes:
  411. return certificate_fingerprint_from_der(der_bytes)
  412. def group_targets_by_type(list_of_targets):
  413. targets_by_type = {}
  414. for target in list_of_targets:
  415. for key, value in target.items():
  416. if value is None:
  417. continue
  418. if key not in targets_by_type:
  419. targets_by_type[key] = []
  420. targets_by_type[key].append(value)
  421. return targets_by_type
  422. def ungroup_targets_by_type(targets_by_type):
  423. ungrouped_targets = []
  424. for target_type, targets in targets_by_type.items():
  425. if isinstance(targets, list):
  426. for target in targets:
  427. ungrouped_targets.append({target_type: target})
  428. elif isinstance(targets, str):
  429. ungrouped_targets.append({target_type: targets})
  430. return ungrouped_targets
  431. def validate_report_measurement_dict(measurement):
  432. from openleadr.enums import _ACCEPTABLE_UNITS, _MEASUREMENT_DESCRIPTIONS
  433. if 'name' not in measurement \
  434. or 'description' not in measurement \
  435. or 'unit' not in measurement:
  436. raise ValueError("The measurement dict must contain the following keys: "
  437. "'name', 'description', 'unit'. Please correct this.")
  438. name = measurement['name']
  439. description = measurement['description']
  440. unit = measurement['unit']
  441. # Validate the item name and description match
  442. if name in _MEASUREMENT_DESCRIPTIONS:
  443. required_description = _MEASUREMENT_DESCRIPTIONS[name]
  444. if description != required_description:
  445. if description.lower() == required_description.lower():
  446. logger.warning(f"The description for the measurement with name '{name}' "
  447. f"was not in the correct case; you provided '{description}' but "
  448. f"it should be '{required_description}'. "
  449. "This was automatically corrected.")
  450. measurement['description'] = required_description
  451. else:
  452. raise ValueError(f"The measurement's description '{description}' "
  453. f"did not match the expected description for this type "
  454. f" ('{required_description}'). Please correct this, or use "
  455. "'customUnit' as the name.")
  456. if unit not in _ACCEPTABLE_UNITS[name]:
  457. raise ValueError(f"The unit '{unit}' is not acceptable for measurement '{name}'. Allowed "
  458. f"units are: '" + "', '".join(_ACCEPTABLE_UNITS[name]) + "'.")
  459. else:
  460. if name != 'customUnit':
  461. logger.warning(f"You provided a measurement with an unknown name {name}. "
  462. "This was corrected to 'customUnit'. Please correct this in your "
  463. "report definition.")
  464. measurement['name'] = 'customUnit'
  465. if 'power' in name:
  466. if 'power_attributes' in measurement:
  467. power_attributes = measurement['power_attributes']
  468. if 'voltage' not in power_attributes \
  469. or 'ac' not in power_attributes \
  470. or 'hertz' not in power_attributes:
  471. raise ValueError("The power_attributes of the measurement must contain the "
  472. "following keys: 'voltage' (int), 'ac' (bool), 'hertz' (int).")
  473. else:
  474. raise ValueError("A 'power' related measurement must contain a "
  475. "'power_attributes' section that contains the following "
  476. "keys: 'voltage' (int), 'ac' (boolean), 'hertz' (int)")
  477. def get_active_period_from_intervals(intervals, as_dict=True):
  478. if is_dataclass(intervals[0]):
  479. intervals = [asdict(i) for i in intervals]
  480. period_start = min([i['dtstart'] for i in intervals])
  481. period_duration = max([i['dtstart'] + i['duration'] - period_start for i in intervals])
  482. if as_dict:
  483. return {'dtstart': period_start,
  484. 'duration': period_duration}
  485. else:
  486. from openleadr.objects import ActivePeriod
  487. return ActivePeriod(dtstart=period_start, duration=period_duration)
  488. def determine_event_status(active_period):
  489. now = datetime.now(timezone.utc)
  490. active_period_start = getmember(active_period, 'dtstart')
  491. if active_period_start.tzinfo is None:
  492. active_period_start = active_period_start.astimezone(timezone.utc)
  493. setmember(active_period, 'dtstart', active_period_start)
  494. active_period_end = active_period_start + getmember(active_period, 'duration')
  495. if now >= active_period_end:
  496. return 'completed'
  497. if now >= active_period_start:
  498. return 'active'
  499. if getmember(active_period, 'ramp_up_period') is not None:
  500. ramp_up_start = active_period_start - getmember(active_period, 'ramp_up_period')
  501. if now >= ramp_up_start:
  502. return 'near'
  503. return 'far'
  504. async def delayed_call(func, delay):
  505. try:
  506. if isinstance(delay, timedelta):
  507. delay = delay.total_seconds()
  508. await asyncio.sleep(delay)
  509. if asyncio.iscoroutinefunction(func):
  510. await func()
  511. elif asyncio.iscoroutine(func):
  512. await func
  513. else:
  514. func()
  515. except asyncio.CancelledError:
  516. pass
  517. def hasmember(obj, member):
  518. """
  519. Check if a dict or dataclass has the given member
  520. """
  521. if is_dataclass(obj):
  522. if hasattr(obj, member):
  523. return True
  524. else:
  525. if member in obj:
  526. return True
  527. return False
  528. def getmember(obj, member, missing='_RAISE_'):
  529. """
  530. Get a member from a dict or dataclass
  531. """
  532. if is_dataclass(obj):
  533. if not missing == '_RAISE_' and not hasattr(obj, member):
  534. return missing
  535. else:
  536. return getattr(obj, member)
  537. else:
  538. if missing == '_RAISE_':
  539. return obj[member]
  540. else:
  541. return obj.get(member, missing)
  542. def setmember(obj, member, value):
  543. """
  544. Set a member of a dict of dataclass
  545. """
  546. if is_dataclass(obj):
  547. setattr(obj, member, value)
  548. else:
  549. obj[member] = value
  550. def get_next_event_from_deque(deque):
  551. unused_elements = []
  552. event = None
  553. for i in range(len(deque)):
  554. msg = deque.popleft()
  555. if isinstance(msg, objects.Event) or (isinstance(msg, dict) and 'event_descriptor' in msg):
  556. event = msg
  557. break
  558. else:
  559. unused_elements.append(msg)
  560. deque.extend(unused_elements)
  561. return event
  562. def validate_report_request_tuples(list_of_report_requests, full_mode=False):
  563. if len(list_of_report_requests) == 0:
  564. return
  565. for report_requests in list_of_report_requests:
  566. if report_requests is None:
  567. continue
  568. for i, rrq in enumerate(report_requests):
  569. if rrq is None:
  570. continue
  571. # Check if it is a tuple
  572. elif not isinstance(rrq, tuple):
  573. report_requests[i] = None
  574. if full_mode:
  575. logger.error("Your on_register_report handler did not return a list of tuples. "
  576. f"The first item from the list was '{rrq}' ({rrq.__class__.__name__}).")
  577. else:
  578. logger.error("Your on_register_report handler did not return a tuple. "
  579. f"It returned '{rrq}'. Please see the documentation for the correct format.")
  580. # Check if it has the correct length
  581. elif not len(rrq) in (3, 4):
  582. report_requests[i] = None
  583. if full_mode:
  584. logger.error("Your on_register_report handler returned tuples of the wrong length. "
  585. f"It should be 3 or 4. It returned: '{rrq}'.")
  586. else:
  587. logger.error("Your on_register_report handler returned a tuple of the wrong length. "
  588. f"It should be 2 or 3. It returned: '{rrq[1:]}'.")
  589. # Check if the first element is callable
  590. elif not callable(rrq[1]):
  591. report_requests[i] = None
  592. if full_mode:
  593. logger.error(f"Your on_register_report handler did not return the correct tuple. "
  594. "It should return a list of (r_id, callback, sampling_interval) or "
  595. "(r_id, callback, sampling_interval, reporting_interval) tuples, where "
  596. "the r_id is a string, callback is a callable function or coroutine, and "
  597. "sampling_interval and reporting_interval are of type datetime.timedelta. "
  598. f"It returned: '{rrq}'. The second element was not callable.")
  599. else:
  600. logger.error(f"Your on_register_report handler did not return the correct tuple. "
  601. "It should return a (callback, sampling_interval) or "
  602. "(callback, sampling_interval, reporting_interval) tuple, where "
  603. "the callback is a callable function or coroutine, and "
  604. "sampling_interval and reporting_interval are of type datetime.timedelta. "
  605. f"It returned: '{rrq[1:]}'. The first element was not callable.")
  606. # Check if the second element is a timedelta
  607. elif not isinstance(rrq[2], timedelta):
  608. report_requests[i] = None
  609. if full_mode:
  610. logger.error(f"Your on_register_report handler did not return the correct tuple. "
  611. "It should return a list of (r_id, callback, sampling_interval) or "
  612. "(r_id, callback, sampling_interval, reporting_interval) tuples, where "
  613. "sampling_interval and reporting_interval are of type datetime.timedelta. "
  614. f"It returned: '{rrq}'. The third element was not of type timedelta.")
  615. else:
  616. logger.error(f"Your on_register_report handler did not return the correct tuple. "
  617. "It should return a (callback, sampling_interval) or "
  618. "(callback, sampling_interval, reporting_interval) tuple, where "
  619. "sampling_interval and reporting_interval are of type datetime.timedelta. "
  620. f"It returned: '{rrq[1:]}'. The second element was not of type timedelta.")
  621. # Check if the third element is a timedelta (if it exists)
  622. elif len(rrq) == 4 and not isinstance(rrq[3], timedelta):
  623. report_requests[i] = None
  624. if full_mode:
  625. logger.error(f"Your on_register_report handler did not return the correct tuple. "
  626. "It should return a list of (r_id, callback, sampling_interval) or "
  627. "(r_id, callback, sampling_interval, reporting_interval) tuples, where "
  628. "sampling_interval and reporting_interval are of type datetime.timedelta. "
  629. f"It returned: '{rrq}'. The fourth element was not of type timedelta.")
  630. else:
  631. logger.error(f"Your on_register_report handler did not return the correct tuple. "
  632. "It should return a (callback, sampling_interval) or "
  633. "(callback, sampling_interval, reporting_interval) tuple, where "
  634. "sampling_interval and reporting_interval are of type datetime.timedelta. "
  635. f"It returned: '{rrq[1:]}'. The third element was not of type timedelta.")
  636. def order_events(events, limit=None, offset=None):
  637. """
  638. Order the events according to the OpenADR rules:
  639. - active events before inactive events
  640. - high priority before low priority
  641. - earlier before later
  642. """
  643. def event_priority(event):
  644. # The default and lowest priority is 0, which we should interpret as a high value.
  645. priority = getmember(getmember(event, 'event_descriptor'), 'priority', float('inf'))
  646. if priority == 0:
  647. priority = float('inf')
  648. return priority
  649. if events is None:
  650. return None
  651. if isinstance(events, objects.Event):
  652. events = [events]
  653. elif isinstance(events, dict):
  654. events = [events]
  655. # Update the event statuses
  656. for event in events:
  657. event_status = determine_event_status(getmember(event, 'active_period'))
  658. setmember(getmember(event, 'active_period'), 'event_status', event_status)
  659. # Short circuit if we only have one event:
  660. if len(events) == 1:
  661. return events
  662. # Get all the active events first
  663. active_events = [event for event in events if getmember(getmember(event, 'event_descriptor'), 'event_status') == 'active']
  664. other_events = [event for event in events if getmember(getmember(event, 'event_descriptor'), 'event_status') != 'active']
  665. # Sort the active events by priority
  666. active_events.sort(key=lambda e: event_priority(e))
  667. # Sort the active events by start date
  668. active_events.sort(key=lambda e: getmember(getmember(e, 'active_period'), 'dtstart'))
  669. # Sort the non-active events by their start date
  670. other_events.sort(key=lambda e: getmember(getmember(e, 'active_period'), 'dtstart'))
  671. ordered_events = active_events + other_events
  672. if limit and offset:
  673. return ordered_events[offset:offset+limit]
  674. return ordered_events