From e6bf4215ddc496c050bb22759e71be1a623c69f6 Mon Sep 17 00:00:00 2001
From: System User <operateur@rcm1.rcm>
Date: Fri, 17 Sep 2021 11:36:38 +0200
Subject: [PATCH] =?UTF-8?q?[ArchiveExtractor]=20Compatibilit=C3=A9=20avec?=
 =?UTF-8?q?=20le=20MODE=5FEVT?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

* l'API retourne à présent MODE_EVT pour les attributs archivés sur évènement
* Sur ce retour, on réagit en forçant une estimation de sampling à 0.1Hz pour le calcul des chunks
---
 ArchiveExtractor.py | 24 ++++++++++++++----------
 1 file changed, 14 insertions(+), 10 deletions(-)

diff --git a/ArchiveExtractor.py b/ArchiveExtractor.py
index b703354..0299d2a 100755
--- a/ArchiveExtractor.py
+++ b/ArchiveExtractor.py
@@ -127,20 +127,24 @@ def query_ADB_BetweenDates(attr,
         raise ValueError("Attribute '%s' is not archived in DB %s"%(attr, extractor))
 
     # Get its sampling period in seconds
-    try:
-        samplingPeriod = int(ADB.GetArchivingMode(attr)[1])*10**-3
+    req=ADB.GetArchivingMode(attr)
+    logger.debug("GetArchivingMode: "+str(req))
+
+    if req[0] == "MODE_P":
+        samplingPeriod = int(req[1])*10**-3
         logger.debug("Attribute is sampled every %g seconds"%samplingPeriod)
 
-        # Evaluate the number of points
-        est_N = (dateStop-dateStart).total_seconds()/samplingPeriod
-        logger.debug("Which leads to %d points to extract."%est_N)
+    elif req[0] == "MODE_EVT":
+        logger.warning("Attribute is archived on event. Chunks of data are sized with an estimated datarate of 0.1Hz")
+        samplingPeriod = 10
+
+    else:
+        raise NotImplemented("Archive mode not implemented")
 
-    except ValueError:
-        logger.warning("Attribute has no sampling period. Maybe it's archived on event.")
-        logger.warning("Please note that this script does not cut acces into chunks for this type of attributes.")
 
-        # Temporary bypass estimation
-        est_N = 1
+    # Evaluate the number of points
+    est_N = (dateStop-dateStart).total_seconds()/samplingPeriod
+    logger.debug("Which leads to %d points to extract."%est_N)
 
     # If data chunk is too much, we need to cut it
     if est_N > Nmax:
-- 
GitLab