Skip to content
Snippets Groups Projects
Commit 8cc72d90 authored by Operateur's avatar Operateur
Browse files

feat: Add test and use it to debug

parent 44c13d1c
Branches
Tags
No related merge requests found
import logging
import datetime
import traceback
import pandas as pd
import numpy as np
import tango
import pandas as pd
import traceback
import ArchiveExtractor as ae
import ArchiveExtractor.Amenities as aea
import ArchiveExtractor.Core as aec
##########################################################################
### Install logger for the module ###
##########################################################################
logger = logging.getLogger(__name__)
#logger.setLevel(getattr(logging, logger.upper()))
logger = logging.getLogger("ArchiveExtractor")
if not logger.hasHandlers():
# No handlers, create one
sh = logging.StreamHandler()
sh.setLevel(logger.level)
sh.setFormatter(logging.Formatter("%(levelname)s:%(message)s"))
sh.setFormatter(logging.Formatter("{name}-{levelname:8}: {message}", style='{'))
logger.addHandler(sh)
##########################################################################
......@@ -41,11 +42,9 @@ def init(
loglevel: string
loglevel to pass to logging.Logger
"""
global _extractors
global _AttrTables
_extractors = (None, None)
_AttrTables = (None, None)
ae._Extractors = (None, None)
ae._AttrTables = (None, None)
try:
logger.setLevel(getattr(logging, loglevel.upper()))
......@@ -54,17 +53,17 @@ def init(
logger.debug("Instanciating extractors device proxy...")
_extractors = (tango.DeviceProxy(HdbExtractorPath), tango.DeviceProxy(TdbExtractorPath))
logger.debug("{} and {} instanciated.".format(*_extractors))
ae._Extractors = (tango.DeviceProxy(HdbExtractorPath), tango.DeviceProxy(TdbExtractorPath))
logger.debug("{} and {} instanciated.".format(*ae._Extractors))
logger.debug("Configuring extractors device proxy...")
for e in _extractors:
for e in ae._Extractors:
# set timeout to 3 sec
e.set_timeout_millis(3000)
logger.debug("Filling attributes lookup tables...")
_AttrTables = tuple(e.getattnameall() for e in _extractors)
logger.debug("HDB: {} TDB: {} attributes counted".format(len(_AttrTables[0]), len(_AttrTables[1])))
ae._AttrTables = tuple(e.getattnameall() for e in ae._Extractors)
logger.debug("HDB: {} TDB: {} attributes counted".format(len(ae._AttrTables[0]), len(ae._AttrTables[1])))
##########################################################################
### Module access functions ###
......@@ -210,12 +209,11 @@ def findattr(pattern, db="H"):
if not db in ("H", "T"):
raise AttributeError("Attribute db should be 'H' or 'T'")
global _AttrTables
keywords=pattern.lower().split('*')
# Select DB
attr_table = _AttrTables[{'H':0, 'T':1}[db]]
attr_table = ae._AttrTables[{'H':0, 'T':1}[db]]
matches = [attr for attr in attr_table if all(k in attr.lower() for k in keywords)]
......@@ -248,7 +246,7 @@ def infoattr(attribute, db='H'):
info = dict()
for func in ("GetAttDefinitionData", "GetAttPropertiesData"):
R=getattr(_extractors[{'H':0, 'T':1}[db]], func)(attribute)
R=getattr(ae._Extractors[{'H':0, 'T':1}[db]], func)(attribute)
if not R is None:
for i in R:
_s=i.split("::")
......@@ -257,8 +255,3 @@ def infoattr(attribute, db='H'):
logger.warning("Function %s on extractor returned None"%func)
return info
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## Initialize on import
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
init()
import logging
import datetime
import numpy as np
import ArchiveExtractor as ae
# Get the module logger
logger = logging.getLogger("ArchiveExtractor")
##########################################################################
### Commodity private variables ###
......@@ -33,8 +39,7 @@ def _check_initialized():
-------
success : boolean
"""
global _extractors
if None in _extractors:
if None in ae._Extractors:
logger.error("Module {0} is not initialied. You should run {0}.init().".format(__name__))
return False
return True
......@@ -120,12 +125,11 @@ def _check_attribute(attribute, db):
db: str
Which database to look in, 'H' or 'T'.
"""
global _extractors
logger.debug("Check that %s is archived."%attribute)
if not _extractors[{'H':0, 'T':1}[db]].IsArchived(attribute):
logger.error("Attribute '%s' is not archived in DB %s"%(attribute, _extractors[{'H':0, 'T':1}[db]]))
raise ValueError("Attribute '%s' is not archived in DB %s"%(attribute, _extractors[{'H':0, 'T':1}[db]]))
if not ae._Extractors[{'H':0, 'T':1}[db]].IsArchived(attribute):
logger.error("Attribute '%s' is not archived in DB %s"%(attribute, ae._Extractors[{'H':0, 'T':1}[db]]))
raise ValueError("Attribute '%s' is not archived in DB %s"%(attribute, ae._Extractors[{'H':0, 'T':1}[db]]))
##----------------------------------------------------------------------##
def _chunkerize(attribute, dateStart, dateStop, db, Nmax=100000):
......@@ -154,11 +158,11 @@ def _chunkerize(attribute, dateStart, dateStop, db, Nmax=100000):
List of datetime giving the limit of each chunks.
For N chunks, there is N+1 elements in cdates, as the start and end boundaries are included.
"""
info=infoattr(attribute, db=db)
info=ae.infoattr(attribute, db=db)
logger.debug("Attribute information \n%s"%info)
# Get the number of points
N=_extractors[{'H':0, 'T':1}[db]].GetAttDataBetweenDatesCount([
N=ae._Extractors[{'H':0, 'T':1}[db]].GetAttDataBetweenDatesCount([
attribute,
dateStart.strftime(_DBDFMT2),
dateStop.strftime(_DBDFMT2)
......
import logging
import datetime
import numpy as np
import pandas as pd
import ArchiveExtractor as ae
import ArchiveExtractor.Amenities as aea
# Get the module logger
logger = logging.getLogger("ArchiveExtractor")
##########################################################################
### Module core functions ###
......@@ -14,7 +23,7 @@ def _extract_attribute(attribute, method, date1, date2, db):
aea._check_attribute(attribute, db)
# Get info about the attribute
info=infoattr(attribute, db=db)
info=ae.infoattr(attribute, db=db)
logger.debug("Attribute information \n%s"%info)
# Detect spectrum
......@@ -48,7 +57,7 @@ def _extract_scalar(attribute, method, date1, date2, db, dtype):
# =====================
if method == "nearest":
cmdreturn = aea._cmd_with_retry(_extractors[{'H':0, 'T':1}[db]], "GetNearestValue", [
cmdreturn = aea._cmd_with_retry(ae._Extractors[{'H':0, 'T':1}[db]], "GetNearestValue", [
attribute,
date1.strftime(aea._DBDFMT),
])
......@@ -79,7 +88,7 @@ def _extract_scalar(attribute, method, date1, date2, db, dtype):
# For each date chunk
for i_d in range(len(cdates)-1):
cmdreturn = aea._cmd_with_retry(_extractors[{'H':0, 'T':1}[db]], "ExtractBetweenDates", [
cmdreturn = aea._cmd_with_retry(ae._Extractors[{'H':0, 'T':1}[db]], "ExtractBetweenDates", [
attribute,
cdates[i_d].strftime(aea._DBDFMT),
cdates[i_d+1].strftime(aea._DBDFMT)
......@@ -120,7 +129,7 @@ def _extract_scalar(attribute, method, date1, date2, db, dtype):
def _extract_vector(attribute, method, date1, date2, db):
# Get info about the attribute
info=infoattr(attribute, db=db)
info=ae.infoattr(attribute, db=db)
# =====================
if method == "nearest":
......@@ -132,7 +141,7 @@ def _extract_vector(attribute, method, date1, date2, db):
dt=datetime.timedelta(seconds=10)
while cnt<1:
logger.debug("Seeking points in {} to {}".format(date1-dt,date1+dt))
cnt=_extractors[{'H':0, 'T':1}[db]].GetAttDataBetweenDatesCount([
cnt=ae._Extractors[{'H':0, 'T':1}[db]].GetAttDataBetweenDatesCount([
attribute,
(date1-dt).strftime(aea._DBDFMT2),
(date1+dt).strftime(aea._DBDFMT2)
......@@ -142,7 +151,7 @@ def _extract_vector(attribute, method, date1, date2, db):
# For vector, we have to use the GetAttxxx commands
cmdreturn = aea._cmd_with_retry(_extractors[{'H':0, 'T':1}[db]], "GetAttDataBetweenDates", [
cmdreturn = aea._cmd_with_retry(ae._Extractors[{'H':0, 'T':1}[db]], "GetAttDataBetweenDates", [
attribute,
(date1-dt).strftime(aea._DBDFMT),
(date1+dt).strftime(aea._DBDFMT),
......@@ -158,7 +167,7 @@ def _extract_vector(attribute, method, date1, date2, db):
# Read the history
logger.debug("Retrieve history of %d values. Dynamic attribute named %s."%(N, name))
attrHist = _extractors[{'H':0, 'T':1}[db]].attribute_history(name, N)
attrHist = ae._Extractors[{'H':0, 'T':1}[db]].attribute_history(name, N)
# Transform to datetime - value arrays
_value = np.empty((N, int(info["max_dim_x"])), dtype=float)
......@@ -295,7 +304,7 @@ def ExtrBetweenDates_MinMaxMean(
cdates[i_d+1].strftime(_DBDFMT2))
)
_val =getattr(_extractors[{'H':0, 'T':1}[db]], "GetAttData%sBetweenDates"%func)([
_val =getattr(ae._Extractors[{'H':0, 'T':1}[db]], "GetAttData%sBetweenDates"%func)([
attribute,
cdates[i_d].strftime(_DBDFMT2),
cdates[i_d+1].strftime(_DBDFMT2)
......
......@@ -10,11 +10,18 @@ __all__ = ["Access", ]
### Module private variables ###
##########################################################################
# Tuple of extractor for HDB and TDB
global _extractors
_extractors = (None, None)
_Extractors = (None, None)
# Tuple for attribute tables
global _AttrTables
_AttrTables = (None, None)
##########################################################################
### Functions in Access are entry points ###
##########################################################################
from .Access import *
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## Initialize on import
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
init()
import logging
logger = logging.getLogger("tester")
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter("{name}-{levelname:8}: {message}", style='{'))
logger.addHandler(sh)
logger.setLevel(logging.DEBUG)
# Put root folder in path
import sys, os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
# test import
logger.info("Testing import")
import ArchiveExtractor
# it should have been automatically initialized, check that extractors are set
logger.info("Testing auto init")
logger.debug(ArchiveExtractor._Extractors)
if None in ArchiveExtractor._Extractors:
raise RuntimeError("ArchiveExtractor does not seems properly initialized")
# Test init()
logger.info("Testing init")
ArchiveExtractor.init(loglevel='debug')
###############################################################################
# Test findattr
logger.info("Testing findattr()")
attrs = ArchiveExtractor.findattr("*dg*dcct*current*")
logger.debug(attrs)
if len(attrs) < 1:
raise RuntimeError("Failed to get attributes with findattr")
attrs = ArchiveExtractor.findattr("*dg*dcct*current*", db='T')
logger.debug(attrs)
if len(attrs) < 1:
raise RuntimeError("Failed to get attributes with findattr")
###############################################################################
# Test infoattr
logger.info("Testing infoattr()")
info = ArchiveExtractor.infoattr(attrs[0], db='T')
logger.debug(info)
###############################################################################
# Test extractions
attr = ArchiveExtractor.findattr("ans/dg*dcct*current*")[0]
logger.info("Testing extract() ; scalar, nearest, timedelta")
val = ArchiveExtractor.extract(attr, "0h")
logger.debug(val)
if val is None:
raise RuntimeError("Could not perform extraction")
logger.info("Testing extract() ; scalar, between, precise date and timedelta")
val = ArchiveExtractor.extract(attr, "1h", "2023-12-13-00:30", method='between')
logger.debug(val)
if val is None:
raise RuntimeError("Could not perform extraction")
logger.info("Testing extract() ; scalar, nearest, specific date")
# Test several formats
for fmt in [
"2023-08",
"2024-01-10",
"2024-01-10-12:00",
]:
logger.debug(fmt)
val = ArchiveExtractor.extract(attr, fmt)
logger.debug(val)
if val is None:
raise RuntimeError("Could not perform extraction")
logger.info("Testing extract() ; dict, nearest, specific date")
val = ArchiveExtractor.extract({"attr":attr, "attr2":attr}, "2023-06")
logger.debug(val)
logger.info("Testing extract() ; list, nearest, specific date")
val = ArchiveExtractor.extract(ArchiveExtractor.findattr("dg*dcct*current"), "2023-06")
logger.debug(val)
logger.info("Testing extract() ; scalar, between, timedelta")
val = ArchiveExtractor.extract(attr, "3h", method='between')
logger.debug(val)
if val is None:
raise RuntimeError("Could not perform extraction")
logger.info("Testing extract() ; scalar, between, precise date")
val = ArchiveExtractor.extract(attr, "2023-12-13-00:30", "2023-12-13-01:30", method='between')
logger.debug(val)
if val is None:
raise RuntimeError("Could not perform extraction")
logger.info("Testing extract() ; spectrum, nearest, precise date")
val = ArchiveExtractor.extract('ANS/DG/BPM-MANAGER/zRefOrbit', "2023-12-13-00:30")
logger.debug(val)
if val is None:
raise RuntimeError("Could not perform extraction")
logger.info("Test success !")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment