diff --git a/old/AppChecker.py b/old/AppChecker.py
deleted file mode 100644
index e393b513d7a5888eba41309409cc2dcac2c05277..0000000000000000000000000000000000000000
--- a/old/AppChecker.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-Generic Application checker class
-
-Describe a set of functions to operate to check an application
-"""
-
-import PyTango as tango
-import logging
-
-class AppChecker:
-
-    def __init__(self, appname):
-        """
-        Constructor.
-
-        Parameters
-        ==========
-        appname: string
-            Name of the application. Usefull for logs.
-
-        Returns
-        =======
-        AppChecker object
-        """
-
-        # Memorize name
-        self.appname = appname
-
-        # Get a logger with the app name
-        self.logger = logging.getLogger(self.appname)
-
-        # We will use this level
-        log_level = logging.DEBUG
-
-        # Create a stream handler
-        s_handler = logging.StreamHandler()
-        s_handler.setFormatter(logging.Formatter("%(levelname)s\t[%(funcName)s]\t%(message)s"))
-        s_handler.setLevel(level=log_level)
-
-        # Attach the stream handler
-        self.logger.addHandler(s_handler)
-
-        # Set level of logger
-        self.logger.setLevel(level=log_level)
-
-
-    ##################################
-    ## CHECK DEVICES STATUS
-    ##################################
-    def check_status(self, device_list):
-        """
-        Check the DS state of all devices used by this equipment.
-
-        Handles ConnectionFailed exceptions, i.e. when device is not started.
-
-        Parameters
-        ==========
-        device_list: array of string
-            List of tango path to devices. Will check status of all those devices.
-
-        Returns
-        =======
-        tuple (status, devlist)
-            status: boolean
-                False if a problem was encountered
-            devlist: array of string
-                List of devices with problem
-        """
-        self.logger.info("Checking state of tango devices...")
-
-        # Function returns
-        status=True
-        devlist=[]
-
-        # ----------------------------------------------------------------------------------------
-        # For all devices in list, try to connect and check that state is running, on or standby.
-        for device in device_list:
-            prox = tango.DeviceProxy(device)
-
-            try:
-                state = prox.state()
-            except tango.ConnectionFailed:
-                self.logger.warning("Device %s is probably not started, connection failed."%device)
-                status=False
-                devlist.append(device)
-                continue
-
-            if not state in [tango.DevState.RUNNING,
-                             tango.DevState.ON,
-                             tango.DevState.STANDBY,
-                            ]:
-                self.logger.warning("Device %s is in state %s"%(device, state))
-                status=False
-                devlist.append(device)
-            else:
-                self.logger.info("Device %s is in state %s"%(device, state))
-
-        # ----------------------------------------------------------------------------------------
-
-        return (status, devlist)
-
-
-    ##################################
-    ## CHECK CONFIGURATION
-    ##################################
-    def check_configuration(self, context):
-        """
-        Check the context (dict of attributes and properties on devices)
-
-        Parameters
-        ==========
-        context: dict
-            Dict of device, attributes and properties with their values
-
-        Returns
-        =======
-        tuple (status, attrlist)
-            status: boolean
-                False if a problem was encountered
-            context: dict
-                Context that mismatch, with the actual values
-
-        """
-
-        # Function returns
-        status=True
-        failcontext=dict()
-
-
-        # ----------------------------------------------------------------------------------------
-        # Loop on every device of the context
-        for devicepath in context.keys():
-
-            # Get a proxy to device
-            prox = tango.DeviceProxy(devicepath)
-
-            # -----------------------------------------------------
-            # Check write attributes
-            wattr = context[devicepath]["wattributes"]
-            failattr = dict()
-            rattr = prox.read_attributes(list(wattr.keys()))
-            for attr in rattr:
-                value = attr.value
-                if value != wattr[attr.name]:
-                    self.logger.warning("Attribute %s/%s value mismatch. Read %s, expect %s."%(
-                        devicepath,
-                        attr.name,
-                        value,
-                        wattr[attr.name]))
-                    status=False
-                    failattr[attr.name]=value
-                else:
-                    self.logger.info("Attribute %s/%s is correctly set to value %s."%(
-                        devicepath,
-                        attr.name,
-                        value))
-
-            # -----------------------------------------------------
-            # Check properties
-            props = context[devicepath]["properties"]
-            failprop = dict()
-
-            rprops = prox.get_property(list(props.keys()))
-            for k in props.keys():
-                value = list(rprops[k])
-                if props[k] != value:
-                    self.logger.warning("Property %s of device %s value mismatch.\nRead %s\nExpect %s"%(
-                        k,
-                        devicepath,
-                        value,
-                        props[k]))
-                    status=False
-                    failprop[k]=value
-                else:
-                    self.logger.info("Property %s of device %s is correctly set to %s"%(
-                        k,
-                        devicepath,
-                        value))
-
-            # -----------------------------------------------------
-            # Append to faild context if any fail
-            if len(failattr.keys()) + len(failprop.keys())>0:
-                failcontext[devicepath]= {
-                    "properties":failprop,
-                    "wattributes":failattr,
-                    }
-
-        # ----------------------------------------------------------------------------------------
-        return (status, failcontext)
-
diff --git a/old/ContextSaver.py b/old/ContextSaver.py
deleted file mode 100644
index ca245848bb42ddcf5e3ff08aece9fc1617fe1afa..0000000000000000000000000000000000000000
--- a/old/ContextSaver.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/Local/pyroot/PyTangoRoot/bin/python
-"""
-Python module for scanning and saving configuration of devices.
-
-Includes a Command Line Interface.
-Can be imported as is to use function in user script.
-"""
-import PyTango as tango
-import logging
-import argparse
-import json
-import numpy
-
-__version__ = "1.0.0"
-
-##---------------------------------------------------------------------------##
-def get_wattr(proxy):
-    """
-    Get all writable attributes from a device and save their values in a python dict.
-
-    Parameters
-    ----------
-    proxy : PyTango.DeviceProxy
-    Proxy to the device.
-
-    Returns
-    -------
-    config : dict
-    Dictionnary, keys are attribute names and value their present values.
-    """
-    logger.debug("Scanning write attribute of device %s"%proxy.name())
-    config = dict()
-
-    # Get all attributes configuration
-    cattr = proxy.get_attribute_config(proxy.get_attribute_list())
-    wattr=[]
-    for attr in cattr:
-        logger.debug("Analyse attribute '%s' "%(
-                attr.name))
-
-        if attr.writable in [
-                    tango.AttrWriteType.WRITE,
-                    tango.AttrWriteType.READ_WRITE,
-                    tango.AttrWriteType.READ_WITH_WRITE]:
-
-            # attr is writtable, savbing it into list
-            wattr.append(attr.name)
-
-            logger.debug("Detect writtable attribute '%s'"%(
-                attr.name))
-
-    # Read all writtable attributes
-    rattr = proxy.read_attributes(wattr)
-    for attr in rattr:
-        v= attr.value
-        if type(v) is numpy.ndarray:
-            v=v.tolist()
-
-        logger.debug("Read writtable attribute '%s' = %s"%(
-            attr.name,
-            v))
-
-        config[attr.name]=v
-
-    return config
-
-
-##---------------------------------------------------------------------------##
-def get_properties(proxy):
-    """
-    Get all properties from a device and save them in a python dict.
-
-    Parameters
-    ----------
-    proxy : PyTango.DeviceProxy
-    Proxy to the device.
-
-    Returns
-    -------
-    config : dict
-    Dictionnary, keys are attribute names and value their present values.
-    """
-    logger.debug("Scanning properties of device %s"%proxy.name())
-
-    pl = proxy.get_property_list('*')
-
-    _props = proxy.get_property(pl)
-
-    props=dict()
-
-    # Here we simply convert tango arrays to python arrays.
-    # For properties, each prop is an array, one element in
-    # the array is a line for the property.
-    # Maybe we need to convert in one single string.
-    for k in _props.keys():
-        if k[:2] !="__":
-            # Change type
-            props[k] = list(_props[k])
-            logger.debug("Detect property %s = %s"%(
-                k, props[k]))
-
-
-    return props
-
-
-##########################################################################
-""" Command Line Interface """
-if __name__ == "__main__":
-
-    # Name the logger after the filename
-    logger = logging.getLogger("ContextSaver")
-
-    #######################################################
-    # Install argument parser
-    import argparse
-
-    parser = argparse.ArgumentParser(description="Copy attributes and properties to a JSON structure.\n"+
-            "Version %s"%__version__)
-
-    parser.add_argument("--fileout", type=str,
-            help="Save the JSON structure to the specified file. Implicit set no output to stdout."),
-
-    parser.add_argument('--log', type=str, default="INFO",
-            help="Log level. Default: %(default)s.")
-
-    parser.add_argument('-v', action="store_true",
-            help="Print in stdout the context. Default is on if no fileout option specified.")
-
-    parser.add_argument('--filemode', action="store_true",
-            help="Set devices to filemode."+
-            " Instead of specifying devices, put a path to a file containing a list of devices."+
-            " The file contains one device path per line.")
-
-    parser.add_argument('devices', type=str, nargs='+',
-                        help="List of devices to inspect. Full tango path.")
-
-    args = parser.parse_args()
-
-
-    #######################################################
-    # Configure logger
-
-    # Add a stream handler
-    s_handler = logging.StreamHandler()
-    s_handler.setFormatter(logging.Formatter("%(levelname)s\t[%(funcName)s] \t%(message)s"))
-
-    # Set level according to command line attribute
-    s_handler.setLevel(level=getattr(logging, args.log.upper()))
-    logger.setLevel(level=getattr(logging, args.log.upper()))
-    logger.addHandler(s_handler)
-
-    logger.debug("Parsed arguments: %s"%args)
-
-    logger.info("Context Saver %s"%__version__)
-
-    #######################################################
-    # Filemode or not
-    if args.filemode:
-        logger.info("Filemode, openning file %s"%args.devices[0])
-        # Read the file. Each line is an device
-        with open(args.devices[0], "r") as fp:
-            devices = fp.readlines()
-
-        logger.debug("Read lines : %s"%devices)
-
-        # Clean end of line
-        for i_a in range(len(devices)):
-            devices[i_a] = devices[i_a].rstrip()
-
-    else:
-        devices = args.devices
-
-    #######################################################
-    # Prepare array for result
-    results = dict()
-
-    #######################################################
-    # Scan all devices
-
-    for dev in devices:
-        logger.info("Scanning device %s..."%dev)
-
-        # Declare proxy
-        prx = tango.DeviceProxy(dev)
-
-        # Retrieve write attributes
-        wattr = get_wattr(prx)
-
-        # Retrieve properties
-        props = get_properties(prx)
-
-        # Build json dict
-        jdict = {
-                "wattributes":wattr,
-                "properties":props,
-                }
-
-        # Add to results
-        results[dev] = jdict
-
-    logger.info("Done")
-
-    if args.fileout is None:
-        print(json.dumps(results, indent=4))
-    else:
-        with open(args.fileout, "w") as fp:
-            json.dump(results, fp, indent=4)
-
-        # Additionnal dump to stdout
-        if args.v:
-            print(json.dumps(results, indent=4))
-
-
-else:
-    # Name the logger after the module name
-    logger = logging.getLogger(__name__)
diff --git a/old/OpUtils.py b/old/OpUtils.py
deleted file mode 100644
index 571e948983de770590b296baece18bafabb28885..0000000000000000000000000000000000000000
--- a/old/OpUtils.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Module OpUtils
-# Usefull function for operation
-
-
-
-###############################################################################
-# FUNCTIONS ON DEVICE PROXY
-###############################################################################
-
-def searchattr(prx, attr, ignorecase=True):
-    """
-    Search in a tango device proxy for an attribute name.
-
-    PARAMETERS
-    ----------
-    prx: tango.DeviceProxy
-        Proxy to the device.
-
-    attr: string
-        String to search for.
-
-    ignorecase: bool
-        Case insensitive.
-    """
-    if ignorecase:
-        return [a for a in prx.get_attribute_list() if attr.lower() in a.lower()]
-    else:
-        return [a for a in prx.get_attribute_list() if attr in a]
-
-def captureattr(prx):
-    """
-    Capture all attributes in the device, return a python dict.
-
-    PARAMETERS
-    ----------
-    prx: tango.DeviceProxy
-        Proxy to the device.
-
-    RETURN
-    ------
-    attrs: dict
-        keys are attributes names, values are values.
-    """
-
-    attrs=dict()
-    attrlist=prx.get_attribute_list()
-
-    R= prx.read_attributes(attrlist, wait=True)
-    for r in R:
-        attrs[r.name.lower()]=r.value
-
-    return attrs
-
-
-
-###############################################################################
-# FUNCTIONS ON TANGO DATABASE
-###############################################################################
-
-###############################################################################
-# FUNCTIONS ON ARCHIVER
-###############################################################################
diff --git a/old/SoleilTools.py b/old/SoleilTools.py
deleted file mode 100644
index bd1f7e18eb3ecc8866fb41bca2f7a857eff0d555..0000000000000000000000000000000000000000
--- a/old/SoleilTools.py
+++ /dev/null
@@ -1,405 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Tools for Soleil Synchrotron
-
-@author: broucquart
-"""
-
-import numpy as np
-import logging
-import datetime
-import matplotlib.colors as mcol
-import pickle
-import matplotlib
-
-logger=logging.getLogger(__name__)
-
-###############################################################################
-# VECTORIZED DATE FUNCTIONS
-###############################################################################
-ArrayTimeStampToDatetime = np.vectorize(datetime.datetime.fromtimestamp)
-ArrayDatetimeToTimeStamp = np.vectorize(datetime.datetime.timestamp)
-ArrayStrpToDateTime = np.vectorize(lambda x : datetime.datetime.strptime(x, "%Y/%m/%d %H:%M:%S.%f"))
-
-
-###############################################################################
-# DATA IMPORTATION
-###############################################################################
-
-##---------------------------------------------------------------------------##
-def load_filer_trend(filename, delimiter='\t'):
-    """
-    Load data from a file generated by atkfilertrend.
-    
-    Delimiter must be comma ','.
-
-    Parameters
-    ----------
-    filename : String
-        Path to the file to load.
-
-    Returns
-    -------
-    ddata : dict
-        Dictionary of data. Key is the attribute tango path, data is the numpy 
-        array of data.
-        The special key "Time" hold the timestamps.
-
-    """
-    
-    # Load the file data
-    logger.info("Load file %s"%filename)
-    data = np.genfromtxt(filename, skip_header=1, skip_footer=1, delimiter=delimiter).transpose()
-    logger.debug("data shape : %s"%str(data.shape))
-    
-    # Read the first line and parse attribute names
-    with open(filename, 'r') as fp:
-        head = fp.readline()
-    
-    # Split head
-    logger.debug("read head : %s"%head)    
-    head = head.split(delimiter)
-    logger.debug("parsed head : %s"%str(head))
-    
-    # Create the dictionnary
-    # Convert microsecond to seconds
-    # Convert timestamps to datetime
-    ddata = {"Time":ArrayTimeStampToDatetime(data[0]/1000)}
-    
-    # Attach data to key in dict.
-    for i in range(1, len(head)-1):
-        ddata[head[i]] = data[i]
-    
-    return ddata
-
-##---------------------------------------------------------------------------##
-def load_mambo_file(filename):
-    """
-    Load data from a file extracted from Mambo.
-
-    Parameters
-    ----------
-    filename : string
-        Filepath.
-
-    Returns
-    -------
-    ddata : dict
-        Dictionary of data. Key is the attribute tango path, data is a tuple of
-        two numpy arrays. First array is datetime values, second is attribute
-        value.
-
-    """
-    # Load the file data as string
-    logger.info("Load file %s"%filename)
-    data = np.genfromtxt(filename, delimiter='\t', skip_header=1, dtype=str).transpose()
-    logger.debug("data shape : %s"%str(data.shape))
-
-    # Read the first line and parse attribute names
-    with open(filename, 'r') as fp:
-        head = fp.readline()
-    
-    # Split head, remove last char (newline)
-    logger.debug("read head : %s"%head)    
-    head = head[:-1].split('\t')
-    logger.debug("parsed head : %s"%str(head))
-
-    # Convert string to datetime
-    tdata = ArrayStrpToDateTime(data[0])
-    
-    ddata = dict()
-    # Find correct values for each dataset (ignore "*")
-    # Add to dictionnary, key is the attribute tango path, value is tuple of
-    # time array and value array
-    for n in range(1, len(data)):
-        good=np.where(data[n]!="*")[0]
-        ddata[head[n]] = (tdata[good], data[n][good].astype(np.float))
-
-    return ddata
-
-###############################################################################
-# SIGNAL PROCESSING
-###############################################################################
-
-##---------------------------------------------------------------------------##
-def MM(datax, datay, N, DEC=1):
-    """
-    Mobile Mean along x. Averaging window of N points.
-
-    Parameters
-    ----------
-    datax : numpy.ndarray
-        X axis, will only be cut at edge to match the length of mean Y.
-        Set to "None" if no X-axis
-    datay : numpy.ndarray
-        Y axis, will be averaged.
-    N : int
-        Averaging window length in points.
-
-    Returns
-    -------
-    Tuple of numpy.ndarray
-        (X axis, Y axis) averaged data.
-
-    """
-    if datax is None:
-        return (np.arange(N//2, len(datay)-N//2+1)[::DEC],
-            np.convolve(datay, np.ones(N)/N, mode='valid')[::DEC])
-    
-    return (np.asarray(datax[N//2:-N//2+1])[::DEC],
-            np.convolve(datay, np.ones(N)/N, mode='valid')[::DEC])
-
-
-##---------------------------------------------------------------------------##
-def meanstdmaxmin(x, y, N):
-    """
-    Compute mean, max, min and +- std over block of N points on the Y axis.
-    Return arrays on length len(x)//N points.
-
-    Parameters
-    ----------
-    x : numpy.ndarray
-        X vector, i.e sampling times.
-    y : numpy.ndarray
-        Y vector, i.e. values.
-    N : int
-        Number on points to average.
-
-    Returns
-    -------
-    xmean : numpy.ndarray
-        New x vector.
-    ymean : numpy.ndarray
-        Means of Y.
-    ystd : numpy.ndarray
-        Std of Y.
-    ymax : numpy.ndarray
-        Maxes of Y.
-    ymin : numpy.ndarray
-        Mins of Y..
-
-    """
-    # If x vector is datetime, convert to timestamps
-    if type(x[0]) is datetime.datetime:
-        xIsDatetime=True
-        x = ArrayDatetimeToTimeStamp(x)
-    else:
-        xIsDatetime=False
-
-    # Quick verification on the X data vector jitter.
-    period = np.mean(x[1:]-x[:-1])
-    jitter = np.std(x[1:]-x[:-1])
-    if jitter > 0.01*period:
-        logger.warning("On X data vector : sampling jitter is over 1%% of the period. (j=%.3g, p=%.3g)"%(jitter, period))
-    
-    # Get number of block of N points
-    _L=len(y)//N
-        
-    
-    # Reshape the arrays.
-    # Drop last points that does not fill a block of N points.
-    _x=np.reshape(x[:_L*N], (_L, N))
-    _y=np.reshape(y[:_L*N], (_L, N))
-
-    # compute the new x vector.
-    # Use mean to compute new absciss position
-    xmean = np.mean(_x, axis=1)
-    
-    if xIsDatetime:
-        xmean = ArrayTimeStampToDatetime(xmean)
-    
-    # Compute parameters
-    ymean = np.mean(_y, axis=1)
-    ystd = np.std(_y, axis=1)
-    ymin = np.min(_y, axis=1)
-    ymax = np.max(_y, axis=1)
-   
-    return (xmean, ymean, ystd, ymax, ymin)
-    
-###############################################################################
-## PLOTTING
-###############################################################################
-
-##---------------------------------------------------------------------------##
-def plot_meanstdmaxmin(ax, datax, datay, N,
-                       c=None, label=None):
-    """
-    Plot on a ax the representation in mean, +- std and min max.
-
-    Parameters
-    ----------
-    ax : matplotlib.axes._base._AxesBase
-        Ax on wich to plot.
-    datax : numpy.ndarray
-        X axis.
-    datay : numpy.ndarray
-        Y axis.
-    N : int
-        Number on points to average.
-    c : TYPE, optional
-        Color. The default is None.
-    label : TYPE, optional
-        Label. The default is None.
-
-    Returns
-    -------
-    lines : TYPE
-        DESCRIPTION.
-
-    """
-    
-    # For the first  plot, consider the whole data range.
-    # Compute the averaging ratio. Minimum ratio is 1
-    ratio = max(len(datax)//N, 1)
-
-    # Compute new data
-    xmean, ymean, ystd, ymax, ymin = meanstdmaxmin(datax, datay, ratio)
-    
-    lines=[]
-    # First, plot the mean with the given attributes
-    lines.append(ax.plot(xmean, ymean, color=c, label=label)[0])
-    
-    # Retrieve the color, usefull if c was None
-    c=lines[0].get_color()
-    
-    # Add max, min and std area
-    lines.append(ax.plot(xmean, ymax, linestyle='-', color=mcol.to_rgba(c, 0.5))[0])
-    lines.append(ax.plot(xmean, ymin, linestyle='-', color=mcol.to_rgba(c, 0.5))[0])
-    lines.append(ax.fill_between(xmean, ymean-ystd, ymean+ystd, color=mcol.to_rgba(c, 0.1)))
-    
-    return lines
-
-##---------------------------------------------------------------------------##
-def plot_MM(ax, datax, datay, N, DEC=1,
-            c=None, label=None):
-    """
-    Plot a signal with its mobile mean. The signal is plotted with transparency.
-
-    Parameters
-    ----------
-    ax : matplotlib.axes._base._AxesBase
-        Axe on which to plot.
-    datax : numpy.ndarray, None
-        X axis data.
-    datay : numpy.ndarray
-        Y axis data.
-    N : int
-        Averaging window length in points.
-    c : TYPE, optional
-        Line color. The default is None.
-    label : str, optional
-        Line label. The default is None.
-
-    Returns
-    -------
-    lines : TYPE
-        DESCRIPTION.
-
-    """
-    # To collect lines
-    lines=[]
-    
-    # Plot mobile mean
-    _l=ax.plot(*MM(datax, datay, N, DEC), c=c, label=label)[0]
-    lines.append(_l)
-    
-    # Retrieve the color, usefull if c was None
-    c=lines[0].get_color()
-    
-    # Plot entire signal    
-    if datax is None:
-        # Case no xaxis data
-        _l=ax.plot(datay, c=mcol.to_rgba(c, 0.4))[0]
-    else:
-        _l=ax.plot(datax, datay, c=mcol.to_rgba(c, 0.4))[0]
-        
-    return lines
-
-###############################################################################
-## PLOT MANIPULATION
-###############################################################################
-
-##---------------------------------------------------------------------------##
-def get_current_ax_zoom(ax):
-    """
-    Get the current ax zoom setup and print the python command to set it exactly.
-
-    Parameters
-    ----------
-    ax : numpy.ndarray
-        Array of ax.
-
-    Raises
-    ------
-    NotImplementedError
-        When the type is not implemented. It is time to implement it !
-
-    Returns
-    -------
-    None.
-
-    """
-    if type(ax) is np.ndarray:
-        for i in range(len(ax)):
-            print("ax[%d].set_xlim"%i+str(ax[i].get_xlim()))
-            print("ax[%d].set_ylim"%i+str(ax[i].get_ylim()))
-        return
-    
-    raise NotImplementedError("Type is %s"%type(ax))
-
-###############################################################################
-## DATE PROCESSING
-###############################################################################
-
-##---------------------------------------------------------------------------##
-def get_time_region(t, startDate, endDate):
-    """
-    Return a range of index selecting the ones between the start and stop date.
-
-    Parameters
-    ----------
-    t : numpy.ndarray
-        An array of datetime objects.
-    startDate : datetime.datetime
-        Start date.
-    endDate : datetime.datetime
-        Stop date.
-
-    Returns
-    -------
-    zone : numpy.ndarray
-        A numpy arange between both index.
-
-    """
-    iT1 = np.searchsorted(t, startDate)
-    iT2 = np.searchsorted(t, endDate)
-    zone = np.arange(iT1, iT2)
-    if len(zone)==0:
-        logging.warning("Time zone is empty.")
-    return zone
-
-###############################################################################
-# DATA EXPORTATION
-###############################################################################
-
-##---------------------------------------------------------------------------##
-def export_mpl(fig, filename):
-    """
-    Export figure to .mpl file.
-
-    Parameters
-    ----------
-    fig : matplotlib.figure.Figure
-        Figure to export.
-    filename : str
-        Filename, without extension.
-
-    Returns
-    -------
-    None.
-
-    """
-    if not type(fig) is matplotlib.figure.Figure:
-        raise TypeError("Parameter fig should be a matplotlib figure (type matplotlib.figure.Figure).")
-    with open(filename+".mpl", 'wb') as fp:
-        pickle.dump(fig, fp)
\ No newline at end of file
diff --git a/old/cli_archiveextractor.py b/old/cli_archiveextractor.py
deleted file mode 100755
index 9a5e7f0101680ee217cdbafd84b61aac1aaba7c8..0000000000000000000000000000000000000000
--- a/old/cli_archiveextractor.py
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/Local/pyroot/PyTangoRoot/bin/python
-"""
-Command Line Interface to  use ArchiveExtractor module
-"""
-import argparse
-import core.ArchiveExtractor as AE
-import logging
-import datetime
-import numpy as np
-import PyTango as tango
-
-# Name the logger after the filename
-logger = logging.getLogger("ArchiveExtractor")
-
-# Default stop date
-dateStop = datetime.datetime.now()
-
-# Default start date
-dateStart = datetime.datetime.now()-datetime.timedelta(days=1)
-
-#######################################################
-# Install argument parser
-
-parser = argparse.ArgumentParser(description="Extract attributes from the extractor devices.\nVersion %s"%AE.__version__)
-
-parser.add_argument("--from", type=AE.ArchiveExtractor.dateparse, dest="dateStart",
-    help="Start date for extraction, format '1990-12-13-22:33:45'. "+
-    "It is possible to be less precise and drop, seconds, minutes, hours or even day."+
-    " Default is one day ago",
-    default=dateStart)
-
-parser.add_argument("--to", type=AE.ArchiveExtractor.dateparse, dest="dateStop",
-    help="Stop date for extraction, format '1990-12-13-22:33:45'. It is possible to be less precise and drop, seconds, minutes, hours or even day."+
-    " Default is now.",
-    default=dateStop)
-
-parser.add_argument("--DB", choices=["H", "T", "L"],
-    default="T", help="Database to extract from. HDB (H) or TDB (T), default: %(default)s")
-
-parser.add_argument("--DBN", type=int, default=2,
-        help="Extractor device number, default: %(default)s")
-
-parser.add_argument("--fileout", type=str, default="extracted_%s.npy"%datetime.datetime.now().strftime("%Y%m%d_%H%M%S"),
-        help="filename of the extraction destination. Default: %(default)s"),
-
-parser.add_argument('--log', type=str, default="INFO",
-        help="Log level. Default: %(default)s.")
-
-
-parser.add_argument('--filemode', action="store_true",
-        help="Set attribute to filemode."+
-        " Instead of specifying attributes, put a path to a file containing a list of attributes."+
-        " The file contains one attribute per line.")
-
-parser.add_argument('attributes', type=str, nargs='+',
-                    help="List of attributes to extract. Full tango path.")
-
-args = parser.parse_args()
-
-
-#######################################################
-# Configure logger
-
-# Add a stream handler
-s_handler = logging.StreamHandler()
-s_handler.setFormatter(logging.Formatter("%(levelname)s\t[%(funcName)s] \t%(message)s"))
-
-# Set level according to command line attribute
-s_handler.setLevel(level=getattr(logging, args.log.upper()))
-logger.setLevel(level=getattr(logging, args.log.upper()))
-logger.addHandler(s_handler)
-
-logger.debug("Parsed arguments: %s"%args)
-
-logger.info("Archive Extractor %s"%AE.__version__)
-
-#######################################################
-# Filemode or not
-if args.filemode:
-    logger.info("Filemode, openning file %s"%args.attributes[0])
-    # Read the file. Each line is an attribute
-    with open(args.attributes[0], "r") as fp:
-        attributes = fp.readlines()
-
-    logger.debug("Read lines : %s"%attributes)
-
-    # Clean end of line
-    for i_a in range(len(attributes)):
-        attributes[i_a] = attributes[i_a].rstrip()
-
-else:
-    attributes = args.attributes
-
-#######################################################
-# Instanciate Extractor
-if args.DB == "L":
-    AE  =  AE.ArchiveExtractor(extractorPath="archiving/extractor/%d"%(args.DBN), logger=logger)
-else:
-    AE  =  AE.ArchiveExtractor(args.DB, args.DBN, logger=logger)
-
-#######################################################
-# Prepare dictionnary for result
-results = dict()
-
-#######################################################
-# Extract from database
-for attr in attributes:
-    logger.info("Extracting attribute %s..."%attr)
-    logger.info("Extract from %s to %s."%(args.dateStart, args.dateStop))
-
-    for attempt in range(3):
-        try:
-            datevalue = AE.betweenDates(attr, args.dateStart, args.dateStop)
-
-            # Add to result dictionnary
-            results[attr] = datevalue
-
-        except ValueError as e:
-            logger.debug("ErrorMsg: %s"%e)
-            logger.warning("Failed to extract %s. Skipping..."%attr)
-        except (tango.CommunicationFailed, tango.DevFailed) as e:
-            # retry
-            logger.debug("ErrorMsg: %s"%e)
-            logger.warning("Failed to extract %s. Retry..."%attr)
-        break
-
-    else:
-        logger.error("The device %s might have crash.\n"%extractor+
-                "You should check with Jive and probably restart with Astor.\n")
-
-    # Save all at each step
-    np.save(args.fileout, results)
-
-logger.info("Extraction done, saved in file %s"%args.fileout)
-
diff --git a/old/env_tango.py b/old/env_tango.py
deleted file mode 100644
index 3e5ef009be4c28784de7139ae9150c1496700c87..0000000000000000000000000000000000000000
--- a/old/env_tango.py
+++ /dev/null
@@ -1,112 +0,0 @@
-import tango
-import re
-
-# =============================================================================
-# The following attributes will be populated when first needed.
-# =============================================================================
-# List of attributes tangopath in TDB
-TDBLIST = []
-
-# List of attributes tangopath in TDB
-HDBLIST = []
-
-# List of tango devices
-DEVLIST = []
-
-###############################################################################
-## USEFULL FUNCTION
-###############################################################################
-def searchattr(prx, attr, ignorecase=True):
-    """
-    Search in a tango device proxy for an attribute name.
-
-    PARAMETERS
-    ----------
-    prx: tango.DeviceProxy
-        Proxy to the device.
-
-    attr: string
-        String to search for.
-
-    ignorecase: bool
-        Case insensitive.
-    """
-    if ignorecase:
-        return [a for a in prx.get_attribute_list() if attr.lower() in a.lower()]
-    else:
-        return [a for a in prx.get_attribute_list() if attr in a]
-
-def searcharch(attr, db, ignorecase=True):
-    """
-    Search for an attribute in the archiver list.
-
-    PARAMETERS
-    ----------
-    attr: string
-        String to search for.
-
-    db: string
-        Which database, "H", "T"
-
-    ignorecase: bool
-        Case insensitive.
-
-    RETURNS
-    -------
-
-    """
-    global ARCHLIST
-    if ARCHLIST is None:
-        # Populate archlist
-        ARCHLIST=dict()
-        for db in "HT":
-            ARCHLIST[db] = tango.DeviceProxy("archiving/{}dbextractor/1".format(db)).getcurrentarchivedatt()
-
-    if ignorecase:
-        return [a for a in ARCHLIST[db] if attr.lower() in a.lower()]
-    else:
-        return [a for a in prx.get_attribute_list() if attr in a]
-
-
-###############################################################################
-## POPULATE ATTRIBUTE FUNCTIONS
-###############################################################################
-
-# =============================================================================
-def populate_tdblist():
-    """
-    Fill the global list of TDB archived attributes.
-    """
-    global TDBLIST
-    TDBLIST= tango.DeviceProxy("archiving/tdbextractor/1").getcurrentarchivedatt()
-
-# =============================================================================
-def populate_hdblist():
-    """
-    Fill the global list of HDB archived attributes.
-    """
-    global HDBLIST
-    HDBLIST= tango.DeviceProxy("archiving/hdbextractor/1").getcurrentarchivedatt()
-
-# =============================================================================
-def populate_devlist():
-    """
-    Fill the global list of devices.
-    """
-
-    DB = tango.Database()
-
-    for domain in DB.get_device_domain("*"):
-        for family in DB.get_device_family(domain+"/*"):
-            for member in DB.get_device_member(domain+"/"+family+"/*"):
-                DEVLIST.append(domain+"/"+family+"/"+member)
-
-# =============================================================================
-def populate_devlist2():
-    """
-    Fill the global list of devices.
-    """
-
-    DB = tango.Database()
-
-    DEVLIST = [dev for dev in DB.get_device_exported("*")]
diff --git a/old/pyprompt.ipy b/old/pyprompt.ipy
deleted file mode 100644
index 15158ec78fce1e2a5235fea3d785179145cb1432..0000000000000000000000000000000000000000
--- a/old/pyprompt.ipy
+++ /dev/null
@@ -1,17 +0,0 @@
-# IPython prompt initialization for operation
-
-# Turn automagic on
-# This gives the possibility to use magic command without '%'.
-# i.e. cd, pwd, ...
-%automagic on
-
-
-# Import usefull packages
-import tango
-import numpy as np
-import OpUtils as OU
-print("Imported: tango, np, OU")
-
-# Create a database object
-DB = tango.Database()
-print("Object DB created: tango database")
diff --git a/old/source_env b/old/source_env
deleted file mode 100644
index bf982991d1a910c56e94d1339a6a370712bcf8e8..0000000000000000000000000000000000000000
--- a/old/source_env
+++ /dev/null
@@ -1,5 +0,0 @@
-# Source the python3 env
-source /usr/Local/pyroot/pytango3rc
-
-# Add the current directory to pythonpath
-export PYTHONPATH=${PYTHONPATH}:`pwd`