Commit 4e8ddc5d authored by Manuela Kuhn's avatar Manuela Kuhn

Merge branch 'release-2.2.0'

parents 0f9ae2d8 f919be03
This diff is collapsed.
......@@ -14,6 +14,8 @@ logfileName = dataManager.log
# File size before rollover in B (linux only)
logfileSize = 10485760 ; #10 MB
# Name with which the service should be running
procname = zeromq-data-transfer
#########################################
#### SignalHandler Configuration ####
......@@ -35,7 +37,8 @@ requestFwPort = 50002
# ZMQ port to disribute control signals
# (needed if running on Windows)
controlPort = 50005
controlPubPort = 50005
controlSubPort = 50006
#########################################
#### EventDetector Configuration ####
......@@ -48,7 +51,7 @@ eventDetectorType = InotifyxDetector
#eventDetectorType = HttpDetector
# Subdirectories to be monitored and to store data to
# (needed if eventDetector is InotifyxDetector or WatchdogDetector or dataFetchter is getFromFile)
# (needed if eventDetector is InotifyxDetector or WatchdogDetector or dataFetcher is getFromFile)
fixSubdirs = ["commissioning", "current", "local"]
# Directory to be monitor for changes
......@@ -70,6 +73,11 @@ monitoredFormats = [".tif", ".cbf"]
# (needed if eventDetector is InotifyxDetector or HttpDetector)
historySize = 0
# Flag describing if a clean up thread which regularly checks
# if some files were missed should be activated
# (needed if eventDetector is InotifyxDetector)
useCleanUp = True
# Time (in seconds) since last modification after which a file will be seen as closed
# (needed if eventDetector is WatchdogDetector)
timeTillClosed = 2
......
#########################################
#### Logging Configuration ####
#########################################
# Path where the logfile will be created
logfilePath = D:\zeromq-data-transfer\logs
# Filename used for logging
logfileName = dataManager.log
# File size before rollover in B (linux only)
logfileSize = 10485760 ; #10 MB
#########################################
#### SignalHandler Configuration ####
#########################################
# Port number to receive signals from
comPort = 50000
# List of hosts allowed to connect
whitelist = ["localhost", "zitpcx19282", "zitpcx22614", "zitpcx17858"]
# ZMQ port to get new requests
requestPort = 50001
# ZMQ port to forward requests
# (needed if running on Windows)
requestFwPort = 50002
# ZMQ port to disribute control signals
# (needed if running on Windows)
controlPubPort = 50005
controlSubPort = 50006
#########################################
#### EventDetector Configuration ####
#########################################
# Type of event detector to use (options are: InotifyxDetector, WatchdogDetector, ZmqDetector, HttpGetDetector)
#eventDetectorType = InotifyxDetector
eventDetectorType = WatchdogDetector
#eventDetectorType = ZmqDetector
#eventDetectorType = HttpDetector
# Subdirectories to be monitored and to store data to
# (needed if eventDetector is InotifyxDetector or WatchdogDetector or dataFetchter is getFromFile)
fixSubdirs = ["commissioning", "current", "local"]
# Directory to be monitor for changes
# Inside this directory only the subdirectories "commissioning", "current" and "local" are monitored
# (needed if eventDetector is InotifyxDetector or WatchdogDetector)
monitoredDir = D:\zeromq-data-transfer\data\source
# Target to move the files into
localTarget = D:\zeromq-data-transfer\data\target
# Type of event detector to use (options are: inotifyx, watchdog, zmq)
#eventDetectorType = inotifyx
eventDetectorType = watchdog
#eventDetectorType = zmq
# Event type of files to be monitored (options are: IN_CLOSE_WRITE, IN_MOVED_TO, ...)
# (needed if eventDetector is InotifyxDetector or WatchdogDetector)
monitoredEventType = IN_CLOSE_WRITE
# Subdirectories of watchDir to be monitored
monitoredSubdirs = ["commissioning", "current", "local"]
# The formats to be monitored, files in an other format will be be neglected
# (needed if eventDetector is InotifyxDetector or WatchdogDetector)
monitoredFormats = [".tif", ".cbf"]
# Number of events stored to look for doubles
# (needed if eventDetector is InotifyxDetector or HttpDetector)
historySize = 0
# Time (in seconds) since last modification after which a file will be seen as closed
# (needed if eventDetector is WatchdogDetector)
timeTillClosed = 2
# List of hosts allowed to connect
whitelist = ["localhost", "zitpcx19282", "zitpcx22614", "zitpcx17858"]
# ZMQ port to get events from
# (needed if eventDetectorType is ZmqDetector)
eventPort = 50003
# Tango device proxy for the detector
# (needed if eventDetectorType is HttpDetector)
#detectorDevice = "haspp10lab:10000/p10/eigerdectris/lab.01"
detectorDevice = haspp06:10000/p06/eigerdectris/exp.01
# Tango device proxy for the filewriter
# (needed if eventDetectorType is HttpDetector)
#filewriterDevice = "haspp10lab:10000/p10/eigerfilewriter/lab.01"
filewriterDevice = haspp06:10000/p06/eigerfilewriter/exp.01
#########################################
#### DataFetcher Configuration ####
#########################################
# Module with methods specifying how to get the data (options are "getFromFile", "getFromZmq", "getFromHttp")
dataFetcherType = getFromFile
#dataFetcherType = getFromZmq
#dataFetcherType = getFromHttp
# If "getFromZmq" is specified as dataFetcherType it needs a port to listen to
# (needed if eventDetectorType is ZmqDetector)
dataFetcherPort = 50010
# Number of parallel data streams
# if this number is modifified, the port numbers also have to be adjusted
numberOfStreams = 1
# Enable ZMQ pipe into storage system (if set to False: the file is moved into the localTarget)
# Enable ZMQ pipe into storage system (uses the fixedStreamHost and fixedStreamPort)
useDataStream = True
# Fixed host to send the data to with highest priority
fixedStreamHost = zitpcx19282
# Fixed Port to send the data to with highest priority
fixedStreamPort = 50100
# Port number to receive signals from
comPort = 50000
# ZMQ port to get new requests
requestPort = 50001
# ZMQ port to forward requests
requestFwPort = 50002
# ZMQ port to get events from (only needed if eventDetectorType is zmq)
eventPort = 50003
# ZMQ-router port which coordinates the load-balancing to the worker-processes
routerPort = 50004
# ZMQ-pull-socket port which deletes/moves given files
cleanerPort = 50005
# Chunk size of file-parts getting send via zmq
chunkSize = 10485760 ; # = 1024*1024*10
#chunkSize = 1073741824 ; # = 1024*1024*1024
# Path where the logfile will be created
logfilePath = D:\zeromq-data-transfer\logs
# ZMQ-router port which coordinates the load-balancing to the worker-processes
# (needed if running on Windows)
routerPort = 50004
# Filename used for logging
logfileName = dataManager.log
# Target to move the files into
localTarget = D:\zeromq-data-transfer\data\target
# File size before rollover in B (linux only)
logfileSize = 10485760 ; #10 MB
# Flag describing if the data should be stored in localTarget
# (needed if dataFetcherType is getFromFile or getFromHttp)
storeData = False
# Flag describing if the files should be removed from the source
# (needed if dataFetcherType is getFromHttp)
removeData = True
#########################################
#### Logging Configuration ####
#########################################
# Path where logfile will be created
logfilePath = /space/projects/zeromq-data-transfer/logs
# Filename used for logging
logfileName = dataReceiver.log
# File size before rollover in B (linux only)
logfileSize = 104857600 ; #100 MB
#########################################
#### DataReceiver Configuration ####
#########################################
#
# List of hosts allowed to receive data from
#whitelist = a3p02.1-hosts
whitelist = ["localhost", "zitpcx19282.desy.de", "zitpcx22614", "lsdma-lab04"]
# Where incoming data will be stored to
targetDir = /space/projects/zeromq-data-transfer/data/target
targetDir = /space/projects/zeromq-data-transfer/data/zmq_target
# Local IP to connect dataStream to
dataStreamIp = 131.169.185.121 ;# zitpcx19282.desy.de
# TCP port of data pipe
dataStreamPort = 50100
# Path where logfile will be created
logfilePath = /space/projects/zeromq-data-transfer/logs
# Filename used for logging
logfileName = dataReceiver.log
# File size before rollover in B (linux only)
logfileSize = 104857600 ; #100 MB
# Where incoming data will be stored to"
targetDir = /rd_liveviewer
#targetDir = /home/kuhnm/Arbeit/zeromq-data-transfer/data/zmq_target
#targetDir = /space/projects/zeromq-data-transfer/data/zmq_target
# TCP port of data pipe"
dataStreamPort = 50100
# IP to communicate with the liveViewer
liveViewerComIp = 0.0.0.0
# TCP port to communicate with the live viewer
liveViewerComPort = 50021
# List of hosts allowed to connect to the receiver
liveViewerWhiteList = ["localhost", "haspp11user02", "haspp11user03", "haspp11user04"]
#liveViewerWhiteList = ["localhost", "zitpcx19282", "zitpcx22614"]
# Port to exchange data and signals between receiver and LiveViewCommunicator
lvCommunicatorPort = 50020
# IP of dataStream-socket to send signals back to the sender
signalIp = haspp11eval01.desy.de
#signalIp = zitpcx19282.desy.de
#signalIp = zitpcx22614.desy.de
# Time to wait for the sender to give a confirmation of the signal
senderResponseTimeout = 1000
# Path where logfile will be created
logfilePath = /home/p11user/live-viewer/logs
#logfilePath = /home/p11user/zeromq-data-transfer/logs
#logfilePath = /home/kuhnm/Arbeit/zeromq-data-transfer/logs
#logfilePath = /space/projects/zeromq-data-transfer/logs
# Filename used for logging
logfileName = zmq_receiver_LiveViewer.log
# Size of the ring buffer for the live viewer
maxRingBufferSize = 20
#maxRingBufferSize = 2
# Size of the queue for the live viewer
maxQueueSize = 1000
#maxQueueSize = 2
Zeromq Data Transfer 2.2.0
- Fixed stopping: The service is shut down if one process dies
- Enabled whitelist for data receiver
- Added tests to check status of fixed data receiver
- Runs now under process name zeromq-data-receiver
- Added init script
- Fixed clean up after shut down
- Enabled combination of data receiver whitelist with ldapsearch
- Added option to enable a clean up thread which checks the directory for missed files
- Version check does not consider bugfixes anymore
Zeromq Data Transfer 2.1.4
- Fixed copied file removal (Part 2)
Zeromq Data Transfer 2.1.3
- Fixed copied file removal (Part 1)
Zeromq Data Transfer 2.1.2
- Fixed too high processor usage
- Fixed suffix check in treewalk after creation of directory
Zeromq Data Transfer 2.1.1
- Fixed error handling with incorrect whitelists
- Fixed version checking
- Added file create time to metadata
Zeromq Data Transfer 2.1.0
- Added file removal safeguard
- Enabled use of IPC internal communication for Linux nodes
- Added exception definitions for dataTransferAPI
- Misc bug fixing
Zeromq Data Transfer 2.0.0
- Added functionality to get Data via HTTP Get
- Redesigned architecture
Zeromq Data Transfer 1.0.0
- Initial implementation
[Unit]
Description=ZMQ data transfer
Wants=network-online.target
After=network-online.target
[Service]
TimeoutStartSec=0
WorkingDirectory=/space/projects/zeromq-data-transfer
ExecStart=/space/projects/zeromq-data-transfer/src/sender/DataManager.py --verbose
[Install]
WantedBy=multi-user.target
......@@ -29,8 +29,7 @@ stop()
{
if [ -f $Pidfile ] ; then
echo "Stopping $INSTANZ"
# pkill python
kill -15 $Pid
kill $Pid
rm $Pidfile
else
echo "Cannot stop $INSTANZ - no Pidfile found!"
......
......@@ -6,6 +6,9 @@ import argparse
import logging
import os
import ConfigParser
import json
import subprocess
import re
BASE_PATH = os.path.dirname ( os.path.dirname ( os.path.dirname ( os.path.realpath ( __file__ ) )))
......@@ -37,6 +40,22 @@ def argumentParsing():
logfileName = config.get('asection', 'logfileName')
logfileSize = config.get('asection', 'logfileSize')
try:
whitelist = json.loads(config.get('asection', 'whitelist'))
except ValueError:
ldap_cn = config.get('asection', 'whitelist')
p = subprocess.Popen("ldapsearch -x -H ldap://it-ldap-slave.desy.de:1389 cn=" + ldap_cn + " -LLL", shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
lines = p.stdout.readlines()
matchHost = re.compile(r'nisNetgroupTriple: [(]([\w|\S|.]+),.*,[)]', re.M|re.I)
whitelist = []
for line in lines:
if matchHost.match(line):
if matchHost.match(line).group(1) not in whitelist:
whitelist.append(matchHost.match(line).group(1))
targetDir = config.get('asection', 'targetDir')
......@@ -45,30 +64,33 @@ def argumentParsing():
parser = argparse.ArgumentParser()
parser.add_argument("--logfilePath" , type = str,
help = "Path where logfile will be created (default=" + str(logfilePath) + ")",
default = logfilePath )
parser.add_argument("--logfileName" , type = str,
help = "Filename used for logging (default=" + str(logfileName) + ")",
default = logfileName )
parser.add_argument("--logfileSize" , type = int,
help = "File size in B before rollover (linux only; (default=" + str(logfileSize) + ")",
default = logfileSize )
parser.add_argument("--verbose" , help = "More verbose output",
action = "store_true" )
parser.add_argument("--onScreen" , type = str,
help = "Display logging on screen (options are CRITICAL, ERROR, WARNING, INFO, DEBUG)",
default = False )
parser.add_argument("--targetDir" , type = str,
help = "Where incoming data will be stored to (default=" + str(targetDir) + ")",
default = targetDir )
parser.add_argument("--dataStreamIp" , type = str,
help = "Ip of dataStream-socket to pull new files from (default=" + str(dataStreamIp) + ")",
default = dataStreamIp )
parser.add_argument("--dataStreamPort" , type = str,
help = "Port number of dataStream-socket to pull new files from (default=" + str(dataStreamPort) + ")",
default = dataStreamPort )
parser.add_argument("--logfilePath" , type = str,
help = "Path where logfile will be created (default=" + str(logfilePath) + ")",
default = logfilePath )
parser.add_argument("--logfileName" , type = str,
help = "Filename used for logging (default=" + str(logfileName) + ")",
default = logfileName )
parser.add_argument("--logfileSize" , type = int,
help = "File size in B before rollover (linux only; (default=" + str(logfileSize) + ")",
default = logfileSize )
parser.add_argument("--verbose" , help = "More verbose output",
action = "store_true" )
parser.add_argument("--onScreen" , type = str,
help = "Display logging on screen (options are CRITICAL, ERROR, WARNING, INFO, DEBUG)",
default = False )
parser.add_argument("--whitelist" , type = str,
help = "List of hosts allowed to connect (default=" + str(whitelist) + ")",
default = whitelist )
parser.add_argument("--targetDir" , type = str,
help = "Where incoming data will be stored to (default=" + str(targetDir) + ")",
default = targetDir )
parser.add_argument("--dataStreamIp" , type = str,
help = "Ip of dataStream-socket to pull new files from (default=" + str(dataStreamIp) + ")",
default = dataStreamIp )
parser.add_argument("--dataStreamPort" , type = str,
help = "Port number of dataStream-socket to pull new files from (default=" + str(dataStreamPort) + ")",
default = dataStreamPort )
arguments = parser.parse_args()
......@@ -104,18 +126,27 @@ def argumentParsing():
class DataReceiver:
def __init__(self, outputDir, dataIp, dataPort):
def __init__(self):
arguments = argumentParsing()
self.log = self.getLogger()
self.whitelist = arguments.whitelist
self.log.info("Configured whitelist: " + str(self.whitelist))
self.outputDir = os.path.normpath(outputDir)
self.dataIp = dataIp
self.dataPort = dataPort
self.targetDir = os.path.normpath(arguments.targetDir)
self.dataIp = arguments.dataStreamIp
self.dataPort = arguments.dataStreamPort
self.log = self.getLogger()
self.log.debug("Init")
self.log.info("Writing to directory '" + self.targetDir + "'.")
self.dataTransfer = dataTransfer("stream", useLog = True)
self.dataTransfer = dataTransfer("stream", useLog = True)
self.run()
try:
self.run()
finally:
self.stop()
def getLogger(self):
......@@ -126,7 +157,8 @@ class DataReceiver:
def run(self):
try:
self.dataTransfer.start(self.dataPort)
self.dataTransfer.start([self.dataIp, self.dataPort], self.whitelist)
# self.dataTransfer.start(self.dataPort)
except:
self.log.error("Could not initiate stream", exc_info=True)
raise
......@@ -139,16 +171,11 @@ class DataReceiver:
while continueReceiving:
try:
[payloadMetadata, payload] = self.dataTransfer.get()
except KeyboardInterrupt:
return
except:
self.log.error("Getting data failed.", exc_info=True)
raise
break
try:
self.dataTransfer.store(self.outputDir, [payloadMetadata, payload] )
except KeyboardInterrupt:
return
self.dataTransfer.store(self.targetDir, [payloadMetadata, payload] )
except:
self.log.error("Storing data...failed.", exc_info=True)
raise
......@@ -160,17 +187,15 @@ class DataReceiver:
self.dataTransfer.stop()
self.dataTransfer = None
def __exit__(self):
self.stop()
if __name__ == "__main__":
arguments = argumentParsing()
def __del__(self):
self.stop()
targetDir = arguments.targetDir
dataStreamIp = arguments.dataStreamIp
dataStreamPort = arguments.dataStreamPort
if __name__ == "__main__":
#start file receiver
receiver = DataReceiver(targetDir, dataStreamIp, dataStreamPort)
receiver = DataReceiver()
__author__ = 'Manuela Kuhn <manuela.kuhn@desy.de>', 'Marco Strutz <marco.strutz@desy.de>'
import sys
import argparse
import logging
import os
import json
import ConfigParser
import zmq
import time
from multiprocessing import Process, freeze_support
import shared.helperScript as helperScript
from shared.LiveViewCommunicator import LiveViewCommunicator
from receiverLiveViewer.FileReceiver import FileReceiver
BASE_PATH = os.path.dirname ( os.path.dirname ( os.path.realpath ( __file__ ) ) )
CONFIG_PATH = BASE_PATH + os.sep + "conf"
def argumentParsing():
configFile = CONFIG_PATH + os.sep + "receiverLiveViewer.conf"
config = ConfigParser.RawConfigParser()
config.readfp(helperScript.FakeSecHead(open(configFile)))
logfilePath = config.get('asection', 'logfilePath')
logfileName = config.get('asection', 'logfileName')
targetDir = config.get('asection', 'targetDir')
dataStreamPort = config.get('asection', 'dataStreamPort')
liveViewerComIp = config.get('asection', 'liveViewerComIp')
liveViewerComPort = config.get('asection', 'liveViewerComPort')
liveViewerWhiteList = json.loads(config.get('asection', 'liveViewerWhiteList'))
lvCommunicatorPort = config.get('asection', 'lvCommunicatorPort')
signalIp = config.get('asection', 'signalIp')
maxRingBufferSize = config.get('asection', 'maxRingBufferSize')
maxQueueSize = config.get('asection', 'maxQueueSize')
senderResponseTimeout = config.get('asection', 'senderResponseTimeout')
parser = argparse.ArgumentParser()
parser.add_argument("--logfilePath" , type=str, default=logfilePath,
help="Path where logfile will be created (default=" + str(logfilePath) + ")")
parser.add_argument("--logfileName" , type=str, default=logfileName,
help="Filename used for logging (default=" + str(logfileName) + ")")
parser.add_argument("--targetDir" , type=str, default=targetDir,
help="Where incoming data will be stored to (default=" + str(targetDir) + ")")
parser.add_argument("--dataStreamPort" , type=str, default=dataStreamPort,
help="Port number of dataStream-socket to pull new files from; there needs to be one entry for each streams (default=" + str(dataStreamPort) + ")")
parser.add_argument("--liveViewerComIp" , type=str, default=liveViewerComIp,
help="IP to bind LiveViewer to (default=" + str(liveViewerComIp) + ")")
parser.add_argument("--liveViewerComPort" , type=str, default=liveViewerComPort,
help="TCP port of live viewer (default=" + str(liveViewerComPort) + ")")
parser.add_argument("--liveViewerWhiteList" , type=str, default=liveViewerWhiteList,
help="List of hosts allowed to connect to the receiver (default=" + str(liveViewerWhiteList) + ")")
parser.add_argument("--lvCommunicatorPort" , type=str, default=lvCommunicatorPort,
help="Port to exchange data and signals between receiver and lvcommunicator (default=" + str(lvCommunicatorPort) + ")")
parser.add_argument("--signalIp" , type=str, default=signalIp,
help="Port number of dataStream-socket to send signals back to the sender (default=" + str(signalIp) + ")")
parser.add_argument("--maxRingBufferSize" , type=int, default=maxRingBufferSize,
help="Size of the ring buffer for the live viewer (default=" + str(maxRingBufferSize) + ")")
parser.add_argument("--maxQueueSize" , type=int, default=maxQueueSize,
help="Size of the queue for the live viewer (default=" + str(maxQueueSize) + ")")
parser.add_argument("--senderResponseTimeout" , type=int, default=senderResponseTimeout,
help=argparse.SUPPRESS)
parser.add_argument("--verbose" , action="store_true",
help="More verbose output")
parser.add_argument("--onScreen" , type=str, default=False,
help="Display logging on screen (options are CRITICAL, ERROR, WARNING, INFO, DEBUG)")
arguments = parser.parse_args()
targetDir = str(arguments.targetDir)
logfilePath = str(arguments.logfilePath)
logfileName = str(arguments.logfileName)
logfileFullPath = os.path.join(logfilePath, logfileName)
verbose = arguments.verbose
onScreen = arguments.onScreen
#enable logging
helperScript.initLogging(logfileFullPath, verbose, onScreen)
# check target directory for existance
helperScript.checkDirExistance(targetDir)
helperScript.checkDirEmpty(targetDir)
# check if logfile is writable
helperScript.checkLogFileWritable(logfilePath, logfileName)
return arguments
class ReceiverLiveViewer():
targetDir = None
dataStreamPort = None
liveViewerComIp = None
liveViewerComPort = None
liveViewerWhiteList = None
lvCommunicatorPort = None
signalIp = None
maxRingBufferSize = None
maxQueueSize = None
senderResponseTimeout = None
def __init__(self):
arguments = argumentParsing()
self.targetDir = arguments.targetDir
self.dataStreamPort = arguments.dataStreamPort
self.liveViewerComIp = arguments.liveViewerComIp
self.liveViewerComPort = arguments.liveViewerComPort
self.liveViewerWhiteList = arguments.liveViewerWhiteList
self.lvCommunicatorPort = arguments.lvCommunicatorPort
self.signalIp = arguments.signalIp
self.maxRingBufferSize = arguments.maxRingBufferSize
self.maxQueueSize = arguments.maxQueueSize
self.senderResponseTimeout = arguments.senderResponseTimeout
# self.context = zmq.Context.instance()
# logging.debug("registering zmq global context")
self.run()
def run(self):
# start file receiver
# lvCommunicatorProcess = threading.Thread(target=LiveViewCommunicator, args=(self.lvCommunicatorPort, self.liveViewerComPort, self.liveViewerComIp, self.maxRingBuffersize, self.maxQueueSize))
logging.info("start lvCommunicator process...")
lvCommunicatorProcess = Process(target=LiveViewCommunicator, args=(self.lvCommunicatorPort,
self.liveViewerComPort, self.liveViewerComIp, self.liveViewerWhiteList,
self.maxRingBufferSize, self.maxQueueSize))
lvCommunicatorProcess.start()
#start file receiver
fileReceiver = FileReceiver(self.targetDir,
self.signalIp, self.dataStreamPort,
self.lvCommunicatorPort, self.senderResponseTimeout)
try:
fileReceiver.process()
except KeyboardInterrupt:
logging.debug("Keyboard interruption detected. Shutting down")
# except Exception, e:
# print "unknown exception detected."
# finally:
# try:
# logging.debug("Destroying ZMQ context...")
# self.context.destroy()
# logging.debug("Destroying ZMQ context...done.")
# except:
# logging.debug("Destroying ZMQ context...failed.")
# logging.error(sys.exc_info())
if __name__ == "__main__":
freeze_support() #see https://docs.python.org/2/library/multiprocessing.html#windows
receiver = ReceiverLiveViewer()
__author__ = 'Manuela Kuhn <manuela.kuhn@desy.de>', 'Marco Strutz <marco.strutz@desy.de>'
import zmq
import sys
import logging