Revision f8fa1217

View differences:

client/python/hlrc_client.egg-info/SOURCES.txt
12 12
hlrc_client/RobotGaze.py
13 13
hlrc_client/RobotMouth.py
14 14
hlrc_client/__init__.py
15
hlrc_client/hlrc_play_animation.py
16
hlrc_client/hlrc_set_emotion.py
17
hlrc_client/hlrc_speak_utterance.py
18 15
hlrc_client/hlrc_test_gui.py
19 16
hlrc_client/textgrid_hlrc.py
20 17
hlrc_client.egg-info/PKG-INFO
client/python/hlrc_client.egg-info/entry_points.txt
1 1
[console_scripts]
2

  
2
hlrc_test_gui = hlrc_client.hlrc_test_gui:main
3 3

  
client/python/hlrc_client/Middleware.py
51 51
		self.gaze_target = RobotGaze()
52 52
		self.mouth_target = RobotMouth()
53 53
		self.robot_animation = RobotAnimation()
54
		
54
			
55
	def __del__(self):
56
		"""destructor
57
		"""
58
		self.logger.debug("destructor of Middleware called")
55 59
	
56 60
	def config_logger(self, level):
57 61
		formatter = logging.Formatter('%(asctime)s %(name)-30s %(levelname)-8s > %(message)s')
client/python/hlrc_client/MiddlewareROS.py
52 52
		Middleware.__init__(self,scope,loglevel)
53 53
		#call mw init
54 54
		self.init_middleware()
55
			
56
	def __del__(self):
57
		"""destructor
58
		"""
59
		self.logger.debug("destructor of MiddlewareROS called")
55 60
		
56 61
	#######################################################################
57 62
        def init_middleware(self):
client/python/hlrc_client/MiddlewareRSB.py
52 52
		Middleware.__init__(self,scope,loglevel)
53 53
		#call mw init
54 54
		self.init_middleware()
55
		
55

  
56
	def __del__(self):
57
		"""destructor
58
		"""
59
		self.logger.debug("destructor of MiddlewareROS called")
60
	
56 61
	#######################################################################
57 62
        def init_middleware(self):
58 63
		"""initialise middleware
client/python/hlrc_client/RobotController.py
31 31

  
32 32
class RobotController:
33 33
	def __init__(self, mw_name, scope, loglevel=logging.WARNING):
34
		"""initialise
35
		:param mw_name: which mw to use, currentyl ROS and RSB are supported
36
		:param scope: base scope we want to listen on 
37
		:param  loglevel: optional log level
38
		"""
34 39
		self.logger = logging.getLogger(__name__)
35 40
		
36 41
		# create nice and actually usable formatter and add it to the handler
37 42
		self.config_logger(loglevel)
38 43
		
39
		#store scope
44
		#store 
40 45
		self.scope = scope
46
		self.mw = mw_name
47
		self.loglevel = loglevel
41 48
		
42
		if (mw_name.upper() == "RSB"):
49
		self.middleware = None
50
		
51
				
52
		if (self.mw.upper() == "RSB"):
43 53
			self.logger.info("creating new middleware connection via RSB")
44
			self.middleware = MiddlewareRSB(self.scope, loglevel)
45
		elif (mw_name.upper() == "ROS"):
54
			self.middleware = MiddlewareRSB(self.scope, self.loglevel)
55
		elif (self.mw.upper() == "ROS"):
46 56
			self.logger.info("creating new middleware connection via ROS")
47
			self.middleware = MiddlewareROS(self.scope, loglevel)
57
			self.middleware = MiddlewareROS(self.scope, self.loglevel)
48 58
		else:
49
			self.logger.error("invalid middleware requested (%s). supported: {ROS, RSB}\n\n" % (mw_name))
59
			self.logger.error("invalid middleware requested (%s). supported: {ROS, RSB}\n\n" % (self.mw))
50 60
			sys.exit(errno.EINVAL)
51 61
	
62
	def __del__(self):
63
		"""destructor
64
		"""
65
		self.logger.debug("destructor of RobotController called")
66
	
52 67
	def config_logger(self, level):
68
		"""initialise a nice logger formatting
69
		:param  level: log level
70
		"""
53 71
		formatter = logging.Formatter('%(asctime)s %(name)-30s %(levelname)-8s > %(message)s')
54 72
		ch = logging.StreamHandler()
55 73
		#ch.setLevel(level)
......
58 76
		self.logger.addHandler(ch)
59 77

  
60 78
	def set_current_emotion(self, robot_emotion, blocking = False):
79
		"""set the current emotion
80
		:param robot_emotion: a RobotEmotion object
81
		:param blocking: should this call block during execution?
82
		"""
61 83
		self.logger.debug("set_current_emotion(%s) %s" % (robot_emotion, ("BLOCKING" if blocking else "NON_BLOCKING")))
62 84
		self.middleware.set_current_emotion(robot_emotion, blocking)
63 85
		
64 86
	def set_default_emotion(self, robot_emotion, blocking = False):
87
		"""set the default emotion
88
		:param robot_emotion: a RobotEmotion object
89
		:param blocking: should this call block during execution?
90
		"""
65 91
		self.logger.debug("set_default_emotion(%s) %s" % (robot_emotion, ("BLOCKING" if blocking else "NON_BLOCKING")))
66 92
		self.middleware.set_default_emotion(robot_emotion, blocking)
67 93
	
68 94
	def set_gaze_target(self, robot_gaze, blocking = False):
95
		"""set a gaze target
96
		:param robot_gaze: a RobotGaze object
97
		:param blocking: should this call block during execution?
98
		"""
99
		
69 100
		self.logger.debug("set_gaze_target(%s) %s" % (robot_gaze, ("BLOCKING" if blocking else "NON_BLOCKING")))
70 101
		self.middleware.set_gaze_target(robot_gaze, blocking)
71 102
	
72 103
	def set_mouth_target(self, robot_mouth, blocking = False):
104
		"""set a mouth target
105
		:param robot_mouth: a RobotMouth object
106
		:param blocking: should this call block during execution?
107
		"""		
73 108
		self.logger.debug("set_mouth_target(%s) %s" % (robot_mouth, ("BLOCKING" if blocking else "NON_BLOCKING")))
74 109
		self.middleware.set_mouth_target(robot_mouth, blocking)
75 110
	
76 111
	def set_head_animation(self, robot_animation, blocking = False):
112
		"""set a head animation
113
		:param robot_animation: a RobotAnimation object
114
		:param blocking: should this call block during execution?
115
		"""		
77 116
		self.logger.debug("set_head_animation(%s) %s" % (robot_animation, ("BLOCKING" if blocking else "NON_BLOCKING")))
78 117
		self.middleware.set_head_animation(robot_animation, blocking)
79 118
	
80 119
	def set_speak(self, text, blocking = False):
120
		"""request the robot to say something using tts
121
		:param text: text to synthesize
122
		:param blocking: should this call block during execution?
123
		"""
81 124
		self.logger.debug("set_speak(%s) %s" % (text, ("BLOCKING" if blocking else "NON_BLOCKING")))
82 125
		self.middleware.set_speak(text, blocking)
83 126

  
84
	def get_gaze_target(self):
85
		result = self.middleware.get_gaze_target()
86
		self.logger.debug("get_gaze_target() returned %s" % (result))
87
		return self.middleware.get_gaze_target()
127
	#def get_gaze_target(self):
128
	#	result = self.middleware.get_gaze_target()
129
	#	self.logger.debug("get_gaze_target() returned %s" % (result))
130
	#	return self.middleware.get_gaze_target()
client/python/hlrc_client/hlrc_play_animation.py
1
#!/usr/bin/python
2
#PYTHONPATH="/opt/ros/groovy/lib/python2.7/dist-packages:/vol/csra/releases/nightly/lib/python2.7/:/vol/csra/releases/nightly/lib/python2.7/site-packages/
3
import sys
4
import logging
5
import errno
6

  
7
try:
8
    import rsb
9
    import rsb.converter
10
    import rst
11
    import rstsandbox
12
    from rst.robot.Animation_pb2 import Animation
13
except ImportError as exception:
14
    sys.stderr.write("ImportError: {}\n> HINT: try to export PYTHONPATH=$PYTHONPATH:$YOUR_PREFIX/lib/python2.7/site-packages/\n\n".format(exception))
15
    sys.exit(errno.ENOPKG)
16

  
17
class hlrc_animation():
18
    def __init__(self, _base_scope):
19
        #print "> registering rst converter"
20
        converter = rsb.converter.ProtocolBufferConverter(messageClass = Animation)
21
        rsb.converter.registerGlobalConverter(converter)
22
        self.server = None	
23
        self.set_scope(_base_scope);
24

  
25
    def __del__(self):
26
        if (not self.server is None):
27
            self.server.deactivate()
28

  
29
    def set_scope(self, scope):
30
        self.base_scope = str(scope) #NOTE: str() is important here, scope is a qstring (?) and gets deleted during call
31
        print "> setting scope to '%s'" % self.base_scope
32
	if (not self.server is None):
33
            self.server.deactivate()
34
	try:
35
            self.server = rsb.createRemoteServer(self.base_scope + '/set')
36
        except ValueError:
37
            print "> invalid scope given. server deactivated"
38
            self.server.deactivate()
39

  
40
    def trigger_animation(self, ani_id, repetitions, duration_each, scale, blocking):
41
        if (self.server is None):
42
            print("> invalid server")
43
            return
44
        #create animation & fill it with values:
45
        ani = Animation()
46

  
47
        #select ani
48
        ani.target = ani_id
49
        ani.repetitions = repetitions
50
        ani.duration_each = duration_each
51
        ani.scale       = scale
52

  
53
        if (blocking):
54
            #blocking:
55
            print "> calling the animation rpc (blocking until we finished talking)..."
56
            print '> server reply: "%s"' % self.server.animation(ani)
57
        else:
58
            print "> calling the animation rpc (NON-BLOCKING)..."
59
            return self.server.animation.async(ani)
60
            #we can block here for a incoming result with a timeout in s
61
            #print '> server reply: "%s"' % future.get(timeout = 10);
62

  
63
        print "> blocking call done"
64
    
65
def main():
66
    if (len(sys.argv) != 5):
67
        print "> usage: %s <base scope> <animation id> <dur each> <repetitions>\n>     example: %s /flobi1 1 1000 1" % (sys.argv[0] , sys.argv[0])
68
        sys.exit(0)
69

  
70
    # Pacify logger.
71
    #logging.basicConfig()
72
    scope  = sys.argv[1]
73
    ani_id = int(sys.argv[2])
74
    dur    = int(sys.argv[3])
75
    rep    = int(sys.argv[4])
76
    scale  = 1.0
77

  
78
    hani = hlrc_animation(scope)
79
    hani.trigger_animation(ani_id, rep, dur, scale, 1)
80

  
81
if __name__ == '__main__':
82
    main()
83

  
84

  
client/python/hlrc_client/hlrc_set_emotion.py
1
#!/usr/bin/python
2
#PYTHONPATH="/opt/ros/groovy/lib/python2.7/dist-packages:/vol/csra/releases/nightly/lib/python2.7/:/vol/csra/releases/nightly/lib/python2.7/site-packages/
3
import sys
4
import logging
5
import errno
6

  
7
try:
8
    import rsb
9
    import rsb.converter
10
    import rst
11
    import rstsandbox
12
    from rst.robot.EmotionState_pb2 import EmotionState
13
except ImportError as exception:
14
    sys.stderr.write("ImportError: {}\n> HINT: try to export PYTHONPATH=$PYTHONPATH:$YOUR_PREFIX/lib/python2.7/site-packages/\n\n".format(exception))
15
    sys.exit(errno.ENOPKG)
16

  
17
class hlrc_emotion():
18
    def __init__(self, _base_scope):
19
        #print "> registering rst converter"
20
        converter = rsb.converter.ProtocolBufferConverter(messageClass = EmotionState)
21
        rsb.converter.registerGlobalConverter(converter)
22
	self.set_scope( _base_scope);
23

  
24
    def set_scope(self, scope):
25
        self.base_scope = str(scope) #NOTE: str() is important here, scope is a qstring (?) and gets deleted during call
26
        print "> setting scope to '%s'" % self.base_scope
27
        try:
28
            self.server = rsb.createRemoteServer(self.base_scope + '/set')
29
        except ValueError:
30
            print "> invalid scope given. server deactivated"
31
            self.server.deactivate()
32

  
33
    def set_emotion(self, emotion_id, duration_each, blocking):
34
        if (self.server is None):
35
            print("> invalid server")
36
            return
37

  
38
        #create emotion & fill it with values:
39
        em = EmotionState()
40

  
41
        #select ani
42
        em.value    = emotion_id
43
        em.duration = duration_each
44

  
45
        with rsb.createRemoteServer(self.base_scope + '/set') as server:
46
            if (blocking):
47
                #blocking:
48
                print "> calling the emotion rpc (blocking until we finished talking)..."
49
                print '> server reply: "%s"' % server.currentEmotion(em)
50
            else:
51
               print "> calling the animation rpc (NON-BLOCKING)..."
52
               future = server.currentEmotion.async(em)
53
               #we can block here for a incoming result with a timeout in s
54
               #print '> server reply: "%s"' % future.get(timeout = 10);
55

  
56
        print "> done"
57
    
58
def main():
59
    if (len(sys.argv) != 4):
60
        print "> usage: %s <base scope> <emotion id> <duration>\n>     example: %s /flobi1 1 1000" % (sys.argv[0] , sys.argv[0])
61
        sys.exit(0)
62

  
63
    # Pacify logger.
64
    #logging.basicConfig()
65
    em_id = int(sys.argv[2])
66
    dur    = int(sys.argv[3])
67
    base   = sys.argv[1]
68

  
69
    hlrc_emotion(base).set_emotion(em_id, dur, 1)
70

  
71
if __name__ == '__main__':
72
    main()
73

  
74

  
client/python/hlrc_client/hlrc_speak_utterance.py
1
#!/usr/bin/python
2
#PYTHONPATH="/opt/ros/groovy/lib/python2.7/dist-packages:/vol/csra/releases/nightly/lib/python2.7/:/vol/csra/releases/nightly/lib/python2.7/site-packages/
3
import sys
4
import wave
5

  
6
from textgrid_hlc import *
7
import logging
8
import errno
9
import os.path
10

  
11
try:
12
    import rsb
13
    import rsb.converter
14
    import rst
15
    import rstsandbox
16
    from rst.audition.Utterance_pb2 import Utterance
17
    from rst.audition.SoundChunk_pb2 import SoundChunk
18
except ImportError as exception:
19
    sys.stderr.write("ImportError: {}\n> HINT: try to export PYTHONPATH=$PYTHONPATH:$YOUR_PREFIX/lib/python2.7/site-packages/\n\n".format(exception))
20
    sys.exit(errno.ENOPKG)
21

  
22
class hlrc_utterance():
23
    def __init__(self, _base_scope):
24
        #print "> registering rst converter"
25
        converter = rsb.converter.ProtocolBufferConverter(messageClass = Utterance)
26
        rsb.converter.registerGlobalConverter(converter)
27
	self.set_scope(_base_scope)
28

  
29
    def set_scope(self, scope):
30
        self.base_scope = str(scope) #NOTE: str() is important here, scope is a qstring (?) and gets deleted during call
31
        print "> setting scope to '%s'" % self.base_scope
32
        try:
33
            self.server = rsb.createRemoteServer(self.base_scope + '/set')
34
        except ValueError:
35
            print "> invalid scope given. server deactivated"
36
            self.server.deactivate()
37

  
38
    def trigger_utterance(self, filename_praat, filename_wav, blocking):
39
        if (self.server is None):
40
            print("> invalid server")
41
            return
42

  
43
        if (not os.path.isfile(filename_praat)):
44
            print "can not open file '%s'" % (filename_praat)
45
            return 0
46

  
47
        if (not os.path.isfile(filename_wav)):
48
            print "can not open file '%s'" % (filename_wav)
49
            return 0
50

  
51
        print "> reading wave file '%s'" % (filename_wav)
52
        wav = wave.open(filename_wav, "r")
53

  
54
    
55
        print "> parsing praat file '%s'" % (filename_praat)
56
        tgrid = TextGrid.load(filename_praat)
57
    
58
    
59
        #create utterance & fill it with values:
60
        ut = Utterance()
61
    
62
        #textual description of audio file
63
        ut.text = filename_praat
64
        
65
        ut.audio.data = wav.readframes(-1)
66
        ut.audio.sample_count = wav.getnframes()
67
        ut.audio.channels = wav.getnchannels()
68
        ut.audio.rate = wav.getframerate()
69
    
70
        if (wav.getsampwidth() == 1):
71
            ut.audio.sample_type = SoundChunk.SAMPLE_U8
72
        elif (wav.getsampwidth() == 2):
73
            ut.audio.sample_type = SoundChunk.SAMPLE_S16
74
        else:
75
            print "> invalid sample type. py doc says wave files are either u8 or s16"
76
            exit(0)
77
    
78
        #wave spec says always little endian
79
        ut.audio.endianness = SoundChunk.ENDIAN_LITTLE
80
    
81
        print "> filling phones with data from praat"
82
        for tier in tgrid.tiers:
83
            idx = (tgrid.tiers.index(tier)) + 1
84
            transcript = tier.simple_transcript
85
            for (xmin, xmax, utt) in transcript:
86
                phoneme = ut.phonemes.add()
87
                phoneme.symbol = utt
88
                phoneme.duration = int(1000.0*(float(xmax)-float(xmin)))
89
    
90
        with rsb.createRemoteServer(self.base_scope + '/set') as server:
91
            
92
            if (blocking):
93
                #blocking:
94
                print "> calling the utterance rpc (blocking until we finished talking)..."
95
                print '> server reply: "%s"' % server.utterance(ut)
96
            else:
97
               print "> calling the utterance rpc (NON-BLOCKING)..."
98
               future = server.utterance.async(ut)
99
               #we can block here for a incoming result with a timeout in seconds
100
               #print '> server reply: "%s"' % future.get(timeout = 10);
101
    
102
        print "> done"
103
        return 1
104

  
105
def main():
106
    # Pacify logger.
107
    #logging.basicConfig()
108
    if (len(sys.argv) != 4):
109
        print "> usage: %s <base_scope> file.praat file.wav\n>     example: %s /flobi1 hello.praat hello.wav" % (sys.argv[0] , sys.argv[0])
110
        sys.exit(0)
111

  
112
    filename_praat = sys.argv[2]
113
    filename_wav = sys.argv[3]
114
    base = sys.argv[1]
115

  
116
    hlc = hlrc_utterance(base)
117
    hlc.trigger_utterance(filename_praat, filename_wav, 1)
118

  
119
if __name__ == '__main__':
120
    main()
client/python/hlrc_client/hlrc_test_gui.py
1 1
#!/usr/bin/python
2

  
3
"""
4
This file is part of hlrc
5

  
6
Copyright(c) sschulz <AT> techfak.uni-bielefeld.de
7
http://opensource.cit-ec.de/projects/hlrc
8

  
9
This file may be licensed under the terms of the
10
GNU General Public License Version 3 (the ``GPL''),
11
or (at your option) any later version.
12

  
13
Software distributed under the License is distributed
14
on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
15
express or implied. See the GPL for the specific language