hlrc / client / python / hlrc_client / MiddlewareRSB.py @ 9d84f3cb
History | View | Annotate | Download (11.269 KB)
1 |
"""
|
---|---|
2 |
This file is part of hlrc
|
3 |
|
4 |
Copyright(c) sschulz <AT> techfak.uni-bielefeld.de
|
5 |
http://opensource.cit-ec.de/projects/hlrc
|
6 |
|
7 |
This file may be licensed under the terms of the
|
8 |
GNU General Public License Version 3 (the ``GPL''),
|
9 |
or (at your option) any later version.
|
10 |
|
11 |
Software distributed under the License is distributed
|
12 |
on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
|
13 |
express or implied. See the GPL for the specific language
|
14 |
governing rights and limitations.
|
15 |
|
16 |
You should have received a copy of the GPL along with this
|
17 |
program. If not, go to http://www.gnu.org/licenses/gpl.html
|
18 |
or write to the Free Software Foundation, Inc.,
|
19 |
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
20 |
|
21 |
The development of this software was supported by the
|
22 |
Excellence Cluster EXC 277 Cognitive Interaction Technology.
|
23 |
The Excellence Cluster EXC 277 is a grant of the Deutsche
|
24 |
Forschungsgemeinschaft (DFG) in the context of the German
|
25 |
Excellence Initiative.
|
26 |
"""
|
27 |
|
28 |
from Middleware import * |
29 |
import errno |
30 |
|
31 |
import rsb |
32 |
import rsb.converter |
33 |
import rst |
34 |
import rstsandbox |
35 |
from rst.robot.EmotionState_pb2 import EmotionState |
36 |
from rst.robot.Animation_pb2 import Animation |
37 |
from rst.robot.GazeTarget_pb2 import GazeTarget |
38 |
from rst.robot.MouthTarget_pb2 import MouthTarget |
39 |
|
40 |
class MiddlewareRSB(Middleware): |
41 |
#######################################################################
|
42 |
def __init__(self, scope, loglevel=logging.WARNING): |
43 |
"""initialise
|
44 |
:param scope: base scope we want to listen on
|
45 |
"""
|
46 |
#init base settings
|
47 |
Middleware.__init__(self,scope,loglevel)
|
48 |
#call mw init
|
49 |
self.init_middleware()
|
50 |
|
51 |
def __del__(self): |
52 |
"""destructor
|
53 |
"""
|
54 |
self.logger.debug("destructor of MiddlewareROS called") |
55 |
|
56 |
#######################################################################
|
57 |
def init_middleware(self): |
58 |
"""initialise middleware
|
59 |
"""
|
60 |
#mute rsb logging:
|
61 |
logging.getLogger("rsb").setLevel(logging.ERROR)
|
62 |
|
63 |
#initialise RSB stuff
|
64 |
self.logger.info("initialising RSB middleware connection on scope %s, registering rst converters..." % (self.base_scope)) |
65 |
|
66 |
self.emotionstate_converter = rsb.converter.ProtocolBufferConverter(messageClass = EmotionState)
|
67 |
rsb.converter.registerGlobalConverter(self.emotionstate_converter)
|
68 |
|
69 |
self.animation_converter = rsb.converter.ProtocolBufferConverter(messageClass = Animation)
|
70 |
rsb.converter.registerGlobalConverter(self.animation_converter)
|
71 |
|
72 |
self.gaze_converter = rsb.converter.ProtocolBufferConverter(messageClass = GazeTarget)
|
73 |
rsb.converter.registerGlobalConverter(self.gaze_converter)
|
74 |
|
75 |
self.mouth_converter = rsb.converter.ProtocolBufferConverter(messageClass = MouthTarget)
|
76 |
rsb.converter.registerGlobalConverter(self.mouth_converter)
|
77 |
|
78 |
try:
|
79 |
self.server = rsb.createRemoteServer(self.base_scope + '/set') |
80 |
except ValueError: |
81 |
self.logger.error("ERROR: invalid scope given. server deactivated") |
82 |
self.server.deactivate()
|
83 |
sys.exit(errno.EINVAL) |
84 |
|
85 |
#######################################################################
|
86 |
def publish_emotion(self, em_type, emotion, blocking): |
87 |
"""publish an emotion via mw
|
88 |
:param em_type: type of emotion (RobotEmotion::TYPE_DEFAULT or RobotEmotion::TYPE_CURRENT)
|
89 |
:param emotion: emotion to set
|
90 |
:param blocking: True if this call should block until execution finished on robot
|
91 |
"""
|
92 |
|
93 |
#create emotion & fill it with values:
|
94 |
rsb_em = EmotionState() |
95 |
|
96 |
#set value
|
97 |
rsb_em.value = self.convert_emotiontype_to_rsbval(emotion.value)
|
98 |
|
99 |
#set duration
|
100 |
rsb_em.duration = int(emotion.time_ms)
|
101 |
|
102 |
with rsb.createRemoteServer(self.base_scope + '/set') as server: |
103 |
self.logger.debug("calling the emotion rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING")) |
104 |
|
105 |
if (blocking):
|
106 |
#blocking rpc call:
|
107 |
if (em_type == RobotEmotion.TYPE_DEFAULT):
|
108 |
result = server.defaultEmotion(rsb_em) |
109 |
else:
|
110 |
result = server.currentEmotion(rsb_em) |
111 |
|
112 |
self.logger.debug("server reply: '%s'" % result) |
113 |
else:
|
114 |
if (em_type == RobotEmotion.TYPE_DEFAULT):
|
115 |
future = server.defaultEmotion.async(rsb_em) |
116 |
else:
|
117 |
future = server.currentEmotion.async(rsb_em) |
118 |
|
119 |
#we could block here for a incoming result with a timeout in s
|
120 |
#print '> server reply: "%s"' % future.get(timeout = 10);
|
121 |
self.logger.debug("emotion rpc done") |
122 |
|
123 |
def publish_head_animation(self, animation, blocking): |
124 |
"""publish an head animation via mw
|
125 |
:param animation: animation to set
|
126 |
:param blocking: True if this call should block until execution finished on robot
|
127 |
"""
|
128 |
|
129 |
self.logger.debug("calling the animation rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING")) |
130 |
|
131 |
#create animation & fill it with values:
|
132 |
rsb_ani = Animation() |
133 |
|
134 |
#select ani
|
135 |
rsb_ani.target = self.convert_animationtype_to_rsbval(animation.value)
|
136 |
rsb_ani.repetitions = animation.repetitions |
137 |
rsb_ani.duration_each = animation.time_ms |
138 |
rsb_ani.scale = animation.scale |
139 |
|
140 |
if (blocking):
|
141 |
#blocking:
|
142 |
result = self.server.animation(rsb_ani)
|
143 |
self.logger.debug("server reply: '%s'" % result) |
144 |
else:
|
145 |
future = self.server.animation.async(rsb_ani)
|
146 |
#we can block here for a incoming result with a timeout in s
|
147 |
#print '> server reply: "%s"' % future.get(timeout = 10);
|
148 |
|
149 |
self.logger.debug("animation rpc done") |
150 |
|
151 |
def publish_default_emotion(self, emotion, blocking): |
152 |
self.publish_emotion(RobotEmotion.TYPE_DEFAULT, emotion, blocking)
|
153 |
|
154 |
def publish_current_emotion(self, emotion, blocking): |
155 |
self.publish_emotion(RobotEmotion.TYPE_CURRENT, emotion, blocking)
|
156 |
|
157 |
def publish_gaze_target(self, gaze, blocking): |
158 |
"""publish a gaze target via mw
|
159 |
:param gaze: gaze to set
|
160 |
:param blocking: True if this call should block until execution finished on robot
|
161 |
"""
|
162 |
self.logger.debug("calling the gaze rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING")) |
163 |
|
164 |
#create gaze target & fill it with values:
|
165 |
rsb_gaze = GazeTarget() |
166 |
|
167 |
#fill proto
|
168 |
rsb_gaze.pan = gaze.pan |
169 |
rsb_gaze.tilt = gaze.tilt |
170 |
rsb_gaze.roll = gaze.roll |
171 |
rsb_gaze.vergence = gaze.vergence |
172 |
rsb_gaze.pan_offset = gaze.pan_offset |
173 |
rsb_gaze.tilt_offset = gaze.tilt_offset |
174 |
|
175 |
if (blocking):
|
176 |
#blocking:
|
177 |
result = self.server.gaze(rsb_gaze)
|
178 |
self.logger.debug("server reply: '%s'" % result) |
179 |
else:
|
180 |
future = self.server.gaze.async(rsb_gaze)
|
181 |
#we can block here for a incoming result with a timeout in s
|
182 |
#print '> server reply: "%s"' % future.get(timeout = 10);
|
183 |
|
184 |
self.logger.debug("gaze rpc done") |
185 |
|
186 |
def publish_mouth_target(self, mouth, blocking): |
187 |
"""publish a mouth target via mw
|
188 |
:param mouth: mouth value to set
|
189 |
:param blocking: True if this call should block until execution finished on robot
|
190 |
"""
|
191 |
self.logger.debug("calling the mouth rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING")) |
192 |
|
193 |
#create mouth state & fill it with values:
|
194 |
rsb_mouth = MouthTarget() |
195 |
|
196 |
#fill proto
|
197 |
rsb_mouth.opening_left = mouth.opening_left |
198 |
rsb_mouth.opening_center = mouth.opening_center |
199 |
rsb_mouth.opening_right = mouth.opening_right |
200 |
rsb_mouth.position_left = mouth.position_left |
201 |
rsb_mouth.position_center = mouth.position_center |
202 |
rsb_mouth.position_right = mouth.position_right |
203 |
|
204 |
if (blocking):
|
205 |
#blocking:
|
206 |
result = self.server.mouth(rsb_mouth)
|
207 |
self.logger.debug("server reply: '%s'" % result) |
208 |
else:
|
209 |
future = self.server.mouth.async(rsb_mouth)
|
210 |
#we can block here for a incoming result with a timeout in s
|
211 |
#print '> server reply: "%s"' % future.get(timeout = 10);
|
212 |
|
213 |
self.logger.debug("mouth rpc done") |
214 |
|
215 |
def publish_speech(self, text, blocking): |
216 |
"""publish a tts request via mw
|
217 |
:param text: text to synthesize and speak
|
218 |
:param blocking: True if this call should block until execution finished on robot
|
219 |
"""
|
220 |
self.logger.debug("calling the speech rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING")) |
221 |
|
222 |
if (blocking):
|
223 |
#blocking:
|
224 |
result = self.server.speech(text)
|
225 |
self.logger.debug("server reply: '%s'" % result) |
226 |
else:
|
227 |
future = self.server.speech.async(text)
|
228 |
#we can block here for a incoming result with a timeout in s
|
229 |
#print '> server reply: "%s"' % future.get(timeout = 10);
|
230 |
|
231 |
self.logger.debug("speech rpc done") |
232 |
#######################################################################
|
233 |
def is_running(self): |
234 |
return True |
235 |
|
236 |
#######################################################################
|
237 |
# some helpers
|
238 |
def convert_animationtype_to_rsbval(self, value): |
239 |
"""convert RobotAnimation.value to RSB animation value
|
240 |
:param value: RobotAnimation.* id to convert to rsb id
|
241 |
"""
|
242 |
#NOTE: this convertion is important as the actual integer values of
|
243 |
# thy python api and the protobuf might be different
|
244 |
|
245 |
if (value == RobotAnimation.IDLE):
|
246 |
return Animation().IDLE
|
247 |
elif (value == RobotAnimation.HEAD_NOD):
|
248 |
return Animation().HEAD_NOD
|
249 |
elif (value == RobotAnimation.HEAD_SHAKE):
|
250 |
return Animation().HEAD_SHAKE
|
251 |
elif (value == RobotAnimation.EYEBLINK_L):
|
252 |
return Animation().EYEBLINK_L
|
253 |
elif (value == RobotAnimation.EYEBLINK_R):
|
254 |
return Animation().EYEBLINK_R
|
255 |
elif (value == RobotAnimation.EYEBLINK_BOTH):
|
256 |
return Animation().EYEBLINK_BOTH
|
257 |
elif (value == RobotAnimation.EYEBROWS_RAISE):
|
258 |
return Animation().EYEBROWS_RAISE
|
259 |
elif (value == RobotAnimation.EYEBROWS_LOWER):
|
260 |
return Animation().EYEBROWS_LOWER
|
261 |
else:
|
262 |
self.logger.error("invalid animation type %d\n" % (value)) |
263 |
return Animation().NEUTRAL
|
264 |
|
265 |
def convert_emotiontype_to_rsbval(self, value): |
266 |
"""convert RobotEmotion.value to RSB animation value
|
267 |
:param value: RobotEmotion.* id to convert to rsb id
|
268 |
"""
|
269 |
#NOTE: this convertion is important as the actual integer values of
|
270 |
# thy python api and the protobuf might be different
|
271 |
|
272 |
if (value == RobotEmotion.NEUTRAL):
|
273 |
return EmotionState().NEUTRAL
|
274 |
elif (value == RobotEmotion.HAPPY):
|
275 |
return EmotionState().HAPPY
|
276 |
elif (value == RobotEmotion.SAD):
|
277 |
return EmotionState().SAD
|
278 |
elif (value == RobotEmotion.ANGRY):
|
279 |
return EmotionState().ANGRY
|
280 |
elif (value == RobotEmotion.SURPRISED):
|
281 |
return EmotionState().SURPRISED
|
282 |
elif (value == RobotEmotion.FEAR):
|
283 |
return EmotionState().FEAR
|
284 |
else:
|
285 |
self.logger.error("invalid emotion type %d\n" % (value)) |
286 |
return EmotionState().NEUTRAL
|