Revision 2346dcd2

View differences:

client/cpp/examples/random_gaze/main.cpp
39 39
int main(int argc, char** argv) {
40 40
	if (argc != 3) {
41 41
		printf("usage: %s <middleware> <robot scope>\n", argv[0]);
42
		printf("       <middleware> is either ROS or RSB\n");
42
		printf("       <middleware> is  ROS\n");
43 43
		printf("       <robot scope> e.g. /flobi or /icub\n\n");
44 44
		exit(EXIT_FAILURE);
45 45
	}
client/cpp/examples/speech_test/main.cpp
33 33
#define BLOCKING true
34 34

  
35 35
int main(int argc, char** argv) {
36
	RobotController* robot_controller = new RobotController("RSB", "/flobi");
36
	RobotController* robot_controller = new RobotController("ROS", "/flobi");
37 37

  
38 38
	int count = 0;
39 39

  
client/cpp/include/MiddlewareROS.h
52 52
#ifndef ROS_SUPPORT
53 53
public:
54 54
	MiddlewareROS(std::string scope) : Middleware(scope) {
55
		printf("> ERROR: hlrc was compiled without ROS middleware support. Please use MiddlewareRSB() instead!\n\n");
55
		printf("> ERROR: hlrc was compiled without ROS middleware support.\n\n");
56 56
		exit(EXIT_FAILURE);
57 57
	}
58 58

  
client/cpp/include/MiddlewareRSB.h
1
/*
2
 * This file is part of hlrc
3
 *
4
 * Copyright(c) sschulz <AT> techfak.uni-bielefeld.de
5
 * http://opensource.cit-ec.de/projects/hlrc
6
 *
7
 * This file may be licensed under the terms of the
8
 * GNU General Public License Version 3 (the ``GPL''),
9
 * or (at your option) any later version.
10
 *
11
 * Software distributed under the License is distributed
12
 * on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
13
 * express or implied. See the GPL for the specific language
14
 * governing rights and limitations.
15
 *
16
 * You should have received a copy of the GPL along with this
17
 * program. If not, go to http://www.gnu.org/licenses/gpl.html
18
 * or write to the Free Software Foundation, Inc.,
19
 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20
 *
21
 * The development of this software was supported by the
22
 * Excellence Cluster EXC 277 Cognitive Interaction Technology.
23
 * The Excellence Cluster EXC 277 is a grant of the Deutsche
24
 * Forschungsgemeinschaft (DFG) in the context of the German
25
 * Excellence Initiative.
26
 *
27
 */
28

  
29
#pragma once
30
#include "Middleware.h"
31
#ifdef RSB_SUPPORT
32
#define BOOST_SIGNALS_NO_DEPRECATION_WARNING // mute warnings from RSB
33
#include <rsb/Factory.h>
34
#include <rsb/Informer.h>
35
#include <rsb/Listener.h>
36

  
37
#if USE_INPROTK_SYNTHESIS
38
#include <rst/communicationpatterns/TaskState.pb.h>
39
#endif
40
#endif
41

  
42
class MiddlewareRSB : public Middleware {
43
#ifndef RSB_SUPPORT
44
public:
45
	MiddlewareRSB(std::string scope) : Middleware(scope) {
46
		printf("> ERROR: hlrc was compiled without RSB middleware support. Please use MiddlewareROS() instead!\n\n");
47
		exit(EXIT_FAILURE);
48
	}
49

  
50
	void init(){};
51
	void publish_default_emotion(RobotEmotion e, bool blocking){};
52
	void publish_current_emotion(RobotEmotion e, bool blocking){};
53
	void publish_gaze_target(RobotGaze g, bool blocking){};
54
	void publish_lookat_target(float x, float y, float z, const std::string frame_id, bool blocking, float roll){};
55
	void publish_mouth_target(RobotMouth m, bool blocking){};
56
	void publish_head_animation(RobotHeadAnimation a, bool blocking){};
57
	void publish_speech(std::string text, bool blocking){};
58

  
59
#else
60
public:
61
	MiddlewareRSB(std::string scope);
62

  
63
protected:
64
	void init();
65
	void publish_current_emotion(RobotEmotion e, bool blocking);
66
	void publish_default_emotion(RobotEmotion e, bool blocking);
67
	void publish_gaze_target(RobotGaze target, bool blocking);
68
	void publish_lookat_target(float x, float y, float z, const std::string frame_id, bool blocking, float roll);
69
	void publish_mouth_target(RobotMouth target, bool blocking);
70
	void publish_head_animation(RobotHeadAnimation a, bool blocking);
71
	void publish_speech(std::string text, bool blocking);
72

  
73
private:
74
	void publish_emotion(std::string scope_target, RobotEmotion e, bool blocking);
75

  
76
	// rsb::patterns::RemoteServerPtr inprotk_server;
77
	rsb::patterns::RemoteServerPtr hlrc_server;
78
	// rsb::patterns::LocalServerPtr inprotk_server;
79
#if USE_INPROTK_SYNTHESIS
80
	rsb::Informer<rst::communicationpatterns::TaskState>::Ptr inprotk_informer;
81
	rsb::ListenerPtr inprotk_listener;
82
	rsb::ListenerPtr hack_listener;
83

  
84
	std::mutex pending_tasks_mutex;
85
	std::vector<std::shared_ptr<rst::communicationpatterns::TaskState> > pending_tasks;
86
	unsigned int say_task_active;
87
	unsigned int say_task_done;
88

  
89
	std::mutex current_saytask_mutex;
90
	std::string current_saytask;
91
	std::string get_current_saytask();
92
	void set_current_saytask(std::string text);
93

  
94
	void check_for_inprotk();
95
	void incoming_hack(std::shared_ptr<std::string> finished_task);
96
#endif
97
#endif
98
};
client/cpp/src/MiddlewareRSB.cpp
1
/*
2
 * This file is part of hlrc
3
 *
4
 * Copyright(c) sschulz <AT> techfak.uni-bielefeld.de
5
 * http://opensource.cit-ec.de/projects/hlrc
6
 *
7
 * This file may be licensed under the terms of the
8
 * GNU General Public License Version 3 (the ``GPL''),
9
 * or (at your option) any later version.
10
 *
11
 * Software distributed under the License is distributed
12
 * on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
13
 * express or implied. See the GPL for the specific language
14
 * governing rights and limitations.
15
 *
16
 * You should have received a copy of the GPL along with this
17
 * program. If not, go to http://www.gnu.org/licenses/gpl.html
18
 * or write to the Free Software Foundation, Inc.,
19
 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20
 *
21
 * The development of this software was supported by the
22
 * Excellence Cluster EXC 277 Cognitive Interaction Technology.
23
 * The Excellence Cluster EXC 277 is a grant of the Deutsche
24
 * Forschungsgemeinschaft (DFG) in the context of the German
25
 * Excellence Initiative.
26
 *
27
 */
28

  
29
#ifdef RSB_SUPPORT
30

  
31
#include "MiddlewareRSB.h"
32
#include <rsb/converter/Repository.h>
33
#include <rsb/converter/ProtocolBufferConverter.h>
34

  
35
#include <rst/animation/EmotionExpression.pb.h>
36
#include <rst/animation/BinocularHeadGaze.pb.h>
37
#include <rst/animation/HeadAnimation.pb.h>
38
#include <rst/audition/Utterance.pb.h>
39
#include <rst/audition/SoundChunk.pb.h>
40
#include <boost/algorithm/string.hpp>
41
#include <boost/range/algorithm_ext/erase.hpp>
42

  
43
using namespace std;
44
using namespace rsb;
45
using namespace rsb::patterns;
46

  
47
MiddlewareRSB::MiddlewareRSB(string scope) : Middleware(scope) {
48
	printf("> new MiddlewareRSB() on base scope '%s'\n", base_scope.c_str());
49
	init();
50
}
51

  
52
void MiddlewareRSB::init(void) {
53
	printf("> MiddlewareRSB::init() registering converters\n");
54

  
55
	try {
56
		// converter for EmotionState
57
		rsb::converter::Converter<string>::Ptr emotionStateConverter(
58
		   new rsb::converter::ProtocolBufferConverter<rst::animation::EmotionExpression>());
59
		rsb::converter::converterRepository<string>()->registerConverter(emotionStateConverter);
60

  
61
		// converter for Utterance
62
		// rsb::converter::Converter<string>::Ptr UtteranceConverter(new
63
		// rsb::converter::ProtocolBufferConverter<rst::audition::Utterance>());
64
		// rsb::converter::converterRepository<string>()->registerConverter(UtteranceConverter);
65

  
66
		// converter for GazeTarget
67
		rsb::converter::Converter<string>::Ptr gazeTargetConverter(
68
		   new rsb::converter::ProtocolBufferConverter<rst::animation::BinocularHeadGaze>());
69
		rsb::converter::converterRepository<string>()->registerConverter(gazeTargetConverter);
70

  
71
		// converter for MouthTarget
72
		/// rsb::converter::Converter<string>::Ptr mouthTargetConverter(new
73
		/// rsb::converter::ProtocolBufferConverter<rst::robot::MouthTarget>());
74
		/// rsb::converter::converterRepository<string>()->registerConverter(mouthTargetConverter);
75

  
76
		// converter for Animation
77
		rsb::converter::Converter<string>::Ptr animationConverter(
78
		   new rsb::converter::ProtocolBufferConverter<rst::animation::HeadAnimation>());
79
		rsb::converter::converterRepository<string>()->registerConverter(animationConverter);
80
	}
81
	catch (std::invalid_argument e) {
82
		printf("> converters already registered\n");
83
	}
84

  
85
	// first get a factory instance that is used to create RSB domain objects
86
	Factory& factory = getFactory();
87

  
88
	// get server
89
	string scope = base_scope + "/set/";
90
	hlrc_server = factory.createRemoteServer(scope);
91

  
92
	printf("> init done\n");
93
}
94

  
95
void MiddlewareRSB::publish_emotion(string scope_target, RobotEmotion e, bool blocking) {
96
	std::shared_ptr<rst::animation::EmotionExpression> request(new rst::animation::EmotionExpression());
97

  
98
	switch (e.value) {
99
		default:
100
			printf("> WANRING: invalid emotion id %d. defaulting to NEUTRAL\n", e.value);
101
			// fall through:
102
		case (RobotEmotion::NEUTRAL):
103
			request->set_emotion(rst::animation::EmotionExpression::NEUTRAL);
104
			break;
105
		case (RobotEmotion::HAPPY):
106
			request->set_emotion(rst::animation::EmotionExpression::HAPPY);
107
			break;
108
		case (RobotEmotion::SAD):
109
			request->set_emotion(rst::animation::EmotionExpression::SAD);
110
			break;
111
		case (RobotEmotion::ANGRY):
112
			request->set_emotion(rst::animation::EmotionExpression::ANGRY);
113
			break;
114
		case (RobotEmotion::SURPRISED):
115
			request->set_emotion(rst::animation::EmotionExpression::SURPRISED);
116
			break;
117
		case (RobotEmotion::FEAR):
118
			request->set_emotion(rst::animation::EmotionExpression::FEAR);
119
			break;
120
	}
121

  
122
	request->set_duration(e.time_ms);
123

  
124
	if (blocking) {
125
		hlrc_server->call<rst::animation::EmotionExpression>(scope_target, request);
126
	}
127
	else {
128
		hlrc_server->callAsync<rst::animation::EmotionExpression>(scope_target, request);
129
	}
130
}
131

  
132
void MiddlewareRSB::publish_current_emotion(RobotEmotion e, bool blocking) {
133
	publish_emotion("currentEmotion", e, blocking);
134
}
135

  
136
void MiddlewareRSB::publish_default_emotion(RobotEmotion e, bool blocking) {
137
	publish_emotion("defaultEmotion", e, blocking);
138
}
139

  
140
void MiddlewareRSB::publish_gaze_target(RobotGaze incoming_target, bool blocking) {
141
	std::shared_ptr<rst::animation::BinocularHeadGaze> request(new rst::animation::BinocularHeadGaze());
142

  
143
	std::shared_ptr<rst::geometry::SphericalDirectionFloat> target(new rst::geometry::SphericalDirectionFloat());
144
	target->set_azimuth(incoming_target.pan);
145
	target->set_elevation(incoming_target.tilt);
146
	request->set_allocated_target(target.get());
147

  
148
	request->set_eye_vergence(incoming_target.vergence);
149

  
150
	std::shared_ptr<rst::geometry::SphericalDirectionFloat> offset(new rst::geometry::SphericalDirectionFloat());
151
	offset->set_azimuth(incoming_target.pan_offset);
152
	offset->set_elevation(incoming_target.tilt_offset);
153
	request->set_allocated_offset(offset.get());
154

  
155
	if (blocking) {
156
		hlrc_server->call<rst::animation::BinocularHeadGaze>("gaze", request);
157
	}
158
	else {
159
		hlrc_server->callAsync<rst::animation::BinocularHeadGaze>("gaze", request);
160
	}
161
}
162

  
163
void MiddlewareRSB::publish_lookat_target(float x, float y, float z, const std::string frame_id, bool blocking, float roll) {
164
	std::cerr << "not yet implemented" << std::endl;
165
}
166

  
167
void MiddlewareRSB::publish_mouth_target(RobotMouth target, bool blocking) {
168
	/*
169
	    std::shared_ptr<rst::robot::MouthTarget> request(new rst::robot::MouthTarget());
170

  
171
	    request->set_position_left(  target.position_left);
172
	    request->set_position_center(target.position_center);
173
	    request->set_position_right( target.position_right);
174

  
175
	    request->set_opening_left(  target.opening_left);
176
	    request->set_opening_center(target.opening_center);
177
	    request->set_opening_right( target.opening_right);
178

  
179
	    if (blocking){
180
	        hlrc_server->call<rst::robot::MouthTarget>("mouth", request);
181
	    }else{
182
	        hlrc_server->callAsync<rst::robot::MouthTarget>("mouth", request);
183
	    }
184
	*/
185
	printf("> ERROR: mouth targets not yet implemented in RSB middleware!\n");
186
}
187

  
188
void MiddlewareRSB::publish_head_animation(RobotHeadAnimation a, bool blocking) {
189
	std::shared_ptr<rst::animation::HeadAnimation> request(new rst::animation::HeadAnimation());
190

  
191
	switch (a.value) {
192
		default:
193
			printf("> WANRING: invalid animation id %d. defaulting to IDLE", a.value);
194
			// fall through:
195
		case (RobotHeadAnimation::IDLE):
196
			request->set_animation(rst::animation::HeadAnimation::IDLE);
197
			break;
198
		case (RobotHeadAnimation::HEAD_NOD):
199
			request->set_animation(rst::animation::HeadAnimation::HEAD_NOD);
200
			break;
201
		case (RobotHeadAnimation::HEAD_SHAKE):
202
			request->set_animation(rst::animation::HeadAnimation::HEAD_SHAKE);
203
			break;
204
		case (RobotHeadAnimation::EYEBLINK_L):
205
			request->set_animation(rst::animation::HeadAnimation::EYEBLINK_LEFT);
206
			break;
207
		case (RobotHeadAnimation::EYEBLINK_R):
208
			request->set_animation(rst::animation::HeadAnimation::EYEBLINK_RIGHT);
209
			break;
210
		case (RobotHeadAnimation::EYEBLINK_BOTH):
211
			request->set_animation(rst::animation::HeadAnimation::EYEBLINK_BOTH);
212
			break;
213
		case (RobotHeadAnimation::EYEBROWS_RAISE):
214
			request->set_animation(rst::animation::HeadAnimation::EYEBROWS_RAISE);
215
			break;
216
		case (RobotHeadAnimation::EYEBROWS_LOWER):
217
			request->set_animation(rst::animation::HeadAnimation::EYEBROWS_LOWER);
218
			break;
219
		case (RobotHeadAnimation::ENGAGEMENT_LEFT):
220
			request->set_animation(rst::animation::HeadAnimation::ENGAGEMENT_LEFT);
221
			break;
222
		case (RobotHeadAnimation::ENGAGEMENT_RIGHT):
223
			request->set_animation(rst::animation::HeadAnimation::ENGAGEMENT_RIGHT);
224
			break;
225
	}
226

  
227
	request->set_repetitions(a.repetitions);
228
	request->set_emphasis_scale(a.scale);
229
	request->set_duration_each(a.time_ms);
230

  
231
	if (blocking) {
232
		hlrc_server->call<rst::animation::HeadAnimation>("animation", request);
233
	}
234
	else {
235
		hlrc_server->callAsync<rst::animation::HeadAnimation>("animation", request);
236
	}
237
}
238

  
239
void MiddlewareRSB::publish_speech(string text, bool blocking) {
240
	// say it
241
	std::shared_ptr<std::string> request(new string(text));
242

  
243
	if (blocking) {
244
		hlrc_server->call<std::string>("speech", request);
245
	}
246
	else {
247
		hlrc_server->callAsync<std::string>("speech", request);
248
	}
249
}
250

  
251
#endif
client/cpp/src/RobotController.cpp
29 29
#include "RobotController.h"
30 30
#include "Middleware.h"
31 31
#include "MiddlewareROS.h"
32
#include "MiddlewareRSB.h"
33 32

  
34 33
using namespace std;
35 34

  
......
38 37
		// intantiate ROS mw
39 38
		middleware = new MiddlewareROS(base_scope);
40 39
	}
41
	else if (mw_name == "RSB") {
42
		// intantiate RSB mw
43
		middleware = new MiddlewareRSB(base_scope);
44
	}
45 40
	else {
46
		printf("ERROR: invalid middleware requested (%s). supported: {ROS, RSB}\n\n", mw_name.c_str());
41
		printf("ERROR: invalid middleware requested (%s). supported: {ROS}\n\n", mw_name.c_str());
47 42
		exit(EXIT_FAILURE);
48 43
	}
49 44
}
client/java/pom.xml
90 90
            <scope>test</scope>
91 91
        </dependency>
92 92
        <dependency>
93
            <groupId>rsb</groupId>
94
            <artifactId>rsb</artifactId>
95
            <version>[0.11.0,)</version>
96
        </dependency>
97
        <dependency>
98
            <groupId>rsb</groupId>
99
            <artifactId>rst-sandbox-csra</artifactId>
100
            <version>0.11-SNAPSHOT</version>
101
        </dependency>
102
        <dependency>
103 93
            <groupId>commons-cli</groupId>
104 94
            <artifactId>commons-cli</artifactId>
105 95
            <version>1.2</version>
client/java/src/main/java/de/uni-bielefeld/cit-ec/opensource/hlrc/client/MiddlewareRSB.java
1
/*
2
* This file is part of hlrc
3
*
4
* Copyright(c) {flier,sschulz} <AT> techfak.uni-bielefeld.de
5
* http://opensource.cit-ec.de/projects/hlrc
6
*
7
* This file may be licensed under the terms of the
8
* GNU General Public License Version 3 (the ``GPL''),
9
* or (at your option) any later version.
10
*
11
* Software distributed under the License is distributed
12
* on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
13
* express or implied. See the GPL for the specific language
14
* governing rights and limitations.
15
*
16
* You should have received a copy of the GPL along with this
17
* program. If not, go to http://www.gnu.org/licenses/gpl.html
18
* or write to the Free Software Foundation, Inc.,
19
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20
*
21
* The development of this software was supported by the
22
* Excellence Cluster EXC 277 Cognitive Interaction Technology.
23
* The Excellence Cluster EXC 277 is a grant of the Deutsche
24
* Forschungsgemeinschaft (DFG) in the context of the German
25
* Excellence Initiative.
26
*
27
*/
28

  
29
package de.uni_bielefeld.cit_ec.opensource.hlrc.client;
30

  
31
import java.util.ArrayList;
32
import java.util.HashMap;
33
import java.util.Map;
34
import java.util.concurrent.ExecutionException;
35
import java.util.concurrent.TimeoutException;
36
import java.util.logging.Level;
37
import java.util.logging.Logger;
38

  
39
import javax.sound.sampled.AudioFormat;
40
import javax.sound.sampled.AudioFormat.Encoding;
41

  
42
import rsb.Factory;
43
import rsb.RSBException;
44
import rsb.converter.DefaultConverterRepository;
45
import rsb.converter.ProtocolBufferConverter;
46
import rsb.patterns.RemoteServer;
47

  
48
import rst.audition.PhonemeType.Phoneme;
49
import rst.audition.SoundChunkType.SoundChunk;
50
import rst.audition.UtteranceType.Utterance;
51
import rst.robot.AnimationType.Animation;
52
import rst.robot.AnimationType.Animation.AnimationType_t;
53
import rst.robot.EmotionStateType.EmotionState;
54
import rst.robot.EmotionStateType.EmotionState.EmotionType;
55
import rst.robot.GazeTargetType.GazeTarget;
56

  
57
import com.google.protobuf.ByteString;
58

  
59
// Create _abstract_ middleware class and function _interface_ for 
60
// future integration of additional Middleware implementations...
61

  
62
import rsb.Factory;
63
import rsb.Informer;
64

  
65

  
66
public class MiddlewareRSB {
67

  
68
    final RemoteServer server;
69
    final String robotname;
70
    static Map<Emotions, EmotionType> emo_map;
71
    static Map<HeadAnimations, AnimationType_t> han_map;
72
    
73
    public class RSBInformer {
74

  
75
    final Factory factory;
76
    final Informer<Object> informer;
77
        
78
    public RSBInformer(String scope) throws Throwable {
79

  
80
        // Get a factory instance to create RSB objects.
81
        factory = Factory.getInstance();
82

  
83
        // Create an informer on scope "/exmaple/informer".
84
        informer = factory.createInformer(scope);
85

  
86
        // Activate the informer to be ready for work
87
        informer.activate();
88

  
89
    }
90
    
91
    public void pushMsg(){
92
        try {
93
            // Send and event using a method that accepts the data and
94
            // automatically creates an appropriate event internally.
95
            this.informer.send("TODO");
96
        } catch (RSBException ex) {
97
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
98
        }
99
    }
100
    
101
    public void closeInf() throws InterruptedException {
102
        try {
103
            // As there is no explicit removal model in java, always manually
104
            // deactivate the informer if it is not needed anymore
105
            this.informer.deactivate();
106
        } catch (RSBException ex) {
107
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
108
        }
109
    }
110
    
111
}
112
    
113
    
114
    @SuppressWarnings("unchecked")
115
	public MiddlewareRSB(String robotname) {
116
        
117
        this.robotname = robotname;
118
            
119
        server = Factory.getInstance().createRemoteServer(this.robotname+"/set/");
120
        System.out.println("--> Robot: Talking to "+this.robotname);
121
        try {
122
            server.activate();
123
        } catch (RSBException ex) {
124
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
125
        }
126

  
127
        {
128
            final ProtocolBufferConverter<EmotionState> converter = new ProtocolBufferConverter<>(EmotionState.getDefaultInstance());
129
            DefaultConverterRepository.getDefaultConverterRepository().addConverter(converter);
130
        }
131
        {
132
            final ProtocolBufferConverter<GazeTarget> converter = new ProtocolBufferConverter<>(GazeTarget.getDefaultInstance());
133
            DefaultConverterRepository.getDefaultConverterRepository().addConverter(converter);
134
        }
135
        {
136
            final ProtocolBufferConverter<Animation> converter = new ProtocolBufferConverter<>(Animation.getDefaultInstance());
137
            DefaultConverterRepository.getDefaultConverterRepository().addConverter(converter);
138
        }
139
        {
140
            final ProtocolBufferConverter<Utterance> converter = new ProtocolBufferConverter<>(Utterance.getDefaultInstance());
141
            DefaultConverterRepository.getDefaultConverterRepository().addConverter(converter);
142
        }
143
        {
144
            final ProtocolBufferConverter<SoundChunk> converter = new ProtocolBufferConverter<>(SoundChunk.getDefaultInstance());
145
            DefaultConverterRepository.getDefaultConverterRepository().addConverter(converter);
146
        }
147
        {
148
            final ProtocolBufferConverter<Phoneme> converter = new ProtocolBufferConverter<>(Phoneme.getDefaultInstance());
149
            DefaultConverterRepository.getDefaultConverterRepository().addConverter(converter);
150
        }
151

  
152
        emo_map = new HashMap();
153
        emo_map.put(Emotions.SAD,       EmotionType.SAD);
154
        emo_map.put(Emotions.ANGRY,     EmotionType.ANGRY);
155
        emo_map.put(Emotions.FEAR,      EmotionType.FEAR);
156
        emo_map.put(Emotions.NEUTRAL,   EmotionType.NEUTRAL);
157
        emo_map.put(Emotions.SURPRISED, EmotionType.SURPRISED);
158
        emo_map.put(Emotions.HAPPY,     EmotionType.HAPPY);
159

  
160
        han_map = new HashMap();
161
        han_map.put(HeadAnimations.HEAD_NOD,       AnimationType_t.HEAD_NOD);
162
        han_map.put(HeadAnimations.HEAD_SHAKE,     AnimationType_t.HEAD_SHAKE);
163
        han_map.put(HeadAnimations.IDLE,           AnimationType_t.IDLE);
164
        han_map.put(HeadAnimations.EYEBLINK_R,     AnimationType_t.EYEBLINK_R);
165
        han_map.put(HeadAnimations.EYEBLINK_L,     AnimationType_t.EYEBLINK_L);
166
        han_map.put(HeadAnimations.EYEBLINK_BOTH,  AnimationType_t.EYEBLINK_BOTH);
167
        han_map.put(HeadAnimations.EYEBROWS_LOWER, AnimationType_t.EYEBROWS_LOWER);
168
        han_map.put(HeadAnimations.EYEBROWS_RAISE, AnimationType_t.EYEBROWS_RAISE);
169
    }
170

  
171
    private SoundChunk compileSoundChunk(AudioFormat audio, byte[] data) {
172
       SoundChunk.Builder sc = SoundChunk.newBuilder();
173
        ByteString bs = ByteString.copyFrom(data);
174
        boolean isbig = audio.isBigEndian();
175
        sc.setChannels(audio.getChannels());
176
        sc.setData(bs);
177
        sc.setRate((int) audio.getSampleRate());
178

  
179
        if (isbig) {
180
            sc.setEndianness(SoundChunk.EndianNess.ENDIAN_BIG);
181
        } else {
182
            sc.setEndianness(SoundChunk.EndianNess.ENDIAN_LITTLE);
183
        }
184

  
185
        sc.setSampleCount(data.length / audio.getFrameSize());
186
        Encoding enc = audio.getEncoding();
187

  
188
        if (enc.equals(Encoding.PCM_SIGNED)) {
189
            int size = audio.getSampleSizeInBits();
190
            if (size == 8) {
191
                sc.setSampleType(SoundChunk.SampleType.SAMPLE_S8);
192
            } else if (size == 16) {
193
                sc.setSampleType(SoundChunk.SampleType.SAMPLE_S16);
194
            } else if (size == 24) {
195
                sc.setSampleType(SoundChunk.SampleType.SAMPLE_S24);
196
            } else {
197
                System.err.println("Audio Sample Size Not Valid");
198
            }
199
        } else if (enc.equals(Encoding.PCM_UNSIGNED)) {
200
            int size = audio.getSampleSizeInBits();
201
            if (size == 8) {
202
                sc.setSampleType(SoundChunk.SampleType.SAMPLE_U8);
203
            } else if (size == 16) {
204
                sc.setSampleType(SoundChunk.SampleType.SAMPLE_U16);
205
            } else if (size == 24) {
206
                sc.setSampleType(SoundChunk.SampleType.SAMPLE_U24);
207
            } else {
208
                System.err.println("Audio Sample Size not valid");
209
                // This is too rigorous, maybe.
210
                // System.exit(-1);
211
            }
212
        } else {
213
            System.err.println("Audio Encoding is invalid");
214
        }
215
        return sc.build();
216
    }
217

  
218
    public void deactivateServer() throws InterruptedException {
219
        try {
220
            this.server.deactivate();
221
        } catch (RSBException ex) {
222
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
223
        }
224
    }
225

  
226
    // OBSOLETE
227
    /*
228
     public MouthAnimation createMouthAnimationMessage(int duration, boolean active) {
229
     MouthAnimation.Builder ma = MouthAnimation.newBuilder();
230
     ma.setDuration(duration);
231
     ma.setActive(active);
232
     return ma.build();
233
     }
234
     */
235
    
236
    public Utterance createSetSpeak(SoundChunk soundchunck, ArrayList tuList, String text) {
237
        Utterance.Builder ut = Utterance.newBuilder();
238
        ut.setAudio(soundchunck);
239
        for (int i=0; i< tuList.size();i++) {
240
        	Phoneme.Builder ph = Phoneme.newBuilder();
241
        	Tuple t = (Tuple) tuList.get(i);
242
        	ph.setSymbol(t.s);
243
            ph.setDuration(t.d);
244
            ut.addPhonemes(i, ph);
245
            
246
        }
247
        System.out.println("Phonem count " + ut.getPhonemesCount() + " Phonem list " + ut.getPhonemesList());
248
        ut.setText(text);
249
        return ut.build();
250
    }
251

  
252
    public Animation createHeadAnimationMessage(HeadAnimations ani, int duration, int repetition, float scale) {
253
        Animation.Builder ha = Animation.newBuilder();
254
        ha.setRepetitions(repetition);
255
        ha.setTarget(han_map.get(ani));
256
        ha.setDurationEach(duration);
257
        ha.setScale(scale);
258
        return ha.build();
259
    }
260

  
261
    public EmotionState createCurrentEmotionMessage(Emotions emo, int duration) {
262
        EmotionState.Builder es = EmotionState.newBuilder();
263
        es.setDuration(duration);
264
        es.setValue(emo_map.get(emo));
265
        return es.build();
266
    }
267

  
268
    public EmotionState createDefaultEmotionMessage(Emotions emo) {
269
        EmotionState.Builder es = EmotionState.newBuilder();
270
        es.setValue(emo_map.get(emo));
271
        return es.build();
272
    }
273

  
274
    public GazeTarget createGazeTargetMessage(float pan, float tilt, float roll) {
275
        GazeTarget.Builder gt = GazeTarget.newBuilder();
276
        gt.setPan(pan);
277
        gt.setTilt(tilt);
278
        gt.setRoll(roll);
279
        return gt.build();
280
    }
281

  
282
    public void setSpeak(AudioFormat audio, byte[] bs, ArrayList tu, String text, boolean blocking) throws ExecutionException, TimeoutException {
283
        SoundChunk sc;
284
        sc = compileSoundChunk(audio, bs);
285
        if (blocking) {
286
            try {
287
                server.call("utterance", createSetSpeak(sc, tu, text));
288
                // System.out.println("Server replied: " + server.call("utterance", createSetSpeak(sc, tu)));
289
            } catch (RSBException ex) {
290
                Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
291
            }
292
        } else {
293
            try {             
294
                server.callAsync("utterance", createSetSpeak(sc, tu, text));
295
                // System.out.println("Set Mouth Animation non-blocking, not wating for reply");
296
            } catch (RSBException ex) {
297
                Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
298
            }
299
        }
300
    }
301
    
302
    public void setCurrentEmotion(Emotions emo, int duration, boolean blocking) throws ExecutionException, TimeoutException {
303
        try {
304
            if (blocking) {
305
                server.call("currentEmotion", createCurrentEmotionMessage(emo, duration));
306
                // System.out.println("Server replied: " + server.call("currentEmotion", createEmotionMessage(emo, duration)));
307
            } else {        
308
                server.callAsync("currentEmotion", createCurrentEmotionMessage(emo, duration));
309
                // System.out.println("Set Emotion non-blocking, not wating for reply");
310
            }
311

  
312
        } catch (RSBException ex) {
313
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
314
        }
315
    }
316

  
317
    public void setGazeTarget(float pan, float tilt, float roll, boolean blocking) throws ExecutionException, TimeoutException {
318
        try {
319
            if (blocking) {
320
                server.call("gaze", createGazeTargetMessage(pan, tilt, roll));
321
                // System.out.println("Server replied: " + server.call("gaze", createGazeTargetMessage(pan, tilt, roll)));
322
            } else {               
323
                server.callAsync("gaze", createGazeTargetMessage(pan, tilt, roll));
324
                // System.out.println("Set Gaze non-blocking, not wating for reply");
325
            }
326
        } catch (RSBException ex) {
327
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
328
        }
329
    }
330

  
331
    public void setDefaultEmotion(Emotions emo, boolean blocking) throws ExecutionException, TimeoutException {
332
        try {
333
            if (blocking) {
334
                server.call("defaultEmotion", createDefaultEmotionMessage(emo));
335
                // System.out.println("Server replied: " + server.call("defaultEmotion", createDefaultEmotionMessage(emo)));
336
            } else {              
337
                server.callAsync("defaultEmotion", createDefaultEmotionMessage(emo));
338
                // System.out.println("Set Default Emotion non-blocking, not wating for reply");
339
            }
340
        } catch (RSBException ex) {
341
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
342
        }
343
    }
344

  
345
    public void setHeadAnimation(HeadAnimations ani, int duration, int repetitions, float scale, boolean blocking) throws ExecutionException, TimeoutException {
346
        try {
347
            if (blocking) {
348
                server.call("animation", createHeadAnimationMessage(ani, duration, repetitions, scale));
349
                // System.out.println("Server replied: " + server.call("animation", createHeadAnimationMessage(ani, duration, repetitions, scale)));
350
            } else {
351
                server.callAsync("animation", createHeadAnimationMessage(ani, duration, repetitions, scale));
352
                // System.out.println("Set Head Animation non-blocking, not wating for reply");
353
            }
354
        } catch (RSBException ex) {
355
            Logger.getLogger(MiddlewareRSB.class.getName()).log(Level.SEVERE, null, ex);
356
        }
357
    }
358
}
client/java/src/main/java/de/uni-bielefeld/cit-ec/opensource/hlrc/client/RobotController.java
39 39

  
40 40
    private final String name;
41 41
    private final String middlwarename;
42
    public final MiddlewareRSB mw;
43 42

  
44 43
    
45 44
    /**
client/python/hlrc_client/MiddlewareRSB.py
1
"""
2
This file is part of hlrc
3

  
4
Copyright(c) sschulz <AT> techfak.uni-bielefeld.de
5
http://opensource.cit-ec.de/projects/hlrc
6

  
7
This file may be licensed under the terms of the
8
GNU General Public License Version 3 (the ``GPL''),
9
or (at your option) any later version.
10

  
11
Software distributed under the License is distributed
12
on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
13
express or implied. See the GPL for the specific language
14
governing rights and limitations.
15

  
16
You should have received a copy of the GPL along with this
17
program. If not, go to http://www.gnu.org/licenses/gpl.html
18
or write to the Free Software Foundation, Inc.,
19
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20

  
21
The development of this software was supported by the
22
Excellence Cluster EXC 277 Cognitive Interaction Technology.
23
The Excellence Cluster EXC 277 is a grant of the Deutsche
24
Forschungsgemeinschaft (DFG) in the context of the German
25
Excellence Initiative.
26
"""
27

  
28
from .Middleware import *
29
import errno
30

  
31
import rsb
32
import rsb.converter
33
import rst
34
import rstsandbox
35

  
36
from rst.animation.EmotionExpression_pb2 import EmotionExpression
37
from rst.animation.HeadAnimation_pb2 import HeadAnimation
38
from rst.animation.BinocularHeadGaze_pb2 import BinocularHeadGaze
39
from rst.geometry.SphericalDirectionFloat_pb2 import SphericalDirectionFloat
40

  
41

  
42
class MiddlewareRSB(Middleware):
43
    #######################################################################
44
    def __init__(self, scope, loglevel=logging.WARNING):
45
        """initialise
46
        :param scope: base scope we want to listen on
47
        """
48
        # init base settings
49
        Middleware.__init__(self, scope, loglevel)
50
        self.EmotionExpression_converter = None
51
        self.HeadAnimation_converter = None
52
        self.BinocularHeadGaze_converter = None
53
        #call mw init
54
        self.server = None
55
        self.init_middleware()
56

  
57
    def __del__(self):
58
        """destructor
59
        """
60
        self.logger.debug("destructor of MiddlewareROS called")
61

  
62
    #######################################################################
63
    def init_middleware(self):
64
        """initialise middleware
65
        """
66
        # mute rsb logging:
67
        logging.getLogger("rsb").setLevel(logging.ERROR)
68

  
69
        #initialise RSB stuff
70
        self.logger.info(
71
            "initialising RSB middleware connection on scope %s, registering rst converters..." % (self.base_scope))
72

  
73
        self.EmotionExpression_converter = rsb.converter.ProtocolBufferConverter(messageClass=EmotionExpression)
74
        rsb.converter.registerGlobalConverter(self.EmotionExpression_converter)
75

  
76
        self.HeadAnimation_converter = rsb.converter.ProtocolBufferConverter(messageClass=HeadAnimation)
77
        rsb.converter.registerGlobalConverter(self.HeadAnimation_converter)
78

  
79
        self.BinocularHeadGaze_converter = rsb.converter.ProtocolBufferConverter(messageClass=BinocularHeadGaze)
80
        rsb.converter.registerGlobalConverter(self.BinocularHeadGaze_converter)
81

  
82
        try:
83
            self.server = rsb.createRemoteServer(self.base_scope + '/set')
84
        except ValueError:
85
            self.logger.error("ERROR: invalid scope given. server deactivated")
86
            self.server.deactivate()
87
            sys.exit(errno.EINVAL)
88

  
89
    #######################################################################
90
    def publish_emotion(self, em_type, emotion, blocking):
91
        """publish an emotion via mw
92
        :param em_type: type of emotion (RobotEmotion::TYPE_DEFAULT or RobotEmotion::TYPE_CURRENT)
93
        :param emotion: emotion to set
94
        :param blocking: True if this call should block until execution finished on robot
95
        """
96

  
97
        # create emotion & fill it with values:
98
        rsb_em = EmotionExpression()
99
        rsb_em.emotion = self.convert_emotiontype_to_rsbval(emotion.value)
100
        rsb_em.duration = int(emotion.time_ms)
101

  
102
        # with rsb.createRemoteServer(self.base_scope + '/{current,default}Emotion') as server:
103
        self.logger.debug("calling the emotion rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING"))
104

  
105
        if (blocking):
106
            #blocking rpc call:
107
            if (em_type == EmotionExpression.TYPE_DEFAULT):
108
                result = self.server.defaultEmotion(rsb_em)
109
            else:
110
                result = self.server.currentEmotion(rsb_em)
111
            self.logger.debug("server reply: '%s'" % result)
112
        else:
113
            if (em_type == EmotionExpression.TYPE_DEFAULT):
114
                future = self.server.defaultEmotion.async(rsb_em)
115
            else:
116
                future = self.server.currentEmotion.async(rsb_em)
117
            self.logger.debug("server reply: '%s'" % future)
118
            #we could block here for a incoming result with a timeout in s
119
            #print '> server reply: "%s"' % future.get(timeout = 10);
120
        self.logger.debug("emotion rpc done")
121

  
122
    def publish_head_animation(self, animation, blocking):
123
        """publish an head HeadAnimation via mw
124
        :param HeadAnimation: HeadAnimation to set
125
        :param blocking: True if this call should block until execution finished on robot
126
        """
127

  
128
        self.logger.debug("calling the HeadAnimation rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING"))
129

  
130
        # create HeadAnimation & fill it with values:
131
        rsb_ani = HeadAnimation()
132

  
133
        #select ani
134
        rsb_ani.animation = self.convert_HeadAnimationtype_to_rsbval(animation.value)
135
        rsb_ani.repetitions = animation.repetitions
136
        rsb_ani.duration_each = animation.time_ms
137
        rsb_ani.emphasis_scale = animation.scale
138

  
139
        if blocking:
140
            #blocking:
141
            result = self.server.animation(rsb_ani)
142
            self.logger.debug("server reply: '%s'" % result)
143
        else:
144
            future = self.server.animation.async(rsb_ani)
145
            #we can block here for a incoming result with a timeout in s
146
            #print '> server reply: "%s"' % future.get(timeout = 10);
147

  
148
        self.logger.debug("HeadAnimation rpc done")
149

  
150
    def publish_default_emotion(self, emotion, blocking):
151
        self.publish_emotion(RobotEmotion.TYPE_DEFAULT, emotion, blocking)
152

  
153
    def publish_current_emotion(self, emotion, blocking):
154
        self.publish_emotion(RobotEmotion.TYPE_CURRENT, emotion, blocking)
155

  
156
    def publish_gaze_target(self, gaze, blocking):
157
        """publish a gaze target via mw
158
        :param gaze: gaze to set
159
        :param blocking: True if this call should block until execution finished on robot
160
        """
161
        self.logger.debug("calling the gaze rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING"))
162

  
163
        # create gaze target & fill it with values:
164
        hg = BinocularHeadGaze()
165

  
166
        hg.target.elevation = gaze.tilt
167
        hg.target.azimuth = gaze.pan
168
        hg.eye_vergence = gaze.vergence
169

  
170
        hg.offset.elevation = gaze.tilt_offset
171
        hg.offset.azimuth = gaze.pan_offset
172

  
173
        if blocking:
174
            # blocking:
175
            result = self.server.gaze(hg)
176
            self.logger.debug("server reply blocking: '%s'" % result)
177
        else:
178
            future = self.server.gaze.async(hg)
179
            self.logger.debug("server reply non-blocking: '%s'" % future)
180
            # we can block here for a incoming result with a timeout in s
181
            #print '> server reply: "%s"' % future.get(timeout = 10);
182

  
183
        self.logger.debug("gaze rpc done")
184

  
185
    def publish_speech(self, text, blocking):
186
        """publish a tts request via mw
187
        :param text: text to synthesize and speak
188
        :param blocking: True if this call should block until execution finished on robot
189
        """
190
        """
191
        self.logger.debug("calling the speech rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING"))
192

  
193
        if (blocking):
194
            # blocking:
195
            result = self.server.speech(text)
196
            self.logger.debug("server reply: '%s'" % result)
197
        else:
198
            future = self.server.speech.async(text)
199
            # we can block here for a incoming result with a timeout in s
200
            #print '> server reply: "%s"' % future.get(timeout = 10);
201

  
202
        self.logger.debug("speech rpc done")
203
        """
204
        print("WARNING: Not IMPLEMENTED.")
205

  
206

  
207
    #######################################################################
208
    def is_running(self):
209
        return True
210

  
211

  
212
    #######################################################################
213
    # some helpers
214
    def convert_HeadAnimationtype_to_rsbval(self, value):
215
        """convert RobotHeadAnimation.value to RSB HeadAnimation value
216
        :param value: RobotHeadAnimation.* id to convert to rsb id
217
        """
218
        # NOTE: this convertion is important as the actual integer values of
219
        #      thy python api and the protobuf might be different
220

  
221
        if value == HeadAnimation.IDLE:
222
            return HeadAnimation().IDLE
223
        elif value == HeadAnimation.HEAD_NOD:
224
            return HeadAnimation().HEAD_NOD
225
        elif (value == HeadAnimation.HEAD_SHAKE):
226
            return HeadAnimation().HEAD_SHAKE
227
        elif (value == HeadAnimation.EYEBLINK_LEFT):
228
            return HeadAnimation().EYEBLINK_LEFT
229
        elif (value == HeadAnimation.EYEBLINK_RIGHT):
230
            return HeadAnimation().EYEBLINK_RIGHT
231
        elif (value == HeadAnimation.EYEBLINK_BOTH):
232
            return HeadAnimation().EYEBLINK_BOTH
233
        elif (value == HeadAnimation.EYEBROWS_RAISE):
234
            return HeadAnimation().EYEBROWS_RAISE
235
        elif (value == HeadAnimation.EYEBROWS_LOWER):
236
            return HeadAnimation().EYEBROWS_LOWER
237
        else:
238
            self.logger.error("invalid HeadAnimation type %d\n" % (value))
239
            return HeadAnimation().NEUTRAL
240

  
241
    def convert_emotiontype_to_rsbval(self, value):
242
        """convert RobotEmotion.value to RSB HeadAnimation value
243
        :param value: RobotEmotion.* id to convert to rsb id
244
        """
245
        # NOTE: this convertion is important as the actual integer values of
246
        #      thy python api and the protobuf might be different
247

  
248
        if (value == RobotEmotion.NEUTRAL):
249
            return EmotionExpression().NEUTRAL
250
        elif (value == RobotEmotion.HAPPY):
251
            return EmotionExpression().HAPPY
252
        elif (value == RobotEmotion.SAD):
253
            return EmotionExpression().SAD
254
        elif (value == RobotEmotion.ANGRY):
255
            return EmotionExpression().ANGRY
256
        elif (value == RobotEmotion.SURPRISED):
257
            return EmotionExpression().SURPRISED
258
        elif (value == RobotEmotion.FEAR):
259
            return EmotionExpression().FEAR
260
        else:
261
            self.logger.error("invalid emotion type %d\n" % (value))
262
            return EmotionExpression().NEUTRAL
263

  
264
    """
265
    def publish_mouth_target(self, mouth, blocking):
266
        publish a mouth target via mw
267
        :param mouth: mouth value to set
268
        :param blocking: True if this call should block until execution finished on robot
269

  
270
        self.logger.debug("calling the mouth rpc (%s)..." % ("BLOCKING" if blocking else "NON-BLOCKING"))
271

  
272
        #create mouth state & fill it with values:
273
        rsb_mouth = MouthTarget()
274

  
275
        #fill proto
276
        rsb_mouth.opening_left   = mouth.opening_left
277
        rsb_mouth.opening_center = mouth.opening_center
278
        rsb_mouth.opening_right  = mouth.opening_right
279
        rsb_mouth.position_left  = mouth.position_left
280
        rsb_mouth.position_center = mouth.position_center
281
        rsb_mouth.position_right = mouth.position_right
282

  
283
        if (blocking):
284
            #blocking:
285
            result = self.server.mouth(rsb_mouth)
286
            self.logger.debug("server reply: '%s'" % result)
287
        else:
288
            future = self.server.mouth.async(rsb_mouth)
289
            #we can block here for a incoming result with a timeout in s
290
            #print '> server reply: "%s"' % future.get(timeout = 10);
291

  
292
        self.logger.debug("mouth rpc done")
293
    """
client/python/hlrc_client/RobotController.py
32 32
class RobotController:
33 33
    def __init__(self, mw_name, scope, loglevel=logging.WARNING, timeout=None):
34 34
        """initialise
35
        :param mw_name: which mw to use, currentyl ROS and RSB are supported
35
        :param mw_name: which mw to use, currentyl ROS is supported
36 36
        :param scope: base scope we want to listen on
37 37
        :param  loglevel: optional log level
38 38
        """
......
48 48

  
49 49
        self.middleware = None
50 50

  
51
        if (self.mw.upper() == "RSB"):
52
            self.logger.info("creating new middleware connection via RSB")
53
            try:
54
                from .MiddlewareRSB import MiddlewareRSB
55
            except ImportError as e:
56
                self.logger.error("failed to import RSB or the necessary data types: {}".format(e))
57
                sys.exit(errno.EINVAL)
58

  
59
            # import worked, safe to intantiate RSB mw now
60
            self.middleware = MiddlewareRSB(self.scope, self.loglevel)
61

  
62
        elif (self.mw.upper() == "ROS"):
51
        if (self.mw.upper() == "ROS"):
63 52
            self.logger.info("creating new middleware connection via ROS")
64 53
            try:
65 54
                from .MiddlewareROS import MiddlewareROS
......
70 59
            # import worked, safe to instantiate RSB mw now
71 60
            self.middleware = MiddlewareROS(self.scope, self.loglevel, timeout)
72 61
        else:
73
            self.logger.error("invalid middleware requested (%s). supported: {ROS, RSB}\n\n" % (self.mw))
62
            self.logger.error("invalid middleware requested (%s). supported: {ROS}\n\n" % (self.mw))
74 63
            sys.exit(errno.EINVAL)
75 64

  
76 65
    def __del__(self):
server/src/Arbiter.cpp
43 43
		audio_player = NULL;
44 44
	}
45 45
   else if (iequals(audio_output.substr(0, 3), "rsb")) {
46
      printf("> ERROR: hlc is compiled without RSB support, RSB audio transport not available, defaulting to libao (default "
46
      printf("> ERROR: hlrc does not support RSB, RSB audio transport not available, defaulting to libao (default "
47 47
             "output!)\n");
48 48
      audio_player = new AudioPlayerLibAO("");
49 49
   }

Also available in: Unified diff