From cb30bd1f44fcc3a0642a362afd4eaea0d8a7d199 Mon Sep 17 00:00:00 2001 From: Stefan Radomski Date: Sun, 3 Nov 2013 17:21:13 +0100 Subject: First signs of WebSockets and some changes to miles --- CMakeLists.txt | 3 +- apps/samples/miles/emptyface.jpg | Bin 0 -> 1603 bytes apps/samples/miles/miles.html | 26 +- apps/samples/miles/miles.js | 369 ++++++++++-- apps/samples/miles/miles.scxml | 155 +++-- apps/samples/miles/test1.jpeg | Bin 10996 -> 7350 bytes apps/samples/miles/test2.jpeg | Bin 40332 -> 7871 bytes apps/samples/miles/test3.jpeg | Bin 0 -> 7939 bytes apps/samples/miles/test4.jpeg | Bin 0 -> 7780 bytes apps/samples/websockets/websockets.html | 77 +++ apps/samples/websockets/websockets.scxml | 16 + contrib/dom/idl/TypedArray.idl | 2 + contrib/src/evws | 1 + src/uscxml/DOMUtils.cpp | 54 +- src/uscxml/Interpreter.cpp | 58 +- src/uscxml/Message.h | 2 +- .../JavaScriptCore/dom/JSCArrayBuffer.cpp | 44 ++ .../ecmascript/JavaScriptCore/dom/JSCArrayBuffer.h | 2 + .../plugins/datamodel/ecmascript/TypedArray.h | 10 + .../datamodel/ecmascript/v8/dom/V8ArrayBuffer.cpp | 30 + .../datamodel/ecmascript/v8/dom/V8ArrayBuffer.h | 6 + .../plugins/invoker/miles/MilesSessionInvoker.cpp | 650 +++++++++++++++++---- .../plugins/invoker/miles/MilesSessionInvoker.h | 60 +- .../plugins/invoker/miles/SpatialAudio.cpp.old | 239 -------- .../plugins/invoker/miles/SpatialAudio.h.old | 76 --- src/uscxml/server/WebSocketServer.cpp | 0 src/uscxml/server/WebSocketServer.h | 8 + 27 files changed, 1324 insertions(+), 564 deletions(-) create mode 100755 apps/samples/miles/emptyface.jpg create mode 100644 apps/samples/miles/test3.jpeg create mode 100644 apps/samples/miles/test4.jpeg create mode 100644 apps/samples/websockets/websockets.html create mode 100644 apps/samples/websockets/websockets.scxml create mode 160000 contrib/src/evws delete mode 100644 src/uscxml/plugins/invoker/miles/SpatialAudio.cpp.old delete mode 100644 src/uscxml/plugins/invoker/miles/SpatialAudio.h.old create mode 100644 src/uscxml/server/WebSocketServer.cpp create mode 100644 src/uscxml/server/WebSocketServer.h diff --git a/CMakeLists.txt b/CMakeLists.txt index 397c954..32b1700 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -654,7 +654,8 @@ find_package(FFMPEG) if (FFMPEG_FOUND) include_directories (${FFMPEG_INCLUDE_DIR}) list (APPEND USCXML_OPT_LIBS ${FFMPEG_LIBRARIES}) - + # required with static ffmpeg builds + # set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-Bsymbolic") endif() set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_ORIG}) diff --git a/apps/samples/miles/emptyface.jpg b/apps/samples/miles/emptyface.jpg new file mode 100755 index 0000000..a1b2f32 Binary files /dev/null and b/apps/samples/miles/emptyface.jpg differ diff --git a/apps/samples/miles/miles.html b/apps/samples/miles/miles.html index 9cee058..08cfeca 100644 --- a/apps/samples/miles/miles.html +++ b/apps/samples/miles/miles.html @@ -15,13 +15,31 @@ -
-
-
+ + + + + + + + + +
+
+
+
+
+
+
+
+
diff --git a/apps/samples/miles/miles.js b/apps/samples/miles/miles.js index 1b2d0b6..a1d4422 100644 --- a/apps/samples/miles/miles.js +++ b/apps/samples/miles/miles.js @@ -12,11 +12,45 @@ function Miles(element, params) { this.connected = false; this.imageIteration = 0; + this.width = 300; + this.height = 200; + // private attributes var scxmlURL = "localhost:8080" - var reflectorIp = "localhost" - var email = "me@somehost.de"; - var problemName = "some really hard problem"; + var reflectorIp = "88.131.107.12" + var email = "user@smartvortex.eu"; + var problemName = "webconfero"; + var remoteEmail = "other@smartvortex.eu"; + + var participants = []; // empty array + var videoCompressions = [ + { value: 'jpeg', label: "JPEG" }, + { value: 'h263', label: "H.263" }, + { value: 'h264', label: "H.264" }, + ]; + var videoCompression = ""; + + var audioEncodings = [ + { value: 'pcm', label: "PCM" }, + { value: 'ulaw', label: "uLaw" }, + { value: 'ogg', label: "Ogg Theora" }, + ]; + var audioEncoding = ""; + + var repollInterval = { + image: 50, + chat: 500, + participants: 1000 + }; + + var showVideo = true; + var enableAudio = true; + var stopChatScrolling = false; + var activateCamera = true; + var openMicrophone = true; + var videoFramerate = 25; + var videoHeight = self.height; + var videoWidth = self.width; // override with parameters if given this.params = params; @@ -27,68 +61,140 @@ function Miles(element, params) { // called when dojo loaded all requirements below this.connect = function() { - self.xhr.post({ + var query = ""; + query += "?reflector=" + encodeURIComponent(reflectorIp); + query += "&userid=" + encodeURIComponent(email); + query += "&session=" + encodeURIComponent(problemName); + + self.xhr.get({ // The URL to request - url: "http://" + scxmlURL + "/miles/connect", + url: "http://" + scxmlURL + "/miles/start" + query, // handleAs:"text", - contentType: 'application/json', - postData: dojo.toJson({ - reflectorIp: reflectorIp, - email: email, - problemName: problemName - }), - headers:{ - "X-Requested-With": null, - }, error: function(err) { console.log(err); }, load: function(result) { - // we expect nothing in return self.connected = true; - // trigger continuously loading the image + + // toggle connect button to disconnect + self.connectDropDown.dropDown.onCancel(true); + self.controlElem.replaceChild(self.controlDropDown.domNode, self.connectDropDown.domNode); + + // trigger continuous updates refreshImage(); + getChatText(); + getParticipants(); } }); } + this.disconnect = function() { + self.connected = false; + self.controlDropDown.dropDown.onCancel(true); + self.controlElem.replaceChild(self.connectDropDown.domNode, self.controlDropDown.domNode); + } + + var getParticipants = function() { + var query = ""; + self.xhr.get({ + // The URL to request + url: "http://" + scxmlURL + "/miles/participants" + query, + handleAs:"json", + error: function(err) { + console.log(err); + if (self.connected) { + setTimeout(getParticipants, repollInterval.participants); + } + }, + load: function(result) { + if (result.participants) { + participants = result.participants; + } + if (self.connected) { + console.log(participants); + setTimeout(getParticipants, repollInterval.participants); + } + } + }); + } + // fetch a base64 encoded image and set it as the src attribute var refreshImage = function() { + var query = ""; + query += "?userid=" + encodeURIComponent(email); self.xhr.get({ // The URL to request - url: "http://" + scxmlURL + "/miles/image", + url: "http://" + scxmlURL + "/miles/thumbnail" + query, + handleAs:"text", headers:{ - "X-Requested-With":null + "X-Content-Encoding": "base64" }, error: function(err) { console.log(err); if (self.connected) { - self.messageElem.innerHTML = self.imageIteration++; - refreshImage(); + setTimeout(refreshImage, repollInterval.image); } }, load: function(result) { self.pictureElem.src = "data:image/jpeg;base64," + result; if (self.connected) { self.messageElem.innerHTML = self.imageIteration++; - refreshImage(); + setTimeout(refreshImage, repollInterval.image); } } }); - }; + var getChatText = function() { + self.xhr.get({ + // The URL to request + url: "http://" + scxmlURL + "/miles/gettext", + handleAs:"json", + error: function(err) { + console.log(err); + if (self.connected) { + setTimeout(getChatText, repollInterval.chat); + } + }, + load: function(result) { + if (result.message) { + self.chatOutputElem.innerHTML += stopChatScrolling + " " + Math.random() + ": " + result.message + '
'; + if (!stopChatScrolling) + self.chatOutputElem.scrollTop = self.chatOutputElem.scrollHeight; + } + if (self.connected) { + setTimeout(getChatText, repollInterval.chat); + } + } + }); + }; + + require(["dojo/dom-construct", "dojo/_base/xhr", "dojo/dom", + "dojo/on", + "dojo/_base/unload", "dijit/form/DropDownButton", "dijit/TooltipDialog", + "dijit/form/TextBox", + "dijit/form/Button", + "dijit/form/CheckBox", + "dijit/form/Select", + "dijit/form/NumberSpinner", "dojo/ready"], function(domConst, xhr, dom, + on, + baseUnload, DropDownButton, TooltipDialog, + TextBox, + Button, + CheckBox, + Select, + NumberSpinner, ready) { ready(function() { self.xhr = xhr; @@ -97,14 +203,19 @@ function Miles(element, params) { if (typeof(element) === 'string') { element = dom.byId(element); } + element.style.width = self.width + "px"; + + baseUnload.addOnWindowUnload(function(){ +// alert("unloading..."); + }); // dynamically assemble the DOM we need element.appendChild(domConst.toDom('\ \ \ -
\ + \
\ - \ + \
\ \ \ @@ -113,9 +224,18 @@ function Miles(element, params) { \ \ \ + \ + \ + \ + \ + \ \ - \ \
\ +
\ +
\ +
\ +
\ -
\ +
\ +
\
\ @@ -123,19 +243,202 @@ function Miles(element, params) { // from the above DOM, fetch some nodes to put dojo widgets in self.pictureElem = dojo.query("img.picture", element)[0]; + self.pictureElem.width = self.width; + self.pictureElem.height = self.height; self.controlElem = dojo.query("td.control", element)[0]; self.messageElem = dojo.query("div.messages", element)[0]; + self.chatOutputElem = dojo.query("div.chatOutput", element)[0]; + self.chatOutputElem.style.fontSize = "0.8em"; + on(self.chatOutputElem, "mouseover", function(evt) { + stopChatScrolling = true; + }); + on(self.chatOutputElem, "mouseout", function(evt) { + stopChatScrolling = false; + }); + + self.chatInputElem = dojo.query("div.chatInput", element)[0]; + + self.chatSendButton = new Button({ + label: "Send", + onClick: function(){ + self.xhr.post({ + // The URL to request + url: "http://" + scxmlURL + "/miles/chat", + contentType: 'text/plain', + postData: dojo.toJson({ + message: chatInputElem.value + }), + error: function(err) { + console.log(err); + }, + load: function(result) {} + }); + + } + }, dojo.query("div.chatSendButton", element)[0]); + + // the chat interface + self.chatInput = new TextBox({ + name: "chatInput", + style: "width: 100%", + }, self.chatInputElem); - // the control dropdown button - self.controlDropDownContent = domConst.toDom('
'); + + // the connect dropdown button + self.connectDropDownContent = domConst.toDom('\ +
\ + \ + \ + \ + \ + \ + \ + \ + \ + '); + self.connectToolTip = new TooltipDialog({ content:self.connectDropDownContent, style:"max-height:320px"}); + self.connectDropDown = new DropDownButton({ label: "Connect", dropDown: self.connectToolTip }); + + // Connect parameters + self.problemNameBox = new TextBox({ + name: "problemName", + value: problemName, + style: "width: 100%", + }); + dojo.query("div.problemName", self.connectToolTip.domNode)[0].appendChild(self.problemNameBox.domNode); + + self.emailBox = new TextBox({ + name: "email", + value: email, + style: "width: 100%", + }); + dojo.query("div.email", self.connectToolTip.domNode)[0].appendChild(self.emailBox.domNode); + + // self.remoteEmailBox = new TextBox({ + // name: "remoteEmail", + // value: remoteEmail, + // style: "width: 100%", + // }); + // dojo.query("div.remoteEmail", self.connectToolTip.domNode)[0].appendChild(self.remoteEmailBox.domNode); + + self.reflectorIpBox = new TextBox({ + name: "reflectorIp", + value: reflectorIp, + style: "width: 100%", + }); + dojo.query("div.reflectorIp", self.connectToolTip.domNode)[0].appendChild(self.reflectorIpBox.domNode); + + self.scxmlURLBox = new TextBox({ + name: "scxmlURL", + value: scxmlURL, + style: "width: 100%", + }); + dojo.query("div.scxmlURL", self.connectToolTip.domNode)[0].appendChild(self.scxmlURLBox.domNode); + + self.connectButton = new Button({ + label: "Connect", + onClick: function(){ + self.connect(); + } + }); + dojo.query("div.connectButton", self.connectToolTip.domNode)[0].appendChild(self.connectButton.domNode); + + // Control parameters + self.controlDropDownContent = domConst.toDom('\ +
\ +
Problem Name:
Your Email:
Reflector Host:
Video Server:
\ + \ + \ + \ + \ + \ + \ + \ + \ + \ + \ + \ + \ + \ + '); self.controlToolTip = new TooltipDialog({ content:self.controlDropDownContent, style:"max-height:320px"}); - self.controlDropDown = new DropDownButton({ label: "Connect", dropDown: self.controlToolTip }); - self.controlElem.appendChild(self.controlDropDown.domNode); + self.controlDropDown = new DropDownButton({ label: "Session", dropDown: self.controlToolTip }); - // many more control widgets to be instantiated here + // Control parameters + self.activateCameraCheckbox = new CheckBox({ + name: "activateCamera", + value: activateCamera, + checked: activateCamera, + }); + dojo.query("div.activateCamera", self.controlToolTip.domNode)[0].appendChild(self.activateCameraCheckbox.domNode); - // connect and start to fetch images from the server - self.connect(); + self.videoCompressionSelect = new Select({ + name: "videoCompression", + value: videoCompression, + options: videoCompressions, + }); + dojo.query("div.videoCompression", self.controlToolTip.domNode)[0].appendChild(self.videoCompressionSelect.domNode); + + self.videoFramerateSpinner = new NumberSpinner({ + name: "videoFramerate", + value: videoFramerate, + style: "width: 50px" + }); + dojo.query("div.videoFramerate", self.controlToolTip.domNode)[0].appendChild(self.videoFramerateSpinner.domNode); + + self.videoWidthSpinner = new NumberSpinner({ + name: "videoWidth", + value: videoWidth, + style: "width: 50px" + }); + dojo.query("div.videoWidth", self.controlToolTip.domNode)[0].appendChild(self.videoWidthSpinner.domNode); + + self.videoHeightSpinner = new NumberSpinner({ + name: "videoHeight", + value: videoHeight, + style: "width: 50px" + }); + dojo.query("div.videoHeight", self.controlToolTip.domNode)[0].appendChild(self.videoHeightSpinner.domNode); + + self.openMicrophoneCheckbox = new CheckBox({ + name: "openMicrophone", + value: openMicrophone, + checked: openMicrophone, + }); + dojo.query("div.openMicrophone", self.controlToolTip.domNode)[0].appendChild(self.openMicrophoneCheckbox.domNode); + + self.audioEncodingSelect = new Select({ + name: "audioEncoding", + value: audioEncoding, + options: audioEncodings, + }); + dojo.query("div.audioEncoding", self.controlToolTip.domNode)[0].appendChild(self.audioEncodingSelect.domNode); + + // session scoped parameters + self.enableAudioCheckbox = new CheckBox({ + name: "enableAudio", + value: enableAudio, + checked: enableAudio, + }); + dojo.query("div.enableAudio", self.controlToolTip.domNode)[0].appendChild(self.enableAudioCheckbox.domNode); + + self.showVideo = new CheckBox({ + name: "showVideo", + value: showVideo, + checked: showVideo, + }); + dojo.query("div.showVideo", self.controlToolTip.domNode)[0].appendChild(self.showVideo.domNode); + + self.disconnectButton = new Button({ + label: "Disconnect", + onClick: function(){ + self.disconnect(); + } + }); + dojo.query("div.disconnectButton", self.controlToolTip.domNode)[0].appendChild(self.disconnectButton.domNode); + + // intially append the connect dropdown + self.controlElem.appendChild(self.connectDropDown.domNode); }) }); diff --git a/apps/samples/miles/miles.scxml b/apps/samples/miles/miles.scxml index 0ecca05..9b1426a 100644 --- a/apps/samples/miles/miles.scxml +++ b/apps/samples/miles/miles.scxml @@ -2,16 +2,25 @@ - -
-
-
- - + + +
+
+ + + + +
+ + + + + + @@ -19,49 +28,125 @@ - -
-
-
+
+
+
- + - - - - - - - - - + + + + + + + + + - -
-
-
- - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + \ No newline at end of file diff --git a/apps/samples/miles/test1.jpeg b/apps/samples/miles/test1.jpeg index e174f89..18c9517 100644 Binary files a/apps/samples/miles/test1.jpeg and b/apps/samples/miles/test1.jpeg differ diff --git a/apps/samples/miles/test2.jpeg b/apps/samples/miles/test2.jpeg index aca2300..7fb9cc0 100644 Binary files a/apps/samples/miles/test2.jpeg and b/apps/samples/miles/test2.jpeg differ diff --git a/apps/samples/miles/test3.jpeg b/apps/samples/miles/test3.jpeg new file mode 100644 index 0000000..6ed85b0 Binary files /dev/null and b/apps/samples/miles/test3.jpeg differ diff --git a/apps/samples/miles/test4.jpeg b/apps/samples/miles/test4.jpeg new file mode 100644 index 0000000..8a623f0 Binary files /dev/null and b/apps/samples/miles/test4.jpeg differ diff --git a/apps/samples/websockets/websockets.html b/apps/samples/websockets/websockets.html new file mode 100644 index 0000000..9a21f46 --- /dev/null +++ b/apps/samples/websockets/websockets.html @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + +
Activate Camera:
Compression:
Framerate:
Width:
Height:
Open Microphone:
Encoding:

Enable Audio:
Show Video:

+ + + +
+
+
+
+ + diff --git a/apps/samples/websockets/websockets.scxml b/apps/samples/websockets/websockets.scxml new file mode 100644 index 0000000..0050091 --- /dev/null +++ b/apps/samples/websockets/websockets.scxml @@ -0,0 +1,16 @@ + + + + + \ No newline at end of file diff --git a/contrib/dom/idl/TypedArray.idl b/contrib/dom/idl/TypedArray.idl index 43dac35..f288361 100644 --- a/contrib/dom/idl/TypedArray.idl +++ b/contrib/dom/idl/TypedArray.idl @@ -14,6 +14,8 @@ interface ArrayBuffer { readonly attribute unsigned long byteLength; attribute DOMString mimeType; + DOMString md5(); + DOMString base64(); ArrayBuffer slice(long begin, optional long end); static boolean isView(any value); }; diff --git a/contrib/src/evws b/contrib/src/evws new file mode 160000 index 0000000..c43488f --- /dev/null +++ b/contrib/src/evws @@ -0,0 +1 @@ +Subproject commit c43488fe46e3fcb971e7d01491b8d82b42b416de diff --git a/src/uscxml/DOMUtils.cpp b/src/uscxml/DOMUtils.cpp index afb806b..e54099e 100644 --- a/src/uscxml/DOMUtils.cpp +++ b/src/uscxml/DOMUtils.cpp @@ -26,41 +26,41 @@ namespace uscxml { std::string DOMUtils::xPathForNode(const Arabica::DOM::Node& node) { - std::string xPath; - + std::string xPath; + if (!node || node.getNodeType() != Arabica::DOM::Node_base::ELEMENT_NODE) return xPath; - - Arabica::DOM::Node curr = node; + + Arabica::DOM::Node curr = node; while(curr) { switch (curr.getNodeType()) { - case Arabica::DOM::Node_base::ELEMENT_NODE: { - if (HAS_ATTR(curr, "id")) { - // we assume ids to be unique and return immediately - xPath.insert(0, "//" + TAGNAME(curr) + "[@id=\"" + ATTR(curr, "id") + "\"]"); - return xPath; - } else { - // check previous siblings to count our index - Arabica::DOM::Node sibling = curr.getPreviousSibling(); - int index = 1; - while(sibling) { - if (sibling.getNodeType() == Arabica::DOM::Node_base::ELEMENT_NODE) { - if (iequals(TAGNAME(sibling), TAGNAME(curr))) { - index++; - } + case Arabica::DOM::Node_base::ELEMENT_NODE: { + if (HAS_ATTR(curr, "id")) { + // we assume ids to be unique and return immediately + xPath.insert(0, "//" + TAGNAME(curr) + "[@id=\"" + ATTR(curr, "id") + "\"]"); + return xPath; + } else { + // check previous siblings to count our index + Arabica::DOM::Node sibling = curr.getPreviousSibling(); + int index = 1; + while(sibling) { + if (sibling.getNodeType() == Arabica::DOM::Node_base::ELEMENT_NODE) { + if (iequals(TAGNAME(sibling), TAGNAME(curr))) { + index++; } - sibling = sibling.getPreviousSibling(); } - xPath.insert(0, "/" + TAGNAME(curr) + "[" + toStr(index) + "]"); + sibling = sibling.getPreviousSibling(); } - break; + xPath.insert(0, "/" + TAGNAME(curr) + "[" + toStr(index) + "]"); } - case Arabica::DOM::Node_base::DOCUMENT_NODE: - return xPath; - default: - LOG(ERROR) << "Only nodes of type element supported for now"; - return ""; - break; + break; + } + case Arabica::DOM::Node_base::DOCUMENT_NODE: + return xPath; + default: + LOG(ERROR) << "Only nodes of type element supported for now"; + return ""; + break; } curr = curr.getParentNode(); } diff --git a/src/uscxml/Interpreter.cpp b/src/uscxml/Interpreter.cpp index b91991b..d14fae4 100644 --- a/src/uscxml/Interpreter.cpp +++ b/src/uscxml/Interpreter.cpp @@ -259,7 +259,7 @@ InterpreterImpl::InterpreterImpl() { _factory = NULL; _capabilities = CAN_BASIC_HTTP | CAN_GENERIC_HTTP; _domEventListener._interpreter = this; - + #ifdef _WIN32 WSADATA wsaData; WSAStartup(MAKEWORD(2, 2), &wsaData); @@ -772,35 +772,35 @@ void InterpreterImpl::processDOMorText(const Arabica::DOM::Node& el void InterpreterImpl::processParamChilds(const Arabica::DOM::Node& element, std::multimap& params) { NodeSet paramElems = filterChildElements(_xmlNSPrefix + "param", element); - for (int i = 0; i < paramElems.size(); i++) { - try { - if (!HAS_ATTR(paramElems[i], "name")) { - LOG(ERROR) << "param element is missing name attribute"; - continue; - } - Data paramValue; - if (HAS_ATTR(paramElems[i], "expr") && _dataModel) { - paramValue = _dataModel.getStringAsData(ATTR(paramElems[i], "expr")); - } else if(HAS_ATTR(paramElems[i], "location") && _dataModel) { - paramValue = _dataModel.getStringAsData(ATTR(paramElems[i], "location")); - } else { - LOG(ERROR) << "param element is missing expr or location or no datamodel is specified"; - continue; - } - std::string paramKey = ATTR(paramElems[i], "name"); - params.insert(std::make_pair(paramKey, paramValue)); - } catch(Event e) { - LOG(ERROR) << "Syntax error while processing params " << DOMUtils::xPathForNode(paramElems[i]) << ":" << std::endl << e << std::endl; - // test 343 - std::multimap::iterator paramIter = params.begin(); - while(paramIter != params.end()) { - params.erase(paramIter++); - } - e.name = "error.execution"; - receiveInternal(e); - break; + for (int i = 0; i < paramElems.size(); i++) { + try { + if (!HAS_ATTR(paramElems[i], "name")) { + LOG(ERROR) << "param element is missing name attribute"; + continue; } + Data paramValue; + if (HAS_ATTR(paramElems[i], "expr") && _dataModel) { + paramValue = _dataModel.getStringAsData(ATTR(paramElems[i], "expr")); + } else if(HAS_ATTR(paramElems[i], "location") && _dataModel) { + paramValue = _dataModel.getStringAsData(ATTR(paramElems[i], "location")); + } else { + LOG(ERROR) << "param element is missing expr or location or no datamodel is specified"; + continue; + } + std::string paramKey = ATTR(paramElems[i], "name"); + params.insert(std::make_pair(paramKey, paramValue)); + } catch(Event e) { + LOG(ERROR) << "Syntax error while processing params " << DOMUtils::xPathForNode(paramElems[i]) << ":" << std::endl << e << std::endl; + // test 343 + std::multimap::iterator paramIter = params.begin(); + while(paramIter != params.end()) { + params.erase(paramIter++); + } + e.name = "error.execution"; + receiveInternal(e); + break; } + } } void InterpreterImpl::send(const Arabica::DOM::Node& element) { @@ -2125,7 +2125,7 @@ void InterpreterImpl::DOMEventListener::handleEvent(Arabica::DOM::Events::Event< } } } - + void InterpreterImpl::dump() { if (!_document) return; diff --git a/src/uscxml/Message.h b/src/uscxml/Message.h index 1964f1c..44358be 100644 --- a/src/uscxml/Message.h +++ b/src/uscxml/Message.h @@ -74,7 +74,7 @@ public: Data() : type(INTERPRETED) {} Data(const std::string& atom_, Type type_ = INTERPRETED) : atom(atom_), type(type_) {} - Data(const char* data, size_t size, const std::string& mimeType, bool adopt); + Data(const char* data, size_t size, const std::string& mimeType, bool adopt = false); Data(bool atom_) : type(INTERPRETED) { if (atom_) { atom = "true"; diff --git a/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.cpp b/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.cpp index 3a3d373..b6a262d 100644 --- a/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.cpp +++ b/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.cpp @@ -31,6 +31,8 @@ JSStaticValue JSCArrayBuffer::staticValues[] = { }; JSStaticFunction JSCArrayBuffer::staticFunctions[] = { + { "md5", md5Callback, kJSPropertyAttributeDontDelete }, + { "base64", base64Callback, kJSPropertyAttributeDontDelete }, { "slice", sliceCallback, kJSPropertyAttributeDontDelete }, { "isView", isViewCallback, kJSPropertyAttributeDontDelete }, { 0, 0, 0 } @@ -95,6 +97,48 @@ bool JSCArrayBuffer::mimeTypeAttrSetter(JSContextRef ctx, JSObjectRef thisObj, J return true; } +JSValueRef JSCArrayBuffer::md5Callback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception) { + + struct JSCArrayBufferPrivate* privData = (struct JSCArrayBufferPrivate*)JSObjectGetPrivate(thisObj); + + if (false) { + } else if (argumentCount == 0) { + + std::string retVal = privData->nativeObj->md5(); + + JSStringRef jscString = JSStringCreateWithUTF8CString(retVal.c_str()); + JSValueRef jscRetVal = JSValueMakeString(ctx, jscString); + JSStringRelease(jscString); + return jscRetVal; + } + + JSStringRef exceptionString = JSStringCreateWithUTF8CString("Parameter mismatch while calling md5"); + *exception = JSValueMakeString(ctx, exceptionString); + JSStringRelease(exceptionString); + return JSValueMakeUndefined(ctx); +} + +JSValueRef JSCArrayBuffer::base64Callback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception) { + + struct JSCArrayBufferPrivate* privData = (struct JSCArrayBufferPrivate*)JSObjectGetPrivate(thisObj); + + if (false) { + } else if (argumentCount == 0) { + + std::string retVal = privData->nativeObj->base64(); + + JSStringRef jscString = JSStringCreateWithUTF8CString(retVal.c_str()); + JSValueRef jscRetVal = JSValueMakeString(ctx, jscString); + JSStringRelease(jscString); + return jscRetVal; + } + + JSStringRef exceptionString = JSStringCreateWithUTF8CString("Parameter mismatch while calling base64"); + *exception = JSValueMakeString(ctx, exceptionString); + JSStringRelease(exceptionString); + return JSValueMakeUndefined(ctx); +} + JSValueRef JSCArrayBuffer::sliceCallback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception) { struct JSCArrayBufferPrivate* privData = (struct JSCArrayBufferPrivate*)JSObjectGetPrivate(thisObj); diff --git a/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.h b/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.h index 7d75e63..6a6c02a 100644 --- a/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.h +++ b/src/uscxml/plugins/datamodel/ecmascript/JavaScriptCore/dom/JSCArrayBuffer.h @@ -38,6 +38,8 @@ public: JSC_DESTRUCTOR(JSCArrayBufferPrivate); + static JSValueRef md5Callback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception); + static JSValueRef base64Callback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception); static JSValueRef sliceCallback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception); static JSValueRef isViewCallback(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObj, size_t argumentCount, const JSValueRef* arguments, JSValueRef* exception); diff --git a/src/uscxml/plugins/datamodel/ecmascript/TypedArray.h b/src/uscxml/plugins/datamodel/ecmascript/TypedArray.h index cee1ed9..de32078 100644 --- a/src/uscxml/plugins/datamodel/ecmascript/TypedArray.h +++ b/src/uscxml/plugins/datamodel/ecmascript/TypedArray.h @@ -93,6 +93,16 @@ public: // memcpy(_buffer->_data + index * sizeof(unsigned char), &value, sizeof(unsigned char)); // } + // non-standard extension + std::string md5() { + return _buffer->md5(); + } + + // non-standard extension + std::string base64() { + return _buffer->base64(); + } + std::string getMimeType() { if (_buffer) return _buffer->mimeType; diff --git a/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.cpp b/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.cpp index 8eccc66..1328b19 100644 --- a/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.cpp +++ b/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.cpp @@ -76,6 +76,36 @@ void V8ArrayBuffer::mimeTypeAttrSetter(v8::Local property, v8::Local privData->nativeObj->setMimeType(*localMimeType); } +v8::Handle V8ArrayBuffer::md5Callback(const v8::Arguments& args) { + + v8::Local self = args.Holder(); + struct V8ArrayBufferPrivate* privData = V8DOM::toClassPtr(self->GetInternalField(0)); + if (false) { + } else if (args.Length() == 0) { + + std::string retVal = privData->nativeObj->md5(); + + return v8::String::New(retVal.c_str()); + } + throw V8Exception("Parameter mismatch while calling md5"); + return v8::Undefined(); +} + +v8::Handle V8ArrayBuffer::base64Callback(const v8::Arguments& args) { + + v8::Local self = args.Holder(); + struct V8ArrayBufferPrivate* privData = V8DOM::toClassPtr(self->GetInternalField(0)); + if (false) { + } else if (args.Length() == 0) { + + std::string retVal = privData->nativeObj->base64(); + + return v8::String::New(retVal.c_str()); + } + throw V8Exception("Parameter mismatch while calling base64"); + return v8::Undefined(); +} + v8::Handle V8ArrayBuffer::sliceCallback(const v8::Arguments& args) { v8::Local self = args.Holder(); diff --git a/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.h b/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.h index 5745095..14b92b0 100644 --- a/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.h +++ b/src/uscxml/plugins/datamodel/ecmascript/v8/dom/V8ArrayBuffer.h @@ -41,6 +41,8 @@ public: V8_DESTRUCTOR(V8ArrayBufferPrivate); static bool hasInstance(v8::Handle); + static v8::Handle md5Callback(const v8::Arguments&); + static v8::Handle base64Callback(const v8::Arguments&); static v8::Handle sliceCallback(const v8::Arguments&); static v8::Handle isViewCallback(const v8::Arguments&); @@ -76,6 +78,10 @@ public: instance->SetAccessor(v8::String::NewSymbol("mimeType"), V8ArrayBuffer::mimeTypeAttrGetter, V8ArrayBuffer::mimeTypeAttrSetter, v8::External::New(0), static_cast(v8::DEFAULT), static_cast(v8::None)); + prototype->Set(v8::String::NewSymbol("md5"), + v8::FunctionTemplate::New(V8ArrayBuffer::md5Callback, v8::Undefined()), static_cast(v8::DontDelete)); + prototype->Set(v8::String::NewSymbol("base64"), + v8::FunctionTemplate::New(V8ArrayBuffer::base64Callback, v8::Undefined()), static_cast(v8::DontDelete)); prototype->Set(v8::String::NewSymbol("slice"), v8::FunctionTemplate::New(V8ArrayBuffer::sliceCallback, v8::Undefined()), static_cast(v8::DontDelete)); prototype->Set(v8::String::NewSymbol("isView"), diff --git a/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.cpp b/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.cpp index a15956e..362d454 100644 --- a/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.cpp +++ b/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.cpp @@ -20,6 +20,7 @@ #include #include "MilesSessionInvoker.h" +#include "uscxml/server/HTTPServer.h" #include #ifdef BUILD_AS_PLUGINS @@ -27,6 +28,7 @@ #endif #include +#include /* srand, rand */ namespace uscxml { @@ -41,9 +43,15 @@ bool pluginConnect(pluma::Host& host) { MilesSessionInvoker::MilesSessionInvoker() { /* Initalize Miles */ miles_init(); + + /* media buffers */ + init_media_buffers(); + + _isRunning = false; } MilesSessionInvoker::~MilesSessionInvoker() { + free_media_buffers(); }; boost::shared_ptr MilesSessionInvoker::create(InterpreterImpl* interpreter) { @@ -57,8 +65,135 @@ Data MilesSessionInvoker::getDataModelVariables() { return data; } +void MilesSessionInvoker::init_media_buffers() { + video_out_buf = NULL; + encoded_out_img = NULL; + audio_in_buf = NULL; + render_img = NULL; + render_img_size = 0; + audio_data = NULL; + encoded_out_audio = NULL; + audio_read_buf = NULL; + video_data = (char *)malloc(1000000); +} + +void MilesSessionInvoker::free_media_buffers() { + if(video_out_buf) + free(video_out_buf); + video_out_buf = NULL; + if(encoded_out_img) + free(encoded_out_img); + encoded_out_img = NULL; + if(audio_in_buf) + free(audio_in_buf); + audio_in_buf = NULL; + if(render_img) + free(render_img); + render_img = NULL; + render_img_size = 0; + if(audio_data) + free(audio_data); + audio_data = NULL; + if(video_data) + free(video_data); + video_data = NULL; + if(encoded_out_audio) + free(encoded_out_audio); + encoded_out_audio = NULL; + if(audio_read_buf) + free(audio_read_buf); + audio_read_buf = NULL; +} + void MilesSessionInvoker::send(const SendRequest& req) { // std::cout << req; + std::string origin; + Event::getParam(req.params, "origin", origin); + + if (false) { + } else if (iequals(req.name, "start")) { + + std::string userId, reflector, session; + Event::getParam(req.params, "userid", userId); + Event::getParam(req.params, "reflector", reflector); + Event::getParam(req.params, "session", session); + processEventStart(origin, userId, reflector, session); + + } else if (iequals(req.name, "participants")) { + + processEventParticipants(origin); + + } else if (iequals(req.name, "thumbnail")) { + + std::string userId; + Event::getParam(req.params, "userid", userId); + processEventThumbnail(origin, userId); + + } else if (iequals(req.name, "videoon")) { + + std::string userId; + Event::getParam(req.params, "userid", userId); + processEventVideoOn(origin, userId); + + } else if (iequals(req.name, "videooff")) { + + std::string userId; + Event::getParam(req.params, "userid", userId); + processEventVideoOff(origin, userId); + + } else if (iequals(req.name, "audioon")) { + + std::string userId; + Event::getParam(req.params, "userid", userId); + processEventAudioOn(origin, userId); + + } else if (iequals(req.name, "audiooff")) { + + std::string userId; + Event::getParam(req.params, "userid", userId); + processEventAudioOff(origin, userId); + + } else if (iequals(req.name, "sendvideo")) { + + std::string userId, compression; + size_t height, width, framerate; + Event::getParam(req.params, "userid", userId); + Event::getParam(req.params, "height", height); + Event::getParam(req.params, "width", width); + Event::getParam(req.params, "framerate", framerate); + processEventSendVideo(origin, width, height, framerate, compression); + + } else if (iequals(req.name, "sendvideooff")) { + + processEventSendVideoOff(origin); + + } else if (iequals(req.name, "sendaudio")) { + + std::string userId, encoding; + Event::getParam(req.params, "userid", userId); + Event::getParam(req.params, "encoding", encoding); + processEventSendAudio(origin, encoding); + + } else if (iequals(req.name, "sendaudiooff")) { + + processEventSendAudioOff(origin); + + } else if (iequals(req.name, "gettext")) { + + processEventGetText(origin); + + } else if (iequals(req.name, "posttext")) { + + std::string userId, message; + Event::getParam(req.params, "userid", userId); + Event::getParam(req.params, "message", message); + processEventPostText(origin, userId, message); + + } else { + LOG(ERROR) << "Do not know how to handle event " << req.name; + } + +#if 0 if (iequals(req.name, "disconnect")) { std::string reflectorIP = "127.0.0.1"; Event::getParam(req.params, "reflectorip", reflectorIP); @@ -72,28 +207,59 @@ void MilesSessionInvoker::send(const SendRequest& req) { LOG(ERROR) << "Could not disconnect from reflector session"; return; } + free_media_buffers(); + _isRunning = false; } else if (iequals(req.name, "image")) { // client wants an image - URL imageURL1("test1.jpeg"); - URL imageURL2("test2.jpeg"); + URL imageURL1("emptyface.jpg"); + //URL imageURL2("test2.jpeg"); imageURL1.toAbsolute(_interpreter->getBaseURI()); - imageURL2.toAbsolute(_interpreter->getBaseURI()); + //imageURL2.toAbsolute(_interpreter->getBaseURI()); std::stringstream ssImage; - if (alternate) { + ssImage << imageURL1; + /*if (alternate) { ssImage << imageURL1; } else { ssImage << imageURL2; } - alternate = !alternate; + alternate = !alternate;*/ std::string imageContent = ssImage.str(); Event retEv; - retEv.data.compound["base64"] = Data(base64_encode(imageContent.data(), imageContent.size()), Data::VERBATIM); + int has_thumb = 0; + struct miles_rtp_in_stream *rtps; + struct miles_list *p; + struct thumb_entry *te; + _mutex.lock(); + // MJ: first param, void * to image, second size of image + //retEv.data.compound["base64"] = Data(base64_encode(imageContent.data(), imageContent.size()), Data::VERBATIM); + if(video_session->instreams) { + rtps = video_session->instreams->stream; + if(rtps) { + p = thumb_list; + while(p) { + te = (struct thumb_entry *)p->item; + if(te->ssrc == rtps->ssrc) { + break; + } + p = p->next; + } + if(p) { + has_thumb = 1; + retEv.data.compound["base64"] = Data(base64_encode(te->img_buf, te->img_size), Data::VERBATIM); + } + } + } + if(!has_thumb) { + // Return empty face image + retEv.data.compound["base64"] = Data(base64_encode(imageContent.data(), imageContent.size()), Data::VERBATIM); + } + _mutex.unlock(); std::string origin; Event::getParam(req.params, "origin", origin); retEv.data.compound["origin"] = origin; @@ -103,9 +269,13 @@ void MilesSessionInvoker::send(const SendRequest& req) { returnEvent(retEv); } else if (iequals(req.name, "connect")) { - - std::cout << req; - + + //std::cout << req; + if(_isRunning) { + LOG(ERROR) << "already connected"; + return; + } + std::string email = "someSaneDefault"; Event::getParam(req.params, "email", email); @@ -113,9 +283,13 @@ void MilesSessionInvoker::send(const SendRequest& req) { Event::getParam(req.params, "reflectorIp", reflectorIp); std::string problemName = "Generic"; - Event::getParam(req.params, "problemname", problemName); + Event::getParam(req.params, "problemName", problemName); + LOG(ERROR) << "connect called, reflector ip = "; + LOG(ERROR) << reflectorIp; + LOG(ERROR) << problemName; + LOG(ERROR) << email; - return; + //return; int rv; rv = miles_connect_reflector_session((char*)reflectorIp.c_str(), (char*)problemName.c_str()); @@ -123,6 +297,7 @@ void MilesSessionInvoker::send(const SendRequest& req) { LOG(ERROR) << "Could not setup reflector session"; return; } + LOG(ERROR) << "session set up"; /* Set up audio and video RTP sockets */ video_rtp_in_socket = miles_net_setup_udp_socket((char*)reflectorIp.c_str(), video_port, video_port, 10, 16000); @@ -130,12 +305,16 @@ void MilesSessionInvoker::send(const SendRequest& req) { video_rtp_out_socket = video_rtp_in_socket; //miles_net_setup_udp_socket((char*)reflectorIP.c_str(), video_port, 0, 10, 16000); audio_rtp_out_socket = audio_rtp_in_socket; //miles_net_setup_udp_socket((char*)reflectorIP.c_str(), audio_port, 0, 10, 16000); + LOG(ERROR) << "rtp sockets set up"; + /* Set up audio and video RTCP sockets */ video_rtcp_in_socket = miles_net_setup_udp_socket((char*)reflectorIp.c_str(), video_port+1, video_port+1, 10, 16000); audio_rtcp_in_socket = miles_net_setup_udp_socket((char*)reflectorIp.c_str(), audio_port+1, audio_port+1, 10, 16000); video_rtcp_out_socket = video_rtcp_in_socket; //miles_net_setup_udp_socket((char*)reflectorIP.c_str(), video_port+1, 0, 10, 16000); audio_rtcp_out_socket = audio_rtcp_in_socket; //miles_net_setup_udp_socket((char*)reflectorIP.c_str(), audio_port+1, 0, 10, 16000); + LOG(ERROR) << "rtcp sockets set up"; + /* Set up RTP audio and video sessions */ video_session = miles_rtp_setup_session(video_rtp_in_socket, MILES_RTP_MEDIA_TYPE_VIDEO); audio_session = miles_rtp_setup_session(audio_rtp_in_socket, MILES_RTP_MEDIA_TYPE_AUDIO); @@ -144,97 +323,145 @@ void MilesSessionInvoker::send(const SendRequest& req) { video_session->rtcp_session = miles_rtp_setup_rtcp_session(video_session, video_rtcp_in_socket); audio_session->rtcp_session = miles_rtp_setup_rtcp_session(audio_session, audio_rtcp_in_socket); - /* Initialize and configure video encoder */ - video_encoder = miles_video_codec_init_encoder(); - video_encoder->codec_id = miles_video_codec_get_encoder_for_rtp_payload_type(MILES_RTP_PAYLOAD_TYPE_JPEG); - video_encoder->width = 320; - video_encoder->height = 240; - video_encoder->qfactor = 50; - rv = miles_video_codec_setup_encoder(video_encoder); - if (!rv) { - LOG(ERROR) << "Could not setup video encoder"; - return; - } + LOG(ERROR) << "rtp/rtcp sessions set up"; - /* Set up video grabber */ - rv = miles_video_grabber_get_supported_grabbers(&supported_video_grabbers); - if(rv<=0) { - /* No video grabber available */ - exit(-1); - } - video_grabber = miles_video_grabber_create_context(supported_video_grabbers[0]); - video_grabber->width = video_encoder->width; - video_grabber->height = video_encoder->height; - miles_video_grabber_setup(video_grabber); - free(supported_video_grabbers); + /* Set up video capture */ + video_grabber_available = setup_video_grabber(); + + /* Set up audio capture/playback */ + audio_available = setup_audio(); /* Set up outgoing RTP stream for video */ - out_rtp_video_stream = miles_rtp_setup_outgoing_stream(video_session, video_rtp_out_socket, 0, MILES_RTP_PAYLOAD_TYPE_JPEG); - - /* Initialize and configure audio encoder */ - audio_encoder = miles_audio_codec_init_encoder(); - audio_encoder->codec_id = miles_audio_codec_get_encoder_for_rtp_payload_type(MILES_RTP_PAYLOAD_TYPE_L16); - audio_encoder->sample_rate = 16000; - audio_encoder->bytes_per_sample = 2; - audio_encoder->chunk_size = 320; /* 20 ms */ - audio_encoder->input_format = MILES_AUDIO_FORMAT_PCM; - rv = miles_audio_codec_setup_encoder(audio_encoder); - if(rv == 0) { - /* Couldn't set up audio codec */ - exit(-1); + if(video_grabber_available) { + out_rtp_video_stream = miles_rtp_setup_outgoing_stream(video_session, video_rtp_out_socket, 0, MILES_RTP_PAYLOAD_TYPE_JPEG); + out_rtp_video_stream->codec_ctx = video_encoder; + out_rtcp_video_stream = miles_rtp_setup_outgoing_rtcp_stream(video_session->rtcp_session, video_rtcp_out_socket, out_rtp_video_stream->ssrc); } + /* Set up outgoing RTP stream for audio */ + if(audio_available) { + out_rtp_audio_stream = miles_rtp_setup_outgoing_stream(audio_session, audio_rtp_out_socket, 0, MILES_RTP_PAYLOAD_TYPE_L16); - /* Set up audio grabber */ - int n = miles_audio_device_get_supported_devices(MILES_AUDIO_DEVICE_OPENAL, &supported_audio_devices); - if(n<=0) { - /* No audio device available */ - exit(-1); - } - /* Use first device that supports capture */ - for(int i=0; icodec_ctx = audio_encoder; - /* Find first audio device that supports playback */ - for(int i=0; irtcp_session, audio_rtcp_out_socket, out_rtp_audio_stream->ssrc); } - if(audio_dev_playback == NULL) - exit(-1); - /* Set up outgoing RTP stream for audio */ - out_rtp_audio_stream = miles_rtp_setup_outgoing_stream(audio_session, audio_rtp_out_socket, 0, MILES_RTP_PAYLOAD_TYPE_L16); + _isRunning = true; - /* Associate RTP stream with codec context */ - out_rtp_audio_stream->codec_ctx = audio_encoder; - out_rtp_video_stream->codec_ctx = video_encoder; + if(audio_available) + _audioThread = new tthread::thread(MilesSessionInvoker::runAudio, this); + _videoThread = new tthread::thread(MilesSessionInvoker::runVideo, this); + } +#endif +} + +void MilesSessionInvoker::processEventStart(const std::string& origin, const std::string& userid, const std::string& reflector, const std::string& session) { + Event ev; + ev.name = "start.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} - /* Set up outgoing RTCP streams for audio and video */ - out_rtcp_audio_stream = miles_rtp_setup_outgoing_rtcp_stream(audio_session->rtcp_session, audio_rtcp_out_socket, out_rtp_audio_stream->ssrc); - out_rtcp_video_stream = miles_rtp_setup_outgoing_rtcp_stream(video_session->rtcp_session, video_rtcp_out_socket, out_rtp_video_stream->ssrc); +void MilesSessionInvoker::processEventParticipants(const std::string& origin) { - _isRunning = true; + Event ev; + // create an array with objects inside + for (int i = 0; i < 5; i++) { + Data userInfo; + userInfo.compound["name"] = Data("username" + toStr(i), Data::VERBATIM); + userInfo.compound["email"] = Data("usermail" + toStr(i), Data::VERBATIM); + ev.data.compound["participants"].array.push_back(userInfo); + } -// while(true) { -// rtp_video_receiver(video_session); -// video_transmitter(video_grabber, video_encoder, out_rtp_video_stream, out_rtcp_video_stream); -// rtp_audio_receiver(audio_session); -// audio_transmitter(audio_dev, audio_encoder, out_rtp_audio_stream, out_rtcp_audio_stream); -// } + ev.name = "participants.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); - // don't start threads for mockup -// _audioThread = new tthread::thread(MilesSessionInvoker::runAudio, this); -// _videoThread = new tthread::thread(MilesSessionInvoker::runVideo, this); +} +void MilesSessionInvoker::processEventThumbnail(const std::string& origin, const std::string& userid) { + _imageSeq = (++_imageSeq % 4); + URL imageURL("test" + toStr(_imageSeq + 1) + ".jpeg"); + imageURL.toAbsolute(_interpreter->getBaseURI()); + std::stringstream ssImage; + ssImage << imageURL; + std::string imageContent = ssImage.str(); + + Event ev; + ev.name = "thumbnail.reply"; + ev.data.compound["origin"] = origin; + + // as we support ECMAScript TYpedArrays, we can handle blobs in the datamodel + ev.data.compound["image"] = Data(imageContent.data(), imageContent.size(), "image/jpeg"); + returnEvent(ev); +} +void MilesSessionInvoker::processEventVideoOn(const std::string& origin, const std::string& userid) { + Event ev; + ev.name = "videoon.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventVideoOff(const std::string& origin, const std::string& userid) { + Event ev; + ev.name = "videooff.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventAudioOn(const std::string& origin, const std::string& userid) { + Event ev; + ev.name = "audioon.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventAudioOff(const std::string& origin, const std::string& userid) { + Event ev; + ev.name = "audiooff.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventSendVideo(const std::string& origin, size_t width, size_t height, size_t framerate, const std::string& compression) { + Event ev; + ev.name = "sendvideo.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventSendVideoOff(const std::string& origin) { + Event ev; + ev.name = "sendvideooff.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventSendAudio(const std::string& origin, const std::string& encoding) { + Event ev; + ev.name = "sendaudio.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventSendAudioOff(const std::string& origin) { + Event ev; + ev.name = "sendaudiooff.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventPostText(const std::string& origin, const std::string& userid, const std::string& message) { + Event ev; + ev.name = "posttext.reply"; + ev.data.compound["origin"] = origin; + returnEvent(ev); +} +void MilesSessionInvoker::processEventGetText(const std::string& origin) { + Event ev; + ev.name = "gettext.reply"; + ev.data.compound["origin"] = origin; + + if (rand() % 5 == 0) { // return some mocked up chat message + ev.data.compound["message"] = Data(".. and then she was all like: aren't we supposed to discuss work related stuff?", Data::VERBATIM); + ev.data.compound["user"] = Data("username1", Data::VERBATIM); } + + returnEvent(ev); } void MilesSessionInvoker::runAudio(void* instance) { @@ -248,7 +475,8 @@ void MilesSessionInvoker::runVideo(void* instance) { void MilesSessionInvoker::processVideo() { while(_isRunning) { rtp_video_receiver(video_session); - video_transmitter(video_grabber, video_encoder, out_rtp_video_stream, out_rtcp_video_stream); + if(video_grabber_available) + video_transmitter(video_grabber, video_encoder, out_rtp_video_stream, out_rtcp_video_stream); } } @@ -259,34 +487,152 @@ void MilesSessionInvoker::processAudio() { } } +int MilesSessionInvoker::setup_audio() { + /* Initialize and configure audio encoder */ + audio_encoder = miles_audio_codec_init_encoder(); + audio_encoder->codec_id = miles_audio_codec_get_encoder_for_rtp_payload_type(MILES_RTP_PAYLOAD_TYPE_L16); + audio_encoder->sample_rate = 16000; + audio_encoder->bytes_per_sample = 2; + audio_encoder->chunk_size = 320; /* 20 ms */ + audio_encoder->input_format = MILES_AUDIO_FORMAT_PCM; + int rv = miles_audio_codec_setup_encoder(audio_encoder); + if(rv == 0) { + /* Couldn't set up audio codec */ + LOG(ERROR) << "Couldn't set up audio codec"; + return 0; + } + LOG(ERROR) << "audio enc set up"; + + /* Set up audio grabber */ + int n = miles_audio_device_get_supported_devices(MILES_AUDIO_DEVICE_OPENAL, &supported_audio_devices); + if(n<=0) { + /* No audio device available */ + LOG(ERROR) << "No audio device available"; + return 0; + } + /* Use first device that supports capture */ + for(int i=0; isample_rate*audio_encoder->bytes_per_sample); + encoded_out_audio = (char *)malloc(audio_encoder->sample_rate*audio_encoder->bytes_per_sample); + audio_read_buf = (char *)malloc(audio_encoder->sample_rate*audio_encoder->bytes_per_sample); + audio_data = (char *)malloc(1000000); + + LOG(ERROR) << "audio device set up"; + return 1; +} + +int MilesSessionInvoker::setup_video_grabber() { + struct miles_video_grabber_description *grabber_description; + + /* Initialize and configure video encoder */ + video_encoder = miles_video_codec_init_encoder(); + video_encoder->codec_id = miles_video_codec_get_encoder_for_rtp_payload_type(MILES_RTP_PAYLOAD_TYPE_JPEG); + video_encoder->width = 320; + video_encoder->height = 240; + video_encoder->qfactor = 50; + int rv = miles_video_codec_setup_encoder(video_encoder); + if (!rv) { + LOG(ERROR) << "Could not setup video encoder"; + return 0; + } + + /* Set up video grabber */ + int n = miles_video_grabber_get_supported_grabbers(&supported_video_grabbers); + if(n<=0) { + /* No video grabber available */ + LOG(ERROR) << "No video grabber available"; + return 0; + } + int use_grabber = 0; + if(n>1) { + /* If more than one grabber, select one that is not 'Test' */ + for(int i=0; iname, "Test") != 0) { + /* Make sure there is a device */ + if(grabber_description->devices != NULL) { + use_grabber = i; + break; + } + } + } + } + video_grabber = miles_video_grabber_create_context(supported_video_grabbers[use_grabber]); + video_grabber->width = video_encoder->width; + video_grabber->height = video_encoder->height; + video_grabber->frame_rate = 25*100; + miles_video_grabber_setup(video_grabber); + free(supported_video_grabbers); + + video_out_buf = (char *)malloc(video_encoder->width*video_encoder->height*4); + encoded_out_img = (char *)malloc(video_encoder->width*video_encoder->height*4); + + return 1; +} + void MilesSessionInvoker::cancel(const std::string sendId) { } void MilesSessionInvoker::invoke(const InvokeRequest& req) { video_port = 5566; audio_port = 5568; + thumb_list = NULL; + save_image = 0; } /** - * Do something with an image decoded from an RTP stream (e.g. render to screen) + * Render video image in a window */ -void MilesSessionInvoker::render_video_image(u_int32_t ssrc, char *img, int width, int height, int img_format) { - - if(img_format != MILES_IMAGE_RGBA) { - miles_image_convert(img, render_img, img_format, MILES_IMAGE_RGBA, width, height); - stbi_write_png("/Users/sradomski/Desktop/image.png", width, height, 3, render_img, width * 3); +void MilesSessionInvoker::render_video_image(char *img, int width, int height, int img_format) { + char *img_buf_ptr; + + if(img_format != MILES_IMAGE_RGB) { + if(render_img==NULL || render_img_size < width*height*4) { + if(render_img) + free(render_img); + render_img_size = width*height*4; + render_img = (char *)malloc(render_img_size); + } + miles_image_convert(img, render_img, img_format, MILES_IMAGE_RGB, width, height); + img_buf_ptr = render_img; } else { - stbi_write_png("/Users/sradomski/Desktop/image.png", width, height, 3, img, width * 3); + img_buf_ptr = img; } - /* render image... */ + /* save image to disk */ + if(save_image) + miles_image_file_write(MILES_IMAGE_FILE_FORMAT_PNG, MILES_IMAGE_RGB, "image.png", width, height, img_buf_ptr); + + /* render image in window... to be implementd. */ } + /** * Send an audio chunk decoded from an RTP stream to an audio device */ void MilesSessionInvoker::playback_audio(u_int32_t ssrc, char *buf, int sample_rate, int bps, int audio_format, int size) { - int n; if(size<0) return; @@ -302,7 +648,7 @@ void MilesSessionInvoker::playback_audio(u_int32_t ssrc, char *buf, int sample_r } /* play audio */ - n = miles_audio_device_write(MILES_AUDIO_DEVICE_OPENAL, audio_dev_playback, buf, size); + miles_audio_device_write(MILES_AUDIO_DEVICE_OPENAL, audio_dev_playback, buf, size); } /** @@ -312,6 +658,9 @@ void MilesSessionInvoker::playback_audio(u_int32_t ssrc, char *buf, int sample_r int MilesSessionInvoker::video_receiver(struct miles_rtp_in_stream *rtp_stream, char *data, int bytes_read) { int status, n; struct miles_video_codec_decode_context *codec_ctx; + char *codec_name; + struct miles_list *p; + struct thumb_entry *te; codec_ctx = (struct miles_video_codec_decode_context *)rtp_stream->codec_ctx; @@ -333,10 +682,80 @@ int MilesSessionInvoker::video_receiver(struct miles_rtp_in_stream *rtp_stream, rtp_stream->codec_ctx = (void *)codec_ctx; return 0; } - n = miles_video_codec_decode(codec_ctx, data, decoded_in_img, bytes_read); + + /* Find thumbnail list entry of the stream */ + _mutex.lock(); + p = thumb_list; + while(p) { + te = (struct thumb_entry *)p->item; + if(te->ssrc == rtp_stream->ssrc) { + break; + } + p = p->next; + } + if(p==NULL) { + // Create new thumbnail list entry + te = (struct thumb_entry *)malloc(sizeof(struct thumb_entry)); + if(thumb_list==NULL) + p = thumb_list = miles_list_create(te); + else + p = miles_list_append(thumb_list, te); + te->ssrc = rtp_stream->ssrc; + te->window_ctx = NULL; + te->img_buf = (char *)malloc(bytes_read); + te->buf_size = bytes_read; + te->img_size = 0; + te->decode_buf = NULL; + } + if(te->buf_size < bytes_read) { + // Need bigger image buffer + free(te->img_buf); + te->img_buf = (char *)malloc(bytes_read); + te->buf_size = bytes_read; + } + /* + * If codec is JPEG, thumbnail image can be saved without decoding + */ + codec_name = miles_video_codec_get_codec_name(codec_ctx->codec_id); + if(codec_name==NULL) { + _mutex.unlock(); + return 0; + } + if(strcmp(codec_name, "JPEG")==0) { + memcpy(te->img_buf, data, bytes_read); + te->img_size = bytes_read; + te->img_format = WEBCONFERO_THUMB_JPEG; + //miles_image_file_write(MILES_IMAGE_FILE_FORMAT_JPG, MILES_IMAGE_JPEG, "test.jpg", bytes_read, 1, data); + // If we're not going to render the video in a window, we're done now + if(te->window_ctx==NULL) { + _mutex.unlock(); + return 0; + } + } else { + te->img_format = WEBCONFERO_THUMB_PNG; + } + free(codec_name); + + if(te->decode_buf==NULL) { + te->decode_buf = (char *)malloc(1920*1080*4); + } + n = miles_video_codec_decode(codec_ctx, data, te->decode_buf, bytes_read); if(n > 0) { - render_video_image(rtp_stream->ssrc, decoded_in_img, codec_ctx->width, codec_ctx->height, codec_ctx->output_format); + if(te->img_format==WEBCONFERO_THUMB_PNG) { + if(n > te->buf_size) { + free(te->img_buf); + te->img_buf = (char *)malloc(n); + te->buf_size = n; + } + // Need to insert a PNG header here... + memcpy(te->img_buf, te->decode_buf, n); + te->img_size = n; + } + if(te->window_ctx) + render_video_image(te->decode_buf, codec_ctx->width, codec_ctx->height, codec_ctx->output_format); } + _mutex.unlock(); + return n; } @@ -378,7 +797,7 @@ int MilesSessionInvoker::audio_receiver(struct miles_rtp_in_stream *rtp_stream, */ void MilesSessionInvoker::rtp_audio_receiver(struct miles_rtp_session *rtp_session) { - int n, m=0; + int n; struct miles_rtp_in_stream *rtp_stream; /* Poll RTP socket, read all available RTP packets */ @@ -389,37 +808,37 @@ void MilesSessionInvoker::rtp_audio_receiver(struct miles_rtp_session *rtp_sessi /* Read RTP data */ n = miles_rtp_recv(rtp_session, &rtp_stream, audio_data); if(n>0) { - m = audio_receiver(rtp_stream, audio_data, n); + audio_receiver(rtp_stream, audio_data, n); } /* Poll RTCP socket */ n = miles_net_poll_socket(rtp_session->rtcp_session->socket); if(n>0) { - /* Do RTCP packet processing */ + /* Do RTCP packet processEventing */ n = miles_rtp_recv_rtcp(rtp_session->rtcp_session); } } } void MilesSessionInvoker::rtp_video_receiver(struct miles_rtp_session *rtp_session) { - int n, m=0; + int n; struct miles_rtp_in_stream *rtp_stream; /* Poll RTP socket, read all available RTP packets */ while (1) { - n = miles_net_poll_socket(rtp_session->socket); + n = miles_net_wait_socket(rtp_session->socket, 10); if(n<=0) return; /* Read RTP data */ n = miles_rtp_recv(rtp_session, &rtp_stream, video_data); if(n>0) { - m = video_receiver(rtp_stream, video_data, n); + video_receiver(rtp_stream, video_data, n); } /* Poll RTCP socket */ n = miles_net_poll_socket(rtp_session->rtcp_session->socket); if(n>0) { - /* Do RTCP packet processing */ + /* Do RTCP packet processEventing */ n = miles_rtp_recv_rtcp(rtp_session->rtcp_session); } } @@ -430,6 +849,21 @@ void MilesSessionInvoker::rtp_video_receiver(struct miles_rtp_session *rtp_sessi */ int MilesSessionInvoker::video_transmitter(struct miles_video_grabber_context *grabber, struct miles_video_codec_encode_context *codec_ctx, struct miles_rtp_out_stream *rtp_stream, struct miles_rtcp_out_stream *out_rtcp_stream) { int n; + static struct timeval last_time; + static int first_time=1; + struct timeval now; + int tbf; + + if (first_time) { + gettimeofday(&last_time, 0); + first_time = 0; + } + gettimeofday(&now, 0); + tbf = 100000 / grabber->frame_rate; + if (elapsed_time(&last_time, &now) < tbf) + return 0; + + last_time = now; /* Send RTCP packets, if due */ miles_rtp_send_rtcp(out_rtcp_stream); @@ -468,4 +902,4 @@ int MilesSessionInvoker::audio_transmitter(struct miles_audio_device *dev, struc } -} \ No newline at end of file +} diff --git a/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.h b/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.h index bd86c5b..11b4be1 100644 --- a/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.h +++ b/src/uscxml/plugins/invoker/miles/MilesSessionInvoker.h @@ -32,11 +32,27 @@ extern "C" { #include "miles/video_grabber.h" #include "miles/session.h" #include "miles/image.h" +#include "miles/list.h" + long elapsed_time(struct timeval *before, struct timeval *after); } #ifdef BUILD_AS_PLUGINS #include "uscxml/plugins/Plugins.h" #endif +#define WEBCONFERO_THUMB_NONE 0 +#define WEBCONFERO_THUMB_JPEG 1 +#define WEBCONFERO_THUMB_PNG 2 + +struct thumb_entry { + char *img_buf; + int buf_size; // The size of the buffer malloced + int img_size; // The size of the image + int img_format; // JPEG or PNG image + char *decode_buf; + u_int32_t ssrc; + void *window_ctx; // The context of the window popped up when the thumbnail is clicked. +}; + namespace uscxml { class MilesSessionInvoker : public InvokerImpl { @@ -58,7 +74,21 @@ public: virtual void invoke(const InvokeRequest& req); protected: - bool alternate; // this is to alternate test1 an test2.jpeg and has no other use! remove me later .. + void processEventStart(const std::string& origin, const std::string& userid, const std::string& reflector, const std::string& session); + void processEventParticipants(const std::string& origin); + void processEventThumbnail(const std::string& origin, const std::string& userid); + void processEventVideoOn(const std::string& origin, const std::string& userid); + void processEventVideoOff(const std::string& origin, const std::string& userid); + void processEventAudioOn(const std::string& origin, const std::string& userid); + void processEventAudioOff(const std::string& origin, const std::string& userid); + void processEventSendVideo(const std::string& origin, size_t width, size_t height, size_t framerate, const std::string& compression); + void processEventSendVideoOff(const std::string& origin); + void processEventSendAudio(const std::string& origin, const std::string& encoding); + void processEventSendAudioOff(const std::string& origin); + void processEventPostText(const std::string& origin, const std::string& userid, const std::string& message); + void processEventGetText(const std::string& origin); + + int _imageSeq; int video_rtp_in_socket, audio_rtp_in_socket; int video_rtp_out_socket, audio_rtp_out_socket; @@ -76,26 +106,34 @@ protected: int video_port, audio_port; std::string ip_address; - char video_out_buf[1000000]; - char encoded_out_img[1000000]; - char decoded_in_img[1000000]; - char audio_in_buf[1000000]; - char render_img[1000000]; - char audio_data[1000000]; - char video_data[1000000]; + char *video_out_buf; + char *encoded_out_img; + char *audio_in_buf; + char *render_img; + int render_img_size; + char *audio_data; + char *video_data; - char encoded_out_audio[1000000]; - char audio_read_buf[1000000]; + char *encoded_out_audio; + char *audio_read_buf; + struct miles_list *thumb_list; + int save_image; struct miles_audio_device *audio_dev_playback; int audio_dev_playback_id; + int audio_available; + int video_grabber_available; static void runAudio(void* instance); static void runVideo(void* instance); void processVideo(); void processAudio(); + int setup_video_grabber(); + int setup_audio(); - void render_video_image(u_int32_t ssrc, char *img, int width, int height, int img_format); + void init_media_buffers(); + void free_media_buffers(); + void render_video_image(char *img, int width, int height, int img_format); void playback_audio(u_int32_t ssrc, char *buf, int sample_rate, int bps, int audio_format, int size); int video_receiver(struct miles_rtp_in_stream *rtp_stream, char *data, int bytes_read); int audio_receiver(struct miles_rtp_in_stream *rtp_stream, char *data, int bytes_read); diff --git a/src/uscxml/plugins/invoker/miles/SpatialAudio.cpp.old b/src/uscxml/plugins/invoker/miles/SpatialAudio.cpp.old deleted file mode 100644 index 2f8e032..0000000 --- a/src/uscxml/plugins/invoker/miles/SpatialAudio.cpp.old +++ /dev/null @@ -1,239 +0,0 @@ -/** - * @file - * @author 2012-2013 Stefan Radomski (stefan.radomski@cs.tu-darmstadt.de) - * @copyright Simplified BSD - * - * @cond - * This program is free software: you can redistribute it and/or modify - * it under the terms of the FreeBSD license as published by the FreeBSD - * project. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - * - * You should have received a copy of the FreeBSD license along with this - * program. If not, see . - * @endcond - */ - -#include "uscxml/Common.h" -#include "SpatialAudio.h" -#include "uscxml/Interpreter.h" -#include "uscxml/URL.h" - -#include - -#ifdef _WIN32 -#define _USE_MATH_DEFINES -#endif -#include - -namespace uscxml { - -#ifdef BUILD_AS_PLUGINS -PLUMA_CONNECTOR -bool pluginConnect(pluma::Host& host) { - host.add( new SpatialAudioProvider() ); - return true; -} -#endif - -SpatialAudio::SpatialAudio() { - _audioDevOpen = false; - _audioDev = NULL; - _audioDevIndex = -1; - _pos = new float[3]; - _pos[0] = _pos[1] = _pos[2] = 0.0; - _listener = new float[3]; - _listener[0] = _listener[1] = _listener[2] = 0.0; - _maxPos = new float[3]; - _maxPos[0] = _maxPos[1] = _maxPos[2] = 1.0; - miles_init(); -} - -SpatialAudio::~SpatialAudio() { -}; - -boost::shared_ptr SpatialAudio::create(InterpreterImpl* interpreter) { - boost::shared_ptr invoker = boost::shared_ptr(new SpatialAudio()); - invoker->_interpreter = interpreter; - return invoker; -} - -Data SpatialAudio::getDataModelVariables() { - Data data; -// data.compound["foo"] = Data("32"); - return data; -} - -void SpatialAudio::send(const SendRequest& req) { - if (!_audioDevOpen) { - _audioDev = miles_audio_device_open(MILES_AUDIO_IO_OPENAL, _audioDevIndex, 0, 22050, 2, 1, 1024, false); - if (_audioDev != NULL) { - _audioDevOpen = true; -// float rolloffFactor = 1.0; -// miles_audio_device_control(MILES_AUDIO_IO_OPENAL, _audioDev, MILES_AUDIO_DEVICE_CTRL_SET_ROLLOFF_FACTOR, &rolloffFactor); - } - } - - if (boost::iequals(req.name, "play")) { - if (_audioDevOpen) { - getPosFromParams(req.params, _pos); - -// std::cout << "Source: "; -// for (int i = 0; i < 3; i++) { -// std::cout << _pos[i] << " "; -// } -// std::cout << std::endl; - - miles_audio_device_control(MILES_AUDIO_IO_OPENAL, _audioDev, MILES_AUDIO_DEVICE_CTRL_SET_POSITION, _pos); - - - char* buffer = (char*)malloc(_audioDev->chunk_size); - // skip wav header - _dataStream.seekg(44); - - while(_dataStream.readsome(buffer, _audioDev->chunk_size) != 0) { - int written = 0; - while(written < _audioDev->chunk_size) { - written += miles_audio_device_write(MILES_AUDIO_IO_OPENAL, _audioDev, buffer + written, _audioDev->chunk_size - written); - tthread::this_thread::sleep_for(tthread::chrono::milliseconds(10)); - } - } - _dataStream.seekg(0); - free(buffer); - } - } else if (boost::iequals(req.name, "move.listener")) { - if (_audioDevOpen) { - getPosFromParams(req.params, _listener); - -// std::cout << "Listener: "; -// for (int i = 0; i < 3; i++) { -// std::cout << _listener[i] << " "; -// } -// std::cout << std::endl; - - miles_audio_device_control(MILES_AUDIO_IO_OPENAL, _audioDev, MILES_AUDIO_DEVICE_CTRL_SET_LISTENER_POS, _listener); - - } - } -} - -void SpatialAudio::cancel(const std::string sendId) { - assert(false); -} - -void SpatialAudio::sendToParent(SendRequest& req) { - req.invokeid = _invokeId; - assert(false); -} - -void SpatialAudio::invoke(const InvokeRequest& req) { - _invokeId = req.invokeid; - - if (req.src.length() > 0) { - URL scriptUrl(req.src); - if (!scriptUrl.toAbsolute(_interpreter->getBaseURI())) { - LOG(ERROR) << "Source attribute for audio invoker has relative URI " << req.src << " with no base URI set for interpreter"; - return; - } - - _dataStream << scriptUrl; - } - - getPosFromParams(req.params, _pos); - - std::multimap::const_iterator paramIter = req.params.begin(); - while(paramIter != req.params.end()) { - if (boost::iequals(paramIter->first, "maxX")) - _maxPos[0] = strTo(paramIter->second); - if (boost::iequals(paramIter->first, "maxY")) - _maxPos[1] = strTo(paramIter->second); - if (boost::iequals(paramIter->first, "maxZ")) - _maxPos[2] = strTo(paramIter->second); - paramIter++; - } - - struct miles_audio_device_description *devices; - int ndevs; - - ndevs = miles_audio_device_get_supported_devices(MILES_AUDIO_IO_OPENAL, &devices); - - for (int i = 0; i < ndevs; i++) { - if ((devices[i].capabilities & MILES_AUDIO_DEVICE_CAPABILITY_SPATIAL) && - (devices[i].capabilities & MILES_AUDIO_DEVICE_CAPABILITY_OUTPUT)) { - _audioDevIndex = i; - break; - } - } -} - -void SpatialAudio::getPosFromParams(const std::multimap& params, float* position) { - // vector explicitly given - try { - if (params.find("x") != params.end()) - position[0] = boost::lexical_cast(params.find("x")->second); - if (params.find("y") != params.end()) - position[1] = boost::lexical_cast(params.find("y")->second); - if (params.find("z") != params.end()) - position[2] = boost::lexical_cast(params.find("z")->second); - } catch (boost::bad_lexical_cast& e) { - LOG(ERROR) << "Cannot interpret x, y or z as float value in params: " << e.what(); - } - - try { - // right is an alias for x - if (params.find("right") != params.end()) - position[0] = boost::lexical_cast(params.find("right")->second); - // height is an alias for y - if (params.find("height") != params.end()) - position[1] = boost::lexical_cast(params.find("height")->second); - // front is an alias for z - if (params.find("front") != params.end()) - position[2] = boost::lexical_cast(params.find("front")->second); - } catch (boost::bad_lexical_cast& e) { - LOG(ERROR) << "Cannot interpret right, height or front as float value in params: " << e.what(); - } - - // do we have a position on a circle? - try { - if (params.find("circle") != params.end()) { - float rad = posToRadian(params.find("circle")->second); - position[0] = cosf(rad); - position[2] = -1 * sinf(rad); // z axis increases to front - } - } catch (boost::bad_lexical_cast& e) { - LOG(ERROR) << "Cannot interpret circle as float value in params: " << e.what(); - } - - position[0] = position[0] / _maxPos[0]; - position[1] = position[1] / _maxPos[1]; - position[2] = position[2] / _maxPos[2]; -// std::cout << _pos[0] << ":" << _pos[1] << ":" << _pos[2] << std::endl; - -} - -float SpatialAudio::posToRadian(const std::string& pos) { - - std::string trimmedPos = boost::trim_copy(pos); - float rad = 0; - - if (trimmedPos.size() > 3 && boost::iequals("deg", trimmedPos.substr(trimmedPos.length() - 3, 3))) { - rad = boost::lexical_cast(trimmedPos.substr(0, trimmedPos.size() - 3)); - rad = fmodf(rad, 360); // into range [0-360] - rad /= 180; // into range [0-2] - rad *= M_PI; // into range [0-2PI] - rad -= M_PI_2; // 0 to top; - rad *= -1; // make clockwise - rad += 2 * M_PI; // make positive - } else if (trimmedPos.size() > 3 && boost::iequals("rad", trimmedPos.substr(trimmedPos.length() - 3, 3))) { - rad = boost::lexical_cast(trimmedPos.substr(0, trimmedPos.size() - 3)); - rad = fmodf(rad, M_PI * 2); // into range [0-2*PI] - } else { - LOG(ERROR) << "Cannot make sense of position value " << trimmedPos << ": does not end in 'deg', 'rad'"; - } - return rad; -} - -} \ No newline at end of file diff --git a/src/uscxml/plugins/invoker/miles/SpatialAudio.h.old b/src/uscxml/plugins/invoker/miles/SpatialAudio.h.old deleted file mode 100644 index d6ca285..0000000 --- a/src/uscxml/plugins/invoker/miles/SpatialAudio.h.old +++ /dev/null @@ -1,76 +0,0 @@ -/** - * @file - * @author 2012-2013 Stefan Radomski (stefan.radomski@cs.tu-darmstadt.de) - * @copyright Simplified BSD - * - * @cond - * This program is free software: you can redistribute it and/or modify - * it under the terms of the FreeBSD license as published by the FreeBSD - * project. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - * - * You should have received a copy of the FreeBSD license along with this - * program. If not, see . - * @endcond - */ - -#ifndef SPATIALAUDIO_H_EH11SAQC -#define SPATIALAUDIO_H_EH11SAQC - -#include -#include - -extern "C" { -#include "miles/audio_device.h" -#include "miles/audio_codec.h" -#include "miles/audio_io.h" -#include "miles/miles.h" -} - -namespace uscxml { - -class SpatialAudio : public InvokerImpl { -public: - SpatialAudio(); - virtual ~SpatialAudio(); - virtual boost::shared_ptr create(InterpreterImpl* interpreter); - - virtual std::set getNames() { - std::set names; - names.insert("spatial-audio"); - names.insert("audio"); - names.insert("http://www.smartvortex.eu/mmi/spatial-audio"); - names.insert("http://www.smartvortex.eu/mmi/spatial-audio/"); - return names; - } - - virtual Data getDataModelVariables(); - virtual void send(const SendRequest& req); - virtual void cancel(const std::string sendId); - virtual void invoke(const InvokeRequest& req); - virtual void sendToParent(SendRequest& req); - - void getPosFromParams(const std::multimap& params, float* position); - static float posToRadian(const std::string& position); - -protected: - std::string _invokeId; - Interpreter* _invokedInterpreter; - - std::stringstream _dataStream; - - float* _pos; - float* _listener; - float* _maxPos; - bool _audioDevOpen; - int _audioDevIndex; - struct miles_audio_device* _audioDev; - -}; - -} - -#endif /* end of include guard: SPATIALAUDIO_H_EH11SAQC */ diff --git a/src/uscxml/server/WebSocketServer.cpp b/src/uscxml/server/WebSocketServer.cpp new file mode 100644 index 0000000..e69de29 diff --git a/src/uscxml/server/WebSocketServer.h b/src/uscxml/server/WebSocketServer.h new file mode 100644 index 0000000..438b932 --- /dev/null +++ b/src/uscxml/server/WebSocketServer.h @@ -0,0 +1,8 @@ +#ifndef WEBSOCKETSERVER_H_FG7908O3 +#define WEBSOCKETSERVER_H_FG7908O3 + +namespace uscxml { + +} + +#endif /* end of include guard: WEBSOCKETSERVER_H_FG7908O3 */ -- cgit v0.12