WearScript combines the power of Android development on Glass with the learning curve of a website. Go from concept to demo in a fraction of the time. For an overview check out the intro video and sample script below. Visit http://www.wearscript.com for documentation and more information.
One-Line Installer(Linux/OSX): Execute the following in a shell to install WearScript on your Glass and authenticate with our default server.
curl -L http://goo.gl/U1RIHm > install.py && python install.py
// Sample WearScript
<html style="width:100%; height:100%; overflow:hidden">
<body style="width:100%; height:100%; overflow:hidden; margin:0">
<canvas id="canvas" width="640" height="360" style="display:block"></canvas>
<script>
function cb(data) { // Changes canvas color depending on head rotation
if (data['type'] == WS.sensor('orientation')) {
ctx.fillStyle = 'hsl(' + data['values'][0] + ', 90%, 50%)'
ctx.fillRect(0, 0, 640, 360);
}
}
function server() {
WS.log('Welcome to WearScript'); // Write to Android Log and Playground console
WS.say('Welcome to WearScript'); // Text-to-speech
// Stream camera images and all sensors to the WearScript Playground Webapp
var sensors = ['gps', 'accelerometer', 'magneticField', 'orientation', 'gyroscope',
'light', 'gravity', 'linearAcceleration', 'rotationVector'];
for (var i = 0; i < sensors.length; i++)
WS.sensorOn(WS.sensor(sensors[i]), .15, 'cb');
WS.cameraOn(2);
WS.dataLog(false, true, .15);
}
function main() {
if (WS.scriptVersion(0)) return;
ctx = document.getElementById('canvas').getContext("2d");
WS.serverConnect('{{WSUrl}}', 'server');
}
window.onload = main;
</script></body></html>
See contributors for details. Name (irc nick)