Engineering lead for Google Lens, Rajan Patel on twitter said that Google Lens would get natural word improvements and experimental AR experiences soon

Google Lens to get useful improvements and AR experiences

For those that don’t know, Google Lens, available for the Pixel smartphones, lets users click photos and then the phone gives the user information related to what’s visible in that particular image. So, if you click a picture of the Taj Mahal, it will give you information about the monument, so on and so forth. Responding to an inquiry from Android Police, Engineering lead for Google Lens, Rajan Patel on Twitter said, “Shopping (apparel, home goods, etc) in the works. Natural world (flora and fauna) improvements coming. Team just sent me an amazing new OCR model. Experimental AR experiences coming as well…”


With the OCR (optical character recognition) model, the smartphone should help Lens work better under the hood overall. It should also help Lens identify text better. Improvements will also help Lens recognizes things you may want to purchase. This could include home goods and apparel as well. 


As far as augmented reality is concerned, it isn’t clear to what extent the AR features will be. Google released its first ARCore app on the Google Play Store. The app lets users put 3D virtual reality characters in the camera’s feed. You can also add characters from Star Wars and Stranger Things in the camera feed. The official release says, “Users can team up with “Star Wars: The Last Jedi” characters like BB-8 and R2-D2, or even stage fun moments with Eleven, the Demogorgon, and other characters from Netflix’s ‘Stranger Things’. Or use Foodmoji stickers to make it clear that you’re feeling hungry, 3D Text for when you really want to title your life, or shoot a celebratory video at a New Year’s party complete with animated AR balloons, cake, and champagne.”

Digit NewsDeskDigit NewsDesk


‘).insertAfter(‘.inside-container p:eq(1)’); */
// $( ” ).insertAfter(‘.inside-container p:eq(0)’);
//});#}
//method to trunkate the text
function shorten(text, maxLength) {
var ret = text;
if (ret.length > maxLength) {
ret = ret.substr(0,maxLength-3) + “…”;
}
return ret;
}

$(function(){
//function to put utm on DontMiss links
/*if(isDesktop()){
$(‘div.dontMiss > a’).each(function(){
$(this).prop(‘href’, $(this).prop(‘href’)+’?utm_source=within_article&utm_medium=desktop&utm_campaign=related’);
//trunkate dont miss content
var sub = shorten($(this).html(),47);
$(this).html(sub);
});
}else{
$(‘div.dontMiss > a’).each(function(){
$(this).prop(‘href’, $(this).prop(‘href’)+’?utm_source=within_article&utm_medium=mobile&utm_campaign=related’);
});
}*/

//disabled method to append dontmiss links to page content by Mayank
/*$(‘div.dontMiss > a’).each(function(index){
//loop over each list item

// if(index%2 > 0){
// index = index – 1;
// }
if($(‘.inside-container > p:eq(‘+index+’)’).length){
if(isDesktop()){
$(‘.inside-container > p:eq(‘+((index * 2) + 1)+’)’).append(‘

Related: ‘ + $(this).html() + ‘‘ );
}else{
$(‘.inside-container > p:eq(‘+((index * 2) + 1)+’)’).append(‘

Related: ‘ + $(this).html() + ‘‘ );
}
}
});*/
$(‘div.dontMissArea’).hide();

/* if(isDesktop()) {
$(‘div.dontMissArea’).hide();
}else{
$(‘div.dontMissArea’).show();
} */

/*
* ga event tracking on page scroll start and end by Mayank
*/

// Debug flag
var debugMode = false;

// Default time delay before checking location
var callBackTime = 100;

// # px before tracking a reader
var readerLocation = 150;

// Set some flags for tracking & execution
var timer = 0;
var scroller = false;
var endContent = false;
var didComplete = false;

// Set some time variables to calculate reading time
var startTime = new Date();
var beginning = startTime.getTime();
var totalTime = 0;

// Get some information about the current page
var pageTitle = document.title;

// Track the aticle load — disabled
if (!debugMode) {
// ga(‘send’, ‘event’, ‘Reading’, ‘ArticleLoaded’, pageTitle, {‘nonInteraction’: 1});
// console.log(“ga(‘send’, ‘event’, ‘Reading’, ‘ArticleLoaded’, pageTitle, {‘nonInteraction’: 1}”);
} else {
alert(‘The page has loaded. Woohoo.’);
}

// Check the location and track user
function trackLocation() {
bottom = $(window).height() + $(window).scrollTop();
height = $(document).height();

// If user starts to scroll send an event
if (bottom > readerLocation && !scroller) {
currentTime = new Date();
scrollStart = currentTime.getTime();
timeToScroll = Math.round((scrollStart – beginning) / 1000);
if (!debugMode) {
ga(‘send’, ‘event’, ‘Reading’, ‘StartReading’, pageTitle, timeToScroll, {‘metric1’ : timeToScroll});
} else {
alert(‘started reading ‘ + timeToScroll);
}
scroller = true;
}

// If user has hit the bottom of the content send an event
if (bottom >= $(‘.inside-container’).scrollTop() + $(‘.inside-container’).innerHeight() && !endContent) {
currentTime = new Date();
contentScrollEnd = currentTime.getTime();
timeToContentEnd = Math.round((contentScrollEnd – scrollStart) / 1000);
if (!debugMode) {
if (timeToContentEnd = height && !didComplete) {
currentTime = new Date();
end = currentTime.getTime();
totalTime = Math.round((end – scrollStart) / 1000);
if (!debugMode) {
ga(‘send’, ‘event’, ‘Reading’, ‘PageBottom’, pageTitle, totalTime, {‘metric3’ : totalTime});
} else {
alert(‘bottom of page ‘+totalTime);
}
didComplete = true;
}
}

// Track the scrolling and track location
$(window).scroll(function() {
if (timer) {
clearTimeout(timer);
}

// Use a buffer so we don’t call trackLocation too often.
timer = setTimeout(trackLocation, callBackTime);
});
});

‘).insertAfter(“.inside-container p:eq(2)”);
}

});



Source link