{"componentChunkName":"component---src-templates-blog-post-js","path":"/blog/mum-thousand-times-more-powerful-than-bert/","result":{"data":{"site":{"siteMetadata":{"title":"No Frills News"}},"contentfulNfnPost":{"postTitle":"MUM: Thousand Times More Powerful Than BERT","slug":"mum-thousand-times-more-powerful-than-bert","createdLocal":"2021-05-24 14:30:44.548504","publishDate":"2021-05-24 12:30:00+00:00","feedName":"Image Recognition","sourceUrl":{"sourceUrl":"https://analyticsindiamag.com/mum-thousand-times-more-powerful-than-bert/"},"postSummary":{"childMarkdownRemark":{"html":"<p>“MUM is a thousand times more powerful than BERT.\nMultitask Unified Model (MUM) — our latest AI milestone — has the potential to transform how Google helps you with complex information tasks.\nLike the popular BERT model, MUM is built on a Transformer architecture.\nUnlike most language models trained on one language, this new language model is trained across 75 different languages.\n3| Understanding information across types: MUM is multimodal, which means understanding information from different formats like web pages, pictures and more, simultaneously.</p>"}}}},"pageContext":{"slug":"mum-thousand-times-more-powerful-than-bert"}}}