<!DOCTYPE html><html lang="en" xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" style="font-size:16px;"><head></head><head><meta charset="utf-8"/><!--[if !mso]><!--><meta http-equiv="X-UA-Compatible" content="IE=edge"/><!--<![endif]--><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="x-apple-disable-message-reformatting"/><meta name="format-detection" content="telephone=no,address=no,email=no,date=no,url=no"/><meta name="color-scheme" content="light"/><meta name="supported-color-schemes" content="light"/><title>LayerNorm Is Not Needed In Transformer</title><!--[if mso]><xml><o:OfficeDocumentSettings><o:AllowPNG/><o:PixelsPerInch>96</o:PixelsPerInch></o:OfficeDocumentSettings></xml><![endif]--><style>
:root { color-scheme: light; supported-color-schemes: light; }
body { margin: 0; padding: 0; min-width: 100%!important; -ms-text-size-adjust: 100% !important; -webkit-transform: scale(1) !important; -webkit-text-size-adjust: 100% !important; -webkit-font-smoothing: antialiased !important; }
.body { word-wrap: normal; word-spacing:normal; }
table.mso { width: 100%; border-collapse: collapse; padding: 0; table-layout: fixed; }
img { border: 0; outline: none; }
table { mso-table-lspace: 0px; mso-table-rspace: 0px; }
td, a, span { mso-line-height-rule: exactly; }
#root [x-apple-data-detectors=true],
a[x-apple-data-detectors=true],
#MessageViewBody a { color: inherit !important; text-decoration: inherit !important; font-size: inherit !important; font-family: inherit !important; font-weight: inherit !important; line-height: inherit !important; }
span.MsoHyperlink { color: inherit !important; mso-style-priority: 99 !important; }
span.MsoHyperlinkFollowed { color: inherit !important; mso-style-priority: 99 !important; }
.a { background-color:#dedede; }
.b { background-color:#2a2a2a; }
.c { background-color:#ffffff; }
.d { background-color:#fff0c8; }
.d2 { background-color:#FFFFFF; }
.d3 { background-color:#FFFFFF; }
h1 a { text-decoration:none;color:#2C81E5 !important;font-style:italic; }
h2 a { text-decoration:none;color:#2C81E5 !important;font-style:italic; }
h3 a { text-decoration:none;color:#2C81E5 !important;font-style:italic; }
h4 a { text-decoration:none;color:#2C81E5 !important;font-style:italic; }
h5 a { text-decoration:none;color:#2C81E5 !important;font-style:italic; }
h6 a { text-decoration:none;color:#2C81E5 !important;font-style:italic; }
h1, h1 a, h2, h2 a, h3, h3 a, h4, h4 a, h5, h5 a, h6, h6 a, ul, li, ol, p, p a { margin: 0;padding: 0; }
h1 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:700;font-size:28px;color:#2A2A2A;line-height:42px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h2 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:700;font-size:24px;color:#2A2A2A;line-height:36px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h3 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:20px;color:#2A2A2A;line-height:30px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h4 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:18px;color:#2A2A2A;line-height:27px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h5 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:16px;color:#2A2A2A;line-height:24px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h6 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:14px;color:#2A2A2A;line-height:21px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
p { font-family:'Georgia','Times New Roman',serif;font-weight:400;color:#2D2D2D;font-size:16px;line-height:24px;padding-bottom:8px;padding-top:8px;mso-margin-top-alt:8px;mso-margin-bottom-alt:8px; }
p a, .e a, ul a, li a, .h a, .h2 a, .h3 a { word-break:break-word;color:#2C81E5 !important;text-decoration:none;font-style:italic; }
p a span, .e a span, ul a span, li a span { color: inherit }
p .bold { font-weight:bold;color:#2D2D2D; }
p span[style*="font-size"] { line-height: 1.6; }
.f p { font-size:12px;line-height:15px;color:#2D2D2D;padding:0; }
.f p a { color:#2D2D2D !important; }
.g p { font-family:'Helvetica',Arial,sans-serif;font-size:14px;line-height:20px;font-weight:normal;margin:0; }
.g p a { text-decoration: underline; }
.i p { font-family:'Helvetica',Arial,sans-serif;line-height:23px;font-size:15px;color:#2D2D2D; }
.i p a { color:#2D2D2D !important; }
.i2 p { font-family:'Helvetica',Arial,sans-serif;line-height:23px;font-size:15px;color:#2D2D2D; }
.i2 p a { color:#2D2D2D !important; }
.i3 p { font-family:'Helvetica',Arial,sans-serif;line-height:43px;font-size:24px;color:#2D2D2D; }
.i3 p a { color:#2D2D2D !important; }
.h p a { color:#595959 !important; }
.h2 p a { color:#595959 !important; }
.h3 p a { color:#595959 !important; }
.f p a, .i p a, .i2 p a, .i3 p a, .h p a, .h2 p a, .h3 p a { text-decoration:underline; }
.j { border-top:3px solid #ffeb2d; }
.k p { padding-left:15px;padding-bottom:0px;padding-top:6px;mso-margin-top-alt:6px;mso-margin-bottom-alt:0px;mso-margin-left-alt:15px; }
.o { background-color:#FFFFFF;border:1px solid #F1F1F1;border-radius:5px; }
.o p { font-family:'Helvetica',Arial,sans-serif;padding:0px;margin:0px; }
.l p,
.l p a, .l a { font-size:14px;line-height:20px;font-weight: bold;color:#2D2D2D;padding-bottom:6px;mso-margin-bottom-alt:6px;text-decoration:none; }
.m p,
.m p a { font-size:13px;line-height:18px;font-weight:400;color:#2D2D2D;padding-bottom:6px;mso-margin-bottom-alt:6px;text-decoration:none; }
.n p,
.n p a { font-size:12px;line-height:17px;font-weight:400;color:#2D2D2D;padding-bottom:6px;mso-margin-bottom-alt:6px;text-decoration:none; }
.p { background-color:#FFFFFF;max-width:520px;border:1px solid #E1E8ED;border:1px solid rgba(80, 80, 80, 0.3);border-radius:5px; }
.q { font-size:16px;font-family:Helvetica,Roboto,Calibri,sans-serif !important;border:1px solid #e1e8ed;border:1px solid rgba(80, 80, 80, 0.3);border-radius:10px;background-color:#FFFFFF; }
.q p { font-size:16px;font-family:system-ui,Helvetica,Roboto,Calibri,sans-serif !important;color:#222222;padding:4px 0; }
.r { border:1px solid #E1E8ED !important;border-radius:5px; }
.s p { font-size: 14px; line-height: 17px; font-weight: 400; color: #697882; text-decoration: none; }
.t p { font-family:'Helvetica',Arial,sans-serif;font-size:12px;line-height:18px;font-weight:400;color:#000000;font-style:italic;padding:4px 0px 0px; }
.v { border-radius:10px;border:solid 0px #DFD150;background-color:#2C81E5;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;color:#FFFFFF; }
.v a { text-decoration:none;display:block;color:#FFFFFF; }
.w p { font-size:12px;line-height:15px;font-weight:400;color:#FFFFFF; }
.w p a { text-decoration: underline !important;color:#FFFFFF !important; }
ul { font-family:'Helvetica',Arial,sans-serif;margin:0px 0px 0px 25px !important;padding:0px !important;color:#2D2D2D;line-height:24px;list-style:disc;font-size:16px; }
ul > li { font-family:'Helvetica',Arial,sans-serif;margin:10px 0px 0px 0px !important;padding: 0px 0px 0px 0px !important; color: #2D2D2D; list-style:disc; }
ol { font-family:'Helvetica',Arial,sans-serif;margin: 0px 0px 0px 25px !important;padding:0px !important;color:#2D2D2D;line-height:24px;list-style:decimal;font-size:16px; }
ol > li { font-family:'Helvetica',Arial,sans-serif;margin:10px 0px 0px 0px !important;padding: 0px 0px 0px 0px !important; color: #2D2D2D; }
.e h3,
.e p,
.e span { padding-bottom:0px;padding-top:0px;mso-margin-top-alt:0px;mso-margin-bottom-alt:0px; }
.e span,
.e li { font-family:'Helvetica',Arial,sans-serif;font-size:16px;color:#2D2D2D;line-height:24px; }
.rec { font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji" !important; }
.rec__button:hover { background-color: #f9fafb !important; }
.copyright a {color: inherit !important; text-decoration: none !important; font-size: inherit !important; font-family: inherit !important; font-weight: inherit !important; line-height: inherit !important;}
.txt_social p { padding: 0; word-break: break-all; }
.table, .table-c, .table-h { border: 1px solid #C0C0C0; }
.table-c { padding:5px; background-color:#FFFFFF; }
.table-c p { color: #2D2D2D; font-family:'Helvetica',Arial,sans-serif !important;overflow-wrap: break-word; }
.table-h { padding:5px; background-color:#F1F1F1; }
.table-h p { color: #2A2A2A; font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif !important;overflow-wrap: break-word; }
@media only screen and (max-width:667px) {
.aa, .w100pc { width: 100% !important; }
.bb img { width: 100% !important; height: auto !important; max-width: none !important; }
.cc { padding: 0px 8px !important; }
.ee { padding-top:10px !important;padding-bottom:10px !important; }
.ff ul, .ff ol { margin: 0px 0px 0px 10px !important;padding: 0px !important; }
.ff li { margin:10px 0px 0px 10px !important; }
.r {height:140px !important;}
.s p { font-size:13px !important;line-height:15px !important; }
.mob-hide {display:none !important;}
.mob-show {display: block !important; width: auto !important; overflow: visible !important; float: none !important; max-height: inherit !important; line-height: inherit !important;}
.mob-stack {width:100% !important;display:block !important;}
.mob-w-full {width:100% !important;}
.mob-block {display:block !important;}
.embed-img {padding:0px 0px 12px 0px !important;}
.socialShare {padding-top:15px !important;}
.rec { padding-left:15px!important;padding-right:15px!important; }
.bodyWrapper { padding:7px 4px 7px 4px !important; }
.social-mobile {float:left !important;margin-top:10px !important;}
}
@media screen and (max-width: 480px) {
u + .a .gg { width: 100% !important; width: 100vw !important; }
.tok-heart { padding-top:75% !important; }
.tok-play { padding-top: 250px !important; }
}
@media screen and (max-width: 320px) {
.tok-heart { padding-top:65% !important; }
}
.u { border: 1px solid #CACACA !important; border-radius: 2px !important; background-color: #ffffff !important; padding: 0px 13px 0px 13px !important; font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif !important;font-size: 12px !important; color: #767676 !important; }
.u a { text-decoration: none; display: block !important; color: #767676 !important; margin: 0px !important; }
.u span, .u img { color: #767676 !important;margin:0px !important; max-height:32px !important;background-color:#ffffff !important; }
</style><!--[if mso]><style type="text/css">
h1, h2, h3, h4, h5, h6 {font-family: Arial, sans-serif !important;}
body, table, td, p, a, span {font-family: Arial, sans-serif !important;}
sup { font-size: 100% !important;vertical-align: .5em !important;mso-text-raise: -1.5% !important;line-height: 0 !important; }
ul { margin-left:0px !important; margin-right:10px !important; margin-top:20px !important; margin-bottom:20px !important; }
ul li { margin-left: 0px !important; mso-special-format: decimal; }
ol { margin-left:0px !important; margin-right:10px !important; margin-top:20px !important; margin-bottom:20px !important; }
ol li { margin-left: 0px !important; mso-special-format: decimal; }
li.listItem { margin-left:15px !important; margin-top:0px !important; }
.paddingDesktop { padding: 10px 0 !important; }
.edm_outlooklist { margin-left: -20px !important; }
.embedImage { display:none !important; }
</style><![endif]--><!-- __merge_tags_in_links__ --><style>
@font-face {
font-family: 'Open Sans';
font-style: normal;
font-weight: 700;
font-display: swap;
src: url('https://fonts.gstatic.com/s/opensans/v40/memSYaGs126MiZpBA-UvWbX2vVnXBbObj2OVZyOOSr4dVJWUgsg-1x4gaVIUwaEQbjA.woff2') format('woff2');
}
@font-face {
font-family: 'Open Sans';
font-style: italic;
font-weight: 700;
font-display: swap;
src: url('https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@1,700&display=swap') format('woff2');
}
</style></head><body class="a" style="margin:0px auto;padding:0px;word-wrap:normal;word-spacing:normal;background-color:#dedede;"><div role="article" aria-roledescription="email" aria-label="email_name" lang="en" style="font-size:1rem"><div style="display:none;max-height:0px;overflow:hidden;"> Scaling Up Diffusion Language Models to 100B, Adding 1 Attention Layer & Make Visual Encoders Generate Images, LayerNorm Is Not Needed In Transformer, and more  ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ </div><table role="none" width="100%" border="0" cellspacing="0" align="center" cellpadding="0" class="gg"><tr><td align="center" valign="top"><table role="none" width="670" border="0" cellspacing="0" cellpadding="0" class="aa" style="width:670px;table-layout:fixed;"><tr><td class="bodyWrapper" align="center" valign="top" style="padding:7px 7px 7px 7px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" style="border-width:0px 0px 0px 0px;border-style: solid; border-color: #2a2a2a;border-radius:10px 10px 0px 0px;background-color:#ffffff;" class="c"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr id="header"><td style="padding:15px 15px 0px 15px;"><div style="padding-top:0px;padding-right:0px;padding-bottom:20px;padding-left:0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td class="f" align="right" valign="top"><p> December 16, 2025 | <a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3EyVlOjf8BY9LxdzM62XjncO55udOHP-TvoE_DNkgm9IeWVKNfaKYU03_zFQdEORVGHDsfdnBBxtrKWX_wtPTL0l_g0M9tF_mizgFkAywLLNn_dL_wggsNwCOBssQjGcVvSCel6Ja3Ufd3ZkRpJr7Dg95zIQBFgq0bJgEBGaPHF4AANuomt07z-gZ067pXiMqkGL7CYzxW8ATcvrxc4pm64m7PW2BazTNZQOEclRhnpUK9wzewQUOqXgi6PvthdkoQKdk_CWCklOGeRZ3wwcUaYWXOFGO5kl55FrYpacxMhFO03FklZNqRtHSpEyO5opoAoAN-dF-GQvMSug3ooIjlspZQz4_tzLF8MB4HFh9bZZs26pUUdwtI6PsKsYy_-_33Em885Ruk4CC5hxiU_LFfVWR4Na5HLhBNlksciU7WmN9lY7P7hqAsMqyXRFIWa7ntSH-derTWyFDJswiuYFHHp69fniA_UtMnzCbkg5YsUv0tqmIH7eXrwW9fMXURgJndR6TsxHl0rPbtDGt7NlCcXn4VvFJTNhe-I6MqyH50dU9lhE-zUgOvfvcE_nKrbXubdGJXn0SW2iq_AeuVWeljLtY7zI1iwjZFx4W_6anndHEB8FY5fS88TI7IH2VRkXi3UKPA2bVEjVexPNXq0q9T0sPqtS5ZHVqvswfYAySxQZvT_dtuSJVYWHqYBkDXa-OjS9Ni8_mT_N0go7EQgnOdB5dQczeinoYD_yYu1kcUkxCZOILMmOc96uA-jxFN8QjGzyOgqixlIwyQFz1ws1NBrFyjSvWhntL6GeG0IGrc1ZSDad5sGu-04uXeCkJbWZE9U/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h0/h001.__jyWOze_nqq-lLdpSriIEJq-R59Mqv3Mz06BYf27Bo"><span class="translation_missing" title="translation missing: en.templates.posts.email.header.read_online">Read Online</span></a></p></td></tr><tr><td class="dd" align="center" valign="top" style="padding:15px 0;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top"><h1 style="text-align:left;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-weight:Bold;font-size:32px;color:#2A2A2A;padding:2px 0;line-height:38px;"> Scaling Up Diffusion Language Models to 100B! </h1><p style="text-align:left;font-family:'Helvetica',Arial,sans-serif;font-weight:normal;font-size:20px;color:#3E3E3E;padding:5px 0;line-height:24px;"> Scaling Up Diffusion Language Models to 100B, Adding 1 Attention Layer & Make Visual Encoders Generate Images, LayerNorm Is Not Needed In Transformer, and more </p></td></tr></table></td></tr><tr><td style="line-height:0;"><div data-open-tracking="true"> <img src="https://elink4f7.mail.bycloud.ai/ss/o/u001.3wmUuY8gEWd4_869a_eXcg/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/ho.gif" alt="" width="1" height="1" border="0" style="height:1px !important;width:1px !important;border-width:0 !important;margin-top:0 !important;margin-bottom:0 !important;margin-right:0 !important;margin-left:0 !important;padding-top:0 !important;padding-bottom:0 !important;padding-right:0 !important;padding-left:0 !important;"/> </div></td></tr></table></div></td></tr><tr id="content-blocks"><td class="email-card-body" align="center" valign="top" style="padding-bottom:15px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td id="nov-18-th-nov-24-th-33-latest-ai-re" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:normal;padding:0px 28px;text-align:left;"><h6 style="color:#2A2A2A;font-weight:normal;mso-line-height-alt:87.5%;"><i>Dec 8th ~ Dec 15th</i><br><i>#86 Latest AI Research Explained Simply</i></h6></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="industry-news-in-1-line" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">🗞️ Industry News in 1 Line</h2></td></tr><tr><td style="padding-bottom:12px;padding-left:50px;padding-right:40px;padding-top:12px;" class="ee"><div style="margin-left:0px;" class="edm_outlooklist"><ol start="1" style="list-style-type:decimal;margin:0px 0px;padding:0px 0px 0px 0px;"><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 1.3k</span></span> Alibaba’s Qwen Team has released <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.wcXdj6dB6nd1Cx4inzJNk-Z-6whDJYtGFT-dqE7VdZ9gkefCkXPTZ-8Xs8F1V7Jtv96v5uk0dV3smO0L-bhU8Xv5ALrgx_rFR4gpWQplZJOgrB7Hy6MGbjwU2_iEYjqrDCKJMDrXOFFT5PNLajfHIgJO50SoQm6N9y-2TdxgbQ1TJ--pjb4p9FE9Dv61pEFZCSvX96zvGY_NeBdSIsPtC8ShhJCjEOh7JE0K9c--8yOQHodIRyiY_Rs3v8gUKaPLh_x0wPH0M87SibTT104ZdWw6Rr4eMkLdeSM8f6KzYqc/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h1/h001.ubBCQHOJNPdE7V5GpX7NQD8846Q-ZF21aCvtfnh4Zx8" target="_blank" rel="noopener noreferrer nofollow"><span>Qwen3-Omni-Flash</span></a>, which is capable of processing text, audio, and video with seamless real-time responses. This upgraded iteration significantly enhances audio-visual interactions by resolving previous stability issues and offering precise control over system prompts and personas. Furthermore, it seamlessly integrates natural multi-turn video and audio understanding with <b>indistinguishable human voices</b> and fully customizable personalities, all backed by robust support for 119 text and 19 speech languages. Try it on <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSgLllRgYzaVYZNtPjegsbHh6CsDG9iRltGzStsABUifb-5Xd5dtUVsNGP6tncxvpELLDj5bkmW02n4X3dv9EeAYJxyhhD1fkeqypJHhytP_KGVvjue8CHFqBOPi-SQ4BvoSoWwW8eJota6iAxbGzd8askWa1V7bYiHPu8DBdLE0JC_h6uaeAlkz_y8GHU_czoDQ1wz73oNtFIpITq8EiqcH0AQ3D7Z-moDIIsgdGktOfc4myYboPkvtOPITOK1vjitd0xZRhnx55QjNEJrNr4Q/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h2/h001.HEH5cZJe9xHCeLt8hsnsWbkEW828JvaEsPFIZL8PS6w" target="_blank" rel="noopener noreferrer nofollow"><span>Modelscope</span></a> or <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.CxDkkVpJsBdVoe83c_tBWnaw_ucPVFH8tIgnu1JziLUfcu_cm6Snb_gyn20cdUygjHj8Dw-IilBkGIAZL5iB8sqeGXJEFhiBzxuZUxkeeXYslOZ-0WZ1idj4LxDyWnjwb0AleFSM90dDjiBSlaY8UR0JYVSAT7zz-YixlFr84G6CJoOo7djeJvD_gxPMYehgb8_alJcUlXgjSAG28wntfAb1VfYuN2KS5WGDlDizyzDlwj74gh1IsuF_9VgnW2BnHn15TJuopLF_z3a-pd0VHOVrz0ggu1CpqvWIQnMGDj0/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h3/h001.-k2jIXrqclKrxXuiVgE_UpwJsdPutUEVzY2v_-clv-M" target="_blank" rel="noopener noreferrer nofollow"><span>HuggingFace</span></a>. </p><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/8a80f748-07a5-40cb-9e61-be51df4d58b8/q3o251201_metric.png?t=1765886772" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></li><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 712</span></span> Ai2 has announced the release of <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtobMN2eQnGMKl-9GGuROuhD2V3UW7BDvwCH52soRINNKl4HYFez46lcY6N7Min3ORW3rq9UxnBqo-lsX1UReXx7hOf3v1bjQDWvGN2PA5pRimPuadCnSorI_A9sXG868PdSx4sVxVek_iyCPkMH6ac40vfWlwTyK_xY9Bz3zr0SQ1HW_YQDiFKwN3zuTJffWV-EtNHxZYF4HeVEn5ODiWXfR3XBdLZx3AxcwweA37VdXnatqXxe0KVg217lyNf3md1A/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h4/h001.jfXzB1fCDJrxIYqJVUxycl7imctBYF1O30meo3sutGs" target="_blank" rel="noopener noreferrer nofollow"><span>Olmo 3.1</span></a>, which is their most capable model to date with the new Think 32B and Instruct 32B variants. The Olmo 3.1 Think 32B model achieves significant performance gains in logic and reasoning benchmarks. They also launched updated <b>7B models optimized for math and code</b> along with full weights, data, and training recipes for the entire suite. <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.zNfxTwpJFmrsCuJJphGRkFBkltPW_jmZQr447Xmo9m98DguGrusy4dLgr_gNCYRP-9JL283fBLQZV-uGIw6eFf-6perowm0C_XvkUgP0DP87B6q_SKJ0VhBSS7Hf_gDnnABYhXMxiNmxglVTibJL8bxDx1WRpAljJQonHWmRn9rrlOkKuwN0EthrLrTseVQG_0O0ga84sac0BU0MmZPbbOQgFwd-HlrPyb6CYxVgOr5jp2qlN0K4BDGzqzxYSEpgCo7XLvzLkhsVCmMdheH_Ig/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h5/h001.2LjVtMj6jp42tIC3CaEmBjUFyyecc8H_ff_61FBVXuI" target="_blank" rel="noopener noreferrer nofollow"><span>Try it online</span></a> or download from <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.CxDkkVpJsBdVoe83c_tBWno2StfHgkdXyV9QnRyXhkv_5BPQzT_MEvw_cwdA3YwmsUoyC6JamQYp5Yq_dpd4MMJaKryP81hOyLcQbkWjfeDsp_HZEijSPu3kAczPiR08eFjF_3hdpHvQWh_zFj2nQmkASDuR1Z_DliZxBpSmW3z_droClXIBBzKxpckMDvsPzpEjvS4qa4AZl2VqjthboOR6bPKvfMzBMBMvoC6AM3QPhIijB8PAIZu8N-cCc9reWH9Wf3xYvX8h1nxgV9Ju_BvaS5RobsRIJ3blZl-22xY/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h6/h001.Yg6BEWThNnO69e4GXoNxIgAckY6ugEbOj3sa6DNgXak" target="_blank" rel="noopener noreferrer nofollow"><span>HuggingFace</span></a>. </p><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/0ebbb83d-4627-4395-bf99-2df44762f619/1765558559-unnamed-2025-12-12t115545-244.png?t=1765887021" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></li><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 1k</span></span> <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j2ygXWEcjeGviVY71yr8FD-YqdGivtprEC_2BpuRd5kwjWDfkRGVpAQT83gRkhwzxnZ-j_B1LHJfb8xRqSJaG7v_58jNbSLMVCEQjhMs918VCho7kdbHTDMojPj9k_fM2Yuo7X183AjvNLER9XjkHjpv8SUYCWsQEXbJA--XfcI9_1R0PJRVA121B8FgpkFzx_nTYtiBMQk4HZjCLB-UMsqaqLu-oEOAI7IJtBDT2RpO4RTylvrF7vFVwAjzm2SMmBtThP7_9xQuOIcy18cYxHk9bdXhM0BEnca0klnfzUhgA/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h7/h001.ibzdhOrAdJ368yN415YPuO0lwCSZgUj7R9nRFW8Nwvg" target="_blank" rel="noopener noreferrer nofollow"><span>NVIDIA has launched the Nemotron 3 family</span></a> with a suite of open models, datasets, and libraries designed to advance specialized agentic AI. <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.sa7HIrCkEzyny8lstY55mLTYRGG0aKtvMtefff0l07NG3KYl0Y3pWd0dFTxa0u8WHxQZfUpNdImDIAndTaBROXcOHWArxPn8YAwiX-UciMs4DMv_8DfqDHVNsxK9nXEiaBXxd5UTHfZ5Nesf4a6eQCxgxKZHY-ah0YPhHcuyjnbdF-KMWzE0fAmPMQ4xOCqRj3qcbdoHOXX81pqQyPKgjfv_XA89Ajs7SHXp2NVlUj6qXDBWOXAED4-aLjmdhDoqccb36sHMXNB52ZPO2vP-mT9IqH9Y-YYaLbR8TTr_UCFEoRIB5xTqH30I2AXAlG9yU-3HTGRlWNjVznGUUyBwDw/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h8/h001.MA9JsCgPSwKCJEn1Zs-hMFmwC00AhrN0cW1ftX7-ZYM" target="_blank" rel="noopener noreferrer nofollow"><span>Nemotron 3 Nano</span></a> is a hybrid <b>Mamba-Transformer mixture-of-experts</b> (MoE) model that delivers highly efficient inference and a 1 million token context window while using only roughly 3B active parameters. NVIDIA has released the model under an open license alongside <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.9ggl6Mt0xphuuMReR5gVpd2xPJvDq2TWHZu_HvzRFxpkRB3eOAzeoOsyAqx5HfTzKk44pJgXIDetTmTlhrZ_DGJUZew4GgbX4RpBYr4_38JCxTwqxTst_irDhXrmdPzvUW8xehiFljDN3cq3ZT8RPRlPEgEPMHzJ6WHWV10OCbuyOZAtGEA8ym54TrC-BQbZFJSmRZjtbKl-GO5ClDyhLoZ-tAL_WNOM84HEPia1QZPqHOUg2Kfhlkdk8W0X4ZieOjwjMfU9OIoD9QjXvyF5KQ/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h9/h001.JwK3BHbGHk_jswhTtzqjTN6oJQkP5ttS4UjoIAAgsbk" target="_blank" rel="noopener noreferrer nofollow"><span>NeMo Gym</span></a>, a new open-source reinforcement learning library for scalable agent training. If you are feeling adventurous, why not <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.9ggl6Mt0xphuuMReR5gVpaGZ5YTLEGekup_X5btk2Dzyk-r2K0Op1moRCXB2Ng0uVr8POOFCMnfNCqGuQz1rYLgbGNI0l0rYbDMAE7xy2xbLYqe0xKa_fKd74f1gQ0AgNzpjTwoRVNwoQuiBSHaHO-LtvyOungNI7ZRaOzZ5TCOocD2GEWyEvCi3ocy71uKQEKug2z_UvBsSXLgMzyuhBLh0CW9XD-zLybs_IW5xexbvjoskOry-bjIYxWxmqhk0G0ncq9q9tSGynwN8STa-bYHdokvvX2V3VuQdUCc9omT8nymqeyYUkV4btdv1_aLgv_MQN5uyPiWNELX27PkebEKayADBckc-iojDeUd4xPV32ez5qDkN8nCSdvFqt8SfkS97luIod6Z8HJ1qXcE06_goYlvgu0-RcEdm058dzxnZEbOQNd8cy6EoUYGA48Y-/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h10/h001.R-wmSl2uUvqQNf2jwrRi2pnCgpFWyS2-7LaFQcYnW2E" target="_blank" rel="noopener noreferrer nofollow"><span>join the Nemotron Model Reasoning challenge</span></a>? </p><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/9739627d-b608-4666-9740-388cc5f68c4f/Nemotron-3-Fig-5-png.jpg?t=1765887698" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></li></ol></div></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="transparent" style="background-color:transparent;border-color:#2C81E5;border-style:solid;border-width:5px;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;"><span style="">New Premium Insights release</span></h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="">For context, Premium Insights is where I write down longer form content that I think is interesting but not long enough to be make into YouTube videos. </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="">Last week I have published the below blog:</span></p></td></tr><tr class="embed-gen-img-r"><td align="center" valign="top" style="padding:12px 27px 12px 27px;" class="dd"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" class="o" style="padding:12px 12px 12px 12px;;background-color:#FFFFFF;border-color:#F1F1F1;border-radius:5px 5px 5px 5px;border-width:1px 1px 1px 1px;"><!--[if !mso]><!--><div style="display:none; float:left; overflow:hidden; width:0; max-height:0; line-height:0;" class="mob-show"><table role="none" border="0" cellspacing="0" cellpadding="0" align="right" width="100%"><tr><td align="center" valign="top"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3ExvEJjT2Stk8Shs_WP0OMsML830bjr633m6HwK1LNQtyvf2iBr7Shhj6Tv349xuuwuKfT8r1BI6DEyUBU61UAXgVA7PVg7mtwU4sfuiGVUe9mbPymKyBF3WgAf8AiRzqltK3LhPHcBJVfaLwgcz7KAUXKxIInFAp_A--E3VLW_p1iT-xDXQ6fyPigeM0ffBi1YJace9CPJP2Np4Dmz4pxyVWh4fPTzwfWxDcv5iNeyqHKIEQiq6vvMoFqpziYbHvZyzWtqOZc8Ptt7v_PDu8m6eaam4D6_kuCwDDoge0PU8yRRHjJ6V7Kc9qsq4vBiN9up69VHuwXE3x9KIZr5WBj19oGzuwoS-mpMe7BdK_dBcsGecY1kuEljQByQhCnnjSsZLTHjj54OPwyUaiygAaQXlf1OIVyOnxgkKzcpfEF1aFKPKDoeSkHo1dIl-f_NzLqsqBUiimxbQ8S-agS6XcKfmTw0YaalVmv1UAMqUvLcEPDmlCueynb3YEsEANOAlEPrb5CaDFESIH9xt4fsAj_Ls0nn1zxV_ImwfQuIst92aOstXDM-j4FveNFedHEUSnH5vgT8VM7vxJ8JgNrOo-cUmQTDzFl7DPPH8rbF7caYl_L8WPDEo02aczbN1gSOQ8XCc3PJa5Mo5aH3T8IUuhjMzbryPLleEiFZk3-LXHDwkKqGxuwpDApftC2xs-WE60J8yIK5vBhapyFwTov1mHyCJKpn4xOEFAUeueO-KwhT7SMdznofbJiXYMfzEekkILOoBoXozvavW4HZYKfTxXWqt8SVtYooZncfTvWWlhxvLSbG1RgfbNECwk7Y1813J5Cw/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h11/h001.lj2Y1lY-wVEDE9RkKt_B9gtJ9NVD7drJ8AySYqU5BXc" target="_blank"><img src="https://beehiiv-images-production.s3.amazonaws.com/uploads/asset/file/fdf3146d-19cc-4b7e-96fb-b003bb0ccd1f/Q4_2025_research_trend_report.jpg?t=1765514046" width="100%" style="height:auto;display:block;"/></a></td></tr><tr><td height="16" style="font-size:16px;line-height:16px;"> </td></tr></table></div><!--<![endif]--><table role="none" border="0" cellspacing="0" cellpadding="0" align="right" width="100%"><tr><td width="57%" align="center" valign="middle" class="mob-stack"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="left" valign="middle" class="l"><p><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3ExvEJjT2Stk8Shs_WP0OMsML830bjr633m6HwK1LNQtyvf2iBr7Shhj6Tv349xuuwuKfT8r1BI6DEyUBU61UAXgVA7PVg7mtwU4sfuiGVUe9mbPymKyBF3WgAf8AiRzqltK3LhPHcBJVfaLwgcz7KAUXKxIInFAp_A--E3VLW_p1iT-xDXQ6fyPigeM0ffBi1buIgTYPzX6sseaopb6GZ5K-zaA6EGzcMX0Vqj0B-Z443UTxgRvpqtTy4JykF7kiQj0IxsaAuXczQ8CtjOTnx8W8oqmthDaGi_QNAgRSnOkfQWZER2XNLYdCPCm0OTJhiYkuR7RIkRPVIWX1mdotdPAYIDNyI-6kVC6211ToyA2YCH7xDUrd-0aDBQTlsPmVakjWZoSsUhtLqsMo_A6uKow-3hP__b8Zp2H7SOoM7OpRjsu3NHtw0IOEkn5GcAFwf-pJQPWfe8VmyEzXE1f11KF6TABGA6bhZwB9Ukbaw_5BatWusJNnrKIsWcFufNrxfaiU9R_XWlNhrh_KffeGfPdrywA69P_ORVs64a02odyMwBtlpRBfRArM0Bjt7-FC_06oseh470MkP7E_edSen1nZiIxTNVop123iyEQPS9eJ5UhDd5ZI9l1Q5j5n7NXXKQ37_-EX8a7OLnz4wRqs93w1BBAVExmdQLkk_rAquwFklCJoPsFdq5qmAma8ytm4lTxjQkL7BRgrLN7gEzbsKzeFMrcxtks2qgwaKGVFnNXw_az0NlChxfMcRxIkApHY83siiJgxzKqTedWwOFnGDE5wR6dSRn4hWhASHZ_JjW2J6oQOIAyWQpp9h8Afkzy3HQ/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h12/h001.hzkq5dDiW7FC0a_VJFtQjykdQJ-XZ60BkDYmHWXDcUc" style="text-decoration:none;font-style:normal;color:#2D2D2D !important;font-size:14px;line-height:20px;" target="_blank"> Aug~Nov AI Research Trend Report <tr><td align="left" valign="top" class="m"><p style="font-size:13px;line-height:19px;color:#2D2D2D;"> Basically recapping what I missed in the last 4 months </p></td></tr><tr><td align="left" valign="bottom" class="n" style="vertical-align:bottom;padding-top:12px;"><p style="word-break:break-word;">mail.bycloud.ai/p/aug-nov-ai-research-trend-report</p></td></tr></a></p></td></tr></table></td><td width="3%" style="font-size:16px;line-height:16px;" class="mob-hide"> </td><td width="40%" align="left" valign="top" class="mob-hide"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3ExvEJjT2Stk8Shs_WP0OMsML830bjr633m6HwK1LNQtyvf2iBr7Shhj6Tv349xuuwuKfT8r1BI6DEyUBU61UAXgVA7PVg7mtwU4sfuiGVUe9mbPymKyBF3WgAf8AiRzqltK3LhPHcBJVfaLwgcz7KAUXKxIInFAp_A--E3VLW_p1iT-xDXQ6fyPigeM0ffBi1Y9iV6vWSE_52qHYvZqh18W23NN6xoaykbGTzTe8qZbS6ZL9Y3vaBf7-uQ4pZeIzO9phmMeosdXAfAbbo_C_kiL5OT-LVbVSRMNGOUdp__MkGvGpebgbmrq-ok_G8IINXzIuqdqIGee_sk9K84gmqaa_5zDJidkY3N1gsk4wRQi-HxtToUMb1I7bYmfkW7Vt5UXUNOjXTzjjVUpgUzD0se98pU3SuSulpPV3sMDz1Qk4YEXHw8-80cuTa9BSFvvoPErwcrAA1jW2OcR94bVvUVYD_Vh96wuPVvDjWDeqvOOAy008QhvpdXiP0Ud53JdkrOxL_lZv-hcof-NSc6hkONocXaJH0dj7vdGWs2QERdskit1noX7HwFHzN1_XhmJiyX1ktbhUmCiNXESBHstsY8WfQsWXicn4LJYGshdSQyu3wa974awECmXQAl_S2MFky608CeBBVg1cF1Rkjl_34BjuECjCGstVVFIu1GjV0kaNDANSWG0MdiN93UVC0AvSs7YuoKkmYQDx7Y6-CwANo7HByiEIUhD7UQbXYYFNy1VBjIYNfXtH7XZ_3a3RGXRq47Yi4TquOfHDqzC6rOu-EjLs-ldX3q9AuxaGVFcXAmWRmNjqXcNC4iz24pP1HAw3UM/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h13/h001.wILwyVX_Tslz4cM4H8Pn0Sy9lxrXfeYWcsEIEyubPco" target="_blank"><img src="https://beehiiv-images-production.s3.amazonaws.com/uploads/asset/file/fdf3146d-19cc-4b7e-96fb-b003bb0ccd1f/Q4_2025_research_trend_report.jpg?t=1765514046" width="230" style="height:auto;display:block;"/></a></td></tr></table></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="">I spent a lot of time on this quarterly(?) AI research trend report (~4000 words), so don’t miss out! </span></p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3EzPyKM-qWYD2kOl7BDZ-_rrxEwCEAfAc9jvNkesapA9vx3ePpFnC2QfvH5kYdUIkV8Hj0_OBOScuDLoVQQE-iuvSDWMfPaLTb4gZ_g2XGxO9Ej8aMP6sgWJsmusTTSo3CC8JaNV57QELdP5eS3Yz-pOa55EmEkw6hkIbHz_wcq-kqAFx3C6xjw5DCLJkokmLxrrLz9EieLkhUv0V_PXit97GIDl3EAUYEdhOA8f_FLNoZJ2lV-WTeSfwbr4QuWC97FQrPeO2M2n1XiuYfo8ehGc5uxMT-T_VzhEED-FHAvZBHs6LiYYLxSG02-O45TbGv5K0vmqwdepNl3oUzuH_3lbVt63JJ8guipVE7Qqx3i_1fl74e-qA0Mine2YX1zw2tKuIVpHI1PBnVjBy7-N4kG3ONtuRLEW7fR6NJE-WKd1Gnpcr1FXYkrspjyIhEj2rGuWUZfmKiaxPKt6ZzD44EdDt7Ldsjccvv1HEXsVihqzsIJeVRbnFByibMAl1QiityH0ER1bdX-mpoSuU7qxPmDcJTYiwWrdHOQroe2FrEDgualNcbXFk10XBoCP9aRnqyNYN-0e533WfSB08E83JdEfwfbDA1njHF5O6Nx5SDcbRIK4BPt8J0AXwVi5ZtgdW8tgXqzSZ6fQXFEtnfW3omiJ_IizQcL37L_vTt07nNsCsyXGc6nWurqk9aQbG9tNh7pMmb3sFrNnOBjczppWBVN20LaMB4kT7rQ4rFrasaRl1O_ANYFFzLLLlconeFKmibQ7sDOby51oSm9XVo676mq3aSldFgLY63w_WCKyH0wgH_srVDVl36JU0jprAJog04xtR5eGvWG3x3wGjHs0exir/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h14/h001.fPAV_1yHJSYwco8x1gg_OL_Xh2yLA0P8EI-Hul7PWwQ" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Check It Out Now </a></td></tr></table></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style=""><a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.tLfGW26lAwaS9gFg17HSoGymQ3NNPtd5dE5MV_8UgjLbPKYFbBPtV6oAT4VYSncNiXOMe0ETHKViEemkGKRuti97gDsqlNJXOC9cMEoZt4vqGEMzd3CYIoAvubE-GTMMk0OmuDNW05VtsL0QiJeSOE1GJ1KMk-7CLBnG0UETptfphXnw6vwTX1NhNcnhGlzEFQwrQH5vdSAMd-SjSomXxTfTd1uKL1lVyXFil-64nqDzcPi8Wmz-9X6mZSvvVDVcTo5NM6l_wqET1xX8_BIdbA/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h15/h001.gyuRMmaqZFfmgrgyW-_OvoSh9k0CgrMR2hV-Cw-eFGc" target="_blank" rel="noopener noreferrer nofollow"><span>Advertise with The AI Timeline! </span></a></span></p></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="one-layer-is-enough-adapting-pretra" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h1 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:175.0%;"><b>One Layer Is Enough: Adapting Pretrained Visual Encoders for Image Generation</b></h1></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Gao<i> et al. [Apple]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 450 </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> Image Generation </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Using powerful, pre-trained visual AI models with image generators has always been tricky. These understanding models produce rich, high-dimensional features, but today’s best generators need to work in a much smaller, more stable space to create images efficiently. This mismatch usually forces complex solutions. But a new method shows we might have been overcomplicating things. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/e5809b93-83f1-4d2c-831e-9721f79ec639/CleanShot_2025-12-16_at_16.53.13_2x.png?t=1765884203" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Comparison between standard VAE, VA-VAE, RAE, and FAE.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Instead of forcing the generator to work with the bulky original features or building a complex translator, the method, called FAE, uses a minimal encoder (just <b>a single attention layer</b>) to gently compress those features into a compact, generation-friendly space. The real cleverness is in the double-decoder setup. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> First, a dedicated decoder faithfully reconstructs the original high-quality features from this compact code. Then, a second, separate decoder uses those reconstructed features as its guide to generate the final image pixels. This separation of duties means the system retains the semantic understanding from the powerful pre-trained model while giving the image decoder the clear, low-dimensional signals it needs to work reliably. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/401f7067-19d0-4391-bff7-9e791de59c5c/CleanShot_2025-12-16_at_16.54.11_2x.png?t=1765884259" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>An illustration of Training Stages of FAE. Stage Ia and Ib can be trained independently.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> It can plug into different types of generators, like diffusion models or normalizing flows, and can use features from various popular pre-trained models like DINO or SigLIP. The results are impressive. On the standard ImageNet 256x256 benchmark, a diffusion model using FAE achieved top-tier image quality, scoring a <b>near state-of-the-art FID</b> of 1.29. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/63fdde1d-0c88-4924-9141-85d664b64e3c/CleanShot_2025-12-16_at_16.54.52_2x.png?t=1765884303" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>FID results of different models on MS-COCO validation (256 × 256).</p></td></tr></table></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j28yjf9KIXZdsXoh1WlHvvKlOQpnZ3458J0q124-1IBnyV1WQHTNmgsENTmhfSZXZ_Zy_L9WhfEPw3uxLI2HubVuTK7GgdQm1TqpQoljP9XfN5UvTH7OSlXKzYUN4g2mbZRllNVR62RBHY4R-iROiMDptKa1_P3hKEwVBLH1j_1RQDmLyb_pW1uMrPgc9zw3n2s4XW6z2lre6cPSP-s7rL5luewXWpmQ45HA0Mzi2IUBB1TSRirts4AjqMY6Uc4bF0Q/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h16/h001.T3HyVTksKCmMtCqu2Y3q1b9sOQEZYgsaDPkrzlX4LP4" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="stronger-normalization-free-transfo" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Stronger Normalization-Free Transformers</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Chen et al. [</i>Princeton University, NYU, Carnegie Mellon University<i>]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 1k </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> Transformers </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> bycloud’s pick </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Normalization layers are a common trick to help deep neural networks train smoothly, but they come with a cost. They need extra computation to track statistics and can be sensitive to settings like batch size. Researchers have been looking for simpler, drop-in replacements, and point-wise functions like Dynamic Tanh (DyT) showed it was possible to match normalization performance. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/7a6db0f4-0a58-4794-a829-71ed83150fcc/CleanShot_2025-12-16_at_16.58.16_2x.png?t=1765884511" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Structure of Dynamic erf (Derf), a point-wise function, that outperforms normalization layers and other point-wise functions.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> To find an optimal design, the researchers first identified what makes a point-wise function work well as a normalization replacement. They tested four key properties: the function should be zero-centered, bounded in its output, sensitive to small changes around zero, and monotonic (always increasing or decreasing). Functions that broke these rules often led to unstable training or worse performance. With these principles as a guide, they performed a large-scale search across many candidate S-shaped functions. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/ee396e45-cd8a-4463-bbc7-d6859292482c/CleanShot_2025-12-16_at_16.59.01_2x.png?t=1765884549" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Results of zero-centeredness on ViT-Base.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> The search identified a clear winner: a dynamically parameterized version of the error function, called Derf. This function, related to the Gaussian cumulative distribution, naturally has all the desired properties. When integrated into a model, it simply transforms each neuron's activation using learnable scaling and shifting parameters. The team tested Derf extensively, replacing normalization layers in Transformers for vision, speech, DNA, and language tasks. In nearly every case, <b>Derf outperformed both standard normalization layers</b> and the previous best alternative, DyT. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j28yjf9KIXZdsXoh1WlHvvKk75QHS662En5DsVJG6x0kXe2_R-66cOixWglOWC7kxaPtsZ220y757hmrklWnpDuRNAJQ6pj6riIEpWG5NKNyXYgYIvRyU9rrd0j3wb6YnDLDFtLyoYtEXBXDRn3m5ntTZdqqsPGiOnrvjIjai4y44oL8C3PMMhoB-MpXGcjtLcLZ3P3O6piXPo197gs00ecWH9jiA6Ar7p-kOoGFpc4iGWyUgF0Lz96KObL2GJdFcqA/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h17/h001.JTpXDQ5pFpxfefFFN9SOjn1dlICjcOS6HpRAAMA2rMk" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="on-the-interplay-of-pre-training-mi" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">On the Interplay of Pre-Training, Mid-Training, and RL on Reasoning Language Models</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Zhang et al. [</i>Carnegie Mellon University, Language Technologies Institute<i>]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 1.3k </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> LLM Reasoning </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> We often see language models get better at reasoning after reinforcement learning, but it's hard to tell if RL is teaching them new skills or just polishing what they already learned in pre-training. To solve this, researchers built a fully controlled test using synthetic math problems. This let them isolate and study the distinct roles of pre-training, mid-training, and RL. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/a3735c4f-802f-41bc-949a-09c4b7cbee00/CleanShot_2025-12-16_at_17.03.09_2x.png?t=1765884809" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Overview of the data generation framework, task setup, and process-verified evaluation.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> The study shows <b>RL can create new reasoning ability</b>, but only under specific conditions. If a task is already well understood from pre-training, RL just fine-tunes the model's existing skill. For RL to teach something genuinely new, the task must be slightly beyond the model's current ability. The model also needs a seed of knowledge from pre-training. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> For example, to solve a problem in a new context like a "zoo" scenario, the model must have seen that context at least a little bit during pre-training. Even just 1% exposure gives RL enough material to work with and generalize from. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/1f84341a-ab22-4ce2-9aee-1fd059862adc/findings.png?t=1765884778" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> The researchers found that mixing mid-training with RL leads to the best generalization. Mid-training, which uses supervised learning on data from the model's edge of competence, builds a strong foundation. RL then explores from that foundation. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Allocating most compute to mid-training with a little RL is great for known tasks, while dedicating more budget to RL is better for tackling completely new, harder problems. Adding rewards for correct reasoning steps, not just the final answer, further reduced errors and improved the reliability of the model's solutions. </p></td></tr><tr class="embed-gen-img-top"><td align="center" valign="top" style="padding:12px 27px 12px 27px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" class="o" style="padding:12px 12px 12px 12px;;background-color:#FFFFFF;border-color:#F1F1F1;border-radius:5px 5px 5px 5px;border-width:1px 1px 1px 1px;"><table role="none" border="0" cellspacing="0" cellpadding="0" align="right" width="100%"><tr><td class="embed-img" align="center" valign="top" style="vertical-align:middle;padding:0px 0px 12px 0px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.CxDkkVpJsBdVoe83c_tBWsAo_aXgAmaLJcAOK1i4ZEtjzKRcJWeAnq2fZ09Nu2mHLbDCMs-k0xvr6mnubwtZe94RsXnA7oQOXpnglAz_p_vdh38itBwlWDAGkFMpcQOgREE0hLjunSGhYlWbA7yLHdjXjxjKSzlbvpRF23dr42AirINu7mi9Vdu5VfmQtK7LloNfQVBpPVVuAmiuxME_kplMukk7XRa0QtgKiR2eJ-1S4QqIltLXNkPBSRF--ZLZj_HDiakNFQWLYi1Zv5w1ew/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h18/h001.sZUHNAgAvoAgLAHHixcVHzB83hv_jtWOmHMTsDFPyug" style="text-decoration:none;" target="_blank"><img src="https://cdn-thumbnails.huggingface.co/social-thumbnails/Interplay-LM-Reasoning.png" width="576" style="height:auto;display:block;" class="w100pc"/></a></td></tr><tr><td align="center" valign="top" class="cc"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="left" valign="top" class="l"><p><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.CxDkkVpJsBdVoe83c_tBWsAo_aXgAmaLJcAOK1i4ZEtjzKRcJWeAnq2fZ09Nu2mHLbDCMs-k0xvr6mnubwtZe94RsXnA7oQOXpnglAz_p_vdh38itBwlWDAGkFMpcQOgREE0hLjunSGhYlWbA7yLHdjXjxjKSzlbvpRF23dr42AirINu7mi9Vdu5VfmQtK7LloNfQVBpPVVuAmiuxME_kr94oIkwhKze00Nknumiy6pSFSO69eLXlYokhdbWh7tUuBF2yNsotoplQB-DrOChGg/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h19/h001.4Ng0KFqmiWha3xmgDN1bccdhRxRe6WsRpi2_5Krrsj0" style="text-decoration:none;font-style:normal;color:#2D2D2D !important;font-size:14px;line-height:20px;" target="_blank"> Interplay-LM-Reasoning (Interplay-LM-Reasoning) <tr><td align="left" valign="top" class="m"><p style="font-size:13px;line-height:19px;color:#2D2D2D;"> Org profile for Interplay-LM-Reasoning on Hugging Face, the AI community building the future. </p></td></tr><tr><td align="left" valign="bottom" class="n" style="vertical-align:bottom;padding-top:12px;"><p style="word-break:break-word;">huggingface.co/Interplay-LM-Reasoning</p></td></tr></a></p></td></tr></table></td></tr></table></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> In tests, a well-calibrated RL approach improved performance on harder tasks by up to 42%, and proper pre-training seeding allowed contextual generalization improvements of up to 60%. These findings show us that when we design training pipelines, we need to ensure broad pre-training coverage of basic concepts, use mid-training to build robust priors, and then apply RL to explore just beyond the model's current limits. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j28yjf9KIXZdsXoh1WlHvvKkHZTQ3B56fROYr3E51IwdxNaZbCtGlAOlvqleKjC0pmD8Zxbe9hNUzRhU41YCEIfkc8JHpF_ehZJaEvhFitbk5uxhEAkuj7H47Xv_Wy1lm3ep-dRvK8dPWuGKrhwsC1nWznYU4PA75K_QQqxiFFof5L_u4zlvVF5ANpHoQJRroY2QMdoW6Zr7pk5KlJ-bxif5FwhIQdT4Xrc67Vpo6lyC63xPKHqvLioWPxCR1BIlqoA/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h20/h001.vtSC9uKiHE2yske1rV3aT7iyx8cYY_XjS0oFT6j7ock" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="not-all-bits-are-equal-scale-depend" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">LLaDA2.0: Scaling Up Diffusion Language Models to 100B</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Bie et al. [</i>Ant Group, Renmin University of China, Zhejiang University, Westlake University, HongKong University of Science and Technology<i>]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 757 </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> Diffusion LLM </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Today's powerful language models generate text one word at a time, which creates a bottleneck. This new approach aims to break that bottleneck by converting existing models into a different kind that can predict many words at once. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> It uses a <b>three-stage</b> training method. Instead of building a parallel model from scratch, which is very costly, the researchers start with a strong existing model trained for sequential generation. They then carefully retrain it using a process called Warmup-Stable-Decay. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/ab68b450-1058-461e-a10b-499e673dcf4d/CleanShot_2025-12-16_at_17.10.30_2x.png?t=1765885238" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>A schematic of the progressive training framework for transforming an AR model into a MDLM.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> First, in the Warmup phase, the model slowly learns to reconstruct small, masked blocks of text instead of just the next word. The block size gradually increases until the model is reconstructing entire sequences at once in the Stable phase. Finally, in the Decay phase, the model is tuned back to working with smaller blocks, which makes it much faster for practical use while keeping its new parallel skills. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/6f231383-cc3d-44fa-a5ac-c25a36e72964/CleanShot_2025-12-16_at_17.11.14_2x.png?t=1765885283" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> To make this conversion stable and efficient, the team introduced important techniques. They use a special document-level attention mask during training. This prevents the model from getting confused by attending to unrelated text when multiple documents are packed together for efficiency, ensuring it learns clean, coherent reconstructions. For the final instruction-tuning phase, they also apply a method called complementary masking. This clever trick ensures nearly every token in the training data contributes to the learning signal in each step, speeding up training and improving the model's grasp of language. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/2f028488-c0e2-4da6-9447-9d6f59fef7d5/llada2_flash_main_bench.png?t=1765885133" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> The final models, including a 100B-parameter version called LLaDA2.0-flash, show strong performance on reasoning, coding, and general knowledge benchmarks. It also shows a significant boost in inference speed, generating many more tokens per second than comparable auto-regressive models. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.VomAAYwkCjux8i_FMc4kJaoaRcPhi6oj0LL4dkZdoHRSNUnjPgeAHCnmS-R6teqm9zM6ksQ_goYMA2XSXSBWhjxY0CK3GPR9k7Xy7369g4ENFGJxi2FuYNton696Tu2VUqyxfLRbtdQiUw0-_lqpEWkJDbdW_tRX7YOx2jBTUKLL2aGEN4ze2B4YsnHGpC5AjEnvPPqX-9NlHYD8X3KGkRZ5A4AC9cd6qUTbj3Q8uhf_rfJMYPTxuszUzNUm0hYUBZYjvw9whUjWbVzv6Rr8tQ/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h21/h001.IMEk-TdXoQzXcF1yFvmNd-QI8_BQ9iag32ueea3S2ks" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="closing-the-train-test-gap-in-world" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Closing the Train-Test Gap in World Models for Gradient-Based Planning</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Parthasarathy et al. [</i>Columbia University, New York University<i>]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 1.3k </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> LLM</span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Gradient-based planning world models can be used for intelligent robot control, but in practice, its performance has often fallen short of slower, search-based alternatives. This is because of train-test mismatch: these models learn to predict the next state from expert demonstrations, but are later used to optimize sequences of actions, which often leads them into unfamiliar and unreliable territory. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/be06feb2-b7bb-4f44-a4ec-ebcf6f9b84e2/524776366-2ad42535-1e5b-474a-9217-efba54f24b18.png?t=1765885371" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Overview of our two proposed methods.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> To bridge this gap, the researchers developed two clever fine-tuning methods. Online World Modeling tackles the problem of the model venturing into unknown states during planning. It works by using a simulator to correct the trajectories that gradient-based planning produces, and then training the model on these new, corrected paths. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/7b1cab7c-1428-4b54-a83d-ff1998227a6d/CleanShot_2025-12-16_at_17.13.55_2x.png?t=1765885442" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> This teaches the model to be accurate even for the non-expert actions it will encounter during optimization. Separately, Adversarial World Modeling focuses on smoothing the optimization landscape itself. It trains the model on deliberately perturbed versions of expert data, making the model more robust and creating a loss surface that is easier for gradient descent to navigate. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/fc1ac9ea-9aec-49d5-b827-5da29f8ebe37/CleanShot_2025-12-16_at_17.14.07_2x.png?t=1765885471" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> These techniques significantly close the train-test performance gap. In tests on object manipulation and navigation tasks, gradient-based planning with an adversarially fine-tuned model matched or exceeded the success rates of the powerful but computationally heavy Cross-Entropy Method (CEM) <b>using only 10% of the computation time</b>. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtoZr-f6keVrG1GKuVv1wQKvr00n8dJU8CHd80kZKefB2wwCQqf7zKYiY9WNlFdfZzeJGfR4tEpU3k5tI80iGA-HAbRzOKSoApnIGAGUdOhpBljLkTCaWN6BOZ_VZRF2Or1i0Ie8hVxPEWLcx0WiIocaEEpYxxm3LetEC4ZvQaiEzoS6x6jjk3BGc7Z-kBud3Fvcl6KIu8n2t69Fi3X0tGYqvGFIhswqv2VF7n6l9K_ZqRF7SnyD6BhAcDF0DOjBFEyg/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h22/h001.L1GJ1Lg3UtKbN8C05HCv79bknAq2FYns3JA1ixkrVmc" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td class="dd" align="center" valign="top" style="padding:20px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmNzU1H97qA8YFPjRS4my4ShBXAoqOTmNC2kNiHALS9DOQFqopVD1JOdRMlcaPQyVUnITZkE8_g4Lb2oivs-Zh1zqSFcm3emYvRyFWfx-55LgV2OKr7KZYaN_hDgTvxQ9VpLkUBaCbKFkafSW9hPXkwQoj1PxsmFeUraOrhLnqv51-3bXh_osa64SDetjgIgs8a5-9gueegYw6YR-zt3Ova5RKaqX_UIe8_jRvQ7HgMvRXZ-mspDFDfSX5PTqnc2FOyeF44jhr4AHDrBY7V_etTg/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h23/h001.FrsUE_zR491ycEL_8S55dugdfuCGD6qp5uT__t5ZKPc" style="text-decoration:none;"><table align="center" width="100%" cellpadding="0" cellspacing="0" border="0" role="none" style="max-width:520px;margin:0 auto;"><tr><td class="p" width="100%" style="padding:2px;border:none;"><table width="100%" cellpadding="0" cellspacing="0" border="0" role="none"><tr><td align="center" valign="top" style="width:100%;"><div style="max-height:0;position:relative;opacity:0.999;width:100%;mso-hide:all;"><div style="display:inline-block;width:100%;padding-top:25%;"><img width="20%" height="auto" loading="lazy" alt="" style="border:0;" src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/static_assets/youtube_play_icon.png"/></div></div><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmNzU1H97qA8YFPjRS4my4ShBXAoqOTmNC2kNiHALS9DOQFqopVD1JOdRMlcaPQyVUnITZkE8_g4Lb2oivs-Zh1zqSFcm3emYvRyFWfx-55LgV2OKr7KZYaN_hDgTvxQ9VpLkUBaCbKFkafSW9hPXkwQoj1PxsmFeUraOrhLnqv51-3bXh_osa64SDetjgIgs8bMIpaWqy3OQtDL74or-FVYe8twOECj0KPC1r1Bq44QcUdAsFr8jN0m22PTNZ9PfMQh0cs_4GNIMpUJEXPTj1UA/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h24/h001.i5RJbDV4TFJTleG5pEqSdYPHzFPvOSssmdj1V5qfl6k" style="text-decoration:none;"><img src="https://i.ytimg.com/vi/pljoUcBniPQ/maxresdefault.jpg" width="480" height="auto" loading="lazy" alt="YouTube video by bycloud" style="display:block;height:auto;border:0;outline:none;text-decoration:none;background-color:#000000;width:100%;"/></a></td></tr><tr><td><p style="font-size:12px;font-weight:500;font-style:italic;font-family:Helvetica, Calibri, sans-serif;color: #686a6d; padding-top:0 !important;padding-bottom:6px !important; padding-left:4px !important;"> DeepSeek V3.2 Just Broke SoTA Again… But How? </p></td></tr></table></td></tr></table></a></td></tr></table></td></tr></table></td></tr><tr><td align="center" valign="top"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td><tr><td class="b" align="center" valign="top" bgcolor="#2a2a2a" style="padding:0px 0px 0px 0px;border-style:solid;border-width: 0px 0px 0px 0px;border-color: #2a2a2a;border-bottom-left-radius:10px;border-bottom-right-radius:10px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" bgcolor="#73ddff" style="padding:12px"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td><span style="padding-left:1px;"></span></td><td align="center" valign="middle" width="75" style="width:75px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.1muhFWIqieRYpaJ-FbWSCQqcWoV4NNHHr5SkP9THApWUO4S9eWSDBFDMKQ83N4CY1l4kXQTU9YnEEqXRrg_2uhS94rQOKDl60C6UO57Zu1mJCFi_zhfD-a_hnJHdTQ7EB7lAlfmnNFt0AP_EKKsDipaMnrN5NNJbnU912XoXRYh3PhgI_e41QjnG5NTpUGnF2VJ7OMN7ZS44NGiBxB3nTcDXrfbZ-kquCcAH94vNvsL4rASL-hiIFzb4Y3ep1XR8/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h25/h001.g8thP1hZQFeRM18rURT-cpCl0SxcRkSirI83dt64j5M" style="text-decoration:none;"><img width="22" height="22" alt="tw" border="0" style="display:block;max-width:22px;color:Dark" src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/static_assets/x_dark.png"/></a></td><td align="center" valign="middle" width="75" style="width:75px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmBoQnQ9VXnB2zTxBG4HeHBi5iti4l06m5fR1UTFq_vFgQaGMmutCjJbuBFU8WHbRj6heToGsiZHlry3dxu5DEimeQbpBAMyhKdSbaWrmIf3bAefTyWSrIfZNTChX0x6PM-YfkKcf2DHWQrBM_xdrT_7zEOWqgBT8iy0bIBscu_zbq15SkXJ02Yc094ZsTmqNBaIhXY3OMxAEDHP6-l9S2-HYLQS4e3j4xtgJw2nlNlgS/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h26/h001.nLI7cy2hAe92N0ScZiwhIH7mso65BG4r_7XJkzi_gKw" style="text-decoration:none;"><img width="22" height="16" alt="yt" border="0" style="display:block;max-width:22px;color:Dark" src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/static_assets/youtube_dark.png"/></a></td><td><span style="padding-left:1px;"></span></td></tr></table></td></tr><tr><td height="10" style="line-height:1px;font-size:1px;height:10px;"> </td></tr><tr><td class="w" align="center" valign="top" style="padding:15px 15px 15px 15px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top"><p style="font-family:'Verdana',Geneva,sans-serif;color:#FFFFFF!important;"> Update your email preferences or unsubscribe <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxWc4htTObwdorovK0nFHVH-4pUdVE0ELYH5DsNemk732SjNwhPNJ25r0O8B5vYifsBhEpz-DJgyVFmavJPa0OyKRRnvw4o7XGyvIv7PRofnmswl7o_HwCBK77NCPG_-k4R3J2p8w1iekFg_xRXm8Cw60n1NbV5q-xN_168akKGWUrn4gPU-4wb9xNwIn02FgdDxnF17Nq71b93x1pQi2equRBeXeugZgtTZL5dGnA4ahF-ZG8YtOebomAbt7wG7K-xlgMfoGFJY0L4wdzTVT6WYO3uMYFsSrr1RyRQF1IlZHrRaxqCOvZkM_n0pLILzW9nTMswTBxjqm9I4XWtvM9TT1ynELeJArTvlSiKpFQmJoRgSZjuCqwFYEAgs6X_6ubPhEjZQkE_86p4hDMHMjzmqD7d4aln6lMG2gT0yLDLYY3AlkxEKUsB2Tb3bcO6yVsGc2TmWO29RQbO-sdlRr4-FNnyHO3Nvwxt2ErqSKK4ON4QoUjHR5xZJQ0dwnZNMH7L2bzqb3mJueYWpxguJ1fIwazXfh1zCdo9EcojiJc1-ro2D8ch6fKlC4Jx55cgzl4XkCzCJLU2316FJ15NVKGDA61JDI4CuzgbLlmQFyndVx9_Iwd4-O-V3ZhZI1SJyNZDHPTyinupY8R3a5vvPFVPw8j8OoVNnLPF2e9bipP9FvYHZ6eH5o7xr5PwBeYYafzy8kn2zBnNHCrkeXWfGmNjOuyrZulURDB1mPr9UnqWy_QZ5QYewSZZrwa7G0xqx4aqMa3G2ovVve8LTHYLN0Lt2RRlMQm1fKgD2lT52ZdWGbifHKlhH0lN5Suq5eEkaUUuAMysV3oRCftV4sLtzjv5NrwAp1ucmvwELciRs_mAb4IKRaZ5xzRTL_gFq6j_5Vd-AJ7pFlmsC5vBXCyiHvcm3QzEJLp3Y8xty8fwUFqwtWSfWkaOCC5R_wQP1LQM3HUWA4NuFgdHOmdqygcM5VKv7yahpdBLB04iOe1yvNT1Z1rrPuJv4-Xqec8se-U-Rgow/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h27/h001.aNShifcZN_Mz03ZWALqGk3Ut0kCuvJJ_yMLN-XeVGxg" style="text-decoration:underline;text-decoration-color:#FFFFFF!important;color:#FFFFFF!important;"> here</a></p><p class="copyright" style="font-family:'Verdana',Geneva,sans-serif;color:#FFFFFF!important;"> © 2025 bycloudai </p><p style="font-family:'Verdana',Geneva,sans-serif;color:#FFFFFF!important;"> 228 Park Ave S, #29976, New York, New York 10003, United States </p></td></tr><tr style="display: table-row !important;"><td align="center" valign="top" style="padding-top:20px;" style="display:table-cell !important;"><table role="none" border="0" cellspacing="0" cellpadding="0" align="center" style="display:table !important;"><tr style="display:table-row !important;"><td class="u" align="center" valign="middle" height="32" style="height:32px;display:table-cell !important; max-height: 32px !important;margin:0px !important; background-color: #ffffff !important;"><a style="line-height:32px !important;text-decoration:none;display:block !important;" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j28olDWFpV5DDKfdk_OdOKOj5RcHMPGAbILds_PzdWtD3HdUa5S1ln3KbsUR0cvLTXlkARqpclavkCpJgeGW743Clbv_a025IIpDsC0yWp8vUa4nBFd2psXCEQDz3rNuO6NI5OMXAEXSeSjT4DmeOc0-NLJbYObEhCgmD-eZAv1cVUXWA12sUC-Ci-2z_Os_y3z3r-5-x-04gEv5CAlhLf-NBcPFyVzEONr7A6_CTGyyN/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h28/h001._Wp0EJon5X50Wpq1EcTiZGO9vFJRjscFpaLO6V5ZT8g"><img src="https://media.beehiiv.com/output-onlinepngtools.png" width="16" alt="beehiiv logo" style="display:inline-block !important;max-width:16px !important; vertical-align:-3px !important;width: 16px !important;" border="0"/><span style="padding-left:11px !important;display: inline-block !important;">Powered by beehiiv</span></a></td></tr></table></td></tr><tr><td align="left" valign="top" height="2" style="height:2px;"><a href='https://elink4f7.mail.bycloud.ai/ss/c/u001.CxDkkVpJsBdVoe83c_tBWsHIaP4XNp0WgUYqLvHcKk_3uqk_KIkz4ddLinhFbud6JuxLFdSUhYnR7b1NSsmbtzXNGNblnEEMKUtkCAjkn8Y/4mh/w1GedkC-Q4WzRf7Wj2N1BQ/h29/h001.zCp5CmExC5bKsYP2b1ooJSPXmhUm5y_2sWY8fgLDilA' style="color: #2a2a2a !important; cursor: default; font-size: 1px; text-decoration: none;"> Terms of Service </a></td></tr></table></td></tr></table></td></tr></td></tr></table></td></tr></table></td></tr></table></td></tr></table></div></body></html>