<!DOCTYPE html><html lang="en" xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" style="font-size:16px;"><head></head><head><meta charset="utf-8"/><!--[if !mso]><!--><meta http-equiv="X-UA-Compatible" content="IE=edge"/><!--<![endif]--><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="x-apple-disable-message-reformatting"/><meta name="format-detection" content="telephone=no,address=no,email=no,date=no,url=no"/><meta name="color-scheme" content="light"/><meta name="supported-color-schemes" content="light"/><title>Last Week's Trending Papers 📈</title><!--[if mso]><xml><o:OfficeDocumentSettings><o:AllowPNG/><o:PixelsPerInch>96</o:PixelsPerInch></o:OfficeDocumentSettings></xml><![endif]--><style>
:root { color-scheme: light; supported-color-schemes: light; }
body { margin: 0; padding: 0; min-width: 100%!important; -ms-text-size-adjust: 100% !important; -webkit-transform: scale(1) !important; -webkit-text-size-adjust: 100% !important; -webkit-font-smoothing: antialiased !important; }
.body { word-wrap: normal; word-spacing:normal; }
table.mso { width: 100%; border-collapse: collapse; padding: 0; table-layout: fixed; }
img { border: 0; outline: none; }
table { mso-table-lspace: 0px; mso-table-rspace: 0px; }
td, a, span { mso-line-height-rule: exactly; }
#root [x-apple-data-detectors=true],
a[x-apple-data-detectors=true],
#MessageViewBody a { color: inherit !important; text-decoration: inherit !important; font-size: inherit !important; font-family: inherit !important; font-weight: inherit !important; line-height: inherit !important; }
span.MsoHyperlink { color: inherit !important; mso-style-priority: 99 !important; }
span.MsoHyperlinkFollowed { color: inherit !important; mso-style-priority: 99 !important; }
.a { background-color:#dedede; }
.b { background-color:#2a2a2a; }
.c { background-color:#ffffff; }
.d { background-color:#fff0c8; }
.d2 { background-color:#FFFFFF; }
.d3 { background-color:#FFFFFF; }
h1 a { text-decoration:none;color:#2C81E5;font-style:italic; }
h2 a { text-decoration:none;color:#2C81E5;font-style:italic; }
h3 a { text-decoration:none;color:#2C81E5;font-style:italic; }
h4 a { text-decoration:none;color:#2C81E5;font-style:italic; }
h5 a { text-decoration:none;color:#2C81E5;font-style:italic; }
h6 a { text-decoration:none;color:#2C81E5;font-style:italic; }
h1, h1 a, h2, h2 a, h3, h3 a, h4, h4 a, h5, h5 a, h6, h6 a, ul, li, ol, p, p a { margin: 0;padding: 0; }
h1 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:700;font-size:28px;color:#2A2A2A;line-height:42px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h2 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:700;font-size:24px;color:#2A2A2A;line-height:36px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h3 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:20px;color:#2A2A2A;line-height:30px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h4 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:18px;color:#2A2A2A;line-height:27px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h5 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:16px;color:#2A2A2A;line-height:24px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
h6 { font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif;font-weight:400;font-size:14px;color:#2A2A2A;line-height:21px;padding-bottom:4px;padding-top:16px;mso-margin-top-alt:16px;mso-margin-bottom-alt:4px }
p { font-family:'Georgia','Times New Roman',serif;font-weight:400;color:#2D2D2D;font-size:16px;line-height:24px;padding-bottom:8px;padding-top:8px;mso-margin-top-alt:8px;mso-margin-bottom-alt:8px; }
p a, .e a, ul a, li a, .h a, .h2 a, .h3 a { word-break:break-word;color:#2C81E5 !important;text-decoration:none;font-style:italic; }
p a span, .e a span, ul a span, li a span { color: inherit }
p .bold { font-weight:bold;color:#2D2D2D; }
p span[style*="font-size"] { line-height: 1.6; }
.f p { font-size:12px;line-height:15px;color:#2D2D2D;padding:0; }
.f p a { color:#2D2D2D !important; }
.g p { font-family:'Helvetica',Arial,sans-serif;font-size:14px;line-height:20px;font-weight:normal;margin:0; }
.g p a { text-decoration: underline; }
.i p { font-family:'Helvetica',Arial,sans-serif;line-height:23px;font-size:15px;color:#2D2D2D; }
.i p a { color:#2D2D2D !important; }
.i2 p { font-family:'Helvetica',Arial,sans-serif;line-height:23px;font-size:15px;color:#2D2D2D; }
.i2 p a { color:#2D2D2D !important; }
.i3 p { font-family:'Helvetica',Arial,sans-serif;line-height:43px;font-size:24px;color:#2D2D2D; }
.i3 p a { color:#2D2D2D !important; }
.h p a { color:#595959 !important; }
.h2 p a { color:#595959 !important; }
.h3 p a { color:#595959 !important; }
.f p a, .i p a, .i2 p a, .i3 p a, .h p a, .h2 p a, .h3 p a { text-decoration:underline; }
.j { border-top:3px solid #ffeb2d; }
.k p { padding-left:15px;padding-bottom:0px;padding-top:6px;mso-margin-top-alt:6px;mso-margin-bottom-alt:0px;mso-margin-left-alt:15px; }
.o { background-color:#FFFFFF;border:1px solid #F1F1F1;border-radius:5px; }
.o p { font-family:'Helvetica',Arial,sans-serif;padding:0px;margin:0px; }
.l p,
.l p a, .l a { font-size:14px;line-height:20px;font-weight: bold;color:#2D2D2D;padding-bottom:6px;mso-margin-bottom-alt:6px;text-decoration:none; }
.m p,
.m p a { font-size:13px;line-height:18px;font-weight:400;color:#2D2D2D;padding-bottom:6px;mso-margin-bottom-alt:6px;text-decoration:none; }
.n p,
.n p a { font-size:12px;line-height:17px;font-weight:400;color:#2D2D2D;padding-bottom:6px;mso-margin-bottom-alt:6px;text-decoration:none; }
.p { background-color:#FFFFFF;max-width:520px;border:1px solid #E1E8ED;border:1px solid rgba(80, 80, 80, 0.3);border-radius:5px; }
.q { font-size:16px;font-family:Helvetica,Roboto,Calibri,sans-serif !important;border:1px solid #e1e8ed;border:1px solid rgba(80, 80, 80, 0.3);border-radius:10px;background-color:#FFFFFF; }
.q p { font-size:16px;font-family:system-ui,Helvetica,Roboto,Calibri,sans-serif !important;color:#222222;padding:4px 0; }
.r { border:1px solid #E1E8ED !important;border-radius:5px; }
.s p { font-size: 14px; line-height: 17px; font-weight: 400; color: #697882; text-decoration: none; }
.t p { font-family:'Helvetica',Arial,sans-serif;font-size:12px;line-height:18px;font-weight:400;color:#000000;font-style:italic;padding:4px 0px 0px; }
.v { border-radius:10px;border:solid 0px #DFD150;background-color:#2C81E5;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;color:#FFFFFF; }
.v a { text-decoration:none;display:block;color:#FFFFFF; }
.w p { font-size:12px;line-height:15px;font-weight:400;color:#FFFFFF; }
.w p a { text-decoration: underline !important;color:#FFFFFF !important; }
ul { font-family:'Helvetica',Arial,sans-serif;margin:0px 0px 0px 25px !important;padding:0px !important;color:#2D2D2D;line-height:24px;list-style:disc;font-size:16px; }
ul > li { font-family:'Helvetica',Arial,sans-serif;margin:10px 0px 0px 0px !important;padding: 0px 0px 0px 0px !important; color: #2D2D2D; list-style:disc; }
ol { font-family:'Helvetica',Arial,sans-serif;margin: 0px 0px 0px 25px !important;padding:0px !important;color:#2D2D2D;line-height:24px;list-style:decimal;font-size:16px; }
ol > li { font-family:'Helvetica',Arial,sans-serif;margin:10px 0px 0px 0px !important;padding: 0px 0px 0px 0px !important; color: #2D2D2D; }
.e h3,
.e p,
.e span { padding-bottom:0px;padding-top:0px;mso-margin-top-alt:0px;mso-margin-bottom-alt:0px; }
.e span,
.e li { font-family:'Helvetica',Arial,sans-serif;font-size:16px;color:#2D2D2D;line-height:24px; }
.rec { font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji" !important; }
.rec__button:hover { background-color: #f9fafb !important; }
.copyright a {color: inherit !important; text-decoration: none !important; font-size: inherit !important; font-family: inherit !important; font-weight: inherit !important; line-height: inherit !important;}
.txt_social p { padding: 0; word-break: break-all; }
.table, .table-c, .table-h { border: 1px solid #C0C0C0; }
.table-c { padding:5px; background-color:#FFFFFF; }
.table-c p { color: #2D2D2D; font-family:'Helvetica',Arial,sans-serif !important;overflow-wrap: break-word; }
.table-h { padding:5px; background-color:#F1F1F1; }
.table-h p { color: #2A2A2A; font-family:'Trebuchet MS','Lucida Grande',Tahoma,sans-serif !important;overflow-wrap: break-word; }
@media only screen and (max-width:667px) {
.aa, .w100pc { width: 100% !important; }
.bb img { width: 100% !important; height: auto !important; max-width: none !important; }
.cc { padding: 0px 8px !important; }
.ee { padding-top:10px !important;padding-bottom:10px !important; }
.ff ul, .ff ol { margin: 0px 0px 0px 10px !important;padding: 0px !important; }
.ff li { margin:10px 0px 0px 10px !important; }
.r {height:140px !important;}
.s p { font-size:13px !important;line-height:15px !important; }
.mob-hide {display:none !important;}
.mob-show {display: block !important; width: auto !important; overflow: visible !important; float: none !important; max-height: inherit !important; line-height: inherit !important;}
.mob-stack {width:100% !important;display:block !important;}
.mob-w-full {width:100% !important;}
.mob-block {display:block !important;}
.embed-img {padding:0px 0px 12px 0px !important;}
.socialShare {padding-top:15px !important;}
.rec { padding-left:15px!important;padding-right:15px!important; }
.bodyWrapper { padding:7px 4px 7px 4px !important; }
.social-mobile {float:left !important;margin-top:10px !important;}
}
@media screen and (max-width: 480px) {
u + .a .gg { width: 100% !important; width: 100vw !important; }
.tok-heart { padding-top:75% !important; }
.tok-play { padding-top: 250px !important; }
}
@media screen and (max-width: 320px) {
.tok-heart { padding-top:65% !important; }
}
.u { border: 1px solid #CACACA !important; border-radius: 2px !important; background-color: #ffffff !important; padding: 0px 13px 0px 13px !important; font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif !important;font-size: 12px !important; color: #767676 !important; }
.u a { text-decoration: none; display: block !important; color: #767676 !important; margin: 0px !important; }
.u span, .u img { color: #767676 !important;margin:0px !important; max-height:32px !important;background-color:#ffffff !important; }
</style><!--[if mso]><style type="text/css">
h1, h2, h3, h4, h5, h6 {font-family: Arial, sans-serif !important;}
body, table, td, p, a, span {font-family: Arial, sans-serif !important;}
sup { font-size: 100% !important;vertical-align: .5em !important;mso-text-raise: -1.5% !important;line-height: 0 !important; }
ul { margin-left:0px !important; margin-right:10px !important; margin-top:20px !important; margin-bottom:20px !important; }
ul li { margin-left: 0px !important; mso-special-format: decimal; }
ol { margin-left:0px !important; margin-right:10px !important; margin-top:20px !important; margin-bottom:20px !important; }
ol li { margin-left: 0px !important; mso-special-format: decimal; }
li.listItem { margin-left:15px !important; margin-top:0px !important; }
.paddingDesktop { padding: 10px 0 !important; }
.edm_outlooklist { margin-left: -20px !important; }
.embedImage { display:none !important; }
</style><![endif]--><!-- __merge_tags_in_links__ --><style>
@font-face {
font-family: 'Open Sans';
font-style: normal;
font-weight: 700;
font-display: swap;
src: url('https://fonts.gstatic.com/s/opensans/v40/memSYaGs126MiZpBA-UvWbX2vVnXBbObj2OVZyOOSr4dVJWUgsg-1x4gaVIUwaEQbjA.woff2') format('woff2');
}
@font-face {
font-family: 'Open Sans';
font-style: italic;
font-weight: 700;
font-display: swap;
src: url('https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@1,700&display=swap') format('woff2');
}
</style></head><body class="a" style="margin:0px auto;padding:0px;word-wrap:normal;word-spacing:normal;background-color:#dedede;"><div role="article" aria-roledescription="email" aria-label="email_name" lang="en" style="font-size:1rem"><div style="display:none;max-height:0px;overflow:hidden;"> RAE, Reasoning with Sampling, and more  ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ ‌ </div><table role="none" width="100%" border="0" cellspacing="0" align="center" cellpadding="0" class="gg"><tr><td align="center" valign="top"><table role="none" width="670" border="0" cellspacing="0" cellpadding="0" class="aa" style="width:670px;table-layout:fixed;"><tr><td class="bodyWrapper" align="center" valign="top" style="padding:7px 7px 7px 7px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" style="border-width:0px 0px 0px 0px;border-style: solid; border-color: #2a2a2a;border-radius:10px 10px 0px 0px;background-color:#ffffff;" class="c"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr id="header"><td style="padding:15px 15px 0px 15px;"><div style="padding-top:0px;padding-right:0px;padding-bottom:20px;padding-left:0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td class="f" align="right" valign="top"><p> October 21, 2025 | <a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3EzqrOr3Rsfw5M0VmIuo8O-IQaeISn3eymkefN7vXKwoO1xj4tJpJpbi3FgZD2EO03Y2Kd8otINTclPclSMKf1md3WPwfbZ4_HiBKMaRu26MD6-8eATd5e0dfqiuYf6axSUrdVQTTLM7k03addppzU5I2GDc1HBQV3RBylD3jwv_WTm2g5mn5ZWuCNVsDvqKNaOIsrJsH0rEwhxxiOQ0aSB1B6hxbakmU6RJtUEgYlqJy2EzPka-oaHv90JTQVbEWZQHBGZLxUcusccWOnXqYBFtu7YoN1uOMNgxlL5vVdZ3aKVCftUqtFpOiym_YT5XJzbOu-CzatZPPaqA6ITBUQNm8W31ezRjc4v-BMxtSfb5LLzdUuLcpRGNtgg_dLvwJhxyD68IFePiSNezLCzofz2uxaeuLZVUl5JUEd5fxLkCPOPxIBJoAqHuTB-dqZZ3VBl1q9pO0YOhYphXavzyFpbCvWNY5HbOBao5uza7fTC1ORmQ0_bh9rx_fBx4LDHy_ntE8A5p42zCeti4rup4kdjhFhaOSvzGew22dwSr67Dn-X2FgkkTnfKJMS5JCpzE0xrD7goAOuAd1WwVXI8CtoDpbBy3gRiujvBCKZwcDAeuVANIdidSM6H2gPlGTG2zgTb0nYJHBY502ReBPSkOdGC1z9HX4gHBC82YKQfS2hRs1mKuEn_NEDyqBJSfZDdKhaYz2YC_SBpstwzTz5rZIAiEV06jR6yalJyh_YESXg_EzugSeT57Mymyo5CCJI-Ye0jfNm9zZV0K8C4qrnv7GLCK4yBE2RSBX4bgQK3nRf1vig/4kx/DsOcx53nT0SjJTKBXx05Nw/h0/h001.9hbILCAhNEKkv8yAwl_5-xCr5KKEFNhDCD9w6_5QDGM"><span class="translation_missing" title="translation missing: en.templates.posts.email.header.read_online">Read Online</span></a></p></td></tr><tr><td class="dd" align="center" valign="top" style="padding:15px 0;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top"><h1 style="text-align:left;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-weight:Bold;font-size:32px;color:#2A2A2A;padding:2px 0;line-height:38px;"> Last Week's Trending Papers 📈 </h1><p style="text-align:left;font-family:'Helvetica',Arial,sans-serif;font-weight:normal;font-size:20px;color:#3E3E3E;padding:5px 0;line-height:24px;"> RAE, Reasoning with Sampling, and more </p></td></tr></table></td></tr><tr><td style="line-height:0;"><div data-open-tracking="true"> <img src="https://elink4f7.mail.bycloud.ai/ss/o/u001.3wmUuY8gEWd4_869a_eXcg/4kx/DsOcx53nT0SjJTKBXx05Nw/ho.gif" alt="" width="1" height="1" border="0" style="height:1px !important;width:1px !important;border-width:0 !important;margin-top:0 !important;margin-bottom:0 !important;margin-right:0 !important;margin-left:0 !important;padding-top:0 !important;padding-bottom:0 !important;padding-right:0 !important;padding-left:0 !important;"/> </div></td></tr></table></div></td></tr><tr id="content-blocks"><td class="email-card-body" align="center" valign="top" style="padding-bottom:15px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td id="nov-18-th-nov-24-th-33-latest-ai-re" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:normal;padding:0px 28px;text-align:left;"><h6 style="color:#2A2A2A;font-weight:normal;mso-line-height-alt:87.5%;"><i>Oct 13th ~ Oct 19th</i><br><i>#78 Latest AI Research Explained Simply</i></h6></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="industry-news-in-1-line" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">🗞️ Industry News in 1 Line</h2></td></tr><tr><td style="padding-bottom:12px;padding-left:50px;padding-right:40px;padding-top:12px;" class="ee"><div style="margin-left:0px;" class="edm_outlooklist"><ol start="1" style="list-style-type:decimal;margin:0px 0px;padding:0px 0px 0px 0px;"><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 7.2k</span></span> Anthropic has <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.1muhFWIqieRYpaJ-FbWSCTi0onEKBR8Up0PTJP-1o6TWUbixSim5TZozPyMiOwAxo2dBfvk1p3ByLP1gf6TwpaMuqGqugnJDBE69ttI21kYYiWrLCt7c9jbyZq-NEYy7a-FCGSFYiNl9oMR8h_cuPdmbgW9QTKJhx0clZYSReWAtT7UTMDCc7C-eX9fMNAl7cq5qCQY5zY8q64EJKHYFvUQtIzvl3FmS1hpV6cRjcnd-D0-7JCGUPHAHpA3A4jjGJMac2hru0z2mNefgarNiLQ/4kx/DsOcx53nT0SjJTKBXx05Nw/h1/h001.Kpou6hx1qfDF1OAJaOLrxsNYST6u-_CnXNq66eP1Q2A" target="_blank" rel="noopener noreferrer nofollow"><span>released Claude Haiku 4.5</span></a>, a new small AI model that matches the coding performance of the previous state-of-the-art Sonnet 4 model but at a fraction of the cost and more than double the speed. It is available as a drop-in replacement on the Claude API and major cloud platforms like Amazon Bedrock and Google Cloud. </p><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/83cd16c6-3634-4262-a50b-1914ccdd0161/1a27d7a85f953c5a0577dc19b507d6e1b93444d5-1920x1080.jpg?t=1761060490" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></li><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 1.5k</span></span> NVIDIA CEO Jensen Huang has personally delivered a <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmKSuJqszsfWP9RLF3XuWJjxAewqoshNhFxn3SIrfX_IaLq2vLbApI3FFDRaZm4wK5Fx12_8bQbqP97gpNs9oGJiO-vrr5U6WCGEGiQY_l3sgk4M49eRvcbIz2yflEI75faWzcqqbmOQrUEYPhWxVK6nfwPQyAi7hljPl1lXikYDr1gTBEenKChpAzQPJpJTOChhNfPil3BVNXu8dfPNWuspjkINEdiPKn2IepQRUYRsd7Tp_YC6oNqsiQy0v7-0Hkw/4kx/DsOcx53nT0SjJTKBXx05Nw/h2/h001.02-tAM3mAgrut2rclFihofh3lvGbo-3sKCqJL2W-AbY" target="_blank" rel="noopener noreferrer nofollow"><span>new DGX Spark AI</span></a> supercomputer to Elon Musk at SpaceX's Starbase rocket factory. The DGX Spark is a desktop-sized computer that can perform <b>one petaFLOP</b> of AI operations at FP4 precision, delivering five times the performance of its predecessor for creators, researchers, and developers. <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmIgsFIztuwh75hhSzRPk8iTdC82GiUOUJ1VLbrWnKf-dt6a8gC6A3ioaj1YQgKL5jfcU_v-U_P8naNPbWeUJMpsccb60tFqRiCp-WFCPXyyq2IMXG6OE85DJOWZnevS3WDACf5-OMaIMl9bRKzz-o7wf80GGUKd8o0ajA_5ObR1UcwzY0D7rUEj_DBwtM3oXpkq8wD4qOE3xBmEa2oePBZff63IhGuri8mIAFb4C7Z8SiBdpG-5pWCM8ITel3RLuwg/4kx/DsOcx53nT0SjJTKBXx05Nw/h3/h001.P3jDRKuvs6nB0jpjnSIB31vBx1wdhB5Xkqb2M-uN7LU" target="_blank" rel="noopener noreferrer nofollow"><span>View early reviews</span></a> of DGX Spark AI or <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxbwLygMPq9zY_5H8ImVBe_51tVgku7xpgZRsSKtsxZX641adHBzIAa5sWQ9Gz53k0QxdNuXK1HxxqmhhlyKgOAWuLnzTT19FKWIuXRM3aZKJDndnTAuSrlZZC0kRBPZ75xn7tVo0N4g_asoMx78rDF1oGoDHbzlZ0TmZbrGQFhsKX6q_B3apo0QjDv0EXZ0zxzimE83gd6icBZHbSkkQMctGXnoxcxRPzuL8pWAr0_pA1GUy-wkHcMpW_XmHbFbU4g/4kx/DsOcx53nT0SjJTKBXx05Nw/h4/h001.Uzs_MrppBM05vaSqJ59iVMeYUCfyP_adzYO9-5E5-x4" target="_blank" rel="noopener noreferrer nofollow"><span>buy it today at $4K</span></a>. </p></li><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 11k</span></span> Anthropic has launched <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j2yo571wOI2ayM48Skcu9lJJj7o_Ci0RmxtHtQOI9A-8Vx9ktWycVG7aD1En9omwKqtEZlouLLFAmn-jGl-KYkCyIrytUOU4KI-S-Qs7pq_1NlujlOXv_JZxN0LPOH1CR9jOQZ-KODseKTAw8e2TqGQIZKzlMi6dh3SivIgt1yNMWykLVRk4_bo4j4IKmcrT_WqaNpa5ehBBQKFahjdaAWJ2qTqsDENn1hH7L_nGW1wRGY6TYsoBmNtdQfNJJRP4DgZzmhjvWNwCMZ7uq8-2-7vMLnLGhoem3vrTkMBfVaJSa/4kx/DsOcx53nT0SjJTKBXx05Nw/h5/h001.1GkqXAtKE9HyAw9T7Fw02WvwPrSqKQoTDMXN54EdtTI" target="_blank" rel="noopener noreferrer nofollow"><span>"Skills" for its Claude AI</span></a>, a new feature that allows the model to instantly load specialized knowledge on-demand. These Skills are simple folders containing instructions and resources that are loaded using "progressive disclosure" (a technique that prevents context window overload by only pulling in information as needed). This architecture empowers anyone to customize the AI for specific tasks without complex programming. <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.VomAAYwkCjux8i_FMc4kJalDpT-xk6aT6KOUkSuRmIFaFjB7fn2j6BC-hIGG9y_4Xh2rbqZCZGKPLZdyUHQK1gGrXrCrN57lfjXcpuJzBo75UQeu82yokI3vH1zFqUGBx_D_gCylooBGvyDQabV8u_A62IzeQaxHugYp2tyV1ii0HMHVo7KUSSibiPkFVBLPgVmjS3sH_VYkiIuc3ZESlc-q8ITt8MIZZeOK9MxecZlBkHEDtlJoD_Ihm3Knn8PJgVm3FJmtYArbLsNT_KrH5TQuWo_6BPXRZzWQdSZ7HUI/4kx/DsOcx53nT0SjJTKBXx05Nw/h6/h001.Vn2bA_gz6hz_8NOAH8vhFq2SCE-0Yitpik_RuXGHiuI" target="_blank" rel="noopener noreferrer nofollow"><span>Read the Anthropic cookbook to get started with skills</span></a>. </p><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/8f2b4ff5-9662-454c-9172-3c6a24f55908/441b9f6cc0d2337913c1f41b05357f16f51f702e-1650x929.jpg?t=1761060684" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></li><li class="listItem ultext"><p style="mso-line-height-alt:150.0%;padding:0px;text-align:left;word-break:break-word;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;">♥ 1.4k</span></span> <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxS86YXgmr-f30Z3A4na2GWANa2UrGEhPkqm-FjAnu34OyPONLAX8pFoZ4IXpHyH-0Rf0AmHQrChG1SVzBKmfQufocxGv4hyDboLgcT1dhKQq6KV3LX-U7UU-z1iJI_iow9zQ-1zHWmxiv0VrgRkcOQ0mwcnRS8xqoLSjwuMPWmRcKFhpYdP3YX2TO2CuO3ox-fD1XbRmP778xNoNTIpfdCghHbrVkHIHa1MACeamfdUC/4kx/DsOcx53nT0SjJTKBXx05Nw/h7/h001.Zd1CL8o1z7gZEc4Jq2PBiPMJmCzQEiHY6y8p9piVojw" target="_blank" rel="noopener noreferrer nofollow"><span>ManusAI has launched an upgraded</span></a> no-code platform that generates, builds, and deploys full-stack websites and AI-native applications from a single prompt. The new version cuts development tasks from fifteen minutes down to approximately four. Users can now <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxRKpBeUaiNjx5uvJ0-oWM5qqPyOxhSuxgwUB8rIzzEKp3Iuz24rgzkRGrvG3vnlq9BEwtp7c6WrZvWyFOF0sVB7Ai-VA0fcdz0-r045Jfvi3Lqrli2QN1F6VlkjmzQXQInCdhQZuHLy7t6XYn4z2daWD7aIgZtTOH3EvmED4qMbx08UNilzRXjKEN8Fp6z-Ldc81JewJCflAwIiGiOJNQHvTqqF5zntQfu8eqUV5alMk/4kx/DsOcx53nT0SjJTKBXx05Nw/h8/h001.RaH0PvssSvXKQHotuU0c6Yn776IcJieEQ6YAPSj6ilg" target="_blank" rel="noopener noreferrer nofollow"><span>embed sophisticated AI features</span></a> directly into their sites, including brand-specific chatbots, image recognition cameras, and dynamic content generators. </p><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/27836997-e18c-4f3b-8049-b7c5bd8f9eed/3175cd7de1036e9ee5f310f67dc47f979029c1d99e0d4c1764cd4724771f4fc6.jpg?t=1761060926" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></li></ol></div></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="transparent" style="background-color:transparent;border-color:#2C81E5;border-style:solid;border-width:5px;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;"><span style="">Support My Newsletter</span></h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="color:rgb(34, 34, 34);font-family:Georgia, "Times New Roman", serif;font-size:16px;">As I aim to keep this newsletter free forever, your support means a lot. If you like reading The AI Timeline, consider forwarding it to another research enthusiast. It helps us keep this up for free!</span></p></td></tr><tr><td align="center" valign="top"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" style="font-size:0px;line-height:0px;padding:30px 0px 30px;" class="dd"><table class="j" role="none" width="50%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td> </td></tr></table></td></tr><tr><td class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Share The AI Timeline</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> You currently have <strong>0</strong> referrals. </p></td></tr><tr><td align="left" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; display:none;width:0px;max-height:0px;overflow:hidden;mso-hide:all;height:0;font-size:0;max-height:0;line-height:0;margin:0 auto;" class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 0;"><tr><td align="center" valign="top" style="width:313px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxWc4htTObwdorovK0nFHVH-4pUdVE0ELYH5DsNemk732SjNwhPNJ25r0O8B5vYifsGNUqyW5TiZkyMsF1yreu0byy2KW36J1wDdpoLuXg2TU1F1OW8OHoHaU4-ZmrZpPU4RN-crQCEimD190CSn9fPuxpIRojBJyu1VfV5KtQD3QMVdSg2JrjEj5-xm4r4E12Whf08itqPCb9Q5W0X4rt3ubYkqCmWnLeZpmb3_RZcbIk0UE5wZnFLCQJHLFs0qZ0OGpXp89o1HU4mWIBur5Or4tQGm5M_Y8m5PvTEfYfxLRyrcRv7GyVs5oLtFfiySZ2SqtZypLA-h50h61p0uPiA7iA_PiMqlVLtM-87XL33VZi05_O3UTpWE_0nAzFRJ4TW1ayz3_vn4Zlp9IERdbnnD61McS5GXBelKUeBLoqNKgcYTm8jBGhEIGBJ873Uvp3cAjt7ndntRNWYvyp1zxe5apH1UdzrulnMcFevQTFPY4Sq9GxBBCY5I0T5yGrPWwC8U4q9cVenZxLoCfX9T7TySQCVzMluXoxeIyvdXzBkU_PgBPMUKjq3JBMrX_heyQrsVgIkgLrNFBMHEN1-W3GgHWfviovF9HXLqBmGu6lkL4ED7bYHnCeMbb4TPloycV6ZsHEHxB8rKhlVrmKhIrq9w/4kx/DsOcx53nT0SjJTKBXx05Nw/h9/h001.sN7VdRdoV64zgVOorkqOHofRa3XPGuvk1shlmqjr5MI" rel="noopener noreferrer nofollow" style="text-decoration:none;" target="_blank"><img src="" alt="" height="auto" width="313" style="display:block;width:100%;" border="0"/></a></td></tr></table></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:left;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="left" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxWc4htTObwdorovK0nFHVH-4pUdVE0ELYH5DsNemk732SjNwhPNJ25r0O8B5vYifsGNUqyW5TiZkyMsF1yreu0byy2KW36J1wDdpoLuXg2TU1F1OW8OHoHaU4-ZmrZpPU4RN-crQCEimD190CSn9fPuxpIRojBJyu1VfV5KtQD3QMVdSg2JrjEj5-xm4r4E12Whf08itqPCb9Q5W0X4rt3ubYkqCmWnLeZpmb3_RZcbIk0UE5wZnFLCQJHLFs0qZ0OGpXp89o1HU4mWIBur5Or4tQGm5M_Y8m5PvTEfYfxLRyrcRv7GyVs5oLtFfiySZ2SqtZypLA-h50h61p0uPiA7iA_PiMqlVLtM-87XL33VZi05_O3UTpWE_0nAzFRJ4TW1ayz3_vn4Zlp9IERdbnnD61McS5GXBelKUeBLoqNKgcYTm8jBGhEIGBJ873Uvp3cAjt7ndntRNWYvyp1zxe5apH1UdzrulnMcFevQTFPY4Sq9GxBBCY5I0T5yGrPWwC8U4q9cVenZxLoCfX9T7TySQCVzMluXoxeIyvdXzBkU_PgBPMUKjq3JBMrX_heyQrsVgIkgLrNFBMHEN1-W3GgHWfviovF9HXLqBmGu6lkL4ED7bYHnCeMbb4TPloycV6ZsHEHxB8rKhlVrmKhIrq9w/4kx/DsOcx53nT0SjJTKBXx05Nw/h10/h001.sZzcx_nCWaCx3hBatHVuQwRIHxo2kMKR_2dQ-bTWx9M" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Click to Share </a></td></tr></table></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Or copy and paste this link to others: <a class="link" href="https://mail.bycloud.ai/subscribe?ref=6SqUHb8KiF&_bhlid=bf7a73b936aab597b0df9777ef50b28c5a049d32" target="_blank" rel="noopener noreferrer nofollow" clicktracking="off"><span>https://mail.bycloud.ai/subscribe?ref=6SqUHb8KiF</span></a></p></td></tr><tr><td align="center" valign="top" style="font-size:0px;line-height:0px;padding:30px 0px 30px;" class="dd"><table class="j" role="none" width="50%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td> </td></tr></table></td></tr></table></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.zNfxTwpJFmrsCuJJphGRkKSrCVph9-fOYkcjx4VfJRyUw-Iv7GHKoTyxc57iFdcabeJrUAXVgdJXAkTcc7bS82ZF6NEkQHkUBgqGaM66RDbyMBpTK8pOBl6aVCc1cb8uD9Bn9drFBN33x2aDU1J9X3kTveoSvtyfttYdAE2qZIfONyDBhkNJndJ0oXPMkSE9a_Ndz2MVoZ9kXWPClKobsz6oWBdGGlVVvbG0CeDyhs4/4kx/DsOcx53nT0SjJTKBXx05Nw/h11/h001.ZfyezPyYaCH2rnlQycBBcPt672brxrEhpUTak4SQ158" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Check Out My Patreon </a></td></tr></table></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style=""><a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.tLfGW26lAwaS9gFg17HSoGymQ3NNPtd5dE5MV_8UgjLbPKYFbBPtV6oAT4VYSncNiXOMe0ETHKViEemkGKRuti97gDsqlNJXOC9cMEoZt4vqGEMzd3CYIoAvubE-GTMM2UNeaPl29JLtIcANUeu-4Gd7NHMwUxLV1htRP8o8Vdp51Vf9kz65CWStOaDQoquxwOsmett-lS2EI0988cXB-VqFLsNLjaBDTCy88p_umgjKhZbMNCYI9JND0L2VQz-n/4kx/DsOcx53nT0SjJTKBXx05Nw/h12/h001.lwzBZqzRsra0fNDlEFTOsvE6PxWx-2IWKCdgg9fmzbg" target="_blank" rel="noopener noreferrer nofollow"><span>Advertise with The AI Timeline! </span></a></span></p></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="recursive-language-models" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Recursive Language Models</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Zhang and Khattab [MIT CSAIL]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 22k </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> Image Generation </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Recursive Language Models (RLMs) offer a fresh way to handle extremely long inputs by allowing language models to break down tasks and call themselves or other models as needed. This approach helps overcome "context rot," where model performance drops as the context grows too large. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> RLMs work by wrapping a language model in a system that can manage and explore the input context step by step. The root model starts with just the query and interacts with an environment where the full context is stored as a variable. It never sees the entire context all at once, which keeps its own input window clear. The model can write and run code to examine parts of the context, search for patterns, or summarize sections. When it needs deeper analysis, it can launch recursive calls to itself or a smaller model, passing along specific portions of the context to work on. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/791b7910-f7d5-4752-bc1f-ab432f8d3d82/teaser.png?t=1761057288" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>An example of a recursive language model (RLM) call, which acts as a mapping from text → text, but is more flexible than a standard language model call and can scale to near-infinite context lengths.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> This setup lets the model adaptively handle huge amounts of information. For example, it might start by scanning the first few lines, then use regular expressions to find relevant entries, or split the context into chunks and assign each to a recursive call. The model builds up its answer gradually, using the notebook environment to store intermediate results. When ready, it outputs a final response either directly or by referencing a variable from the notebook. This flexible, program-like interaction allows RLMs to scale to near-infinite context lengths without overloading any single model call. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> In tests, RLMs showed strong results on challenging long-context tasks. On the OOLONG benchmark, an RLM using GPT-5-mini <b>more than doubled the performance of GPT-5</b> itself, while keeping costs comparable. Even as context length increased to around 263,000 tokens, the RLM maintained a significant lead. On the BrowseComp-Plus benchmark, which involves searching through thousands of documents, the RLM achieved <b>perfect accuracy</b> with 1,000 documents in context. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/a1a2a988-0c1f-4bf1-a116-c3e176c8a99f/repl.png?t=1761057309" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>The RLM framework provides the root LM with the ability to analyze the context in a Python notebook environment and launch recursive LM calls (depth=1) over any string stored in a variable. </p></td></tr></table></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtoaIwcNyyjSB6oJuzJxue6R6cd5rPbLIbW-t1s3krAjxhJDKHEZ6-itUgm0l7B-0V585l1X3Q5s6AmVZp9RjH12pdViJBdQsf3sG1i0hclkfqnBj_GtvYtiXeAoRz44B9jzjiMHuy812Df7dSBrU9zn85jidTGZN_jRNMokPNccUoXinylUt632shdq88oKatRZATiHxqPhNqQA6P2wT1FpmPNT-HhDgssMwvCeIxBxKa/4kx/DsOcx53nT0SjJTKBXx05Nw/h13/h001.tIJ7eM3XSIi0beiRGbT7xR7a0_SCHncKl8iaD2_xf6Q" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="diffusion-transformers-with-represe" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Diffusion Transformers with Representation Autoencoders</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Zheng et al. [New York University]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 424 </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> VAE </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> bycloud’s pick </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Latent generative modeling relies on autoencoders to compress images for diffusion, but the standard VAE approach has seen little improvement. This paper introduces a new method called Representation Autoencoders (RAEs) that replaces VAEs with pretrained encoders like DINO and offers richer latent spaces and better performance for diffusion transformers. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> RAEs use a frozen pretrained encoder, such as DINOv2, to convert images into high-dimensional tokens. These tokens capture both fine details and semantic information. A separate decoder, trained with reconstruction losses, maps these tokens back to images, which results in sharper outputs than VAEs without heavy compression. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/268f9b74-e12d-4fcf-aae7-db26cba2db63/image.png?t=1761057779" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Comparison of SD-VAE and RAE</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Diffusion transformers need adjustments to handle these high-dimensional tokens. The model's width must be at least as large as the token dimension to learn the noise patterns during training properly. A dimension-aware noise schedule shift is applied, adjusting the timing of noise addition based on the token size to maintain training stability. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/7ac13e24-0e29-466c-bb06-8dcaa094a248/image.png?t=1761057820" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Changing model depth has marginal effect on overfitting results</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> While RAEs excel in quality and efficiency, they require careful tuning of model width and noise schedules, which may add complexity. The decoder's noise augmentation slightly reduces reconstruction fidelity, though it benefits generation overall. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtoZr-f6keVrG1GKuVv1wQKvr8Wvr7Rmxs3BTanHP1rFFD_wFYke3CvK5YIoFuPgzEyWwHmBlTXiWNeOtJxRYr56wZ4gUYqsZrGnBfOzDigOcMCprV1pYSQpQwB8PJ7GFGsgXPeM-ZrxLF3dN0nYTwCg-xQpUlmve6UU80JNs2bjlA8rpVAfUIKH_dMhd1HnAne2immCuJSDPfzmZAAuCJRzHPLlOc0aewqafpA_EfD6DE/4kx/DsOcx53nT0SjJTKBXx05Nw/h14/h001.fswfe0iyGy0aUBO7ghvBECPMDxH3lrEs_WZpIjjuWzU" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="not-all-bits-are-equal-scale-depend" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Not All Bits Are Equal: Scale-Dependent Memory Optimization Strategies for Reasoning Models</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Kim et al. [KRAFTON, University of Wisconsin–Madison, UC Berkeley, Microsoft Research]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 430 </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> LLM Scaling Law </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> When we deploy LLMs for reasoning tasks, it is important to manage memory, especially since the key-value (KV) cache can consume more space than the model weights themselves. This research explores how to best allocate limited memory between model parameters and generation length to maximize accuracy. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/bb1d25f8-b157-49c2-97f2-098a21583ce9/image.png?t=1761059077" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> This study finds that smaller models, with effective sizes below an 8-bit 4B parameter model, achieve better performance by dedicating more memory to higher-precision weights rather than extending the generation length. This is because their limited capacity benefits more from precise computations than from additional tokens. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> In contrast, larger models see greater accuracy gains when memory is allocated to longer generations, as their robust parameter sets can leverage extended reasoning chains. The research also examines KV cache compression techniques like eviction, which selectively retains important tokens, and quantization, which reduces precision. Eviction works well for smaller models by preserving critical information without numerical errors, while quantization becomes competitive for larger models that are more tolerant to precision loss. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/9b080b97-7bc9-4dce-bdb2-be2ab8c91459/image.png?t=1761059125" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Latency vs. Accuracy trade-offs and Throughput vs. Accuracy trade-offs</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Experimental results show that models smaller than 8-bit 4B perform best with weight-focused allocations, while larger models excel with extended generations. For mathematical reasoning, higher weight precision (8-bit or 16-bit) works well, whereas 4-bit suffices for knowledge-intensive tasks. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtoZr-f6keVrG1GKuVv1wQKvr_fXCv4ws5lRLXhCqT_1lpgYHnB4X9Nn5X2PXSXLKnqgIy_iir51iia8LkxKZDabZzb2tliBT0uZXgaNu_H3Zy08j4tKnS1Htg9DVRMihzbcQvDUbqwfir8QDQWEefwNTxJh8EknUyOJB9XUXixrf8iJ42AH8VR5JQYmwShlXQLNblSF3CztSizrPNEdX3pnRmOusNa1z7eaNegczb5a3o/4kx/DsOcx53nT0SjJTKBXx05Nw/h15/h001.zWBuwB5HNy_CPeghRUu_pOF9LfNdrS0EadriQ_94xSg" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="not-all-bits-are-equal-scale-depend" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">Reasoning with Sampling: Your Base Model is Smarter Than You Think</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Karan and Du [Harvard University]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 855 </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> LLM Sampling </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Large language models often improve at reasoning tasks after reinforcement learning training, but a new study asks whether we can achieve similar improvements just by sampling more cleverly from the base model. </p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> This method works by repeatedly resampling parts of a generated text sequence and deciding whether to keep the new version. It selects a random segment in the current output, generates a fresh replacement for that part using the base model, and then compares the overall likelihood of the new sequence to the old one. If the new text is more likely under the model, it is kept; otherwise, the process continues with the original. This iterative refinement allows the sampling process to explore different reasoning paths while favoring those the model finds more plausible. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/baa5ae43-46f3-439e-881c-465d2fb54f2c/teaser.png?t=1761056524" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Single-shot reasoning performance of power sampling and GRPO relative to the base model for Qwen2.5-Math-7B.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> This approach is inspired by a technique called Metropolis-Hastings sampling. The algorithm breaks the full sequence into blocks and progressively applies the resampling process block by block. At each step, it uses the current block as a starting point and runs several iterations of resampling and acceptance to sharpen the output. The procedure does not require any external reward or training data, relying only on the model’s own probability estimates to guide the sampling toward higher-quality reasoning traces. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/e7890e93-e30c-436f-af33-55b1c057150b/passatk.png?t=1761056571" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Pass@k performance of power sampling and GRPO relative to the base model for Qwen2.5-Math-7B.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> On the MATH500 benchmark, it matched the performance of reinforcement learning post-training and <b>outperformed the RL approach on HumanEval</b> and GPQA. It also maintained better output diversity across multiple samples, avoiding the mode collapse often seen with RL. A key limitation is increased inference-time computation, as the resampling process requires generating more tokens. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtoZr-f6keVrG1GKuVv1wQKvqDI0ugNRmorT1C0nsvVk9BZp6GeCRsHa3YnGOdM-nvHJNk_C2JjH-nR-XbyUvRzHqoXdjax5l_bxwsyCtFZ33yEFDBY0qELO_Zva7B2b5vSPCuiIUY9TkbZBo2ayh_roDdjB7eueuzcCLV4MmbK1504OMKSMzMrRNsFGxvd2nY4eCWsLxUgHBDiHb_qqzhlv42T5VGMofFH08DLPaq8FEe/4kx/DsOcx53nT0SjJTKBXx05Nw/h16/h001.IuuDqZITJ5c5zoazGH67tgNcaaUAETe0Pk8X3JRPTxw" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td id="the-art-of-scaling-reinforcement-le" class="dd" align="left" valign="top" style="color:#2A2A2A;font-weight:Bold;padding:0px 28px;text-align:left;"><h2 style="color:#2A2A2A;font-weight:Bold;mso-line-height-alt:150.0%;">The Art of Scaling Reinforcement Learning Compute for LLM</h2></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><i>Khatri et al. [Meta, UT Austin, UCL, UC Berkeley, Harvard University, Periodic Labs]</i></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"><span style="background-color:#e0e0e0;"><span style="color:rgb(255, 58, 58);font-size:0.6rem;"> ♥ 805 </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span><span style="background-color:#e0e0e0;"><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> LLM RL </span></span><span style="color:rgb(44, 129, 229);font-size:0.6rem;"> </span></p></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> Reinforcement learning enhances LLMs, but scaling it effectively has been more of an art than a science. A new study introduces a systematic framework to predict how RL performance improves with increased compute, which makes the process more reliable and efficient. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/b9a2b39c-203e-4543-aca2-9281b28673e7/image.png?t=1761059629" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>ScaleRL is more scalable than prevalent RL methods.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> The researchers developed a method that uses a sigmoidal curve to model the relationship between training compute and model performance. This approach allows them to estimate future performance based on early training data, helping identify which RL recipes will scale well without needing extensive, costly runs. They tested this across numerous design choices, such as loss functions and normalization techniques, to see how each influences both the final performance ceiling and the efficiency of reaching it. </p></td></tr><tr><td align="center" valign="top" style="padding-bottom:20px;padding-left:15px;padding-right:15px;padding-top:20px; " class="dd"><table role="none" border="0" cellspacing="0" cellpadding="0" style="margin:0 auto 0 auto;"><tr><td align="center" valign="top" style="width:626px;"><img src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/uploads/asset/file/462f077b-bc03-44bc-b9df-9bb2e5bd9684/image.png?t=1761059716" alt="" height="auto" width="626" style="display:block;width:100%;" border="0"/></td></tr><tr><td align="center" valign="top" class="t" style="width:626px; padding: 4px 0px 4px 0px;"><p>Scaling RL Generation Length.</p></td></tr></table></td></tr><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"> By analyzing these factors, they found that certain elements, like the type of loss aggregation or advantage normalization, mainly affect how quickly the model learns rather than changing its ultimate capabilities. This insight helps distinguish between methods that look promising at small scales but may not hold up under larger compute budgets. </p></td></tr><tr class="btn_row"><td valign="top" style="padding-bottom:14px;padding-left:28px;padding-right:28px;padding-top:14px;text-align:center;width:100%;word-break:break-word;" class="dd"><table width="100%" role="none" border="0" cellspacing="0" cellpadding="0" style="margin:14px auto 14px auto;"><tr><td align="center" valign="middle"><table role="none" border="0" cellspacing="0" cellpadding="0"><tr><td style="background-color:#2C81E5;border-radius:8px;mso-padding-alt:14px 20px;" class="btn"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.fUNb4GdFo9D3F8WuLArtoZr-f6keVrG1GKuVv1wQKvqGlyKVEinA5c4HWgFDVyVhCWWCrS-gmQUKu6oPqVe8qbPOdSdF6z4pVhFFu3QgYTZs7XB4QBShHteAL3z1iFy-BJLkrWnAqVwoSAJovLRYFv7q_w4S-wcCHUboBOZyPXy77p_zaGmhe9M88sZkLydRtxf-VYd0HR7hbFKBMVXIyigh9zskzepOWvIn9RQkk3dBUzbheH52wan_9MBcfZLD/4kx/DsOcx53nT0SjJTKBXx05Nw/h17/h001.YS_GzGwMUAx_r6MaZNmY3CMfEggmeb2NgtIW4d8VcUc" target="_blank" rel="noopener noreferrer nofollow" style="background-color:#2C81E5;border-radius:8px;color:#FFFFFF;display:inline-block;font-family:'Open Sans','Segoe UI','Apple SD Gothic Neo','Lucida Grande','Lucida Sans Unicode',sans-serif;font-size:16px;font-weight:normal;line-height:18px;padding:14px 20px;text-decoration:none;"> Read Full Paper </a></td></tr></table></td></tr></table></td></tr><tr><td><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" style=""><tr><td bgcolor="#222222" style="background-color:#222222;padding:0.0px 0.0px 0.0px 0.0px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0"><tr><td class="dd" align="left" style="padding:0px 28px;text-align:left;word-break:break-word;"><p style="mso-line-height-alt:150.0%;"></p></td></tr></table></td></tr></table></td></tr><tr><td class="e" align="left" valign="top" style="padding:28px 28px 8px;"><h3 style="">Do you prefer this new format?</h3></td></tr><tr><td class="ee e " style="padding:0px 28px 28px;"><div style="margin-left:0px;" class="edm_outlooklist"><table role="none" border="0" cellspacing="4" cellpadding="0" align="left" style="min-width:300px;"><tr><td style="width:100%;border:1px solid rgba(50, 50, 50, 0.17);padding:6px 12px; border-radius:4px;"><a style="font-style:normal;text-decoration:none;" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3ExYC6NhsEFZgZDGIeNrDJnMnXdpm5sjjUeyC0zbhSb3PnmXi_Swp0oogDWm_n0S4LC1ZiwqGLOArVQgzCKFtTaFLVS0yIAZYSTB-ExT9Wfbjt373MCNcc4-DfzCwXpnByzYJlsS1nYXdTeDkWVq2ts5Sv6lZZxnS1-DB68NLuF9Y7wctGV3PnWVsalRrjcmETsehDJDPmDKdg7Mjl5V7X3R3C59PRoezZsPyaVnN0F3CV-HJZLpab6AybtC9acEDoCtj-cZXOWYG4qbb_-5dSwizGSXzaAU8Z2JX2m-XFLkvJPkekCQfr5toumWrTVAG952xzCg2OlJ1KyUy6u5bbJp64ZnElb_nC3A733BdOgkDjvzmgySepQWIOV9E400P3dm0RbOhLPVFMLrKE9uB_DtDyKYzmvCPSC9IMOxCTVLLkcN7wQ3pUwYjYLr7toqoJO8UuvEFrtv9jSLRbTbIt-mXbFWj8ITIh5SpfkDhWwmMd3S_y9SwHs2X8VVBpPb8d8kMkTb6L3ZixnY9K_KW7P33o2EnbQdOr9NXEYZFEY9ZWCSsWy1quHqCH4Xj5qctQJZBmnjIX8sByog6-I9-t8VDALZT8w_P01G-R_pwJDsTL2_Mp4qqrwHDZ_336PKkTjUTgeumS-OhsN7zYQ7rnAZledslm8qj09yoqAsL_Yj-PI_AmjsOxRL8qHV8bIV7pHO2R1YOUMv7mjroOh0Y5-kEZdZRzAKAXfgOBePRJcAhR_pgUK5JKXZp0eypsbmgIv4qy1NjcvAiqEVc9FIu_Qz5UzdvWXoHsGzuBZham10VUK9_6PTS9U--IsI6GdBhyZ0_FXeFHn5Z2yMP8hd5p2WxlqdmVCIazqzDF1jqB6NrzepjkO4v5jlMMY36ixrcqgAJwTIHCLvs0dSuJtaN4Os-GZnFsOQMse8KioG5xyK_PqDsIR9G2Nvx2Ek0zfBkhPZ82le7zQ0V_4nm7wC05LWBVYiCiQCG2C0LIZ2UVSIqg/4kx/DsOcx53nT0SjJTKBXx05Nw/h18/h001.qpgUVWqVuNGW9L1k-32A4lbjnT_gnaJQRrUeaQV5uhE"><p style="font-size:14px;color:inherit;"> Yes, I like the short format </p></a></td></tr><tr><td style="width:100%;border:1px solid rgba(50, 50, 50, 0.17);padding:6px 12px; border-radius:4px;"><a style="font-style:normal;text-decoration:none;" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxSdB5RCIH6yy1Fm1CYma3ExYC6NhsEFZgZDGIeNrDJnMnXdpm5sjjUeyC0zbhSb3PnmXi_Swp0oogDWm_n0S4LA5vDxgHvExb4780Nunpr7fqwX24sED9WPfjgiPzZK91jz8O5K-pBp3An7ji9a16mlDGRc21sj46Dh1JFbVTF86VbRjinth5FwvxgX4OdxPRAmu5MARtY2NfPPAJ1E5GQ6d__yu_t10CxxOi6zSSsfzUyXxWQV7VLWOKBU07VMXBz2W7YgX2prN5FFdxMOqhofJ-d8RnJjcQeC3Uw-2Vqs5nE1W5Ft5km7HqhuwhRmdPjAsNE8zEoJTMJA23YZY3O3jcZgJC6E94Bz_y_TdY9JxFeiGrkvuKv3x6iBDao_ZzgY2MALaGAsdcQ4N-YIzbS8uEEJ8CSQ2SPOe_E2OVZqk4O5v3ctymz6tf3EIorUjWfZZmSgrQxBlA2Y1dcAZ5f1Hgs4Pr9DgDqB12P84HHv8QbEiicW-3QPO3NR_2cLWquthjAjzzErG7sshsmFEr6VcpoTtssXKwkF08Tc4n8WsPWxJUrNSouD6clRaAjmeuCldGszZwIQld5qROEjzvj0NsM0Gpne5NwlytRbbFcJJQP2nWOIR91zioJFumMF3b0-UbaArjjYP3gYVwI-XFlLI8DuYre_wCAcK7ekEEN8RaBiCd5fmL7lNKFA02ghqJzBAwhbkS3k4rhjEwSmO7F5A-3-CHX9i6Y7KYWd6k9ZNqvUhxkzFYxlzezgt_Ks-ZrC1b2ipx7b3swFWH8B6jBIgQ9vxq2h3rGLhujkQBmL7D-MKbuG8b3eo3Qzz706NIfrsFRE7-aZtFs92aw2VaFc_LFS5muSAPCHO95G3MP3hSJ9rEmapS0fVNLqMPa0jEGYeu7eYk5n-aDtZeXJMMo8eqSQGaoEKJqN1MdGcq5XK6ajUEmoWdkDhkds8LdRHQUZnTfkfQl-fLIPKc-4sJfCZqXwvSMEXw6qQ9DOj7YNov8nBM27bElWlkWyuYjSXUw/4kx/DsOcx53nT0SjJTKBXx05Nw/h19/h001.FmjPDYJrlRURD1EnlnBObNK50TlBCi_ssm5MIqX28Z4"><p style="font-size:14px;color:inherit;"> No, bring back long posts </p></a></td></tr></table></div></td></tr><tr><td class="dd" align="center" valign="top" style="padding:20px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmAPHMvrLw3w8tuKyltuTkQm643TrxHchBtcLezTQOdXe8wYEL-DTZa8ZCNAKX_I2Ah77PCpLTtpxSsuUCsZeEuMQ44BYlW0m2FBSM_1r4GMgogP7YXRyV_J2QfNcSUPJTLyrapf7qYvynuW82eENcIMF0bYiFaz3ScPOJ4Wg6LFay4BWhibgZ6-ztO9k6yw2qC_ostRBwXicA3eiSWREbSv_pn2zOgc01XLvfdK3fzVl1m1f-2WFoRGFlMghn74sgQ/4kx/DsOcx53nT0SjJTKBXx05Nw/h20/h001.rcC1kLdCKOCcim-Ov60L02dO5liwFA-lyS8TLyY5khU" style="text-decoration:none;"><table align="center" width="100%" cellpadding="0" cellspacing="0" border="0" role="none" style="max-width:520px;margin:0 auto;"><tr><td class="p" width="100%" style="padding:2px;border:none;"><table width="100%" cellpadding="0" cellspacing="0" border="0" role="none"><tr><td align="center" valign="top" style="width:100%;"><div style="max-height:0;position:relative;opacity:0.999;width:100%;mso-hide:all;"><div style="display:inline-block;width:100%;padding-top:25%;"><img width="20%" height="auto" loading="lazy" alt="" style="border:0;" src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/static_assets/youtube_play_icon.png"/></div></div><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmAPHMvrLw3w8tuKyltuTkQm643TrxHchBtcLezTQOdXe8wYEL-DTZa8ZCNAKX_I2Ah77PCpLTtpxSsuUCsZeEuMQ44BYlW0m2FBSM_1r4GMgogP7YXRyV_J2QfNcSUPJTLyrapf7qYvynuW82eENcIMF0bYiFaz3ScPOJ4Wg6LFaKQdNdb4E7bV8rhn060r9ukwvnfrxAujikLAMlSgZJ338Fgf0OzP-eTU3_ymtyG6q8JFVVup0DCPsJfiGUCWvhA/4kx/DsOcx53nT0SjJTKBXx05Nw/h21/h001.SHRKC44-KNYCnT8kvZ8zdOmtpUuome3sokb24d953I4" style="text-decoration:none;"><img src="https://i.ytimg.com/vi/sgIB7l6hW3Q/maxresdefault.jpg" width="480" height="auto" loading="lazy" alt="YouTube video by bycloud" style="display:block;height:auto;border:0;outline:none;text-decoration:none;background-color:#000000;width:100%;"/></a></td></tr><tr><td><p style="font-size:12px;font-weight:500;font-style:italic;font-family:Helvetica, Calibri, sans-serif;color: #686a6d; padding-top:0 !important;padding-bottom:6px !important; padding-left:4px !important;"> Researchers Are Getting Really Creative Training LLMs [Token Order Prediction] </p></td></tr></table></td></tr></table></a></td></tr></table></td></tr></table></td></tr><tr><td align="center" valign="top"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td><tr><td class="b" align="center" valign="top" bgcolor="#2a2a2a" style="padding:0px 0px 0px 0px;border-style:solid;border-width: 0px 0px 0px 0px;border-color: #2a2a2a;border-bottom-left-radius:10px;border-bottom-right-radius:10px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top" bgcolor="#73ddff" style="padding:12px"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td><span style="padding-left:1px;"></span></td><td align="center" valign="middle" width="75" style="width:75px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.1muhFWIqieRYpaJ-FbWSCQqcWoV4NNHHr5SkP9THApWUO4S9eWSDBFDMKQ83N4CY1l4kXQTU9YnEEqXRrg_2uhS94rQOKDl60C6UO57Zu1mJCFi_zhfD-a_hnJHdTQ7ErgbLSFXCh8ilK7ym_aEzMFNNd-oxO80Yc2Zr1efOxoWeOc-_8MOG3nSUTr-yzEOFQxvSjGwHBZecnW35D7V9gADFOlBxxsTKBoP-767yHY8/4kx/DsOcx53nT0SjJTKBXx05Nw/h22/h001.3UykmrzHaDcnQuOd5rI_S5ZZlQygPh04iGrJWfmwPRU" style="text-decoration:none;"><img width="22" height="22" alt="tw" border="0" style="display:block;max-width:22px;color:Dark" src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/static_assets/x_dark.png"/></a></td><td align="center" valign="middle" width="75" style="width:75px;"><a href="https://elink4f7.mail.bycloud.ai/ss/c/u001.amatuKKICSickUKplYJXmBoQnQ9VXnB2zTxBG4HeHBi5iti4l06m5fR1UTFq_vFgQaGMmutCjJbuBFU8WHbRj6heToGsiZHlry3dxu5DEimeQbpBAMyhKdSbaWrmIf3bFoTCANszDFzbcUnD4jemZTaObBSl_ALeEZXdVcPrmjFBOzT0cVBZ8AaFQ56d-lY4uVTwBEOj1yV7mAP8B-MbjJ_xs2XO97g5z8hlSvO5_cw/4kx/DsOcx53nT0SjJTKBXx05Nw/h23/h001.tXTRyB2PxM8ezkpNykptlQTdLas7G4iR3diIqhBbGeU" style="text-decoration:none;"><img width="22" height="16" alt="yt" border="0" style="display:block;max-width:22px;color:Dark" src="https://media.beehiiv.com/cdn-cgi/image/fit=scale-down,format=auto,onerror=redirect,quality=80/static_assets/youtube_dark.png"/></a></td><td><span style="padding-left:1px;"></span></td></tr></table></td></tr><tr><td height="10" style="line-height:1px;font-size:1px;height:10px;"> </td></tr><tr><td class="w" align="center" valign="top" style="padding:15px 15px 15px 15px;"><table role="none" width="100%" border="0" cellspacing="0" cellpadding="0" align="center"><tr><td align="center" valign="top"><p style="font-family:'Verdana',Geneva,sans-serif;color:#FFFFFF!important;"> Update your email preferences or unsubscribe <a class="link" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.c6q0w4g5sodbtO4I1B_pxWc4htTObwdorovK0nFHVH-4pUdVE0ELYH5DsNemk732SjNwhPNJ25r0O8B5vYifsBhEpz-DJgyVFmavJPa0OyKRRnvw4o7XGyvIv7PRofnmE6sjPjCoKgGZl-nnjLpDwZ2N7Eg5F1PaLaU-GnTi4CBygTn5kabT8G4zpF60IXCk6Usc23-6TzKvvuj7GdJNFSdnVCRI72l_uoFBCf-w_Ipu00z2nryKwYSpfdn5AMl9zoAAma-6PPRUFORKkR_isY-ukzY2uste1qNDvUvaLr0tQyDkKwmAc6I1_ri1o_qUo1CLEN7r61-rtWtNLBEmupVdTc90q3YbU6oNtcdWkDJtVtANmjAYo5WiWbMKKltxOoKq1WHN70VBEhV0KWb0qRmRwt-wLwpCSx4kcyo95JMhHxFZ0x5GZtz60hQ6KQbIi9b-yE8Ilu4v7ex5bOEVigpnf2SYqdNoIyQ-3e06YLAp8wSqlb9l3v5quyKsgRiD2ScVFAw5y0OvRUO2SS8KuU0VkcVdZULMmCj7kwrAXTPEuRwfo7vK5DwDdK-JFh0E7BXt8KI2mUy94rNO0Vfm8I8yppAGTV8QgP83FovNrOHZwFyZiwlt8yuU6dzeLfgZHPjPjpoYk6BiGS7EUrHIu5OAM1UDN5IvmRY54YXi0bNUzvTLTjG9444XZ1ewTxLEoJgpp6cCSxkff5RgHS2oxjh-LuZD-03sVds7JLzZG6ivn4tsBqI-f0ZvCRTHAklLn9vTZJrsZLydR5y2jDwiREJNu5cI5_m5YvnhcpxkHBDlKavHQ1HDoes5v_RirdgR2eI0L3t4LelzV19J7aixT8K9f9Ub8dk6nrMIWJFQYPJ4-w74LrCnNbiUjEujb2oCHiqRb_dz4Pe4E7OK9XT4XHWvtitVbQA6QcjsBfeBqvMP6SCXazYTqZox-D3ycYZ62LuJeKpKEmWHMnbqB72ndbZpBzVPiYZ8pR8C63B-K9MeXGjBdI0_aOiVsI13oYTU/4kx/DsOcx53nT0SjJTKBXx05Nw/h24/h001.D6Hg1l65SZQaoonRsw90CyurWStc77otU41_3TZBySo" style="text-decoration:underline;text-decoration-color:#FFFFFF!important;color:#FFFFFF!important;"> here</a></p><p class="copyright" style="font-family:'Verdana',Geneva,sans-serif;color:#FFFFFF!important;"> © 2025 bycloudai </p><p style="font-family:'Verdana',Geneva,sans-serif;color:#FFFFFF!important;"> 228 Park Ave S, #29976, New York, New York 10003, United States </p></td></tr><tr style="display: table-row !important;"><td align="center" valign="top" style="padding-top:20px;" style="display:table-cell !important;"><table role="none" border="0" cellspacing="0" cellpadding="0" align="center" style="display:table !important;"><tr style="display:table-row !important;"><td class="u" align="center" valign="middle" height="32" style="height:32px;display:table-cell !important; max-height: 32px !important;margin:0px !important; background-color: #ffffff !important;"><a style="line-height:32px !important;text-decoration:none;display:block !important;" href="https://elink4f7.mail.bycloud.ai/ss/c/u001.DUiN96-Eq7pUHzwEhy5j28olDWFpV5DDKfdk_OdOKOgfy2HwGjD-R9NNS38835moR9o0MFqWQiZd2IVwUbMWHRJkYHoUJFwN6rYX82oCL9oANZw9_GXeSXhSOGqfuoEBx5AMVyuRN2W8AXLPpsNBnLeauEeBIq2fe0UI7bSoLN2hjV0r5YxHAtG9b8_3Y8tqoEpl66k7LbU4zjhZ5oT5Zb8Sr7-WubwuxWOvqA4qLVMy0oUUdT0bYJwbQ-1dR71q/4kx/DsOcx53nT0SjJTKBXx05Nw/h25/h001.DNpC09e5BQExwLGTESQ56d2x2F451qQ2PSOWq2ZlS4U"><img src="https://media.beehiiv.com/output-onlinepngtools.png" width="16" alt="beehiiv logo" style="display:inline-block !important;max-width:16px !important; vertical-align:-3px !important;width: 16px !important;" border="0"/><span style="padding-left:11px !important;display: inline-block !important;">Powered by beehiiv</span></a></td></tr></table></td></tr><tr><td align="left" valign="top" height="2" style="height:2px;"><a href='https://elink4f7.mail.bycloud.ai/ss/c/u001.CxDkkVpJsBdVoe83c_tBWsHIaP4XNp0WgUYqLvHcKk_3uqk_KIkz4ddLinhFbud6JuxLFdSUhYnR7b1NSsmbtzXNGNblnEEMKUtkCAjkn8Y/4kx/DsOcx53nT0SjJTKBXx05Nw/h26/h001._aFI51PGSjKdusYcIYJsbaIf6mthwRy95Huq-pgKvfg' style="color: #2a2a2a !important; cursor: default; font-size: 1px; text-decoration: none;"> Terms of Service </a></td></tr></table></td></tr></table></td></tr></td></tr></table></td></tr></table></td></tr></table></td></tr></table></div></body></html>