garrethlee commited on
Commit
249a678
Β·
verified Β·
1 Parent(s): c9de898

Fix chart bugs

Browse files
404.html CHANGED
@@ -1 +1 @@
1
- <!DOCTYPE html><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="/_next/static/media/a34f9d1faa5f3315-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="/_next/static/css/b279119c7cf0bf4b.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="/_next/static/chunks/webpack-2ef856c14afe7005.js"/><script src="/_next/static/chunks/4bd1b696-3f4296d5f7280832.js" async=""></script><script src="/_next/static/chunks/517-7e8cf213e6b3ad2a.js" async=""></script><script src="/_next/static/chunks/main-app-224e8c8447f5164d.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust"/><title>404: This page could not be found.</title><title>From Digits to Decisions</title><meta name="description" content="How Tokenization Impacts Arithmetic in LLMs"/><script src="/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__className_d65c78 bg-white"><main class="relative min-h-screen"><div style="font-family:system-ui,&quot;Segoe UI&quot;,Roboto,Helvetica,Arial,sans-serif,&quot;Apple Color Emoji&quot;,&quot;Segoe UI Emoji&quot;;height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div></main><script src="/_next/static/chunks/webpack-2ef856c14afe7005.js" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"3:\"$Sreact.fragment\"\n4:I[15244,[],\"\"]\n5:I[43866,[],\"\"]\n6:I[86213,[],\"OutletBoundary\"]\n8:I[86213,[],\"MetadataBoundary\"]\na:I[86213,[],\"ViewportBoundary\"]\nc:I[34835,[],\"\"]\n1:HL[\"/_next/static/media/a34f9d1faa5f3315-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/_next/static/css/b279119c7cf0bf4b.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"5gq-4W1KcImTPfDCkpFnL\",\"p\":\"\",\"c\":[\"\",\"_not-found\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$3\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/_next/static/css/b279119c7cf0bf4b.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_d65c78 bg-white\",\"children\":[\"$\",\"main\",null,{\"className\":\"relative min-h-screen\",\"children\":[\"$\",\"$L4\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L5\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[]}]}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$3\",\"c\",{\"children\":[null,[\"$\",\"$L4\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\",\"/_not-found\",\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L5\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"notFoundStyles\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$3\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:style\",\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:children:props:children:1:props:style\",\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:children:props:children:2:props:style\",\"children\":[\"$\",\"h2\",null,{\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:children:props:children:2:props:children:props:style\",\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L6\",null,{\"children\":\"$L7\"}]]}],{},null]},null]},null],[\"$\",\"$3\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[\"$\",\"$3\",\"LqnvCKpzHeXH5HNZ6m4rw\",{\"children\":[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"$La\",null,{\"children\":\"$Lb\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\"}]]}]]}]]],\"m\":\"$undefined\",\"G\":[\"$c\",\"$undefined\"],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"b:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"1\",{\"children\":\"From Digits to Decisions\"}],[\"$\",\"meta\",\"2\",{\"name\":\"description\",\"content\":\"How Tokenization Impacts Arithmetic in LLMs\"}]]\n"])</script><script>self.__next_f.push([1,"7:null\n"])</script></body></html>
 
1
+ <!DOCTYPE html><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="/_next/static/media/a34f9d1faa5f3315-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="/_next/static/css/b279119c7cf0bf4b.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="/_next/static/chunks/webpack-2ef856c14afe7005.js"/><script src="/_next/static/chunks/4bd1b696-3f4296d5f7280832.js" async=""></script><script src="/_next/static/chunks/517-7e8cf213e6b3ad2a.js" async=""></script><script src="/_next/static/chunks/main-app-224e8c8447f5164d.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust"/><title>404: This page could not be found.</title><title>From Digits to Decisions</title><meta name="description" content="How Tokenization Impacts Arithmetic in LLMs"/><script src="/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__className_d65c78 bg-white"><main class="relative min-h-screen"><div style="font-family:system-ui,&quot;Segoe UI&quot;,Roboto,Helvetica,Arial,sans-serif,&quot;Apple Color Emoji&quot;,&quot;Segoe UI Emoji&quot;;height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div></main><script src="/_next/static/chunks/webpack-2ef856c14afe7005.js" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"3:\"$Sreact.fragment\"\n4:I[15244,[],\"\"]\n5:I[43866,[],\"\"]\n6:I[86213,[],\"OutletBoundary\"]\n8:I[86213,[],\"MetadataBoundary\"]\na:I[86213,[],\"ViewportBoundary\"]\nc:I[34835,[],\"\"]\n1:HL[\"/_next/static/media/a34f9d1faa5f3315-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/_next/static/css/b279119c7cf0bf4b.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"WFHEiSUV1U35H1Zx_2aiS\",\"p\":\"\",\"c\":[\"\",\"_not-found\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$3\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/_next/static/css/b279119c7cf0bf4b.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_d65c78 bg-white\",\"children\":[\"$\",\"main\",null,{\"className\":\"relative min-h-screen\",\"children\":[\"$\",\"$L4\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L5\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[]}]}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$3\",\"c\",{\"children\":[null,[\"$\",\"$L4\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\",\"/_not-found\",\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L5\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"notFoundStyles\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$3\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:style\",\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:children:props:children:1:props:style\",\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:children:props:children:2:props:style\",\"children\":[\"$\",\"h2\",null,{\"style\":\"$0:f:0:1:1:props:children:1:props:children:props:children:props:children:props:notFound:1:props:children:props:children:2:props:children:props:style\",\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L6\",null,{\"children\":\"$L7\"}]]}],{},null]},null]},null],[\"$\",\"$3\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[\"$\",\"$3\",\"Zh3AR8nccYYN6GQlcafQs\",{\"children\":[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"$La\",null,{\"children\":\"$Lb\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\"}]]}]]}]]],\"m\":\"$undefined\",\"G\":[\"$c\",\"$undefined\"],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"b:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"1\",{\"children\":\"From Digits to Decisions\"}],[\"$\",\"meta\",\"2\",{\"name\":\"description\",\"content\":\"How Tokenization Impacts Arithmetic in LLMs\"}]]\n"])</script><script>self.__next_f.push([1,"7:null\n"])</script></body></html>
_next/static/WFHEiSUV1U35H1Zx_2aiS/_buildManifest.js ADDED
@@ -0,0 +1 @@
 
 
1
+ self.__BUILD_MANIFEST=function(r,e,t){return{__rewrites:{afterFiles:[],beforeFiles:[],fallback:[]},__routerFilterStatic:{numItems:3,errorRate:1e-4,numBits:58,numHashes:14,bitArray:[1,1,0,1,0,r,r,0,r,r,e,r,r,r,e,r,e,e,r,e,e,e,r,e,e,e,e,e,r,e,r,r,r,r,e,r,r,e,r,r,r,e,r,e,r,e,e,r,r,r,e,e,r,r,r,e,r,r]},__routerFilterDynamic:{numItems:e,errorRate:1e-4,numBits:e,numHashes:null,bitArray:[]},"/_error":["static/chunks/pages/_error-94b8133dd8229633.js"],sortedPages:["/_app","/_error"]}}(1,0,0),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
_next/static/WFHEiSUV1U35H1Zx_2aiS/_ssgManifest.js ADDED
@@ -0,0 +1 @@
 
 
1
+ self.__SSG_MANIFEST=new Set([]);self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB()
_next/static/chunks/app/page-740b26fd4b65b4be.js ADDED
@@ -0,0 +1 @@
 
 
1
+ (self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[974],{31654:(e,a,t)=>{Promise.resolve().then(t.bind(t,52364)),Promise.resolve().then(t.bind(t,19202)),Promise.resolve().then(t.bind(t,9443)),Promise.resolve().then(t.bind(t,88587)),Promise.resolve().then(t.bind(t,64300)),Promise.resolve().then(t.bind(t,70089)),Promise.resolve().then(t.bind(t,32856)),Promise.resolve().then(t.bind(t,31391)),Promise.resolve().then(t.bind(t,79607)),Promise.resolve().then(t.bind(t,5180)),Promise.resolve().then(t.bind(t,94388)),Promise.resolve().then(t.bind(t,96305)),Promise.resolve().then(t.bind(t,10944)),Promise.resolve().then(t.bind(t,52107)),Promise.resolve().then(t.bind(t,15467)),Promise.resolve().then(t.bind(t,50193)),Promise.resolve().then(t.bind(t,4549)),Promise.resolve().then(t.bind(t,59370))},52364:(e,a,t)=>{"use strict";t.d(a,{Citation:()=>s});var r=t(95155);let s=()=>(0,r.jsxs)("section",{className:"footer-section",children:[(0,r.jsx)("h2",{id:"citation",className:"responsive-text-lg font-semibold mb-4",children:"Citation"}),(0,r.jsxs)("div",{className:"footer-content",children:[(0,r.jsx)("p",{className:"responsive-text-sm mb-4",children:"For attribution in academic contexts, please cite this work as"}),(0,r.jsx)("p",{className:"responsive-text-sm mb-6 pl-4 border-l-2 border-gray-200",children:'Lee, et al., "From Digits to Decisions: How Tokenization Impacts Arithmetic in LLMs", 2024.'}),(0,r.jsx)("p",{className:"responsive-text-sm mb-2",children:"BibTeX citation"}),(0,r.jsx)("pre",{className:"responsive-text-sm bg-gray-50 p-4 rounded-md overflow-x-auto text-sm whitespace-pre-wrap",children:"@misc{lee2024digitstodecisions,\n title={From Digits to Decisions: How Tokenization Impacts Arithmetic in LLMs},\n author={Garreth Lee, Guilherme Penedo, Leandro von Werra and Thomas Wolf},\n url={https://huggingface.co/spaces/huggingface/number-tokenization-blog},\n}"})]})]})},19202:(e,a,t)=>{"use strict";t.d(a,{Footnotes:()=>i});var r=t(95155),s=t(31391);let i=()=>{let{footnotes:e,getFootnoteNumber:a}=(0,s.L)(),t=e=>{e.preventDefault();let a=e.currentTarget.getAttribute("href");if(a){let e=document.querySelector(a);e&&e.scrollIntoView({behavior:"smooth"})}};return(0,r.jsxs)("footer",{className:"mt-12 border-t border-zinc-200 dark:border-zinc-700",children:[(0,r.jsx)("h2",{id:"footnotes",className:"responsive-text-lg font-semibold mt-8 mb-4",children:"Footnotes"}),(0,r.jsx)("ol",{className:"responsive-text-sm space-y-4",children:Object.entries(e).map(e=>{let[s,i]=e;return(0,r.jsxs)("li",{id:"footnote-".concat(s),className:"text-gray-700",children:[(0,r.jsxs)("span",{children:[a(s),". "]}),i.text,(0,r.jsx)("a",{href:"#".concat(s),onClick:t,className:"text-primary hover:text-primary/80 ml-1",children:"[↩]"})]},s)})})]})}},9443:(e,a,t)=>{"use strict";t.d(a,{default:()=>s});var r=t(95155);function s(e){let{title:a}=e;return(0,r.jsxs)("div",{className:"relative overflow-hidden",children:[(0,r.jsxs)("div",{className:"absolute inset-0",children:[(0,r.jsx)("img",{src:"https://wallpapers.com/images/hd/renaissance-aesthetic-9qhcaymsmode7v14.jpg",alt:"Hero background",className:"absolute inset-0 w-full h-full object-cover"}),(0,r.jsx)("div",{className:"absolute inset-0 bg-black opacity-55"})]}),(0,r.jsx)("div",{className:"relative max-w-6xl mx-auto px-4 sm:px-6 lg:px-8",children:(0,r.jsx)("div",{className:"pt-48 pb-48 text-center lg:pt-48",children:(0,r.jsx)("h1",{className:"font-bold tracking-tight text-white sm:text-6xl",children:a})})})]})}},88587:(e,a,t)=>{"use strict";t.d(a,{InlineCitation:()=>n});var r=t(95155),s=t(31391),i=t(53601);function n(e){let{id:a}=e,{citations:t,getCitationNumber:n}=(0,s.g)(),l=t[a],o=n(a);return(0,r.jsx)(i.Bc,{children:(0,r.jsxs)(i.m_,{delayDuration:100,children:[(0,r.jsx)(i.k$,{asChild:!0,children:(0,r.jsx)("span",{id:"".concat(a),children:(0,r.jsx)("sup",{className:"text-xs text-gray-500",children:(0,r.jsxs)("a",{href:"#citation-".concat(a),className:"hover:text-blue-600","aria-label":"Citation ".concat(o),children:["[",o,"]"]})})})}),(0,r.jsx)(i.ZI,{side:"top",sideOffset:4,className:"max-w-[400px]",children:(0,r.jsxs)("p",{className:"text-xs whitespace-normal",children:[(0,r.jsxs)("span",{className:"font-bold",children:[l.title," (",l.year,")"]}),(0,r.jsx)("br",{}),l.authors.join(", ")]})})]})})}},64300:(e,a,t)=>{"use strict";t.d(a,{InlineFootnote:()=>n});var r=t(95155),s=t(31391),i=t(53601);function n(e){let{id:a}=e,{footnotes:t,getFootnoteNumber:n}=(0,s.L)(),l=n(a);return(0,r.jsx)(i.Bc,{children:(0,r.jsxs)(i.m_,{delayDuration:100,children:[(0,r.jsx)(i.k$,{asChild:!0,children:(0,r.jsx)("span",{id:"".concat(a),className:"pl-0.5",children:(0,r.jsx)("sup",{className:"text-xs text-gray-500",children:(0,r.jsx)("a",{href:"#footnote-".concat(a),className:"hover:text-blue-600","aria-label":"Footnote ".concat(l),children:l})})})}),(0,r.jsx)(i.ZI,{side:"top",sideOffset:6,className:"max-w-[400px]",children:(0,r.jsx)("p",{className:"text-xs whitespace-normal",children:t[a].text})})]})})}},70089:(e,a,t)=>{"use strict";t.d(a,{References:()=>l});var r=t(95155),s=t(67396),i=t(41507),n=t(31391);let l=()=>{let{citations:e}=(0,n.g)(),a=Object.entries(e).map(e=>{let[a,t]=e;return{id:a,...t}});return(0,r.jsxs)("section",{className:"footer-section",children:[(0,r.jsx)("h2",{id:"references",className:"responsive-text-lg font-semibold",children:"References"}),(0,r.jsx)("div",{className:"footer-content",children:(0,r.jsx)("ol",{className:"list-decimal list-outside ml-6 space-y-2",children:a.map(e=>(0,r.jsx)("li",{id:"citation-".concat(e.id),className:"text-gray-700 text-xs",children:(0,r.jsxs)("div",{children:[e.authors.join(", "),', "',e.title,'"',e.journal&&(0,r.jsxs)("span",{className:"italic",children:[" ",e.journal]}),e.publisher&&(0,r.jsxs)("span",{className:"italic",children:[" ",e.publisher]}),", ",e.year,".",e.doi&&(0,r.jsxs)("span",{children:[" DOI: ",e.doi]}),e.url&&(0,r.jsxs)(s.default,{href:e.url,className:"inline-flex items-center ml-2 text-primary hover:text-primary/80",children:["[",(0,r.jsx)(i.A,{className:"h-4 w-4 mx-0.5"}),"Link]"]})]})},e.id))})})]})}},32856:(e,a,t)=>{"use strict";t.d(a,{TableOfContents:()=>n});var r=t(95155),s=t(12115),i=t(67396);let n=()=>{let[e,a]=(0,s.useState)([]),[t,n]=(0,s.useState)("");(0,s.useEffect)(()=>{let e=document.querySelectorAll("h2, h3, h4, h5, h6");a(Array.from(e).map(e=>({id:e.id,text:e.textContent||"",level:parseInt(e.tagName[1])})).slice(1));let t=new IntersectionObserver(e=>{e.forEach(e=>{e.isIntersecting&&e.target.id&&n(e.target.id)})},{rootMargin:"-20px 0px -80% 0px"});return e.forEach(e=>{t.observe(e)}),()=>{e.forEach(e=>{t.unobserve(e)})}},[]);let l=(e,a)=>{e.preventDefault();let t=document.getElementById(a);if(t){let e=document.body.getBoundingClientRect().top,a=t.getBoundingClientRect().top;window.scrollTo({top:a-e-80-20,behavior:"smooth"})}};return(0,r.jsxs)("nav",{className:"w-full",children:[(0,r.jsx)("h2",{className:"text-sm font-bold mb-3",children:"Table of Contents"}),(0,r.jsx)("ul",{className:"text-sm space-y-1.5",children:e.map(e=>{let a=t===e.id;return(0,r.jsx)("li",{className:"\n ".concat(2===e.level?"ml-0":"ml-3","\n "),children:(0,r.jsx)(i.default,{href:"#".concat(e.id),className:"\n text-gray-600 hover:text-gray-900 transition-colors duration-200 \n block py-1 px-2 rounded-md hover:bg-gray-50 min-w-[20ch]\n ".concat(a?"bg-gray-100 text-gray-900 font-medium":"","\n "),onClick:a=>l(a,e.id),children:e.text})},e.id)})})]})}},22130:(e,a,t)=>{"use strict";t.d(a,{Wu:()=>d,ZB:()=>o,Zp:()=>n,aR:()=>l});var r=t(95155),s=t(12115),i=t(21567);let n=s.forwardRef((e,a)=>{let{className:t,...s}=e;return(0,r.jsx)("div",{ref:a,className:(0,i.cn)("rounded-xl border bg-card text-card-foreground shadow",t),...s})});n.displayName="Card";let l=s.forwardRef((e,a)=>{let{className:t,...s}=e;return(0,r.jsx)("div",{ref:a,className:(0,i.cn)("flex flex-col space-y-1.5 p-6",t),...s})});l.displayName="CardHeader";let o=s.forwardRef((e,a)=>{let{className:t,...s}=e;return(0,r.jsx)("div",{ref:a,className:(0,i.cn)("font-semibold leading-none tracking-tight",t),...s})});o.displayName="CardTitle",s.forwardRef((e,a)=>{let{className:t,...s}=e;return(0,r.jsx)("div",{ref:a,className:(0,i.cn)("text-sm text-muted-foreground",t),...s})}).displayName="CardDescription";let d=s.forwardRef((e,a)=>{let{className:t,...s}=e;return(0,r.jsx)("div",{ref:a,className:(0,i.cn)("p-6 pt-0",t),...s})});d.displayName="CardContent",s.forwardRef((e,a)=>{let{className:t,...s}=e;return(0,r.jsx)("div",{ref:a,className:(0,i.cn)("flex items-center p-6 pt-0",t),...s})}).displayName="CardFooter"},53601:(e,a,t)=>{"use strict";t.d(a,{Bc:()=>l,ZI:()=>c,k$:()=>d,m_:()=>o});var r=t(95155),s=t(12115),i=t(12134),n=t(21567);let l=i.Kq,o=i.bL,d=i.l9,c=s.forwardRef((e,a)=>{let{className:t,sideOffset:s=4,...l}=e;return(0,r.jsx)(i.ZL,{children:(0,r.jsx)(i.UC,{ref:a,sideOffset:s,className:(0,n.cn)("z-50 overflow-hidden rounded-md bg-primary px-3 py-1.5 text-xs text-primary-foreground animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",t),...l})})});c.displayName=i.UC.displayName},31391:(e,a,t)=>{"use strict";t.d(a,{CitationProvider:()=>o,g:()=>d,L:()=>c});var r=t(95155),s=t(12115);let i={mirzadeh2024:{authors:["Mirzadeh, I.","Alizadeh, K.","Shahrokhi, H.","Tuzel, O.","Bengio, S.","Farajtabar, M."],year:2024,title:"GSM-Symbolic: Understanding the Limitations of Mathematical Reasoning in Large Language Models",archivePrefix:"arXiv",eprint:"2410.05229",primaryClass:"cs.LG",url:"https://arxiv.org/pdf/2410.05229"},xu2024:{authors:["Xu, N.","Ma, X."],year:2024,title:"LLM The Genius Paradox: A Linguistic and Math Expert's Struggle with Simple Word-based Counting Problems",archivePrefix:"arXiv",eprint:"2410.14166",primaryClass:"cs.CL",url:"https://arxiv.org/pdf/2410.14166"},radford2019:{authors:["Radford, A.","Wu, J.","Child, R.","Luan, D.","Amodei, D.","Sutskever, I."],year:2019,title:"Language Models are Unsupervised Multitask Learners",url:"https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf"},beren2024:{authors:["Millidge, B."],year:2023,title:"Integer Tokenization is Insane",url:"https://www.beren.io/2023-02-04-Integer-tokenization-is-insane/"},touvron2023llama:{authors:["Touvron, H.","Lavril, T.","Izacard, G.","Martinet, X.","Lachaux, M.","Lacroix, T.","Rozi\xe8re, B.","Goyal, N.","Hambro, E.","Azhar, F.","Rodriguez, A.","Joulin, A.","Grave, E.","Lample, G."],year:2023,title:"LLaMA: Open and Efficient Foundation Language Models",eprint:"2302.13971",archivePrefix:"arXiv",primaryClass:"cs.CL",url:"https://arxiv.org/pdf/2302.13971"},touvron2023llama2:{authors:["Touvron, H.","Martin, L.","Stone, K.","Albert, P.","Almahairi, A.","Babaei, Y.","Bashlykov, N.","Batra, S.","Bhargava, P.","Bhosale, S.","Bikel, D.","Blecher, L.","Canton Ferrer, C.","Chen, M.","Cucurull, G.","Esiobu, D.","Fernandes, J.","Fu, J.","Fu, W.","Fuller, B.","Gao, C.","Goswami, V.","Goyal, N.","Hartshorn, A.","Hosseini, S.","Hou, R.","Inan, H.","Kardas, M.","Kerkez, V.","Khabsa, M.","Kloumann, I.","Korenev, A.","Koura, P. S.","Lachaux, M.","Lavril, T.","Lee, J.","Liskovich, D.","Lu, Y.","Mao, Y.","Martinet, X.","Mihaylov, T.","Mishra, P.","Molybog, I.","Nie, Y.","Poulton, A.","Reizenstein, J.","Rungta, R.","Saladi, K.","Schelten, A.","Silva, R.","Smith, E. M.","Subramanian, R.","Tan, X. E.","Tang, B.","Taylor, R.","Williams, A.","Kuan, J. X.","Xu, P.","Yan, Z.","Zarov, I.","Zhang, Y.","Fan, A.","Kambadur, M.","Narang, S.","Rodriguez, A.","Stojnic, R.","Edunov, S.","Scialom, T."],year:2023,title:"Llama 2: Open Foundation and Fine-Tuned Chat Models",eprint:"2307.09288",archivePrefix:"arXiv",primaryClass:"cs.CL",url:"https://arxiv.org/pdf/2307.09288"},deepseek2024:{authors:["DeepSeek-AI","Liu, A.","Feng, B.","Wang, B.","Wang, B.","Liu, B.","Zhao, C.","Dengr, C.","Ruan, C.","Dai, D.","Guo, D.","Yang, D.","Chen, D.","Ji, D.","Li, E.","Lin, F.","Luo, F.","Hao, G.","Chen, G.","Li, G.","Zhang, H.","Xu, H.","Yang, H.","Zhang, H.","Ding, H.","Xin, H.","Gao, H.","Li, H.","Qu, H.","Cai, J. L.","Liang, J.","Guo, J.","Ni, J.","Li, J.","Chen, J.","Yuan, J.","Qiu, J.","Song, J.","Dong, K.","Gao, K.","Guan, K.","Wang, L.","Zhang, L.","Xu, L.","Xia, L.","Zhao, L.","Zhang, L.","Li, M.","Wang, M.","Zhang, M.","Zhang, M.","Tang, M.","Li, M.","Tian, N.","Huang, P.","Wang, P.","Zhang, P.","Zhu, Q.","Chen, Q.","Du, Q.","Chen, R. J.","Jin, R. L.","Ge, R.","Pan, R.","Xu, R.","Chen, R.","Li, S. S.","Lu, S.","Zhou, S.","Chen, S.","Wu, S.","Ye, S.","Ma, S.","Wang, S.","Zhou, S.","Yu, S.","Zhou, S.","Zheng, S.","Wang, T.","Pei, T.","Yuan, T.","Sun, T.","Xiao, W. L.","Zeng, W.","An, W.","Liu, W.","Liang, W.","Gao, W.","Zhang, W.","Li, X. Q.","Jin, X.","Wang, X.","Bi, X.","Liu, X.","Wang, X.","Shen, X.","Chen, X.","Chen, X.","Nie, X.","Sun, X.","Wang, X.","Liu, X.","Xie, X.","Yu, X.","Song, X.","Zhou, X.","Yang, X.","Lu, X.","Su, X.","Wu, Y.","Li, Y. K.","Wei, Y. X.","Zhu, Y. X.","Xu, Y.","Huang, Y.","Li, Y.","Zhao, Y.","Sun, Y.","Li, Y.","Wang, Y.","Zheng, Y.","Zhang, Y.","Xiong, Y.","Zhao, Y.","He, Y.","Tang, Y.","Piao, Y.","Dong, Y.","Tan, Y.","Liu, Y.","Wang, Y.","Guo, Y.","Zhu, Y.","Wang, Y.","Zou, Y.","Zha, Y.","Ma, Y.","Yan, Y.","You, Y.","Liu, Y.","Ren, Z. Z.","Ren, Z.","Sha, Z.","Fu, Z.","Huang, Z.","Zhang, Z.","Xie, Z.","Hao, Z.","Shao, Z.","Wen, Z.","Xu, Z.","Zhang, Z.","Li, Z.","Wang, Z.","Gu, Z.","Li, Z.","Xie, Z."],year:2024,title:"DeepSeek-V2: A Strong, Economical, and Efficient Mixture-of-Experts Language Model",eprint:"2405.04434",archivePrefix:"arXiv",primaryClass:"cs.CL",url:"https://arxiv.org/pdf/2405.04434"},grattafiori2024:{authors:["Grattafiori, A.","Dubey, A.","Jauhri, A.","Pandey, A.","Kadian, A.","Al-Dahle, A.","Letman, A.","Mathur, A.","Schelten, A.","Vaughan, A.","Yang, A.","Fan, A.","Goyal, A.","Hartshorn, A.","Yang, A.","Mitra, A.","Sravankumar, A.","Korenev, A.","Hinsvark, A.","Rao, A.","Zhang, A.","Rodriguez, A.","Gregerson, A.","Spataru, A.","Roziere, B.","Biron, B.","Tang, B.","Chern, B.","Caucheteux, C.","Nayak, C.","Bi, C.","Marra, C.","McConnell, C.","Keller, C.","Touret, C.","Wu, C.","Wong, C.","Canton Ferrer, C.","Nikolaidis, C.","Allonsius, D.","Song, D.","Pintz, D.","Livshits, D.","Wyatt, D.","Esiobu, D.","Choudhary, D.","Mahajan, D.","Garcia-Olano, D.","Perino, D.","Hupkes, D.","Lakomkin, E.","AlBadawy, E.","Lobanova, E.","Dinan, E.","Smith, E. M.","Radenovic, F.","Guzm\xe1n, F.","Zhang, F.","Synnaeve, G.","Lee, G.","Lewis Anderson, G.","Thattai, G.","Nail, G.","Mialon, G.","Pang, G.","Cucurell, G.","Nguyen, H.","Korevaar, H.","Xu, H.","Touvron, H.","Zarov, I.","Arrieta Ibarra, I.","Kloumann, I.","Misra, I.","Evtimov, I.","Zhang, J.","Copet, J.","Lee, J.","Geffert, J.","Vranes, J.","Park, J.","Mahadeokar, J.","Shah, J.","van der Linde, J.","Billock, J.","Hong, J.","Lee, J.","Fu, J.","Chi, J.","Huang, J.","Liu, J.","Wang, J.","Yu, J.","Bitton, J.","Spisak, J.","Park, J.","Rocca, J.","Johnstun, J.","Saxe, J.","Jia, J.","Alwala, K.","Prasad, K.","Upasani, K.","Plawiak, K.","Li, K.","Heafield, K.","Stone, K.","El-Arini, K.","Iyer, K.","Malik, K.","Chiu, K.","Bhalla, K.","Lakhotia, K.","Rantala-Yeary, L.","van der Maaten, L.","Chen, L.","Tan, L.","Jenkins, L.","Martin, L.","Madaan, L.","Malo, L.","Blecher, L.","Landzaat, L.","de Oliveira, L.","Muzzi, M.","Pasupuleti, M.","Singh, M.","Paluri, M.","Kardas, M.","Tsimpoukelli, M.","Oldham, M.","Rita, M.","Pavlova, M.","Kambadur, M.","Lewis, M.","Si, M.","Singh, M.","Hassan, M.","Goyal, N.","Torabi, N.","Bashlykov, N.","Bogoychev, N.","Chatterji, N.","Zhang, N.","Duchenne, O.","\xc7elebi, O.","Alrassy, P.","Zhang, P.","Li, P.","Vasic, P.","Weng, P.","Bhargava, P.","Dubal, P.","Krishnan, P.","Koura, P. S.","Xu, P.","He, Q.","Dong, Q.","Ganapathy, R.","Calderer, R.","Silveira Cabral, R.","Stojnic, R.","Raileanu, R.","Maheswari, R.","Girdhar, R.","Patel, R.","Sauvestre, R.","Polidoro, R.","Sumbaly, R.","Taylor, R.","Silva, R.","Hou, R.","Wang, R.","Hosseini, S.","Chennabasappa, S.","Singh, S.","Bell, S.","Kim, S. S.","Edunov, S.","Nie, S.","Narang, S.","Raparthy, S.","Shen, S.","Wan, S.","Bhosale, S.","Zhang, S.","Vandenhende, S.","Batra, S.","Whitman, S.","Sootla, S.","Collot, S.","Gururangan, S.","Borodinsky, S.","Herman, T.","Fowler, T.","Sheasha, T.","Georgiou, T.","Scialom, T.","Speckbacher, T.","Mihaylov, T.","Xiao, T.","Karn, U.","Goswami, V.","Gupta, V.","Ramanathan, V.","Kerkez, V.","Gonguet, V.","Do, V.","Vogeti, V.","Albiero, V.","Petrovic, V.","Chu, W.","Xiong, W.","Fu, W.","Meers, W.","Martinet, X.","Wang, X.","Wang, X.","Tan, X. E.","Xia, X.","Xie, X.","Jia, X.","Wang, X.","Goldschlag, Y.","Gaur, Y.","Babaei, Y.","Wen, Y.","Song, Y.","Zhang, Y.","Li, Y.","Mao, Y.","Delpierre Coudert, Z.","Yan, Z.","Chen, Z.","Papakipos, Z.","Singh, A.","Srivastava, A.","Jain, A.","Kelsey, A.","Shajnfeld, A.","Gangidi, A.","Victoria, A.","Goldstand, A.","Menon, A.","Sharma, A.","Boesenberg, A.","Baevski, A.","Feinstein, A.","Kallet, A.","Sangani, A.","Teo, A.","Yunus, A.","Lupu, A.","Alvarado, A.","Caples, A.","Gu, A.","Ho, A.","Poulton, A.","Ryan, A.","Ramchandani, A.","Dong, A.","Franco, A.","Goyal, A.","Saraf, A.","Chowdhury, A.","Gabriel, A.","Bharambe, A.","Eisenman, A.","Yazdan, A.","James, B.","Maurer, B.","Leonhardi, B.","Huang, B.","Loyd, B.","De Paola, B.","Paranjape, B.","Liu, B.","Wu, B.","Ni, B.","Hancock, B.","Wasti, B.","Spence, B.","Stojkovic, B.","Gamido, B.","Montalvo, B.","Parker, C.","Burton, C.","Mejia, C.","Liu, C.","Wang, C.","Kim, C.","Zhou, C.","Hu, C.","Chu, C.","Cai, C.","Tindal, C.","Feichtenhofer, C.","Gao, C.","Civin, D.","Beaty, D.","Kreymer, D.","Li, D.","Adkins, D.","Xu, D.","Testuggine, D.","David, D.","Parikh, D.","Liskovich, D.","Foss, D.","Wang, D.","Le, D.","Holland, D.","Dowling, E.","Jamil, E.","Montgomery, E.","Hahn, E.","Wood, E.","Le, E.","Brinkman, E.","Arcaute, E.","Dunbar, E.","Smothers, E.","Sun, F.","Kreuk, F.","Tian, F.","Kokkinos, F.","Ozgenel, F.","Caggioni, F.","Kanayet, F.","Seide, F.","Medina Florez, G.","Schwarz, G.","Badeer, G.","Swee, G.","Sizov, G.","Zhang, G.","Lakshminarayanan, G.","Inan, H.","Shojanazeri, H.","Zou, H.","Wang, H.","Zha, H.","Habeeb, H.","Rudolph, H.","Suk, H.","Aspegren, H.","Goldman, H.","Zhan, H.","Damlaj, I.","Molybog, I.","Tufanov, I.","Leontiadis, I.","Veliche, I.","Gat, I.","Weissman, J.","Geboski, J.","Kohli, J.","Lam, J.","Asher, J.","Gaya, J.","Marcus, J.","Tang, J.","Chan, J.","Zhen, J.","Reizenstein, J.","Teboul, J.","Yang, J.","Cummings, J.","Carvill, J.","Shepard, J.","McPhie, J.","Torres, J.","Ginsburg, J.","Wang, J.","Wu, K.","U, K.","Saxena, K.","Khandelwal, K.","Zand, K.","Veeraraghavan, K.","Michelena, K.","Li, K.","Jagadeesh, K.","Huang, K.","Chawla, K.","Huang, K.","Chen, L.","Garg, L.","A, L.","Silva, L.","Bell, L.","Zhang, L.","Guo, L.","Yu, L.","Moshkovich, L.","Wehrstedt, L.","Khabsa, M.","Avalani, M.","Bhatt, M.","Mankus, M.","Hasson, M.","Lennie, M.","Reso, M.","Groshev, M.","Naumov, M.","Lathi, M.","Keneally, M.","Liu, M.","Seltzer, M. L.","Valko, M.","Restrepo, M.","Patel, M.","Vyatskov, M.","Samvelyan, M.","Clark, M.","Macey, M.","Wang, M.","Jubert Hermoso, M.","Metanat, M.","Rastegari, M.","Bansal, M.","Santhanam, N.","Parks, N.","White, N.","Bawa, N.","Singhal, N.","Egebo, N.","Usunier, N.","Mehta, N.","Laptev, N. P.","Dong, N.","Cheng, N.","Chernoguz, O.","Hart, O.","Salpekar, O.","Kalinli, O.","Kent, P.","Parekh, P.","Saab, P.","Balaji, P.","Rittner, P.","Bontrager, P.","Roux, P.","Dollar, P.","Zvyagina, P.","Ratanchandani, P.","Yuvraj, P.","Liang, Q.","Alao, R.","Rodriguez, R.","Ayub, R.","Murthy, R.","Nayani, R.","Mitra, R.","Parthasarathy, R.","Li, R.","Hogan, R.","Wang, R.","Howes, R.","Rinott, R.","Mehta, S.","Siby, S.","Bondu, S. J.","Datta, S.","Chugh, S.","Hunt, S.","Dhillon, S.","Sidorov, S.","Pan, S.","Mahajan, S.","Verma, S.","Yamamoto, S.","Lindsay, S.","Lindsay, S.","Feng, S.","Lin, S.","Zha, S. C.","Patil, S.","Shankar, S.","Zhang, S.","Zhang, S.","Wang, S.","Agarwal, S.","Sajuyigbe, S.","Chintala, S.","Max, S.","Chen, S.","Kehoe, S.","Satterfield, S.","Govindaprasad, S.","Gupta, S.","Deng, S.","Cho, S.","Virk, S.","Subramanian, S.","Choudhury, S.","Goldman, S.","Remez, T.","Glaser, T.","Best, T.","Koehler, T.","Robinson, T.","Li, T.","Zhang, T.","Matthews, T.","Chou, T.","Shaked, T.","Vontimitta, V.","Ajayi, V.","Montanez, V.","Mohan, V.","Kumar, V. S.","Mangla, V.","Ionescu, V.","Poenaru, V.","Mihailescu, V. T.","Ivanov, V.","Li, W.","Wang, W.","Jiang, W.","Bouaziz, W.","Constable, W.","Tang, X.","Wu, X.","Wang, X.","Wu, X.","Gao, X.","Kleinman, Y.","Chen, Y.","Hu, Y.","Jia, Y.","Qi, Y.","Li, Y.","Zhang, Y.","Zhang, Y.","Adi, Y.","Nam, Y.","Wang, Y.","Zhao, Y.","Hao, Y.","Qian, Y.","Li, Y.","He, Y.","Rait, Z.","DeVito, Z.","Rosnbrick, Z.","Wen, Z.","Yang, Z.","Zhao, Z.","Ma, Z."],year:2024,title:"The Llama 3 Herd of Models",eprint:"2407.21783",archivePrefix:"arXiv",primaryClass:"cs.AI",url:"https://arxiv.org/abs/2407.21783"},singh2024:{authors:["Singh, A. K.","Strouse, D. J."],year:2024,title:"Tokenization counts: the impact of tokenization on arithmetic in frontier LLMs",eprint:"2402.14903",archivePrefix:"arXiv",primaryClass:"cs.CL",url:"https://arxiv.org/pdf/2402.14903"},claude2024:{authors:["Buckley, M."],year:2024,title:"Right to Left (R2L) Integer Tokenization",url:"https://www.beren.io/2024-07-07-Right-to-Left-Integer-Tokenization/"},madaan2024:{authors:["Madaan, L.","Singh, A. K.","Schaeffer, R.","Poulton, A.","Koyejo, S.","Stenetorp, P.","Narang, S.","Hupkes, D."],year:2024,title:"Quantifying Variance in Evaluation Benchmarks",eprint:"2406.10229",archivePrefix:"arXiv",primaryClass:"cs.LG",url:"https://arxiv.org/pdf/2406.10229"}},n={"r2l-footnote":{text:"For example, 1234 will be tokenized to [1,2,3,4] regardless if we tokenize from the left or right"},"same-tokens-footnote":{text:"This only applies to non-carry additions. For carry additions, the resulting number will be tokenized differently depending on the direction. For example, 999 + 111 = 1110 would be tokenized as [1, 110] in L2R but [111, 0] in R2L."}},l=(0,s.createContext)(null);function o(e){let{children:a}=e,t=Object.keys(i),s=Object.keys(n);return(0,r.jsx)(l.Provider,{value:{citations:i,footnotes:n,getCitationNumber:e=>t.indexOf(e)+1,getFootnoteNumber:e=>s.indexOf(e)+1},children:a})}function d(){let e=(0,s.useContext)(l);if(!e)throw Error("useCitations must be used within CitationProvider");return e}function c(){let e=(0,s.useContext)(l);if(!e)throw Error("useFootnotes must be used within CitationProvider");return e}},21567:(e,a,t)=>{"use strict";t.d(a,{cn:()=>i});var r=t(43463),s=t(69795);function i(){for(var e=arguments.length,a=Array(e),t=0;t<e;t++)a[t]=arguments[t];return(0,s.QP)((0,r.$)(a))}},79607:(e,a,t)=>{"use strict";t.d(a,{default:()=>u});var r=t(95155),s=t(22130),i=t(58581),n=t(394),l=t(15255),o=t(12800),d=t(66963),c=t(27112),h=t(86354),m=t(87162),x=t(67820);let u=()=>(0,r.jsx)("div",{className:"w-full h-[500px]",children:(0,r.jsx)(s.Zp,{className:"w-full h-full",children:(0,r.jsx)(s.Wu,{className:"w-full h-full",children:(0,r.jsx)(i.u,{width:"100%",height:"100%",children:(0,r.jsxs)(n.E,{data:[{operation:"add",l2r:.30685,r2l:.31925,l2r_std:.07,r2l_std:.07},{operation:"divide",l2r:.3184,r2l:.29065,l2r_std:.05,r2l_std:.05},{operation:"multiply",l2r:.15735,r2l:.16205,l2r_std:.01,r2l_std:.01},{operation:"subtract",l2r:.1812,r2l:.1737,l2r_std:.07,r2l_std:.075}],margin:{top:20,right:30,left:20,bottom:5},children:[(0,r.jsx)(l.d,{strokeDasharray:"3 3"}),(0,r.jsx)(o.W,{dataKey:"operation"}),(0,r.jsx)(d.h,{domain:[0,.45],label:{value:"Accuracy",angle:-90,position:"insideLeft"}}),(0,r.jsx)(c.m,{content:(0,r.jsx)(e=>{let{active:a,payload:t,label:s}=e;return a&&t&&t.length?(0,r.jsx)("div",{className:"bg-white p-2 border border-gray-200 rounded shadow",children:t.map((e,a)=>(0,r.jsx)("p",{className:"text-sm",children:"".concat(e.name,": ").concat(e.value.toFixed(3)," \xb1 ").concat(e.payload["".concat(e.dataKey,"_std")].toFixed(3))},a))}):null},{})}),(0,r.jsx)(h.s,{}),(0,r.jsx)(m.y,{dataKey:"l2r",name:"Pure-BPE L2R",fill:"#ff9999",opacity:.8,children:(0,r.jsx)(x.u,{dataKey:"l2r_std",width:4,strokeWidth:1.5,stroke:"#374151"})}),(0,r.jsx)(m.y,{dataKey:"r2l",name:"Pure-BPE R2L",fill:"#99e6e6",opacity:.8,children:(0,r.jsx)(x.u,{dataKey:"r2l_std",width:4,strokeWidth:1.5,stroke:"#374151"})})]})})})})})},5180:(e,a,t)=>{"use strict";t.d(a,{default:()=>n});var r=t(95155),s=t(12115),i=t(22130);let n=()=>{let[e,a]=(0,s.useState)("Pure-BPE Tokenizer Model"),[t,n]=(0,s.useState)(null),[l,o]=(0,s.useState)(null),d=[.82,.79,.85,.77];return(0,r.jsx)(i.Zp,{className:"w-full px-4",children:(0,r.jsxs)(i.Wu,{className:"space-y-4",children:[(0,r.jsxs)("div",{className:"py-4",children:[(0,r.jsx)("p",{className:"responsive-text-sm text-muted-foreground mb-2",children:"Select model with highest standard deviation across evals"}),(0,r.jsx)("div",{className:"grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-2",children:["Three-Digit L2R Model","Three-Digit R2L Model","Pure-BPE Tokenizer Model"].map((t,s)=>(0,r.jsxs)("div",{onClick:()=>a(t),className:"border rounded-md p-2 flex flex-col items-center justify-center min-h-[60px] cursor-pointer transition-all hover:scale-[1.02] active:scale-[0.98] ".concat(t===e?"border-2 border-blue-500 bg-blue-50/50":"hover:bg-gray-50"),children:[(0,r.jsx)("div",{className:"responsive-text-sm text-muted-foreground text-center",children:t}),(0,r.jsxs)("div",{className:"responsive-text-sm text-muted-foreground mt-1",children:["Οƒ = \xb1",(.02+.01*s).toFixed(3)]}),t===e&&(0,r.jsx)("div",{className:"responsive-text-xs text-blue-500 mt-0.5",children:"Highest variation"})]},s))})]}),(0,r.jsxs)("div",{className:"py-4",children:[(0,r.jsx)("p",{className:"text-xs text-muted-foreground mb-2",children:"Train same model with different random seeds"}),(0,r.jsx)("div",{className:"grid grid-cols-4 gap-2",children:[1,2,3,4].map(a=>(0,r.jsxs)("div",{onMouseEnter:()=>n(a),onMouseLeave:()=>n(null),className:"border rounded-md p-2 flex flex-col items-center justify-center h-[60px] transition-all hover:scale-105 ".concat(t===a?"shadow-md":""," ").concat(1===a?"bg-blue-50":2===a?"bg-green-50":3===a?"bg-yellow-50":"bg-red-50"),children:[(0,r.jsx)("div",{className:"text-xs text-muted-foreground",children:e}),(0,r.jsxs)("div",{className:"text-xs text-muted-foreground mt-1",children:["Seed ",a]})]},a))})]}),(0,r.jsxs)("div",{className:"py-4",children:[(0,r.jsxs)("p",{className:"text-xs text-muted-foreground mb-2",children:[(0,r.jsx)("strong",{children:"For each task, "})," evaluate all models"]}),(0,r.jsx)("div",{className:"space-y-1",children:["A","B","..."].map((e,a)=>(0,r.jsxs)("div",{onMouseEnter:()=>o(e),onMouseLeave:()=>o(null),className:"grid grid-cols-5 gap-2 transition-transform hover:translate-x-1",children:[(0,r.jsx)("div",{className:"flex items-center justify-center h-[40px]",children:(0,r.jsxs)("span",{className:"text-xs text-muted-foreground",children:["Task ",e]})}),d.map((t,s)=>(0,r.jsx)("div",{className:"rounded-md p-2 flex items-center justify-center h-[40px] transition-transform hover:scale-105 ".concat(l===e?"shadow-sm":""," ").concat(0===s?"bg-blue-50":1===s?"bg-green-50":2===s?"bg-yellow-50":"bg-red-50"),children:(0,r.jsx)("span",{className:"text-xs text-muted-foreground",children:"..."===e?"...":0===a?t.toFixed(2):(.87*t).toFixed(2)})},s))]},e))})]}),(0,r.jsxs)("div",{className:"py-4",children:[(0,r.jsx)("p",{className:"text-xs text-muted-foreground mb-2",children:"Calculate task-specific standard deviations"}),(0,r.jsx)("div",{className:"space-y-1",children:["A","B","..."].map(e=>(0,r.jsxs)("div",{className:"grid grid-cols-5 gap-2 transition-transform hover:translate-x-1",children:[(0,r.jsx)("div",{className:"flex items-center justify-center h-[40px]",children:(0,r.jsxs)("span",{className:"text-xs text-muted-foreground",children:["Task ",e]})}),(0,r.jsx)("div",{className:"col-span-4 rounded-md p-2 flex items-center justify-center h-[40px] bg-blue-50 transition-transform hover:scale-[1.02]",children:(0,r.jsx)("span",{className:"text-xs text-muted-foreground",children:"..."===e?"...":"Οƒ = \xb1".concat(("A"===e?.034:.0299).toFixed(3))})})]},e))})]}),(0,r.jsxs)("p",{className:"py-4 text-xs text-muted-foreground italic",children:["For task A, we report Οƒ = 0.034 as the standard deviation for ",(0,r.jsx)("strong",{children:"all models"})]})]})})}},94388:(e,a,t)=>{"use strict";t.d(a,{default:()=>n});var r=t(95155),s=t(12115),i=t(22130);let n=()=>{let[e,a]=(0,s.useState)(null);return(0,r.jsxs)(i.Zp,{className:"w-full max-w-2xl bg-gradient-to-br from-background to-muted/20",children:[(0,r.jsx)(i.aR,{className:"p-4 md:p-6",children:(0,r.jsx)(i.ZB,{className:"text-lg md:text-xl lg:text-2xl font-bold text-center",children:"Average Accuracy in Addition Tasks"})}),(0,r.jsx)(i.Wu,{className:"p-4 md:p-6 mt-4",children:(0,r.jsx)("div",{className:"h-[300px] md:h-[400px] w-full relative",children:(0,r.jsx)("div",{className:"h-full flex items-end justify-around",children:[{name:"Left-to-Right Tokenizer (used in Llama 3)",value:.699},{name:"Right-to-Left Tokenizer",value:.791}].map(t=>(0,r.jsxs)("div",{className:"flex flex-col items-center",children:[(0,r.jsx)("div",{className:"w-12 md:w-20 lg:w-24 rounded-t-sm transition-all duration-200 ".concat(e===t.name?"bg-[#99e6e6] scale-105":"bg-[#99e6e6]"),style:{height:"".concat((t.value-.6)/(.8-.6)*300,"px")},onMouseEnter:()=>a(t.name),onMouseLeave:()=>a(null),children:(0,r.jsx)("div",{className:"text-center mt-2 text-xs md:text-sm lg:text-base transition-opacity duration-200 font-bold ".concat(e===t.name?"opacity-100":"opacity-80"),children:t.value.toFixed(3)})}),(0,r.jsx)("div",{className:"mt-4 text-xs md:text-sm lg:text-base text-center whitespace-normal md:whitespace-nowrap max-w-[120px] md:max-w-none transition-all duration-200 ".concat(e===t.name?"font-bold scale-105":""),children:t.name})]},t.name))})})})]})}},96305:(e,a,t)=>{"use strict";t.d(a,{default:()=>n});var r=t(95155),s=t(12115),i=t(22130);let n=()=>{let e=Array.from({length:40},(e,a)=>25*a+1),a=e=>new Set(["521","527","531","532","534","539","541","542","543","547","564","566","567","569","611","619","621","622","624","631","632","633","634","636","637","638","639","664","711","715","716","717","719","721","722","723","724","726","731","732","734","735","737","738","739","741","742","743","744","746","749","761","764","766","769","788","791","811","812","813","814","816","817","818","819","821","822","823","824","826","827","828","829","831","832","834","835","836","837","838","839","841","842","843","844","845","846","847","848","849","851","852","853","854","856","857","858","859","861","862","863","865","867","868","869","871","872","873","874","876","878","879","881","890","891","892","894","895","897","898","902","903","904","906","907","908","912","913","914","917","918","919","921","922","923","924","926","927","928","929","931","932","933","934","935","936","937","938","939","941","942","943","944","945","946","947","948","955","957","958","959","961","962","963","964","965","966","967","971","972","973","974","976","977","979","981","982","983","984","988","991"]).has(e.toString())?"bg-red-200":"bg-[#99e6e6]";return(0,r.jsxs)(i.Zp,{className:"w-full",children:[(0,r.jsx)(i.aR,{children:(0,r.jsx)(i.ZB,{className:"text-lg sm:text-xl font-semibold text-center",children:"BPE (GPT2) Tokenization Heatmap for Numbers 1-1000"})}),(0,r.jsxs)(i.Wu,{children:[(0,r.jsxs)("div",{className:"flex flex-wrap justify-center gap-2 sm:gap-4 mb-2 sm:mb-4",children:[(0,r.jsx)("div",{className:"flex items-center gap-1 sm:gap-2",children:(0,r.jsx)("span",{className:"text-[10px] sm:text-xs",children:"This number consists of "})}),(0,r.jsxs)("div",{className:"flex items-center gap-1 sm:gap-2",children:[(0,r.jsx)("div",{className:"w-3 h-3 sm:w-4 sm:h-4 bg-[#99e6e6]"}),(0,r.jsx)("span",{className:"text-[10px] sm:text-xs",children:"1 token"})]}),(0,r.jsxs)("div",{className:"flex items-center gap-1 sm:gap-2",children:[(0,r.jsx)("div",{className:"w-3 h-3 sm:w-4 sm:h-4 bg-red-200"}),(0,r.jsx)("span",{className:"text-[10px] sm:text-xs",children:"2 tokens"})]})]}),(0,r.jsx)("div",{className:"overflow-x-auto flex justify-center",children:(0,r.jsx)("div",{className:"min-w-[300px] text-xs sm:min-w-[800px] sm:text-sm",children:(0,r.jsx)("div",{className:"grid",style:{gridTemplateColumns:"repeat(40, 22px)"},children:e.map(e=>(0,r.jsx)(s.Fragment,{children:Array.from({length:25},(t,s)=>{let i=e+s;return(0,r.jsx)("div",{className:"w-4 h-4 sm:w-6 sm:h-6 border border-gray-100 ".concat(a(i)," flex items-center justify-center"),children:(0,r.jsx)("span",{className:"text-[6px] sm:text-[8px] md:text-[8px] font-bold",children:i})},"cell-".concat(i))})},"row-".concat(e)))})})})]})]})}},10944:(e,a,t)=>{"use strict";t.d(a,{default:()=>o});var r=t(95155),s=t(12115),i=t(22130);let n={MAWPS:{"Pure BPE":[.003,.014,.042,.076,.087,.121,.135,.155,.166,.203,.234,.217,.237,.234,.251,.265,.27,.299,.282,.296,.287,.293,.335,.33,.327,.307,.335,.315,.33,.321],"Three-Digit L2R":[0,.034,.062,.068,.115,.135,.158,.172,.172,.228,.186,.248,.254,.299,.282,.332,.31,.327,.341,.33,.349,.352,.335,.383,.383,.369,.392,.392,.406,.394],"Three-Digit R2L":[0,.025,.059,.076,.096,.115,.13,.155,.144,.203,.231,.268,.239,.242,.245,.273,.273,.282,.293,.304,.318,.335,.33,.335,.346,.361,.344,.355,.355,.361],"Single-Digit":[0,.042,.037,.051,.082,.127,.161,.166,.189,.214,.234,.268,.254,.27,.301,.346,.31,.301,.344,.349,.358,.341,.355,.344,.363,.358,.372,.352,.377,.358]},SVAMP:{"Pure BPE":[0,.02,.055,.06,.065,.09,.13,.115,.14,.115,.14,.13,.16,.16,.16,.15,.2,.2,.205,.2,.22,.185,.205,.185,.245,.22,.215,.175,.205,.205],"Three-Digit L2R":[.005,.02,.03,.055,.085,.095,.09,.105,.115,.135,.15,.135,.165,.18,.185,.215,.25,.195,.225,.205,.2,.22,.205,.21,.24,.22,.255,.225,.235,.27],"Three-Digit R2L":[0,.025,.035,.06,.06,.105,.12,.13,.135,.16,.155,.165,.155,.2,.195,.215,.25,.21,.225,.235,.205,.27,.255,.22,.285,.225,.26,.25,.26,.265],"Single-Digit":[0,.01,.02,.06,.08,.105,.1,.135,.165,.17,.12,.16,.185,.165,.19,.17,.185,.205,.185,.205,.22,.22,.19,.21,.23,.2,.2,.23,.23,.265]}},l=(e,a)=>{if(0===a)return e;let t=[];for(let r=0;r<e.length;r++){let s=0,i=0;for(let t=Math.max(0,r-a);t<=Math.min(e.length-1,r+a);t++)s+=e[t],i++;t[r]=s/i}return t},o=()=>{let[e,a]=(0,s.useState)("MAWPS"),[t,o]=(0,s.useState)(null),[d,c]=(0,s.useState)(null),[h,m]=(0,s.useState)(new Set([])),[x,u]=(0,s.useState)(5),g=Object.entries(n[e]).map(e=>{let[a,t]=e;return{label:a,values:l(t,x),color:({"Pure BPE":"rgb(255, 99, 132)","Three-Digit L2R":"rgb(75, 192, 192)","Three-Digit R2L":"rgb(153, 102, 255)","Single-Digit":"rgb(255, 159, 64)"})[a]}}),p=Math.max(...g.flatMap(e=>e.values)),f=e=>Math.min(740,Math.max(60,60+e/29*680)),v=e=>Math.min(340,Math.max(60,340-e/p*280)),y=e=>e.map((e,a)=>"".concat(0===a?"M":"L"," ").concat(f(a)," ").concat(v(e))).join(" "),b=Array.from({length:15},(e,a)=>2*a),j=e=>{let a=new Set(h);a.has(e)?a.delete(e):a.add(e),m(a)};return(0,r.jsxs)(i.Zp,{className:"w-full max-w-5xl",children:[(0,r.jsxs)(i.aR,{children:[(0,r.jsxs)("div",{className:"flex items-center justify-center gap-4",children:[(0,r.jsxs)("select",{className:"border rounded p-1",value:e,onChange:e=>a(e.target.value),children:[(0,r.jsx)("option",{value:"MAWPS",children:"MAWPS"}),(0,r.jsx)("option",{value:"SVAMP",children:"SVAMP"})]}),(0,r.jsxs)("div",{className:"flex items-center gap-2",children:[(0,r.jsx)("span",{className:"text-sm",children:"Smoothing:"}),(0,r.jsx)("input",{type:"range",min:"0",max:"5",value:x,onChange:e=>u(parseInt(e.target.value)),className:"w-24"}),(0,r.jsx)("span",{className:"text-sm",children:x})]})]}),(0,r.jsx)("div",{className:"text-gray-500 text-center",children:"Evaluated on 5-shot problems; Quasi-Exact Match (Accuracy)"})]}),(0,r.jsx)(i.Wu,{className:"flex justify-center",children:(0,r.jsxs)("svg",{width:800,height:400,children:[(0,r.jsx)("line",{x1:60,y1:340,x2:740,y2:340,stroke:"black"}),(0,r.jsx)("line",{x1:60,y1:60,x2:60,y2:340,stroke:"black"}),b.map(e=>(0,r.jsxs)("g",{transform:"translate(".concat(f(e),", ").concat(340,")"),children:[(0,r.jsx)("line",{y2:"6",stroke:"black"}),(0,r.jsxs)("text",{y:"20",textAnchor:"middle",children:[2*e,"k"]})]},e)),g.map((e,a)=>!h.has(e.label)&&(0,r.jsxs)("g",{children:[(0,r.jsx)("path",{d:y(e.values),stroke:e.color,fill:"none",strokeWidth:t===e.label?3:2,opacity:null===t||t===e.label?1:.3,onMouseEnter:()=>o(e.label),onMouseLeave:()=>o(null)}),null!==d&&(0,r.jsx)("circle",{cx:f(d),cy:v(e.values[d]),r:4,fill:e.color})]},a)),g.map((e,a)=>(0,r.jsxs)("g",{transform:"translate(".concat(400-120*g.length/2+120*a,", ").concat(30,")"),style:{cursor:"pointer"},opacity:h.has(e.label)||null!==t&&t!==e.label?.3:1,onMouseEnter:()=>o(e.label),onMouseLeave:()=>o(null),onClick:()=>j(e.label),children:[(0,r.jsx)("line",{x1:0,y1:0,x2:20,y2:0,stroke:e.color,strokeWidth:2}),(0,r.jsx)("text",{x:25,y:4,fontSize:12,children:e.label})]},a)),(0,r.jsx)("text",{x:400,y:395,textAnchor:"middle",children:"Training Steps"}),(0,r.jsx)("text",{transform:"rotate(-90) translate(-".concat(200,", 25)"),textAnchor:"middle",children:"Quasi-Exact Match (Accuracy)"}),null!==d&&(0,r.jsx)("g",{transform:"translate(".concat(Math.min(610,Math.max(60,f(d))),",").concat(Math.min(340-(12+12*g.filter(e=>!h.has(e.label)).length),Math.max(60,Math.min(...g.filter(e=>!h.has(e.label)).map(e=>v(e.values[d])))-15)),")"),children:(()=>{let e=g.filter(e=>!h.has(e.label)),a=[(0,r.jsxs)("text",{x:70,y:0,textAnchor:"middle",fontWeight:"bold",fontSize:11,children:[2*d,"k steps"]},"header"),...e.map((e,a)=>(0,r.jsxs)("text",{x:70,y:18+12*a,textAnchor:"middle",fill:e.color,fontSize:11,children:[e.label,": ",e.values[d].toFixed(3)]},a+1))],t=Math.max(...a.map(e=>6.5*e.props.children.toString().length))+5,s=25+12*e.length;return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)("rect",{x:10,y:-12,width:t,height:s,rx:4,fill:"white",stroke:"black",strokeWidth:.5,opacity:.95}),a]})})()}),(0,r.jsx)("rect",{x:60,y:60,width:680,height:280,fill:"transparent",onMouseMove:e=>{let a=e.currentTarget.getBoundingClientRect(),t=Math.round((e.clientX-a.left)/680*29);t>=0&&t<30&&c(t)},onMouseLeave:()=>c(null)})]})})]})}},52107:(e,a,t)=>{"use strict";t.d(a,{default:()=>u});var r=t(95155);t(12115);var s=t(22130),i=t(58581),n=t(394),l=t(15255),o=t(12800),d=t(66963),c=t(27112),h=t(86354),m=t(87162),x=t(67820);let u=()=>(0,r.jsx)("div",{className:"w-full h-[500px]",children:(0,r.jsx)(s.Zp,{className:"w-full h-full",children:(0,r.jsx)(s.Wu,{className:"w-full h-full",children:(0,r.jsx)(i.u,{width:"100%",height:"100%",children:(0,r.jsxs)(n.E,{data:[{operation:"add",l2r:.37,r2l:.4,l2r_std:.04,r2l_std:.03},{operation:"divide",l2r:.31,r2l:.35,l2r_std:.02,r2l_std:.02},{operation:"multiply",l2r:.17,r2l:.17,l2r_std:.01,r2l_std:.01},{operation:"subtract",l2r:.19,r2l:.23,l2r_std:.03,r2l_std:.03}],margin:{top:20,right:30,left:20,bottom:5},children:[(0,r.jsx)(l.d,{strokeDasharray:"3 3"}),(0,r.jsx)(o.W,{dataKey:"operation"}),(0,r.jsx)(d.h,{domain:[0,.45],label:{value:"Accuracy",angle:-90,position:"insideLeft"}}),(0,r.jsx)(c.m,{content:(0,r.jsx)(e=>{let{active:a,payload:t,label:s}=e;return a&&t&&t.length?(0,r.jsx)("div",{className:"bg-white p-2 border border-gray-200 rounded shadow",children:t.map((e,a)=>(0,r.jsx)("p",{className:"text-sm",children:"".concat(e.name,": ").concat(e.value.toFixed(2)," \xb1 ").concat(e.payload["".concat(e.dataKey,"_std")].toFixed(2))},a))}):null},{})}),(0,r.jsx)(h.s,{}),(0,r.jsx)(m.y,{dataKey:"l2r",name:"Three-digit L2R",fill:"#ff9999",opacity:.8,children:(0,r.jsx)(x.u,{dataKey:"l2r_std",width:4,strokeWidth:1.5,stroke:"#374151"})}),(0,r.jsx)(m.y,{dataKey:"r2l",name:"Three-digit R2L",fill:"#99e6e6",opacity:.8,children:(0,r.jsx)(x.u,{dataKey:"r2l_std",width:4,strokeWidth:1.5,stroke:"#374151"})})]})})})})})},15467:(e,a,t)=>{"use strict";t.d(a,{default:()=>g});var r=t(95155),s=t(12115),i=t(22130),n=t(58581),l=t(94973),o=t(15255),d=t(12800),c=t(66963),h=t(27112),m=t(86354),x=t(16161);let u=[{digits:1,l2r:1,r2l:1,l2r_carry:1,r2l_carry:1},{digits:2,l2r:1,r2l:1,l2r_carry:1,r2l_carry:1},{digits:3,l2r:1,r2l:1,l2r_carry:1,r2l_carry:.886},{digits:4,l2r:.942,r2l:.922,l2r_carry:.974,r2l_carry:.941},{digits:5,l2r:.95,r2l:.939,l2r_carry:.953,r2l_carry:.981},{digits:6,l2r:.959,r2l:.959,l2r_carry:.951,r2l_carry:.719},{digits:7,l2r:.871,r2l:.906,l2r_carry:.829,r2l_carry:.867},{digits:8,l2r:.922,r2l:.93,l2r_carry:.782,r2l_carry:.964},{digits:9,l2r:.956,r2l:.956,l2r_carry:.837,r2l_carry:.6},{digits:10,l2r:.828,r2l:.857,l2r_carry:.275,r2l_carry:.8},{digits:11,l2r:.859,r2l:.893,l2r_carry:.638,r2l_carry:.905},{digits:12,l2r:.886,r2l:.886,l2r_carry:.707,r2l_carry:.662},{digits:13,l2r:.761,r2l:.808,l2r_carry:.218,r2l_carry:.74},{digits:14,l2r:.748,r2l:.814,l2r_carry:.382,r2l_carry:.824},{digits:15,l2r:.762,r2l:.762,l2r_carry:.439,r2l_carry:.448},{digits:16,l2r:.483,r2l:.695,l2r_carry:.286,r2l_carry:.654},{digits:17,l2r:.485,r2l:.683,l2r_carry:.278,r2l_carry:.692},{digits:18,l2r:.484,r2l:.484,l2r_carry:.291,r2l_carry:.144},{digits:19,l2r:.474,r2l:.59,l2r_carry:.151,r2l_carry:.548},{digits:20,l2r:.446,r2l:.583,l2r_carry:.171,r2l_carry:.578}],g=()=>{let[e,a]=(0,s.useState)({l2r:!0,r2l:!0,l2r_carry:!1,r2l_carry:!1}),t=e=>{a(a=>({...a,[e]:!a[e]}))};return(0,r.jsxs)(i.Zp,{className:"w-full max-w-4xl mx-auto",children:[(0,r.jsxs)(i.aR,{children:[(0,r.jsx)(i.ZB,{className:"text-xl font-bold text-center",children:"R2L Tokenization achieves better performance in addition tasks"}),(0,r.jsx)("div",{className:"flex justify-center mt-4",children:(0,r.jsxs)("div",{className:"flex flex-wrap gap-2 p-1 bg-muted/20 rounded-lg",children:[(0,r.jsx)("button",{onClick:()=>t("l2r"),className:"px-4 py-1.5 text-sm font-medium rounded-md transition-all duration-200 ease-out transform hover:scale-105 active:scale-95 ".concat(e.l2r?"bg-primary text-primary-foreground shadow-md":"bg-background/80 text-muted-foreground hover:bg-background"),children:"L2R"}),(0,r.jsx)("button",{onClick:()=>t("r2l"),className:"px-4 py-1.5 text-sm font-medium rounded-md transition-all duration-200 ease-out transform hover:scale-105 active:scale-95 ".concat(e.r2l?"bg-primary text-primary-foreground shadow-md":"bg-background/80 text-muted-foreground hover:bg-background"),children:"R2L"}),(0,r.jsx)("button",{onClick:()=>t("l2r_carry"),className:"px-4 py-1.5 text-sm font-medium rounded-md transition-all duration-200 ease-out transform hover:scale-105 active:scale-95 ".concat(e.l2r_carry?"bg-primary text-primary-foreground shadow-md":"bg-background/80 text-muted-foreground hover:bg-background"),children:"L2R Carry"}),(0,r.jsx)("button",{onClick:()=>t("r2l_carry"),className:"px-4 py-1.5 text-sm font-medium rounded-md transition-all duration-200 ease-out transform hover:scale-105 active:scale-95 ".concat(e.r2l_carry?"bg-primary text-primary-foreground shadow-md":"bg-background/80 text-muted-foreground hover:bg-background"),children:"R2L Carry"})]})})]}),(0,r.jsx)(i.Wu,{children:(0,r.jsx)(n.u,{width:"100%",height:500,children:(0,r.jsxs)(l.b,{data:u,margin:{top:20,right:30,left:20,bottom:10},children:[(0,r.jsx)(o.d,{strokeDasharray:"3 3"}),(0,r.jsx)(d.W,{dataKey:"digits",label:{value:"Digits",position:"insideBottomRight",offset:-10}}),(0,r.jsx)(c.h,{label:{value:"Accuracy",angle:-90,position:"insideLeft"},domain:[0,1]}),(0,r.jsx)(h.m,{}),(0,r.jsx)(m.s,{iconSize:12,wrapperStyle:{fontSize:"12px",paddingTop:"10px"}}),e.l2r&&(0,r.jsx)(x.N,{type:"monotone",dataKey:"l2r",name:"Left to Right Without Carry",stroke:"hsl(var(--chart-1))",activeDot:{r:8}}),e.r2l&&(0,r.jsx)(x.N,{type:"monotone",dataKey:"r2l",name:"Right to Left Without Carry",stroke:"hsl(var(--chart-2))",activeDot:{r:8}}),e.l2r_carry&&(0,r.jsx)(x.N,{type:"monotone",dataKey:"l2r_carry",name:"Left to Right with Carry",stroke:"hsl(var(--chart-3))",activeDot:{r:8}}),e.r2l_carry&&(0,r.jsx)(x.N,{type:"monotone",dataKey:"r2l_carry",name:"Right to Left with Carry",stroke:"hsl(var(--chart-4))",activeDot:{r:8}})]})})})]})}},50193:(e,a,t)=>{"use strict";t.d(a,{default:()=>i});var r=t(95155);t(12115);var s=t(22130);let i=()=>(0,r.jsx)(s.Zp,{className:"w-{3/4} max-w-4xl pt-8 px-2 bg-gradient-to-br from-background to-muted/20 mx-2",children:(0,r.jsx)(s.Wu,{className:"flex flex-col items-center",children:[{problem:"3789 + 8791 = 12580",l2r:{steps:[{tokens:["378","9"],highlight:"9"},{tokens:["879","1"],highlight:"1"},{tokens:["125","80"],highlight:"80"}]},r2l:{steps:[{tokens:["3","789"],highlight:"789"},{tokens:["8","791"],highlight:"791"},{tokens:["12","580"],highlight:"580"}]}}].map(e=>(0,r.jsxs)("div",{className:"w-full",children:[(0,r.jsx)("div",{className:"grid grid-cols-1 md:grid-cols-2 gap-4 md:gap-8 max-w-3xl mx-auto",children:[{title:"Three-digit L2R Tokenization",steps:e.l2r.steps,color:"red-200"},{title:"Three-digit R2L Tokenization",steps:e.r2l.steps,color:"[#99e6e6]"}].map(e=>{let{title:a,steps:t,color:s}=e;return(0,r.jsxs)("div",{className:"border rounded-lg p-4 md:p-8 bg-white/50 transition-all duration-200 hover:shadow-lg hover:scale-[1.02]",children:[(0,r.jsx)("p",{className:"text-lg md:text-xl font-bold mb-4 md:mb-6 text-center tracking-wide",children:a}),(0,r.jsxs)("div",{className:"font-mono flex flex-col items-center text-base md:text-lg",children:[(0,r.jsx)("div",{className:"flex flex-col items-center border-b border-gray-300 pb-1 mb-3 w-full md:w-1/2",children:t.slice(0,-1).map((e,a)=>(0,r.jsxs)("div",{className:"flex items-center space-x-2 mb-2",children:[1===a&&(0,r.jsx)("span",{className:"text-gray-500",children:"\xa0"}),(0,r.jsx)("span",{className:"bg-gray-100 px-2 py-1 transition-colors duration-200 hover:bg-gray-200",children:e.tokens[0]}),(0,r.jsx)("span",{className:"bg-".concat(s," px-2 py-1 transition-colors duration-200 hover:bg-").concat(s,"-200"),children:e.highlight}),1===a&&(0,r.jsx)("span",{className:"text-gray-500",children:"+"})]},a))}),(0,r.jsxs)("div",{className:"flex items-center space-x-2",children:[(0,r.jsx)("span",{className:"bg-gray-100 px-2 py-1 transition-colors duration-200 hover:bg-gray-200",children:t[2].tokens[0]}),(0,r.jsx)("span",{className:"bg-".concat(s," px-2 py-1 transition-colors duration-200 hover:bg-").concat(s,"-200"),children:t[2].highlight})]})]})]},a)})}),(0,r.jsxs)("p",{className:"text-sm md:text-base text-gray-600 mt-5 max-w-3xl mx-auto leading-relaxed bg-white/50 rounded-lg",children:["In the three-digit L2R example, ",(0,r.jsx)("span",{className:"text-red-600",children:"9"})," + ",(0,r.jsx)("span",{className:"text-red-600",children:"1"})," should map to the digit ",(0,r.jsx)("span",{className:"text-red-600",children:"0"})," but ends up grouped together with ",(0,r.jsx)("span",{className:"text-red-600",children:"8"})," to form ",(0,r.jsx)("span",{className:"text-red-600",children:"80"}),", since the first three tokens (125) were already grouped together. ",(0,r.jsx)("span",{className:"text-red-600 font-medium",children:"This 'shift' in the tokenization boundary produces additional complexity"})," in the learning process which has been shown to be detrimental to accuracy.",(0,r.jsx)("br",{}),(0,r.jsx)("br",{}),"In the three-digit R2L example, each digit of ",(0,r.jsx)("span",{className:"text-teal-600",children:"580"})," aligns neatly with its corresponding sub-operands ",(0,r.jsx)("span",{className:"text-teal-600",children:"789"})," and ",(0,r.jsx)("span",{className:"text-teal-600",children:"791"}),", which is a more intuitive grouping for the model to learn."]})]},e.problem))})})},4549:(e,a,t)=>{"use strict";t.d(a,{default:()=>m});var r=t(95155);t(12115);var s=t(22130),i=t(58581),n=t(394),l=t(15255),o=t(12800),d=t(66963),c=t(27112),h=t(87162);let m=()=>(0,r.jsxs)(s.Zp,{className:"w-full h-[500px] bg-gradient-to-br from-background to-muted/20",children:[(0,r.jsxs)(s.aR,{className:"p-4 md:p-6",children:[(0,r.jsx)(s.ZB,{className:"text-lg md:text-xl lg:text-2xl font-bold text-center",children:"How Different Tokenizers Perform in Arithmetic"}),(0,r.jsx)("p",{className:"text-muted-foreground text-center text-xs md:text-sm",children:"Average Accuracy Evaluated on 5-shot Questions"})]}),(0,r.jsx)(s.Wu,{className:"w-full h-[400px]",children:(0,r.jsx)(i.u,{width:"100%",height:"100%",children:(0,r.jsxs)(n.E,{data:[{method:"Pure BPE",accuracy:.24},{method:"Three-digit L2R",accuracy:.26},{method:"Three-digit R2L",accuracy:.29},{method:"Single Digit",accuracy:.35}],margin:{top:20,right:30,left:20,bottom:20},barSize:100,barGap:8,children:[(0,r.jsx)(l.d,{strokeDasharray:"3 3"}),(0,r.jsx)(o.W,{dataKey:"method",angle:0,textAnchor:"middle",height:80,interval:0,tick:e=>{let{x:a,y:t,payload:s}=e,i=s.value.split(" ");return(0,r.jsx)("g",{transform:"translate(".concat(a,",").concat(t,")"),children:i.map((e,a)=>(0,r.jsx)("text",{x:0,y:20,dy:15*a,textAnchor:"middle",fill:"#666",fontSize:12,className:"text-xs md:text-sm",children:e},a))})}}),(0,r.jsx)(d.h,{domain:[0,.4],tickFormatter:e=>e.toFixed(2),label:{value:"Accuracy",angle:-90,position:"insideLeft"}}),(0,r.jsx)(c.m,{content:(0,r.jsx)(e=>{let{active:a,payload:t,label:s}=e;return a&&t&&t.length?(0,r.jsx)("div",{className:"bg-white p-2 border border-gray-200 rounded shadow",children:(0,r.jsx)("p",{className:"text-sm",children:"".concat(t[0].payload.method,": ").concat(t[0].value.toFixed(2))})}):null},{})}),(0,r.jsx)(h.y,{dataKey:"accuracy",fill:"#99e6e6",radius:[2,2,2,2]})]})})})]})},59370:(e,a,t)=>{"use strict";t.d(a,{default:()=>n});var r=t(95155),s=t(12115),i=t(22130);let n=()=>{let[e,a]=(0,s.useState)("performance"),t=[{digits:"1 digit",gpt2:.72,llama3:.72,deepseek:.76,llama3_r2l:.75},{digits:"2 digits",gpt2:.37,llama3:.43,deepseek:.44,llama3_r2l:.46},{digits:"3 digits",gpt2:.06,llama3:.07,deepseek:.26,llama3_r2l:.11},{digits:"4 digits",gpt2:.03,llama3:.04,deepseek:.18,llama3_r2l:.06},{digits:"5 digits",gpt2:.02,llama3:.03,deepseek:.14,llama3_r2l:.05}],n=[{operation:"add",gpt2:.31,llama3:.37,deepseek:.58,llama3_r2l:.4},{operation:"divide",gpt2:.32,llama3:.31,deepseek:.41,llama3_r2l:.35},{operation:"multiply",gpt2:.16,llama3:.17,deepseek:.18,llama3_r2l:.17},{operation:"subtract",gpt2:.18,llama3:.19,deepseek:.26,llama3_r2l:.23}],l=[{type:"float",gpt2:.14,llama3:.17,deepseek:.24,llama3_r2l:.18},{type:"integer",gpt2:.35,llama3:.41,deepseek:.47,llama3_r2l:.43}],o=["gpt2","llama3","llama3_r2l","deepseek"],d=e=>e>=.7?"bg-teal-200":e>=.4?"bg-teal-100":e>=.2?"bg-red-100":"bg-red-200",c=e=>o.reduce((a,t)=>e[t]>e[a]?t:a,o[0]);return(0,r.jsxs)(i.Zp,{className:"w-full max-w-4xl p-4 md:p-8 bg-gradient-to-br from-background to-muted/20",children:[(0,r.jsxs)(i.aR,{className:"space-y-2",children:[(0,r.jsx)(i.ZB,{className:"text-xl md:text-2xl font-semibold text-center",children:"Comparing the Same Model Trained with Various Tokenizers"}),(0,r.jsx)("p",{className:"text-muted-foreground text-center text-xs md:text-sm",children:"Average accuracy; Evaluted with 5-shot arithmetic questions"}),(0,r.jsx)("div",{className:"text-center",children:(0,r.jsx)("div",{children:(0,r.jsxs)("select",{onChange:e=>{a(e.target.value)},className:"mt-4 p-2 border rounded text-sm md:text-base",children:[(0,r.jsx)("option",{value:"digits",children:"Number of Digits"}),(0,r.jsx)("option",{value:"operation",children:"Operation"}),(0,r.jsx)("option",{value:"type",children:"Number Type"})]})})})]}),(0,r.jsx)(i.Wu,{children:(0,r.jsx)("div",{className:"relative overflow-x-auto rounded-lg border border-gray-200",children:(0,r.jsx)("div",{className:"w-full overflow-x-scroll",children:(0,r.jsxs)("table",{className:"w-full border-collapse border min-w-[600px]",children:[(0,r.jsx)("thead",{className:"border-b",children:(0,r.jsxs)("tr",{className:"border-b",children:[(0,r.jsxs)("th",{className:"px-2 md:px-4 py-2 text-[10px] sm:text-xs md:text-sm text-gray-600 border-b border-r",children:[(()=>{switch(e){case"digits":default:return"Number of Digits";case"operation":return"Operation";case"type":return"Number Type"}})()," ",(0,r.jsx)("span",{className:"font-bold text-gray-800 mx-1",children:"\\"})," Tokenizer"]}),o.map((e,a)=>(0,r.jsx)("th",{className:"px-2 md:px-4 py-2 text-[10px] sm:text-xs md:text-sm text-gray-600 border-b ".concat(a<o.length-1?"border-r":""),children:"gpt2"===e?"Pure BPE":"llama3"===e?"Three-digit L2R":"llama3_r2l"===e?"Three-digit R2L":"deepseek"===e?"Single-digit":""},e))]})}),(0,r.jsx)("tbody",{children:(()=>{switch(e){case"digits":default:return t;case"operation":return n;case"type":return l}})().map(e=>{let a=c(e),t="digits"in e?e.digits:"operation"in e?e.operation:e.type;return(0,r.jsxs)("tr",{className:"hover:bg-teal-50 transition-colors border-b",children:[(0,r.jsx)("td",{className:"px-2 md:px-6 py-2 md:py-4 font-medium border-r text-gray-800 text-center text-[10px] sm:text-xs md:text-sm",children:t}),o.map((t,s)=>(0,r.jsx)("td",{className:"px-2 md:px-6 py-2 md:py-4 text-center font-mono text-[10px] sm:text-xs md:text-sm ".concat(d(e[t])," transition-colors ").concat(s<o.length-1?"border-r":""),children:t===a?(0,r.jsx)("b",{children:e[t].toFixed(2)}):e[t].toFixed(2)},t))]},t)})})]})})})})]})}}},e=>{var a=a=>e(e.s=a);e.O(0,[508,441,517,358],()=>a(31654)),_N_E=e.O()}]);
index.html CHANGED
The diff for this file is too large to render. See raw diff
 
index.txt CHANGED
@@ -25,7 +25,7 @@ f:I[52107,["508","static/chunks/508-cf9c2705c445ae41.js","974","static/chunks/ap
25
  1e:I[34835,[],""]
26
  1:HL["/_next/static/media/a34f9d1faa5f3315-s.p.woff2","font",{"crossOrigin":"","type":"font/woff2"}]
27
  2:HL["/_next/static/css/b279119c7cf0bf4b.css","style"]
28
- 0:{"P":null,"b":"5gq-4W1KcImTPfDCkpFnL","p":"","c":["",""],"i":false,"f":[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",["$","$3","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/b279119c7cf0bf4b.css","precedence":"next","crossOrigin":"$undefined","nonce":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_d65c78 bg-white","children":["$","main",null,{"className":"relative min-h-screen","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]}]]}],{"children":["__PAGE__",["$","$3","c",{"children":[["$","$L6",null,{"children":[["$","$L7",null,{"title":"From Digits to Decisions: How Tokenization Impacts Arithmetic in LLMs"}],["$","div",null,{"className":" bg-white","children":["$","div",null,{"className":"min-h-screen","children":["$","div",null,{"className":"w-full max-w-[83rem] mx-auto px-4 sm:px-6 lg:px-8 py-4 sm:py-6 lg:py-8 flex flex-col lg:flex-row","children":[["$","aside",null,{"className":"w-full lg:w-48 xl:w-56 mb-6 sm:mb-8 lg:mb-0 lg:mr-6 xl:mr-8 flex-shrink-0","children":["$","div",null,{"className":"sticky top-16 sm:top-20 lg:top-24","children":["$","$L8",null,{}]}]}],["$","main",null,{"className":"flex-1 min-w-0","children":[["$","header",null,{"className":"mb-12 border-t border-b border-gray-100 py-6","children":["$","div",null,{"className":"flex flex-col gap-2","children":[["$","div",null,{"className":"flex items-center gap-2 text-sm","children":[["$","p",null,{"className":"font-semibold text-gray-800","children":[["$","$3","0",{"children":[false,["$","a",null,{"href":"https://huggingface.co/garrethlee","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Garreth Lee"}]]}],["$","$3","1",{"children":[", ",["$","a",null,{"href":"https://huggingface.co/guipenedo","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Guilherme Penedo"}]]}],["$","$3","2",{"children":[", ",["$","a",null,{"href":"https://huggingface.co/lvwerra","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Leandro von Werra"}]]}],["$","$3","3",{"children":[", ",["$","a",null,{"href":"https://huggingface.co/thomwolf","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Thomas Wolf"}]]}]]}],["$","span",null,{"className":"text-gray-300","children":"β€’"}],["$","time",null,{"className":"text-gray-500","children":"November 29, 2024"}]]}],["$","div",null,{"className":"text-sm text-gray-500 italic","children":"Hugging Face"}]]}]}],["$","article",null,{"className":"prose prose-sm sm:prose lg:prose-lg xl:prose-xl","children":[["$","p",null,{"className":"responsive-text-base","children":["Recently, there has been a lot of buzz around a seemingly simple question that even state-of-the-art large language models (LLM) fail to answer correctly: ",["$","em",null,{"children":"\"Which is bigger? 9.9 or 9.11\""}]]}],["$","p",null,{"className":"responsive-text-base","children":["Despite various attempts and variations of prompting techniques, most frontier models still struggle to make an accurate comparison of the two numbers. This highlights a broader issue many of today's models encounter: they have limited mathematical reasoning capabilities",["$","$L9",null,{"id":"mirzadeh2024"}],". While there are multiple conjectures of why this is the case, including the composition of pretraining data and the model architecture itself",["$","$L9",null,{"id":"xu2024"}],", we investigate one of the most fundamental processes in LLMs,",["$","strong",null,{"children":"tokenization"}],", and how it affects a model's ability to do math, specifically arithmetic problems."]}],["$","p",null,{"className":"responsive-text-base","children":"In this blog post, we discuss:"}],["$","ol",null,{"className":"list-decimal pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":"Our detailed approach in comparing different methods of number tokenization"}],["$","li",null,{"children":"Why reading from right to left is sometimes better than from left to right"}],["$","li",null,{"children":"A clear frontrunner of tokenization methods for arithmetic in LLMs"}]]}],["$","h2",null,{"id":"a-brief-history","className":"responsive-text-2xl","children":"A Brief History of Number Tokenization"}],["$","p",null,{"className":"responsive-text-base","children":["Back in 2019, The GPT2 paper detailed its use of BPE (byte-pair encoding) as a tokenization method for language models ",["$","$L9",null,{"id":"radford2019"}],". This approach works by merging frequently occurring subwords into single units until the vocabulary reaches a target size."]}],["$","p",null,{"className":"responsive-text-base","children":["Because of how this algorithm operates, the resulting vocabulary depends heavily on the training data fed into the tokenizer. This led to ",["$","strong",null,{"children":"inconsistencies "}],"in how numbers are encoded ",["$","$L9",null,{"id":"beren2024"}],". Commonly seen numbers (i.e. 1-100, years like 1945, etc.) in the training data will likely be represented as a single token, while less frequently seen numbers are split into multiple tokens like below:"]}],["$","div",null,{"className":"visualization-container","children":["$","$La",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":["Four years later, the herd of llamas began their stampede! Llama and Llama 2 used SentencePiece's BPE implementation with a notable tweak for numbers: ",["$","em",null,{"children":"they split all numbers into individual digits"}]," ",["$","$L9",null,{"id":"touvron2023llama"}],["$","$L9",null,{"id":"touvron2023llama2"}],". This meant there were only 10 unique tokens to represent any number, simplifying numerical representation for LLMs. Deepseek released a model much later (DeepSeek-V2) with a similar single-digit tokenizer ",["$","$L9",null,{"id":"deepseek2024"}],"."]}],["$","p",null,{"className":"responsive-text-base","children":["Later on, Llama 3 took a different approach for handling numbers, tokenizing them in groups of three digits ",["$","$L9",null,{"id":"grattafiori2024"}],". As a result, numbers from 1 to 999 each have unique tokens, while numbers from 1000 onward are composed of these tokens."]}],["$","h3",null,{"id":"right-to-left-tokenization","className":"responsive-text-2xl","children":"A New Paradigm: Right-to-Left Tokenization"}],["$","p",null,{"className":"responsive-text-base","children":"So far, the tokenization methods we've seen \"processed\" text from left to right. For instance, if the three-digit tokenizer encounters the sequence \"12345,\" it will scan from the beginning, breaking it down into segments like \"123\" and \"45\"."}],["$","p",null,{"className":"responsive-text-base","children":[["$","strong",null,{"children":"Right-to-left (R2L) tokenization"}],", on the other hand, processes text from the end to the beginning in groups of three. Using R2L, the sequence \"12345\" would be tokenized by scanning from the right, first splitting off \"345\" and then moving to \"12.\" Recently, there has been some exploration too of forcing this R2L tokenization behaviour in frontier closed-source models, which has shown to benefit certain arithmetic operations since the R2L representation prevents the misalignment of the operands ",["$","$L9",null,{"id":"singh2024"}],". It has also been rumored that Claude uses this R2L tokenization method ",["$","$L9",null,{"id":"claude2024"}],"."]}],["$","p",null,{"className":"responsive-text-base","children":"To better understand what misalignment looks like, let's take 3789 + 8791 as an example:"}],["$","div",null,{"className":"visualization-container","children":["$","$Lb",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"This insight suggests that three-digit R2L tokenization could potentially be improved over the standard three-digit L2R tokenization used by Llama 3."}],["$","p",null,{"className":"responsive-text-base","children":"To recap, here's an overview of the techniques used to handle number tokenization:"}],["$","div",null,{"className":"overflow-x-auto my-5 rounded-lg","children":["$","table",null,{"className":"w-full border bg-white/50 rounded-lg","children":[["$","thead",null,{"children":["$","tr",null,{"className":"border border-gray-200","children":[["$","th",null,{"className":"px-4 py-3 text-left text-sm font-semibold","children":"How numbers are tokenized"}],["$","th",null,{"className":"px-4 py-3 text-left text-sm font-semibold","children":"tokenizer (model)"}]]}]}],["$","tbody",null,{"children":[["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"pure BPE; no special handling"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"gpt2"}]]}],["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"split to single digits"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"llama, llama2, deepseek"}]]}],["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"1-999 has unique tokens"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"llama3"}]]}],["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"split to groups of three digits (R2L)"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"Claude (?)"}]]}]]}]]}]}],["$","h2",null,{"id":"fair-comparison","className":"responsive-text-2xl","children":"Creating a fair comparison of different methods"}],["$","p",null,{"className":"responsive-text-base","children":"The goal of this investigation is to compare these tokenizers and their different ways of processing numbers in a way that minimizes the influence of external factors such as model architecture, training configurations, and pre-training data in evaluation results."}],["$","p",null,{"className":"responsive-text-base","children":["Thus, one important design decision we made to address this goal was to evaluate",["$","strong",null,{"children":" models trained from scratch,"}]," where each model has the same data mixture, training configs, and a roughly equal compute budget (number of model parameters and training tokens). The only meaningful difference that each model should have with one another is the ",["$","strong",null,{"children":"tokenizer used"}]," to tokenize the training data."]}],["$","h3",null,{"id":"experimental-setup","className":"responsive-text-2xl","children":"Experimental Setup"}],["$","p",null,{"className":"responsive-text-base","children":"We picked 3 tokenizers mentioned previously, namely GPT2's BPE tokenizer, Llama 3's three-digit tokenizer, and Deepseek's single-digit tokenizer."}],["$","p",null,{"className":"responsive-text-base","children":["To test right-to-left tokenization, we created R2L versions of the Pure-BPE and three-digit tokenizers, where numbers would be chunked into groups of 3 digits from the right before being tokenized. We didn't create a R2L version for single-digit tokenization since it would produce the same result since numbers are tokenized to individual digits ",["$","$Lc",null,{"id":"r2l-footnote"}],". To achieve this, we added an extra preprocessing step which forces the R2L behaviour without producing additional tokens during inference:"]}],["$","div",null,{"id":"code-snippet","className":"my-5 bg-gray-100 p-4 rounded-lg font-mono text-xs whitespace-pre overflow-auto max-h-[400px] ","children":["$","code",null,{"children":"from transformers import AutoTokenizer\nfrom tokenizers import pre_tokenizers, Regex\n\n# Initialize all tokenizers\ntokenizer = AutoTokenizer.from_pretrained(\"meta-llama/Meta-Llama-3-8B\")\n\n# Add an extra step to the existing pre-tokenizer steps\ntokenizer._tokenizer.pre_tokenizer = pre_tokenizers.Sequence(\n [\n # Added step: split by R2L digits\n pre_tokenizers.Split(pattern = Regex(r\"\\d{1,3}(?=(\\d{3})*\\b)\"), \n behavior=\"isolated\", invert = False),\n # Below: Existing steps from Llama 3's tokenizer\n pre_tokenizers.Split(pattern=Regex(r\"(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\\r\\n\\p{L}\\p{N}]?\\p{L}+|\\p{N}{1,3}| ?[^\\s\\p{L}\\p{N}]+[\\r\\n]*|\\s*[\\r\\n]+|\\s+(?!\\S)|\\s+\"), \n behavior=\"isolated\", invert=False), \n pre_tokenizers.ByteLevel(add_prefix_space=False, trim_offsets=True, use_regex=False)\n ]\n )\n\nprint(tokenizer.tokenize(\"42069\")) # [42, 069]\n\n"}]}],["$","h3",null,{"id":"model-configuration","className":"responsive-text-xl","children":"Model Configuration"}],["$","p",null,{"className":"responsive-text-base","children":["The models we train use the original Llama architecture. To account for the differences in each tokenizer's vocabulary size, we ",["$","em",null,{"children":"modified the number of hidden layers so that each model has roughly the same number of parameters"}]," (~1.45B)."]}],["$","div",null,{"className":"visualization-container","children":["$","div",null,{"ref":"$undefined","className":"rounded-xl border bg-card text-card-foreground w-full max-w-2xl p-8 bg-gradient-to-br from-background to-muted/20 shadow-xl","children":[["$","div",null,{"ref":"$undefined","className":"flex flex-col p-6 space-y-3","children":[["$","div",null,{"ref":"$undefined","className":"tracking-tight text-2xl font-bold text-center","children":"Larger Tokenizer Vocabulary = More Parameters"}],["$","p",null,{"className":"text-muted-foreground text-center text-sm","children":"Visualizing embedding matrix size (vocab_size Γ— hidden_dim)"}]]}],["$","div",null,{"ref":"$undefined","className":"p-6 pt-0","children":[["$","div",null,{"className":"flex justify-center items-start gap-16 mt-4","children":[["$","div",null,{"className":"flex flex-col items-center transition-all duration-200 hover:scale-105","children":[["$","div",null,{"className":"text-base font-bold mb-4 text-center bg-teal-50 px-4 py-1 rounded-full","children":"Model A"}],["$","div",null,{"className":"grid grid-cols-4 gap-1 mb-3 bg-white p-4 rounded-lg shadow-md","children":[["$","div","0",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","1",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","2",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","3",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","4",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","5",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","6",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","7",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","8",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","9",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","10",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","11",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","12",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","13",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","14",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","15",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","16",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","17",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","18",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","19",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","20",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","21",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","22",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","23",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","24",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","25",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","26",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","27",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","28",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","29",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","30",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","31",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}]]}],["$","div",null,{"className":"text-sm font-medium text-muted-foreground","children":"32 parameters"}]]}],["$","div",null,{"className":"flex flex-col items-center transition-all duration-200 hover:scale-105","children":[["$","div",null,{"className":"text-base font-bold mb-4 text-center bg-red-50 px-4 py-1 rounded-full","children":"Model B"}],["$","div",null,{"className":"grid grid-cols-4 gap-1 mb-3 bg-white p-4 rounded-lg shadow-md","children":[["$","div","0",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","1",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","2",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","3",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","4",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","5",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","6",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","7",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","8",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","9",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","10",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","11",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","12",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","13",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","14",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","15",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","16",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","17",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","18",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","19",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","20",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","21",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","22",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","23",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","24",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","25",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","26",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","27",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","28",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","29",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","30",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","31",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","32",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","33",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","34",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","35",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","36",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","37",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","38",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","39",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","40",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","41",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","42",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","43",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","44",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","45",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","46",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","47",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","48",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","49",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","50",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","51",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","52",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","53",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","54",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","55",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","56",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","57",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","58",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","59",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","60",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","61",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","62",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","63",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}]]}],["$","div",null,{"className":"text-sm font-medium text-muted-foreground","children":"64 parameters"}]]}]]}],["$","div",null,{"className":"text-sm text-center mt-12 mx-auto max-w-2xl text-muted-foreground leading-relaxed","children":["For a fixed number of training tokens, Model B \"learns\" more due to its larger embedding matrix. To maintain a constant compute budget, we ",["$","strong",null,{"children":"reduce the number of hidden layers"}]," in models with larger vocabularies."]}]]}]]}]}],["$","h3",null,{"id":"pretraining-data","className":"responsive-text-xl","children":"Pretraining Data"}],["$","p",null,{"className":"responsive-text-base","children":"We used a mixture of general web data and math & code data for model pretraining:"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":["42% ",["$","a",null,{"href":"https://huggingface.co/datasets/HuggingFaceFW/fineweb","className":"text-blue-600 hover:text-blue-800","children":"Fineweb"}]]}],["$","li",null,{"children":["40% ",["$","a",null,{"href":"https://huggingface.co/datasets/Infi-MM/InfiMM-WebMath-40B","className":"text-blue-600 hover:text-blue-800","children":"InfiMM-WebMath"}]]}],["$","li",null,{"children":["10% ",["$","a",null,{"href":"https://huggingface.co/datasets/open-web-math/open-web-math","className":"text-blue-600 hover:text-blue-800","children":"open-web-math"}]]}],["$","li",null,{"children":["8% ",["$","a",null,{"href":"https://huggingface.co/datasets/EleutherAI/proof-pile-2","className":"text-blue-600 hover:text-blue-800","children":"Proof-Pile-2"}]," (algebraic-stack subset)"]}]]}],["$","p",null,{"className":"responsive-text-base","children":"The relative weights are based on the token counts of each dataset. The resulting mixture amounted to ~120B tokens. We also had R2L-tokenized version of these datasets for the corresponding R2L tokenizers."}],["$","h2",null,{"id":"evaluation","className":"responsive-text-2xl","children":"Evaluation"}],["$","p",null,{"className":"responsive-text-base","children":["It's important to note that an LLM's training process is non-deterministic. Even after controlling the pretraining data mix, model architecture, and compute budget, different model initialization settings can still produce noise in evaluation results",["$","$L9",null,{"id":"madaan2024"}],"."]}],["$","p",null,{"className":"responsive-text-base","children":["To account for this, we trained three additional models with the tokenizer that produced the highest variation in model scores on the same data mixture using",["$","strong",null,{"children":" different seeds"}],". For each task, we then take the standard deviation of model scores and assume that this value is the upper-bound of variability across all model architectures and training datasets."]}],["$","div",null,{"className":"visualization-container","children":["$","$Ld",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"We divide our evaluation suite into two main categories:"}],["$","p",null,{"className":"responsive-text-base font-bold mt-4 mb-2","children":"Word-based problems"}],["$","p",null,{"className":"responsive-text-base","children":["For a model of this size, it is quite difficult to obtain any meaningful signal using traditional mathematical reasoning benchmarks, so we opted for easier benchmarks with relatively straightforward solutions that only require a few simple mathematical operations, namely ",["$","a",null,{"href":"https://huggingface.co/datasets/mwpt5/MAWPS","className":"text-blue-600 hover:text-blue-800","children":"MAWPS"}]," and ",["$","a",null,{"href":"https://huggingface.co/datasets/ChilleD/SVAMP","className":"text-blue-600 hover:text-blue-800","children":"SVAMP"}],". For evaluation, we standardized a question-answer format and parsed a single numerical answer from the model's output to compute the accuracy."]}],["$","p",null,{"className":"responsive-text-base font-bold mt-4 mb-2","children":"Simple arithmetic problems"}],["$","p",null,{"className":"responsive-text-base","children":"We created a custom benchmark containing simple arithmetic problems (+, - , x, Γ·). These problems are split across various axes, which allows us to perform a more detailed assessment of the pitfalls of LLMs when doing math. We create different splits based on:"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":"The difficulty of the problem (harder problems = more decimals, more digits)"}],["$","li",null,{"children":"Whether it was a float / int operation"}],["$","li",null,{"children":"Whether or not commas were used to demarcate digits into groups of 3"}],["$","li",null,{"children":"The arithmetic operator used in the problem"}]]}],["$","p",null,{"className":"responsive-text-base mt-4","children":"Given the relatively fixed-form answers in the evaluation tasks above, the evaluation metric we chose is a quasi-exact match (QEM) of the gold answer and the model generation, with normalizations to account for minor differences in floating point equality (127.60 and 127.6 should be a match) as well as usage of commas (1234 and 1,234 should be a match)."}],["$","h2",null,{"id":"results","className":"responsive-text-2xl","children":"Results"}],["$","h3",null,{"id":"results-arithmetic","className":"responsive-text-lg","children":"Arithmetic Problems"}],["$","p",null,{"className":"responsive-text-lg font-bold","children":"Single-digit tokenization outperforms all other tested tokenizers in arithmetic problems"}],["$","div",null,{"className":"visualization-container","children":["$","$Le",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"While the differences for easier problems are less pronounced, as the complexity of the problems increases, there is an increasing gap between the best-performing tokenizer (single-digit) and the rest. This suggests that single-digit tokenization is more robust to variations in input data length and can better capture intricate patterns, leading to improved performance in challenging scenarios where other tokenization methods struggle."}],["$","p",null,{"className":"responsive-text-base","children":["Additionally, while it's intuitive to assume that integer problems will have better performance (which is also confirmed by the results in the grid), we also found that the ",["$","strong",null,{"children":"performance gap"}]," between floats and integers is similar across all tokenizers. This shows that there aren't any inherent tradeoffs when choosing tokenizers in these two categories (i.e. the optimal tokenizer for integers is also optimal for floats)."]}],["$","p",null,{"className":"responsive-text-lg font-bold","children":"Three-digit R2L tokenization has better performance over standard three-digit L2R tokenization"}],["$","div",null,{"className":"visualization-container","children":["$","$Lf",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":["We found that a model trained on R2L tokenized data resulted in mostly",["$","strong",null,{"children":" significant"}]," improvements (except for multiplication) when compared to training on the default L2R tokenized data. This shows that it is an optimal setting for arithmetic operations compared to the typical left-to-right encoding."]}],["$","p",null,{"className":"responsive-text-lg font-bold","children":"Pure-BPE tokenizers show inconsistent performance when numbers are chunked in groups of 3 from right to left"}],["$","div",null,{"className":"visualization-container","children":["$","$L10",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"Evidently, pure BPE-based tokenizers without any additional number preprocessing don't benefit much from using R2L tokenization. A possible explanation of why this might be the case is the lack of 'structure' in how digits are grouped together in these tokenizers."}],["$","p",null,{"className":"responsive-text-base","children":"Unlike the three-digit tokenizer, due to the peculiarities of pure BPE-based tokenization mentioned above, there are other numbers that are also grouped less consistently. This inconsistency undermines the advantages of R2L, which we have shown to work best when numbers are uniformly tokenized in 3-digit chunks from least to most significant digit."}],["$","h3",null,{"id":"results-word-based","className":"responsive-text-lg","children":"Word-based problems"}],["$","p",null,{"className":"responsive-text-base","children":"In addition to analysing arithmetic performance, we also wanted to see if a tokenizer's performance in pure arithmetic also translates to word problems."}],["$","div",null,{"className":"visualization-container","children":["$","$L11",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"While the performance gap between tokenizers is less pronounced in word-based problems, we see that single-digit and three-digit tokenizers generally outperform the BPE-based tokenizers, which indicates that the trend is consistent across both types of problems."}],["$","h3",null,{"id":"results-bonus","className":"responsive-text-lg","children":"Bonus: R2L inference on Llama 3"}],["$","p",null,{"className":"responsive-text-base","children":["One of the other things we wanted to test was how existing pretrained/instruct models performed when it was subjected to a tokenization scheme that was different to what it was originally trained on ",["$","strong",null,{"children":" without having to re-train or fine-tune it"}],". So, we took Llama3 8B Instruct and used the same code above to modify its tokenizer to perform R2L tokenization during inference, instead of re-training a new model with R2L data."]}],["$","p",null,{"className":"responsive-text-base","children":"One important thing to note when adding two numbers in a three-digit tokenization scheme: the result can sometimes produce more tokens than the input numbers. One such example is when we add 999 and 111, which individually only require a single token, but when added together produce 1110, which requires two tokens (1 and 110). We wanted to explore how much of a difference this makes when performing addition with both L2R and R2L tokenization on varying token lengths."}],["$","p",null,{"className":"responsive-text-base","children":"(Going forward, we will refer to additions that result in an extra token as \"carry\" additions, and those that do not as \"without carry\")"}],["$","p",null,{"className":"responsive-text-base","children":"We performed few-shot arithmetic tasks of varying digit lengths and carry settings with Llama3 8B Instruct. We did not find any significant performance difference for subtraction, multiplication, or division, so we only show the results for addition."}],["$","div",null,{"className":"visualization-container","children":["$","$L12",null,{}]}],["$","div",null,{"className":"responsive-text-base","children":[["$","p",null,{"className":"font-semibold mb-2","children":"A few things to note from the results:"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2","children":[["$","li",null,{"children":["For non-carry additions, digits that are multiples of 3 produce the exact same results, given that numbers like 528 491 have the same tokens regardless of tokenization direction",["$","$Lc",null,{"id":"same-tokens-footnote"}]]}],["$","li",null,{"children":"We see an interesting cyclic trend every three digits when comparing L2R and R2L carry additions. For multiples of three, L2R slightly outperforms R2L, but in the next digit, R2L performance shoots up and overtakes L2R, further widening the performance gap in the digit after that before dropping again in the next multiple of three."}],["$","li",null,{"children":"We found that the model's output tokens perfectly formatted numbers as R2L with just a few R2L examples, despite being trained on L2R formatted numbers. This is super important because it shows that these models aren't just \"memorizing\" patterns seen from its training data. When given an entirely new distribution of R2L number tokens, it was still able to reason and perform even better than the data it was originally trained on."}]]}]]}],["$","p",null,{"className":"responsive-text-base","children":["When averaging across all digit lengths and carry settings, there was a",["$","strong",null,{"children":" ~10% improvement in accuracy"}]," over the standard L2R tokenization used in Llama 3, just by swapping the tokenizer with the few lines of code above."]}],["$","div",null,{"className":"visualization-container","children":["$","$L13",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"We tried to perform a similar experiment with single-digit tokenization, but performance dropped off really quickly after a few digits. This makes intuitive sense since the shift in token distribution of the training data and single-digit tokenized data is way larger than the difference in token distribution between the training data and the R2L data."}],["$","h2",null,{"id":"conclusion","className":"responsive-text-2xl","children":"So, which tokenization method is best for math?"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":"πŸ”„ While Byte-Pair Encoding remains a popular tokenization method, there should be preprocessing steps to fix the inconsistencies and improve numerical reasoning."}],["$","li",null,{"children":["πŸ“ If you have to use a tokenizer that has a token for numbers up to 3 digits, make sure you tokenize your data R2L (",["$","a",null,{"href":"#code-snippet","className":"text-blue-600 hover:text-blue-800","children":"with the code snippet above"}],")"]}],["$","li",null,{"children":"πŸš€ If you already have a trained model where the data was tokenized L2R, you can get better math performance by running inference with R2L"}],["$","li",null,{"children":"πŸ“š Word-based problems have less apparent performance differences between tokenizers, but we found that single-digit and three-digit tokenizers outperform the BPE-based tokenizers."}],["$","li",null,{"children":["πŸ‘‘ ",["$","mark",null,{"children":"Most importantly, for arithmetic operations, single-digit tokenization has significantly better performance than other methods"}]]}]]}],["$","div",null,{"className":"visualization-container","children":["$","$L14",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"Our work shows that tokenization significantly impact arithmetic performance in language models. With careful selection, we can optimize tokenization strategies based on problem type, improving LLM performance on mathematical tasks. We hope to see more research in the ways that tokenization affects other aspects of mathematical reasoning πŸ€—."}],["$","$L15",null,{}],["$","$L16",null,{}],["$","$L17",null,{}]]}]]}]]}]}]}]]}],null,["$","$L18",null,{"children":"$L19"}]]}],{},null]},null],["$","$3","h",{"children":[null,["$","$3","4PNfema34Y-vWYFYzchxh",{"children":[["$","$L1a",null,{"children":"$L1b"}],["$","$L1c",null,{"children":"$L1d"}],["$","meta",null,{"name":"next-size-adjust"}]]}]]}]]],"m":"$undefined","G":["$1e","$undefined"],"s":false,"S":true}
29
  1d:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]
30
  1b:[["$","meta","0",{"charSet":"utf-8"}],["$","title","1",{"children":"From Digits to Decisions"}],["$","meta","2",{"name":"description","content":"How Tokenization Impacts Arithmetic in LLMs"}],["$","link","3",{"rel":"icon","href":"/favicon.ico","type":"image/x-icon","sizes":"16x16"}]]
31
  19:null
 
25
  1e:I[34835,[],""]
26
  1:HL["/_next/static/media/a34f9d1faa5f3315-s.p.woff2","font",{"crossOrigin":"","type":"font/woff2"}]
27
  2:HL["/_next/static/css/b279119c7cf0bf4b.css","style"]
28
+ 0:{"P":null,"b":"WFHEiSUV1U35H1Zx_2aiS","p":"","c":["",""],"i":false,"f":[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",["$","$3","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/b279119c7cf0bf4b.css","precedence":"next","crossOrigin":"$undefined","nonce":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_d65c78 bg-white","children":["$","main",null,{"className":"relative min-h-screen","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]}]]}],{"children":["__PAGE__",["$","$3","c",{"children":[["$","$L6",null,{"children":[["$","$L7",null,{"title":"From Digits to Decisions: How Tokenization Impacts Arithmetic in LLMs"}],["$","div",null,{"className":" bg-white","children":["$","div",null,{"className":"min-h-screen","children":["$","div",null,{"className":"w-full max-w-[83rem] mx-auto px-4 sm:px-6 lg:px-8 py-4 sm:py-6 lg:py-8 flex flex-col lg:flex-row","children":[["$","aside",null,{"className":"w-full lg:w-48 xl:w-56 mb-6 sm:mb-8 lg:mb-0 lg:mr-6 xl:mr-8 flex-shrink-0","children":["$","div",null,{"className":"sticky top-16 sm:top-20 lg:top-24","children":["$","$L8",null,{}]}]}],["$","main",null,{"className":"flex-1 min-w-0","children":[["$","header",null,{"className":"mb-12 border-t border-b border-gray-100 py-6","children":["$","div",null,{"className":"flex flex-col gap-2","children":[["$","div",null,{"className":"flex items-center gap-2 text-sm","children":[["$","p",null,{"className":"font-semibold text-gray-800","children":[["$","$3","0",{"children":[false,["$","a",null,{"href":"https://huggingface.co/garrethlee","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Garreth Lee"}]]}],["$","$3","1",{"children":[", ",["$","a",null,{"href":"https://huggingface.co/guipenedo","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Guilherme Penedo"}]]}],["$","$3","2",{"children":[", ",["$","a",null,{"href":"https://huggingface.co/lvwerra","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Leandro von Werra"}]]}],["$","$3","3",{"children":[", ",["$","a",null,{"href":"https://huggingface.co/thomwolf","className":"hover:text-blue-600","target":"_blank","rel":"noopener noreferrer","children":"Thomas Wolf"}]]}]]}],["$","span",null,{"className":"text-gray-300","children":"β€’"}],["$","time",null,{"className":"text-gray-500","children":"November 29, 2024"}]]}],["$","div",null,{"className":"text-sm text-gray-500 italic","children":"Hugging Face"}]]}]}],["$","article",null,{"className":"prose prose-sm sm:prose lg:prose-lg xl:prose-xl","children":[["$","p",null,{"className":"responsive-text-base","children":["Recently, there has been a lot of buzz around a seemingly simple question that even state-of-the-art large language models (LLM) fail to answer correctly: ",["$","em",null,{"children":"\"Which is bigger? 9.9 or 9.11\""}]]}],["$","p",null,{"className":"responsive-text-base","children":["Despite various attempts and variations of prompting techniques, most frontier models still struggle to make an accurate comparison of the two numbers. This highlights a broader issue many of today's models encounter: they have limited mathematical reasoning capabilities",["$","$L9",null,{"id":"mirzadeh2024"}],". While there are multiple conjectures of why this is the case, including the composition of pretraining data and the model architecture itself",["$","$L9",null,{"id":"xu2024"}],", we investigate one of the most fundamental processes in LLMs,",["$","strong",null,{"children":" tokenization"}],", and how it affects a model's ability to do math, specifically arithmetic problems."]}],["$","p",null,{"className":"responsive-text-base","children":"In this blog post, we discuss:"}],["$","ol",null,{"className":"list-decimal pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":"Our detailed approach in comparing different methods of number tokenization"}],["$","li",null,{"children":"Why reading from right to left is sometimes better than from left to right"}],["$","li",null,{"children":"A clear frontrunner of tokenization methods for arithmetic in LLMs"}]]}],["$","h2",null,{"id":"a-brief-history","className":"responsive-text-2xl","children":"A Brief History of Number Tokenization"}],["$","p",null,{"className":"responsive-text-base","children":["Back in 2019, The GPT2 paper detailed its use of BPE (byte-pair encoding) as a tokenization method for language models ",["$","$L9",null,{"id":"radford2019"}],". This approach works by merging frequently occurring subwords into single units until the vocabulary reaches a target size."]}],["$","p",null,{"className":"responsive-text-base","children":["Because of how this algorithm operates, the resulting vocabulary depends heavily on the training data fed into the tokenizer. This led to ",["$","strong",null,{"children":"inconsistencies "}],"in how numbers are encoded ",["$","$L9",null,{"id":"beren2024"}],". Commonly seen numbers (i.e. 1-100, years like 1945, etc.) in the training data will likely be represented as a single token, while less frequently seen numbers are split into multiple tokens like below:"]}],["$","div",null,{"className":"visualization-container","children":["$","$La",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":["Four years later, the herd of llamas began their stampede! Llama and Llama 2 used SentencePiece's BPE implementation with a notable tweak for numbers: ",["$","em",null,{"children":"they split all numbers into individual digits"}]," ",["$","$L9",null,{"id":"touvron2023llama"}],["$","$L9",null,{"id":"touvron2023llama2"}],". This meant there were only 10 unique tokens to represent any number, simplifying numerical representation for LLMs. Deepseek released a model much later (DeepSeek-V2) with a similar single-digit tokenizer ",["$","$L9",null,{"id":"deepseek2024"}],"."]}],["$","p",null,{"className":"responsive-text-base","children":["Later on, Llama 3 took a different approach for handling numbers, tokenizing them in groups of three digits ",["$","$L9",null,{"id":"grattafiori2024"}],". As a result, numbers from 1 to 999 each have unique tokens, while numbers from 1000 onward are composed of these tokens."]}],["$","h3",null,{"id":"right-to-left-tokenization","className":"responsive-text-2xl","children":"A New Paradigm: Right-to-Left Tokenization"}],["$","p",null,{"className":"responsive-text-base","children":"So far, the tokenization methods we've seen \"processed\" text from left to right. For instance, if the three-digit tokenizer encounters the sequence \"12345,\" it will scan from the beginning, breaking it down into segments like \"123\" and \"45\"."}],["$","p",null,{"className":"responsive-text-base","children":[["$","strong",null,{"children":"Right-to-left (R2L) tokenization"}],", on the other hand, processes text from the end to the beginning in groups of three. Using R2L, the sequence \"12345\" would be tokenized by scanning from the right, first splitting off \"345\" and then moving to \"12.\" Recently, there has been some exploration too of forcing this R2L tokenization behaviour in frontier closed-source models, which has shown to benefit certain arithmetic operations since the R2L representation prevents the misalignment of the operands ",["$","$L9",null,{"id":"singh2024"}],". It has also been rumored that Claude uses this R2L tokenization method ",["$","$L9",null,{"id":"claude2024"}],"."]}],["$","p",null,{"className":"responsive-text-base","children":"To better understand what misalignment looks like, let's take 3789 + 8791 as an example:"}],["$","div",null,{"className":"visualization-container","children":["$","$Lb",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"This insight suggests that three-digit R2L tokenization could potentially be improved over the standard three-digit L2R tokenization used by Llama 3."}],["$","p",null,{"className":"responsive-text-base","children":"To recap, here's an overview of the techniques used to handle number tokenization:"}],["$","div",null,{"className":"overflow-x-auto my-5 rounded-lg","children":["$","table",null,{"className":"w-full border bg-white/50 rounded-lg","children":[["$","thead",null,{"children":["$","tr",null,{"className":"border border-gray-200","children":[["$","th",null,{"className":"px-4 py-3 text-left text-sm font-semibold","children":"How numbers are tokenized"}],["$","th",null,{"className":"px-4 py-3 text-left text-sm font-semibold","children":"tokenizer (model)"}]]}]}],["$","tbody",null,{"children":[["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"pure BPE; no special handling"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"gpt2"}]]}],["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"split to single digits"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"llama, llama2, deepseek"}]]}],["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"1-999 has unique tokens"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"llama3"}]]}],["$","tr",null,{"className":"border border-gray-200","children":[["$","td",null,{"className":"px-4 py-3 text-sm","children":"split to groups of three digits (R2L)"}],["$","td",null,{"className":"px-4 py-3 text-sm","children":"Claude (?)"}]]}]]}]]}]}],["$","h2",null,{"id":"fair-comparison","className":"responsive-text-2xl","children":"Creating a fair comparison of different methods"}],["$","p",null,{"className":"responsive-text-base","children":"The goal of this investigation is to compare these tokenizers and their different ways of processing numbers in a way that minimizes the influence of external factors such as model architecture, training configurations, and pre-training data in evaluation results."}],["$","p",null,{"className":"responsive-text-base","children":["Thus, one important design decision we made to address this goal was to evaluate",["$","strong",null,{"children":" models trained from scratch,"}]," where each model has the same data mixture, training configs, and a roughly equal compute budget (number of model parameters and training tokens). The only meaningful difference that each model should have with one another is the ",["$","strong",null,{"children":"tokenizer used"}]," to tokenize the training data."]}],["$","h3",null,{"id":"experimental-setup","className":"responsive-text-2xl","children":"Experimental Setup"}],["$","p",null,{"className":"responsive-text-base","children":"We picked 3 tokenizers mentioned previously, namely GPT2's BPE tokenizer, Llama 3's three-digit tokenizer, and Deepseek's single-digit tokenizer."}],["$","p",null,{"className":"responsive-text-base","children":["To test right-to-left tokenization, we created R2L versions of the Pure-BPE and three-digit tokenizers, where numbers would be chunked into groups of 3 digits from the right before being tokenized. We didn't create a R2L version for single-digit tokenization since it would produce the same result since numbers are tokenized to individual digits ",["$","$Lc",null,{"id":"r2l-footnote"}],". To achieve this, we added an extra preprocessing step which forces the R2L behaviour without producing additional tokens during inference:"]}],["$","div",null,{"id":"code-snippet","className":"my-5 bg-gray-100 p-4 rounded-lg font-mono text-xs whitespace-pre overflow-auto max-h-[400px] ","children":["$","code",null,{"children":"from transformers import AutoTokenizer\nfrom tokenizers import pre_tokenizers, Regex\n\n# Initialize all tokenizers\ntokenizer = AutoTokenizer.from_pretrained(\"meta-llama/Meta-Llama-3-8B\")\n\n# Add an extra step to the existing pre-tokenizer steps\ntokenizer._tokenizer.pre_tokenizer = pre_tokenizers.Sequence(\n [\n # Added step: split by R2L digits\n pre_tokenizers.Split(pattern = Regex(r\"\\d{1,3}(?=(\\d{3})*\\b)\"), \n behavior=\"isolated\", invert = False),\n # Below: Existing steps from Llama 3's tokenizer\n pre_tokenizers.Split(pattern=Regex(r\"(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\\r\\n\\p{L}\\p{N}]?\\p{L}+|\\p{N}{1,3}| ?[^\\s\\p{L}\\p{N}]+[\\r\\n]*|\\s*[\\r\\n]+|\\s+(?!\\S)|\\s+\"), \n behavior=\"isolated\", invert=False), \n pre_tokenizers.ByteLevel(add_prefix_space=False, trim_offsets=True, use_regex=False)\n ]\n )\n\nprint(tokenizer.tokenize(\"42069\")) # [42, 069]\n\n"}]}],["$","h3",null,{"id":"model-configuration","className":"responsive-text-xl","children":"Model Configuration"}],["$","p",null,{"className":"responsive-text-base","children":["The models we train use the original Llama architecture. To account for the differences in each tokenizer's vocabulary size, we ",["$","em",null,{"children":"modified the number of hidden layers so that each model has roughly the same number of parameters"}]," (~1.45B)."]}],["$","div",null,{"className":"visualization-container","children":["$","div",null,{"ref":"$undefined","className":"rounded-xl border bg-card text-card-foreground w-full max-w-2xl p-8 bg-gradient-to-br from-background to-muted/20 shadow-xl","children":[["$","div",null,{"ref":"$undefined","className":"flex flex-col p-6 space-y-3","children":[["$","div",null,{"ref":"$undefined","className":"tracking-tight text-2xl font-bold text-center","children":"Larger Tokenizer Vocabulary = More Parameters"}],["$","p",null,{"className":"text-muted-foreground text-center text-sm","children":"Visualizing embedding matrix size (vocab_size Γ— hidden_dim)"}]]}],["$","div",null,{"ref":"$undefined","className":"p-6 pt-0","children":[["$","div",null,{"className":"flex justify-center items-start gap-16 mt-4","children":[["$","div",null,{"className":"flex flex-col items-center transition-all duration-200 hover:scale-105","children":[["$","div",null,{"className":"text-base font-bold mb-4 text-center bg-teal-50 px-4 py-1 rounded-full","children":"Model A"}],["$","div",null,{"className":"grid grid-cols-4 gap-1 mb-3 bg-white p-4 rounded-lg shadow-md","children":[["$","div","0",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","1",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","2",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","3",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","4",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","5",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","6",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","7",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","8",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","9",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","10",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","11",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","12",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","13",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","14",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","15",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","16",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","17",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","18",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","19",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","20",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","21",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","22",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","23",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","24",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","25",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","26",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","27",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","28",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","29",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","30",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}],["$","div","31",{"className":"w-5 h-5 bg-[#99e6e6] border border-teal-300 rounded-sm transition-colors duration-150 hover:bg-teal-300","title":"Embedding parameter"}]]}],["$","div",null,{"className":"text-sm font-medium text-muted-foreground","children":"32 parameters"}]]}],["$","div",null,{"className":"flex flex-col items-center transition-all duration-200 hover:scale-105","children":[["$","div",null,{"className":"text-base font-bold mb-4 text-center bg-red-50 px-4 py-1 rounded-full","children":"Model B"}],["$","div",null,{"className":"grid grid-cols-4 gap-1 mb-3 bg-white p-4 rounded-lg shadow-md","children":[["$","div","0",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","1",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","2",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","3",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","4",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","5",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","6",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","7",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","8",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","9",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","10",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","11",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","12",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","13",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","14",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","15",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","16",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","17",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","18",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","19",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","20",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","21",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","22",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","23",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","24",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","25",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","26",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","27",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","28",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","29",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","30",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","31",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","32",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","33",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","34",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","35",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","36",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","37",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","38",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","39",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","40",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","41",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","42",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","43",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","44",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","45",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","46",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","47",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","48",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","49",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","50",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","51",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","52",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","53",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","54",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","55",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","56",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","57",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","58",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","59",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","60",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","61",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","62",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}],["$","div","63",{"className":"w-5 h-5 bg-red-200 border border-red-300 rounded-sm transition-colors duration-150 hover:bg-red-300","title":"Embedding parameter"}]]}],["$","div",null,{"className":"text-sm font-medium text-muted-foreground","children":"64 parameters"}]]}]]}],["$","div",null,{"className":"text-sm text-center mt-12 mx-auto max-w-2xl text-muted-foreground leading-relaxed","children":["For a fixed number of training tokens, Model B \"learns\" more due to its larger embedding matrix. To maintain a constant compute budget, we ",["$","strong",null,{"children":"reduce the number of hidden layers"}]," in models with larger vocabularies."]}]]}]]}]}],["$","h3",null,{"id":"pretraining-data","className":"responsive-text-xl","children":"Pretraining Data"}],["$","p",null,{"className":"responsive-text-base","children":"We used a mixture of general web data and math & code data for model pretraining:"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":["42% ",["$","a",null,{"href":"https://huggingface.co/datasets/HuggingFaceFW/fineweb","className":"text-blue-600 hover:text-blue-800","children":"Fineweb"}]]}],["$","li",null,{"children":["40% ",["$","a",null,{"href":"https://huggingface.co/datasets/Infi-MM/InfiMM-WebMath-40B","className":"text-blue-600 hover:text-blue-800","children":"InfiMM-WebMath"}]]}],["$","li",null,{"children":["10% ",["$","a",null,{"href":"https://huggingface.co/datasets/open-web-math/open-web-math","className":"text-blue-600 hover:text-blue-800","children":"open-web-math"}]]}],["$","li",null,{"children":["8% ",["$","a",null,{"href":"https://huggingface.co/datasets/EleutherAI/proof-pile-2","className":"text-blue-600 hover:text-blue-800","children":"Proof-Pile-2"}]," (algebraic-stack subset)"]}]]}],["$","p",null,{"className":"responsive-text-base","children":"The relative weights are based on the token counts of each dataset. The resulting mixture amounted to ~120B tokens. We also had R2L-tokenized version of these datasets for the corresponding R2L tokenizers."}],["$","h2",null,{"id":"evaluation","className":"responsive-text-2xl","children":"Evaluation"}],["$","p",null,{"className":"responsive-text-base","children":["It's important to note that an LLM's training process is non-deterministic. Even after controlling the pretraining data mix, model architecture, and compute budget, different model initialization settings can still produce noise in evaluation results",["$","$L9",null,{"id":"madaan2024"}],"."]}],["$","p",null,{"className":"responsive-text-base","children":["To account for this, we trained three additional models with the tokenizer that produced the highest variation in model scores on the same data mixture using",["$","strong",null,{"children":" different seeds"}],". For each task, we then take the standard deviation of model scores and assume that this value is the upper-bound of variability across all model architectures and training datasets."]}],["$","div",null,{"className":"visualization-container","children":["$","$Ld",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"We divide our evaluation suite into two main categories:"}],["$","p",null,{"className":"responsive-text-base font-bold mt-4 mb-2","children":"Word-based problems"}],["$","p",null,{"className":"responsive-text-base","children":["For a model of this size, it is quite difficult to obtain any meaningful signal using traditional mathematical reasoning benchmarks, so we opted for easier benchmarks with relatively straightforward solutions that only require a few simple mathematical operations, namely ",["$","a",null,{"href":"https://huggingface.co/datasets/mwpt5/MAWPS","className":"text-blue-600 hover:text-blue-800","children":"MAWPS"}]," and ",["$","a",null,{"href":"https://huggingface.co/datasets/ChilleD/SVAMP","className":"text-blue-600 hover:text-blue-800","children":"SVAMP"}],". For evaluation, we standardized a question-answer format and parsed a single numerical answer from the model's output to compute the accuracy."]}],["$","p",null,{"className":"responsive-text-base font-bold mt-4 mb-2","children":"Simple arithmetic problems"}],["$","p",null,{"className":"responsive-text-base","children":"We created a custom benchmark containing simple arithmetic problems (+, - , x, Γ·). These problems are split across various axes, which allows us to perform a more detailed assessment of the pitfalls of LLMs when doing math. We create different splits based on:"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":"The difficulty of the problem (harder problems = more decimals, more digits)"}],["$","li",null,{"children":"Whether it was a float / int operation"}],["$","li",null,{"children":"Whether or not commas were used to demarcate digits into groups of 3"}],["$","li",null,{"children":"The arithmetic operator used in the problem"}]]}],["$","p",null,{"className":"responsive-text-base mt-4","children":"Given the relatively fixed-form answers in the evaluation tasks above, the evaluation metric we chose is a quasi-exact match (QEM) of the gold answer and the model generation, with normalizations to account for minor differences in floating point equality (127.60 and 127.6 should be a match) as well as usage of commas (1234 and 1,234 should be a match)."}],["$","h2",null,{"id":"results","className":"responsive-text-2xl","children":"Results"}],["$","h3",null,{"id":"results-arithmetic","className":"responsive-text-lg","children":"Arithmetic Problems"}],["$","p",null,{"className":"responsive-text-lg font-bold","children":"Single-digit tokenization outperforms all other tested tokenizers in arithmetic problems"}],["$","div",null,{"className":"visualization-container","children":["$","$Le",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"While the differences for easier problems are less pronounced, as the complexity of the problems increases, there is an increasing gap between the best-performing tokenizer (single-digit) and the rest. This suggests that single-digit tokenization is more robust to variations in input data length and can better capture intricate patterns, leading to improved performance in challenging scenarios where other tokenization methods struggle."}],["$","p",null,{"className":"responsive-text-base","children":["Additionally, while it's intuitive to assume that integer problems will have better performance (which is also confirmed by the results in the grid), we also found that the ",["$","strong",null,{"children":"performance gap"}]," between floats and integers is similar across all tokenizers. This shows that there aren't any inherent tradeoffs when choosing tokenizers in these two categories (i.e. the optimal tokenizer for integers is also optimal for floats)."]}],["$","p",null,{"className":"responsive-text-lg font-bold","children":"Three-digit R2L tokenization has better performance over standard three-digit L2R tokenization"}],["$","div",null,{"className":"visualization-container","children":["$","$Lf",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":["We found that a model trained on R2L tokenized data resulted in mostly",["$","strong",null,{"children":" significant"}]," improvements (except for multiplication) when compared to training on the default L2R tokenized data. This shows that it is an optimal setting for arithmetic operations compared to the typical left-to-right encoding."]}],["$","p",null,{"className":"responsive-text-lg font-bold","children":"Pure-BPE tokenizers show inconsistent performance when numbers are chunked in groups of 3 from right to left"}],["$","div",null,{"className":"visualization-container","children":["$","$L10",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"Evidently, pure BPE-based tokenizers without any additional number preprocessing don't benefit much from using R2L tokenization. A possible explanation of why this might be the case is the lack of 'structure' in how digits are grouped together in these tokenizers."}],["$","p",null,{"className":"responsive-text-base","children":"Unlike the three-digit tokenizer, due to the peculiarities of pure BPE-based tokenization mentioned above, there are other numbers that are also grouped less consistently. This inconsistency undermines the advantages of R2L, which we have shown to work best when numbers are uniformly tokenized in 3-digit chunks from least to most significant digit."}],["$","h3",null,{"id":"results-word-based","className":"responsive-text-lg","children":"Word-based problems"}],["$","p",null,{"className":"responsive-text-base","children":"In addition to analysing arithmetic performance, we also wanted to see if a tokenizer's performance in pure arithmetic also translates to word problems."}],["$","div",null,{"className":"visualization-container","children":["$","$L11",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"While the performance gap between tokenizers is less pronounced in word-based problems, we see that single-digit and three-digit tokenizers generally outperform the BPE-based tokenizers, which indicates that the trend is consistent across both types of problems."}],["$","h3",null,{"id":"results-bonus","className":"responsive-text-lg","children":"Bonus: R2L inference on Llama 3"}],["$","p",null,{"className":"responsive-text-base","children":["One of the other things we wanted to test was how existing pretrained/instruct models performed when it was subjected to a tokenization scheme that was different to what it was originally trained on ",["$","strong",null,{"children":" without having to re-train or fine-tune it"}],". So, we took Llama3 8B Instruct and used the same code above to modify its tokenizer to perform R2L tokenization during inference, instead of re-training a new model with R2L data."]}],["$","p",null,{"className":"responsive-text-base","children":"One important thing to note when adding two numbers in a three-digit tokenization scheme: the result can sometimes produce more tokens than the input numbers. One such example is when we add 999 and 111, which individually only require a single token, but when added together produce 1110, which requires two tokens (1 and 110). We wanted to explore how much of a difference this makes when performing addition with both L2R and R2L tokenization on varying token lengths."}],["$","p",null,{"className":"responsive-text-base","children":"(Going forward, we will refer to additions that result in an extra token as \"carry\" additions, and those that do not as \"without carry\")"}],["$","p",null,{"className":"responsive-text-base","children":"We performed few-shot arithmetic tasks of varying digit lengths and carry settings with Llama3 8B Instruct. We did not find any significant performance difference for subtraction, multiplication, or division, so we only show the results for addition."}],["$","div",null,{"className":"visualization-container","children":["$","$L12",null,{}]}],["$","div",null,{"className":"responsive-text-base","children":[["$","p",null,{"className":"font-semibold mb-2","children":"A few things to note from the results:"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2","children":[["$","li",null,{"children":["For non-carry additions, digits that are multiples of 3 produce the exact same results, given that numbers like 528 491 have the same tokens regardless of tokenization direction",["$","$Lc",null,{"id":"same-tokens-footnote"}]]}],["$","li",null,{"children":"We see an interesting cyclic trend every three digits when comparing L2R and R2L carry additions. For multiples of three, L2R slightly outperforms R2L, but in the next digit, R2L performance shoots up and overtakes L2R, further widening the performance gap in the digit after that before dropping again in the next multiple of three."}],["$","li",null,{"children":"We found that the model's output tokens perfectly formatted numbers as R2L with just a few R2L examples, despite being trained on L2R formatted numbers. This is super important because it shows that these models aren't just \"memorizing\" patterns seen from its training data. When given an entirely new distribution of R2L number tokens, it was still able to reason and perform even better than the data it was originally trained on."}]]}]]}],["$","p",null,{"className":"responsive-text-base","children":["When averaging across all digit lengths and carry settings, there was a",["$","strong",null,{"children":" ~10% improvement in accuracy"}]," over the standard L2R tokenization used in Llama 3, just by swapping the tokenizer with the few lines of code above."]}],["$","div",null,{"className":"visualization-container","children":["$","$L13",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"We tried to perform a similar experiment with single-digit tokenization, but performance dropped off really quickly after a few digits. This makes intuitive sense since the shift in token distribution of the training data and single-digit tokenized data is way larger than the difference in token distribution between the training data and the R2L data."}],["$","h2",null,{"id":"conclusion","className":"responsive-text-2xl","children":"So, which tokenization method is best for math?"}],["$","ul",null,{"className":"list-disc pl-6 space-y-2 responsive-text-base","children":[["$","li",null,{"children":"πŸ”„ While Byte-Pair Encoding remains a popular tokenization method, there should be preprocessing steps to fix the inconsistencies and improve numerical reasoning."}],["$","li",null,{"children":["πŸ“ If you have to use a tokenizer that has a token for numbers up to 3 digits, make sure you tokenize your data R2L (",["$","a",null,{"href":"#code-snippet","className":"text-blue-600 hover:text-blue-800","children":"with the code snippet above"}],")"]}],["$","li",null,{"children":"πŸš€ If you already have a trained model where the data was tokenized L2R, you can get better math performance by running inference with R2L"}],["$","li",null,{"children":"πŸ“š Word-based problems have less apparent performance differences between tokenizers, but we found that single-digit and three-digit tokenizers outperform the BPE-based tokenizers."}],["$","li",null,{"children":["πŸ‘‘ ",["$","mark",null,{"children":"Most importantly, for arithmetic operations, single-digit tokenization has significantly better performance than other methods"}]]}]]}],["$","div",null,{"className":"visualization-container","children":["$","$L14",null,{}]}],["$","p",null,{"className":"responsive-text-base","children":"Our work shows that tokenization significantly impact arithmetic performance in language models. With careful selection, we can optimize tokenization strategies based on problem type, improving LLM performance on mathematical tasks. We hope to see more research in the ways that tokenization affects other aspects of mathematical reasoning πŸ€—."}],["$","$L15",null,{}],["$","$L16",null,{}],["$","$L17",null,{}]]}]]}]]}]}]}]]}],null,["$","$L18",null,{"children":"$L19"}]]}],{},null]},null],["$","$3","h",{"children":[null,["$","$3","ySh9eiEwCos5DMsz96t6i",{"children":[["$","$L1a",null,{"children":"$L1b"}],["$","$L1c",null,{"children":"$L1d"}],["$","meta",null,{"name":"next-size-adjust"}]]}]]}]]],"m":"$undefined","G":["$1e","$undefined"],"s":false,"S":true}
29
  1d:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]
30
  1b:[["$","meta","0",{"charSet":"utf-8"}],["$","title","1",{"children":"From Digits to Decisions"}],["$","meta","2",{"name":"description","content":"How Tokenization Impacts Arithmetic in LLMs"}],["$","link","3",{"rel":"icon","href":"/favicon.ico","type":"image/x-icon","sizes":"16x16"}]]
31
  19:null