<?xml version="1.0" encoding="UTF-8"?><rss xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:atom="http://www.w3.org/2005/Atom" version="2.0" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:googleplay="http://www.google.com/schemas/play-podcasts/1.0"><channel><title><![CDATA[calebds.dev]]></title><description><![CDATA[Coding to learn, share, and excel in the age of chaos.]]></description><link>https://calebds.dev</link><generator>Substack</generator><lastBuildDate>Wed, 15 Apr 2026 22:20:13 GMT</lastBuildDate><atom:link href="https://calebds.dev/feed" rel="self" type="application/rss+xml"/><copyright><![CDATA[Caleb Sotelo]]></copyright><language><![CDATA[en]]></language><webMaster><![CDATA[calebdsdev@substack.com]]></webMaster><itunes:owner><itunes:email><![CDATA[calebdsdev@substack.com]]></itunes:email><itunes:name><![CDATA[Caleb Sotelo]]></itunes:name></itunes:owner><itunes:author><![CDATA[Caleb Sotelo]]></itunes:author><googleplay:owner><![CDATA[calebdsdev@substack.com]]></googleplay:owner><googleplay:email><![CDATA[calebdsdev@substack.com]]></googleplay:email><googleplay:author><![CDATA[Caleb Sotelo]]></googleplay:author><itunes:block><![CDATA[Yes]]></itunes:block><item><title><![CDATA[Time for RAG: maple-leaf!]]></title><description><![CDATA[How I built an AI chatbot over my personal journals in a day.]]></description><link>https://calebds.dev/p/time-for-rag-maple-leaf</link><guid isPermaLink="false">https://calebds.dev/p/time-for-rag-maple-leaf</guid><dc:creator><![CDATA[Caleb Sotelo]]></dc:creator><pubDate>Sun, 01 Mar 2026 21:08:44 GMT</pubDate><enclosure url="https://substackcdn.com/image/fetch/$s_!oKHn!,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg" length="0" type="image/jpeg"/><content:encoded><![CDATA[<div class="captioned-image-container"><figure><a class="image-link image2 is-viewable-img" target="_blank" href="https://substackcdn.com/image/fetch/$s_!oKHn!,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg" data-component-name="Image2ToDOM"><div class="image2-inset"><picture><source type="image/webp" srcset="https://substackcdn.com/image/fetch/$s_!oKHn!,w_424,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 424w, https://substackcdn.com/image/fetch/$s_!oKHn!,w_848,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 848w, https://substackcdn.com/image/fetch/$s_!oKHn!,w_1272,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 1272w, https://substackcdn.com/image/fetch/$s_!oKHn!,w_1456,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 1456w" sizes="100vw"><img src="https://substackcdn.com/image/fetch/$s_!oKHn!,w_1456,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg" width="1456" height="1092" data-attrs="{&quot;src&quot;:&quot;https://substack-post-media.s3.amazonaws.com/public/images/bc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg&quot;,&quot;srcNoWatermark&quot;:null,&quot;fullscreen&quot;:null,&quot;imageSize&quot;:null,&quot;height&quot;:1092,&quot;width&quot;:1456,&quot;resizeWidth&quot;:null,&quot;bytes&quot;:11389033,&quot;alt&quot;:null,&quot;title&quot;:null,&quot;type&quot;:&quot;image/jpeg&quot;,&quot;href&quot;:null,&quot;belowTheFold&quot;:false,&quot;topImage&quot;:true,&quot;internalRedirect&quot;:&quot;https://calebds.dev/i/189585951?img=https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg&quot;,&quot;isProcessing&quot;:false,&quot;align&quot;:null,&quot;offset&quot;:false}" class="sizing-normal" alt="" srcset="https://substackcdn.com/image/fetch/$s_!oKHn!,w_424,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 424w, https://substackcdn.com/image/fetch/$s_!oKHn!,w_848,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 848w, https://substackcdn.com/image/fetch/$s_!oKHn!,w_1272,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 1272w, https://substackcdn.com/image/fetch/$s_!oKHn!,w_1456,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2Fbc734a92-6608-4312-a659-fddcfda359f0_5712x4284.jpeg 1456w" sizes="100vw" fetchpriority="high"></picture><div class="image-link-expand"><div class="pencraft pc-display-flex pc-gap-8 pc-reset"><button tabindex="0" type="button" class="pencraft pc-reset pencraft icon-container restack-image"><svg role="img" width="20" height="20" viewBox="0 0 20 20" fill="none" stroke-width="1.5" stroke="var(--color-fg-primary)" stroke-linecap="round" stroke-linejoin="round" xmlns="http://www.w3.org/2000/svg"><g><title></title><path d="M2.53001 7.81595C3.49179 4.73911 6.43281 2.5 9.91173 2.5C13.1684 2.5 15.9537 4.46214 17.0852 7.23684L17.6179 8.67647M17.6179 8.67647L18.5002 4.26471M17.6179 8.67647L13.6473 6.91176M17.4995 12.1841C16.5378 15.2609 13.5967 17.5 10.1178 17.5C6.86118 17.5 4.07589 15.5379 2.94432 12.7632L2.41165 11.3235M2.41165 11.3235L1.5293 15.7353M2.41165 11.3235L6.38224 13.0882"></path></g></svg></button><button tabindex="0" type="button" class="pencraft pc-reset pencraft icon-container view-image"><svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-maximize2 lucide-maximize-2"><polyline points="15 3 21 3 21 9"></polyline><polyline points="9 21 3 21 3 15"></polyline><line x1="21" x2="14" y1="3" y2="10"></line><line x1="3" x2="10" y1="21" y2="14"></line></svg></button></div></div></div></a><figcaption class="image-caption">Nanzen-ji temple in Kyoto, Japan</figcaption></figure></div><p>About six months ago, I realized I&#8217;d been a software engineer for fifteen years, and decided to take an intentional sabbatical &#8212; the first real break I&#8217;d had since college. I worked on jazz improv, visited with family all over California, traveled to Japan and Spain, and finished a draft of a sci-fi short story I&#8217;d had brewing for a couple years.</p><p>Now I&#8217;m preparing for a job search, and I wanted to shake the rust off. More specifically, I wanted to build something I&#8217;d only ever worked adjacent to: a <a href="https://en.wikipedia.org/wiki/Retrieval-augmented_generation">RAG system</a>. I&#8217;ve reviewed pull requests for RAG architectures, sat in on design sessions, and peeked at the literature. But I&#8217;d never actually built one. And in a post-LLM world where RAG feels like table stakes for anyone building AI products, I figured it was time. The result is <a href="https://github.com/calebds/maple-leaf">maple-leaf</a> &#8212; a local CLI that lets me query five years of personal journals using natural language. I named it after the <em>Maple Leaf Rag</em> by Scott Joplin.</p><p>This is a walk-through of how it got off the ground with Claude Code.</p><h2><strong>What is RAG? and why does it exist?</strong></h2><p>Foundation models like Claude or GPT know a lot &#8212; but their knowledge has a hard cutoff. Everything they know was baked in at training time. They know nothing about your life, your company&#8217;s internal docs, or anything that happened after training. RAG &#8212; Retrieval-Augmented Generation &#8212; is the standard pattern for bridging that gap.</p><p>The big picture:</p><div class="highlighted_code_block" data-attrs="{&quot;language&quot;:&quot;markdown&quot;,&quot;nodeId&quot;:null}" data-component-name="HighlightedCodeBlockToDOM"><pre class="shiki"><code class="language-markdown">   Your Data (documents, journals, notes)
         &#9474;
   [Embedding Model]   &#8592; converts text to vectors that
         &#9474;               capture semantic meaning
   [Vector Store]      &#8592; searchable by similarity
         &#9474;
&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;
         At query time:
&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;&#9472;
         &#9474;
   User Query
         &#9474;
   [1. Embed query]
   [2. Find similar chunks in vector store]
   [3. Retrieve top-K results]
   [4. Inject as context + call Foundation Model]
         &#9474;
         &#9660;
   Your Answer
         &#9474;
   (grounded in YOUR data, not just training knowledge)</code></pre></div><p>The key insight: instead of fine-tuning a model on your data &#8212; expensive, slow, and you&#8217;d have to redo it every time your data changes &#8212; you retrieve the right context at query time and hand it to the model as part of the prompt. The model doesn&#8217;t need to <em>know</em> your data. It just needs to <em>see</em> the relevant parts when you ask a question. Why does RAG exist? Because foundation models are extraordinarily capable reasoners. What they lack is context. RAG supplies it.</p><p>That&#8217;s the loop I wanted to understand by building.</p><div><hr></div><h2><strong>1. Starting with CLAUDE.md, not code</strong></h2><p><em>GitHub: <a href="https://github.com/calebds/maple-leaf/commit/c322172">c322172</a></em></p><p>The first thing I did was spend a session talking through the architecture before writing a single line of code. I&#8217;ve started doing this on projects with Claude Code: front-load the architectural thinking, get it into a document, and treat that document as the source of truth throughout the build.</p><p>The initial sketch had a few things I wanted to change. Claude had proposed a dual-vendor setup &#8212; Anthropic for generation, OpenAI for embeddings. I ended up cutting that to a single vendor: local embeddings via ChromaDB&#8217;s default model (<code>all-MiniLM-L6-v2</code>, runs in-process, no API key), Anthropic for generation only.</p><p>The question of project structure was also interesting. Claude&#8217;s first proposal organized the engine into nested sub-packages: <code>engine/ingest/</code>, <code>engine/retrieval/</code>, <code>engine/generation/</code>, <code>engine/store/</code>. Four sub-packages, seven files. My instinct was that this was premature. When you&#8217;re wiring a first pass at a system, touching seven files plus four <code>__init__.py</code> files to get something working is friction you don&#8217;t need. I pushed for a flat engine:</p><div class="highlighted_code_block" data-attrs="{&quot;language&quot;:&quot;markdown&quot;,&quot;nodeId&quot;:&quot;bbd974e8-513f-4a22-891d-1f4e598650d5&quot;}" data-component-name="HighlightedCodeBlockToDOM"><pre class="shiki"><code class="language-markdown">engine/
&#9500;&#9472;&#9472; ingest.py
&#9500;&#9472;&#9472; store.py
&#9500;&#9472;&#9472; search.py
&#9492;&#9472;&#9472; generate.py</code></pre></div><p>Same architectural boundaries, less ceremony. Claude agreed and updated the doc, and we scaffolded from there.</p><div><hr></div><h2><strong>2. The Bear connector</strong></h2><p><em>GitHub: <a href="https://github.com/calebds/maple-leaf/commit/4e34dfc">4e34dfc</a></em></p><p>My first data source is a Bear app export: five years of daily journal entries, ~1500 files, each a plain markdown file with a date-encoded filename and a tag line at the bottom. Parsing them was one area where Claude handled most of the algorithmic detail.</p><p>Bear&#8217;s export format is based on my own naming conventions. A file named <code>101 - Kyoto.md</code> means October 1st. The tag line <code>#journal #2025/october</code> gives you the year and month. So date reconstruction requires both pieces &#8212; the filename prefix gives month and day, the tag gives year and month &#8212; and you cross-reference to disambiguate:</p><div class="highlighted_code_block" data-attrs="{&quot;language&quot;:&quot;python&quot;,&quot;nodeId&quot;:&quot;e47f30cf-e018-4379-b59f-97a8e0fe08e4&quot;}" data-component-name="HighlightedCodeBlockToDOM"><pre class="shiki"><code class="language-python">def _parse_date(filename: str, tags: list[str]) -&gt; datetime:
    """Extract a date from the filename prefix and date tag.
    Filename gives month+day (e.g. "101" = 10/1, "1018" = 10/18).
    Date tag gives year+month (e.g. "#2025/october").
    """</code></pre></div><p>Claude wrote the initial implementation, then hit a real edge case: a file named <code>91 - Meysan.md</code> with a <code>#2025/september</code> tag. Is that day 91, or September 1st? The answer is the latter &#8212; the first digit <code>9</code> matches the tag month. There was also <code>920, 921 - LAXHND.md</code>, a multi-day entry (international flight). The parser handles it by splitting on the comma and taking the first date.</p><p>Before loading my full five-year export, I wanted a way to dry-run the parser and get a list of files that would fail &#8212; so I could go fix them in Bear before ingesting. Claude added a <code>validate</code> command to the CLI: walk the export directory, run the same parsing logic, and report every filename that throws an error rather than crashing on the first one. The first implementation dropped the validation logic directly into <code>cli.py</code>, which I pushed back on &#8212; the CLI is glue, not a place for data quality logic. We extracted it to <code>connectors/validate.py</code>, next to the connector it validates against. Then I noticed that <code>validate_bear_export</code> and <code>BearConnector.fetch_all</code> were doing the same thing: glob files, read content, call <code>_parse_tags</code>, call <code>_parse_date</code>. The right fix was to extract a <code>parse_file</code> function shared by both &#8212; so that validating a file and ingesting a file are provably the same code path. If <code>validate</code> passes, <code>ingest</code> won&#8217;t fail.</p><p>This commit also settled a testing convention question. Claude put tests in a top-level <code>tests/</code> directory &#8212; standard Python convention. I pushed back: I prefer tests co-located with the source they exercise. When you open <code>connectors/</code>, you should see <code>bear.py</code> and <code>test_bear.py</code> together. Claude walked me through why Python projects historically avoided this (packaging concerns &#8212; you don&#8217;t want test files shipped in your distribution), acknowledged the reasoning didn&#8217;t apply to a local CLI, and moved the tests. A pre-commit hook to run the full suite before every commit went in at the same time.</p><h2><strong>3. One less system</strong></h2><p><em>GitHub: <a href="https://github.com/calebds/maple-leaf/commit/8abfedb">8abfedb</a></em></p><p>The original architecture had two stores: ChromaDB for vectors, SQLite for metadata. The rationale was pre-filtering &#8212; narrow candidates via SQL before running ANN search.</p><p>While wiring the query path, I asked directly: does ChromaDB support metadata filtering? It does, natively:</p><div class="highlighted_code_block" data-attrs="{&quot;language&quot;:&quot;python&quot;,&quot;nodeId&quot;:null}" data-component-name="HighlightedCodeBlockToDOM"><pre class="shiki"><code class="language-python">collection.query(
    query_texts=["What happened in Kyoto?"],
    where={"source": "bear"},
    n_results=10,
)</code></pre></div><p>So I argued for dropping SQLite. Claude pushed back gently &#8212; the case for SQLite isn&#8217;t the RAG query path, it&#8217;s document-level operations: counting documents by source, tracking sync state, full corpus listings. <code>COUNT</code>/<code>GROUP BY</code> queries, not vector searches. I kept that distinction and landed here: ChromaDB-only for now, SQLite added to future work for corpus admin tooling &#8212; the right call for a couple thousand documents on a laptop.</p><p>This is the kind of architectural conversation I find genuinely useful in this workflow. Claude had enough context to argue both sides clearly. I made the call.</p><h2><strong>4. The full loop</strong></h2><p><em>GitHub: <a href="https://github.com/calebds/maple-leaf/commit/762c94a">762c94a</a>, <a href="https://github.com/calebds/maple-leaf/commit/74531d2">74531d2</a>, <a href="https://github.com/calebds/maple-leaf/commit/7ec04c5">7ec04c5</a></em></p><p>The ingestion pipeline ended up simple: one document, one chunk. Each Bear entry is a few paragraphs at most, well within embedding context limits. Start simple and measure &#8212; revisit chunking if retrieval quality degrades.</p><p>The TDD practitioner in me wanted to write a query test that finds nothing first: I like to build &#8220;outside-in&#8221; where possible. So we did: start with an empty collection, prove the empty path, then fill in ingestion knowing there&#8217;s a test to flip green.</p><p>A re-ingestion question came up while wiring: what happens if <code>maple-leaf ingest</code> runs twice on the same export? ChromaDB&#8217;s <code>add()</code> raises on duplicate IDs, so we switched to <code>upsert()</code> &#8212; one word change, but noting that I was the one thinking about implications of how the tool would be used.</p><p>Ingestion seemed to be working, so I tried the query <em>what happened in Kyoto?</em> to sanity-check raw retrieval. The Kyoto entry came back as the top result, with related Japan trip entries following. Then we wired in generation &#8212; the part where the LLM actually gets called.</p><blockquote><p><em>Wow, it works! commit and push, will come back to this tomorrow. thanks</em></p></blockquote><p>That was me, approximately two hours into the session. Worth noting, because that reaction is real. There&#8217;s a moment when a working RAG loop stops being a diagram and becomes a thing that answers questions about your life.</p><h2><strong>5. First quality issue</strong></h2><p><em>GitHub: <a href="https://github.com/calebds/maple-leaf/commit/7d798cd">7d798cd</a></em></p><p>The first real problem surfaced quickly:</p><div class="highlighted_code_block" data-attrs="{&quot;language&quot;:&quot;bash&quot;,&quot;nodeId&quot;:null}" data-component-name="HighlightedCodeBlockToDOM"><pre class="shiki"><code class="language-bash">maple-leaf query "what were my favorite hikes of 2025?"</code></pre></div><p>Claude responded that it didn&#8217;t have any entries from 2025. It did. The issue is that semantic search is date-blind &#8212; the embedding of &#8220;favorite hikes of 2025&#8221; doesn&#8217;t know that &#8220;2025&#8221; should function as a filter. It just finds hike-adjacent entries. Those happened to be from other years.</p><p><strong>This is one of the core limitation of naive RAG</strong> and why temporal filtering matters. The fix: <code>--from</code>/<code>--to</code> flags that pre-filter ChromaDB with <code>where</code> clauses on a numeric Unix timestamp field before ANN search runs. ChromaDB can&#8217;t range-query on ISO date strings &#8212; only on numeric types &#8212; so the timestamp field had to be added to the ingested metadata. Rather than requiring a full re-ingest, we wrote a migration to backfill <code>created_at_ts</code> from the existing <code>created_at</code> ISO string already stored in ChromaDB.</p><p>I also noticed that date parsing logic had accumulated in the CLI. I pushed it into <code>engine/search.py</code> &#8212; the CLI takes arguments and calls the engine, and that&#8217;s all it should do. This came up five or six times across the build, and every time it was the same refactor, one I had to initiate.</p><h2><strong>6. Second source, connector abstraction pays off</strong></h2><p><em>GitHub: <a href="https://github.com/calebds/maple-leaf/commit/90f089e">90f089e</a></em></p><p>The last major commit added a second data source: my dream logs. Also in Bear, also in markdown, but stored separately with title-only filenames and no date prefix. Different format, different connector, same <code>Document</code> output, same engine.</p><p>Adding a new source meant: write a <code>BearDreamsConnector</code>, point the CLI at it. The engine didn&#8217;t change. The query path didn&#8217;t change. The tests for the existing connector didn&#8217;t change. The <code>Document</code> contract held.</p><p>I also introduced a <code>Source</code> enum at this point &#8212; <code>Source.JOURNAL</code> and <code>Source.DREAM</code> &#8212; rather than raw strings. <code>--source journal</code> or <code>--source dreams</code> at query time. A single definition of what &#8220;source&#8221; means across the system: the CLI, the connectors, the system prompt.</p><div><hr></div><h2><strong>What&#8217;s ahead</strong></h2><p>There&#8217;s a reasonable future work list: NL time parsing (resolving &#8220;when I was in Japan&#8221; into date ranges via a pre-query Claude call), cross-encoder reranking, additional connectors, SQLite for corpus analytics. None of it is urgent. The current system retrieves well, and the architecture accommodates everything without major rework.</p><p><strong>The value of getting the high-level RAG loop right early is that every subsequent improvement is additive.</strong> Better chunking, better retrieval, smarter prompts &#8212; these all slot in cleanly when the basic pipeline is solid: both in code and in my head. I spent most of this 24-hour build getting that foundation right rather than chasing features. I&#8217;d make the same prioritization again.</p><p>And in case you were wondering, here&#8217;s what I did in Kyoto:</p><div class="captioned-image-container"><figure><a class="image-link image2 is-viewable-img" target="_blank" href="https://substackcdn.com/image/fetch/$s_!QzFR!,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png" data-component-name="Image2ToDOM"><div class="image2-inset"><picture><source type="image/webp" srcset="https://substackcdn.com/image/fetch/$s_!QzFR!,w_424,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 424w, https://substackcdn.com/image/fetch/$s_!QzFR!,w_848,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 848w, https://substackcdn.com/image/fetch/$s_!QzFR!,w_1272,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 1272w, https://substackcdn.com/image/fetch/$s_!QzFR!,w_1456,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 1456w" sizes="100vw"><img src="https://substackcdn.com/image/fetch/$s_!QzFR!,w_1456,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png" width="1312" height="460" data-attrs="{&quot;src&quot;:&quot;https://substack-post-media.s3.amazonaws.com/public/images/3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png&quot;,&quot;srcNoWatermark&quot;:null,&quot;fullscreen&quot;:null,&quot;imageSize&quot;:null,&quot;height&quot;:460,&quot;width&quot;:1312,&quot;resizeWidth&quot;:null,&quot;bytes&quot;:561931,&quot;alt&quot;:null,&quot;title&quot;:null,&quot;type&quot;:&quot;image/png&quot;,&quot;href&quot;:null,&quot;belowTheFold&quot;:true,&quot;topImage&quot;:false,&quot;internalRedirect&quot;:&quot;https://calebds.dev/i/189585951?img=https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png&quot;,&quot;isProcessing&quot;:false,&quot;align&quot;:null,&quot;offset&quot;:false}" class="sizing-normal" alt="" srcset="https://substackcdn.com/image/fetch/$s_!QzFR!,w_424,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 424w, https://substackcdn.com/image/fetch/$s_!QzFR!,w_848,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 848w, https://substackcdn.com/image/fetch/$s_!QzFR!,w_1272,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 1272w, https://substackcdn.com/image/fetch/$s_!QzFR!,w_1456,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F3235663a-2484-4426-bc30-eea3c2bf87c7_1312x460.png 1456w" sizes="100vw" loading="lazy"></picture><div class="image-link-expand"><div class="pencraft pc-display-flex pc-gap-8 pc-reset"><button tabindex="0" type="button" class="pencraft pc-reset pencraft icon-container restack-image"><svg role="img" width="20" height="20" viewBox="0 0 20 20" fill="none" stroke-width="1.5" stroke="var(--color-fg-primary)" stroke-linecap="round" stroke-linejoin="round" xmlns="http://www.w3.org/2000/svg"><g><title></title><path d="M2.53001 7.81595C3.49179 4.73911 6.43281 2.5 9.91173 2.5C13.1684 2.5 15.9537 4.46214 17.0852 7.23684L17.6179 8.67647M17.6179 8.67647L18.5002 4.26471M17.6179 8.67647L13.6473 6.91176M17.4995 12.1841C16.5378 15.2609 13.5967 17.5 10.1178 17.5C6.86118 17.5 4.07589 15.5379 2.94432 12.7632L2.41165 11.3235M2.41165 11.3235L1.5293 15.7353M2.41165 11.3235L6.38224 13.0882"></path></g></svg></button><button tabindex="0" type="button" class="pencraft pc-reset pencraft icon-container view-image"><svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-maximize2 lucide-maximize-2"><polyline points="15 3 21 3 21 9"></polyline><polyline points="9 21 3 21 3 15"></polyline><line x1="21" x2="14" y1="3" y2="10"></line><line x1="3" x2="10" y1="21" y2="14"></line></svg></button></div></div></div></a></figure></div><p>and here is some highly recommended ramen!</p><div class="captioned-image-container"><figure><a class="image-link image2 is-viewable-img" target="_blank" href="https://substackcdn.com/image/fetch/$s_!KFPd!,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg" data-component-name="Image2ToDOM"><div class="image2-inset"><picture><source type="image/webp" srcset="https://substackcdn.com/image/fetch/$s_!KFPd!,w_424,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 424w, https://substackcdn.com/image/fetch/$s_!KFPd!,w_848,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 848w, https://substackcdn.com/image/fetch/$s_!KFPd!,w_1272,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 1272w, https://substackcdn.com/image/fetch/$s_!KFPd!,w_1456,c_limit,f_webp,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 1456w" sizes="100vw"><img src="https://substackcdn.com/image/fetch/$s_!KFPd!,w_1456,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg" width="1456" height="1941" data-attrs="{&quot;src&quot;:&quot;https://substack-post-media.s3.amazonaws.com/public/images/19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg&quot;,&quot;srcNoWatermark&quot;:null,&quot;fullscreen&quot;:null,&quot;imageSize&quot;:null,&quot;height&quot;:1941,&quot;width&quot;:1456,&quot;resizeWidth&quot;:null,&quot;bytes&quot;:5057678,&quot;alt&quot;:null,&quot;title&quot;:null,&quot;type&quot;:&quot;image/jpeg&quot;,&quot;href&quot;:null,&quot;belowTheFold&quot;:true,&quot;topImage&quot;:false,&quot;internalRedirect&quot;:&quot;https://calebds.dev/i/189585951?img=https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg&quot;,&quot;isProcessing&quot;:false,&quot;align&quot;:null,&quot;offset&quot;:false}" class="sizing-normal" alt="" srcset="https://substackcdn.com/image/fetch/$s_!KFPd!,w_424,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 424w, https://substackcdn.com/image/fetch/$s_!KFPd!,w_848,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 848w, https://substackcdn.com/image/fetch/$s_!KFPd!,w_1272,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 1272w, https://substackcdn.com/image/fetch/$s_!KFPd!,w_1456,c_limit,f_auto,q_auto:good,fl_progressive:steep/https%3A%2F%2Fsubstack-post-media.s3.amazonaws.com%2Fpublic%2Fimages%2F19ca8535-0ab6-44f4-9145-b6561973a354_5712x4284.jpeg 1456w" sizes="100vw" loading="lazy"></picture><div class="image-link-expand"><div class="pencraft pc-display-flex pc-gap-8 pc-reset"><button tabindex="0" type="button" class="pencraft pc-reset pencraft icon-container restack-image"><svg role="img" width="20" height="20" viewBox="0 0 20 20" fill="none" stroke-width="1.5" stroke="var(--color-fg-primary)" stroke-linecap="round" stroke-linejoin="round" xmlns="http://www.w3.org/2000/svg"><g><title></title><path d="M2.53001 7.81595C3.49179 4.73911 6.43281 2.5 9.91173 2.5C13.1684 2.5 15.9537 4.46214 17.0852 7.23684L17.6179 8.67647M17.6179 8.67647L18.5002 4.26471M17.6179 8.67647L13.6473 6.91176M17.4995 12.1841C16.5378 15.2609 13.5967 17.5 10.1178 17.5C6.86118 17.5 4.07589 15.5379 2.94432 12.7632L2.41165 11.3235M2.41165 11.3235L1.5293 15.7353M2.41165 11.3235L6.38224 13.0882"></path></g></svg></button><button tabindex="0" type="button" class="pencraft pc-reset pencraft icon-container view-image"><svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-maximize2 lucide-maximize-2"><polyline points="15 3 21 3 21 9"></polyline><polyline points="9 21 3 21 3 15"></polyline><line x1="21" x2="14" y1="3" y2="10"></line><line x1="3" x2="10" y1="21" y2="14"></line></svg></button></div></div></div></a></figure></div><p></p>]]></content:encoded></item></channel></rss>