<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: Karan Verma</title>
    <description>The latest articles on DEV Community by Karan Verma (@karanverma).</description>
    <link>https://dev.to/karanverma</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/karanverma"/>
    <language>en</language>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Mon, 08 Sep 2025 11:31:53 +0000</pubDate>
      <link>https://dev.to/karanverma/-d50</link>
      <guid>https://dev.to/karanverma/-d50</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" class="crayons-story__hidden-navigation-link"&gt;From Beginner to Pro: Docker + Terraform for Scalable AI Agents&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2455826" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;May 3 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" id="article-link-2455826"&gt;
          From Beginner to Pro: Docker + Terraform for Scalable AI Agents
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/terraform"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;terraform&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/aideployment"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;aideployment&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/mlops"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;mlops&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;1&lt;span class="hidden s:inline"&gt; reaction&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>docker</category>
      <category>terraform</category>
      <category>aideployment</category>
      <category>mlops</category>
    </item>
    <item>
      <title>From Zero to Kubernetes: A Beginner's Guide to Orchestrating Docker Containers</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sat, 31 May 2025 12:30:28 +0000</pubDate>
      <link>https://dev.to/docker/from-zero-to-kubernetes-a-beginners-guide-to-orchestrating-docker-containers-leg</link>
      <guid>https://dev.to/docker/from-zero-to-kubernetes-a-beginners-guide-to-orchestrating-docker-containers-leg</guid>
      <description>&lt;p&gt;&lt;strong&gt;Introduction&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;If you've ever built or deployed applications using Docker, you've likely hit a point where running containers on your laptop just isn’t enough. You need scaling, automation, recovery, and networking across machines. Enter Kubernetes, the container orchestrator trusted by startups and tech giants alike. In this beginner-friendly guide, we’ll walk you through what Kubernetes is, why it matters, and how Docker developers can start leveraging its power.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;What is Kubernetes?&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Kubernetes (also called K8s) is an open-source platform that automates deploying, scaling, and managing containerized applications. While Docker helps package your app into a container, Kubernetes helps run and scale it across many machines.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Frwp2o04bgatzrrim2j8h.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Frwp2o04bgatzrrim2j8h.png" alt="arch" width="800" height="533"&gt;&lt;/a&gt;&lt;br&gt;
Kubernetes architecture explained: The Control Plane manages the cluster while Nodes run Pods, which host your Docker containers.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Why Use Kubernetes?&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;- Self-Healing:&lt;/strong&gt; Restarts failed containers automatically.&lt;br&gt;
&lt;strong&gt;- Scalability:&lt;/strong&gt; Scale apps up or down automatically with a single command.&lt;br&gt;
&lt;strong&gt;- Declarative Management:&lt;/strong&gt; Define your infrastructure and app needs using YAML files.&lt;br&gt;
&lt;strong&gt;- Portability:&lt;/strong&gt; Run anywhere from your laptop with Minikube to cloud providers like AWS, GCP, or Azure.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;How Kubernetes Works (for Docker Devs)&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Kubernetes works on a cluster model. A cluster has:&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;- Master Node (Control Plane):&lt;/strong&gt; Handles scheduling, scaling, and communication.&lt;br&gt;
&lt;strong&gt;- Worker Nodes:&lt;/strong&gt; Run your Docker containers inside Pods.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Pods and Deployments&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;A Pod is the smallest deployable unit in Kubernetes. It wraps your container(s) and runs on a node. You usually don’t run Pods directly, you use Deployments to manage them.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fq6as56oqoss71o68qqcs.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fq6as56oqoss71o68qqcs.png" alt="Pod &amp;amp; Deployment Flow" width="800" height="800"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Exposing Your App with Services&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Pods can come and go. You need a stable way to expose them; that’s where Services come in. A Service routes traffic to the right Pods and load-balances across them.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fss43jo4u9cb4edxeslyg.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fss43jo4u9cb4edxeslyg.png" alt="Image description" width="800" height="800"&gt;&lt;/a&gt;&lt;br&gt;
Kubernetes Service: Traffic from users is routed through a Service to reach the right Pods, ensuring balanced and reliable access to your app.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step-by-Step: Try It Yourself with Minikube&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Let’s get hands-on!&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;1. Install Minikube &amp;amp; kubectl&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;brew install minikube
minikube start
kubectl get nodes
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;2. Create a Deployment YAML&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;apiVersion: apps/v1
kind: Deployment
metadata:
  name: my-web-app
spec:
  replicas: 2
  selector:
    matchLabels:
      app: web
  template:
    metadata:
      labels:
        app: web
    spec:
      containers:
      - name: nginx
        image: nginx:latest
        ports:
        - containerPort: 80
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;3. Deploy it to Kubernetes&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;kubectl apply -f deployment.yaml
kubectl get pods
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;4. Expose Your Deployment as a Service&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;kubectl expose deployment my-web-app --type=NodePort --port=80
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;5. Access Your App&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;minikube service my-web-app
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;Bonus: Access a Pod Directly (Port Forwarding)&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;kubectl port-forward pod/my-web-app-xxxx 8080:80
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;📚 Further Reading&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Here are some trusted, beginner-friendly resources to deepen your Kubernetes knowledge, especially curated for developers coming from Docker:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;&lt;p&gt;&lt;a href="https://kubernetes.io/docs/" rel="noopener noreferrer"&gt;Kubernetes Official Documentation&lt;/a&gt;: The canonical source for Kubernetes knowledge, straight from the maintainers.&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;&lt;a href="https://docs.docker.com/get-started/orchestration/" rel="noopener noreferrer"&gt;Docker + Kubernetes (Docker Docs)&lt;/a&gt;: Docker’s own guide on moving from Docker CLI to Kubernetes orchestration.  &lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;&lt;a href="https://minikube.sigs.k8s.io/docs/start/" rel="noopener noreferrer"&gt;Minikube Official Docs&lt;/a&gt;: Run Kubernetes locally in minutes, perfect for testing and dev environments.&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;&lt;a href="https://kubernetes.io/docs/reference/kubectl/cheatsheet/" rel="noopener noreferrer"&gt;kubectl Cheat Sheet&lt;/a&gt;: Bookmark this as your go-to for common Kubernetes CLI commands.&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;&lt;a href="https://docs.digitalocean.com/products/kubernetes/getting-started/deploy-image-to-cluster/" rel="noopener noreferrer"&gt;Build and Deploy Your First Image on DigitalOcean Kubernetes&lt;/a&gt;: A hands-on tutorial that ties together Docker image creation and Kubernetes deployment.&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;&lt;a href="https://www.youtube.com/watch?v=X48VuDVv0do" rel="noopener noreferrer"&gt;Kubernetes for Beginners (YouTube - TechWorld with Nana)&lt;/a&gt;: A visual, practical walkthrough of key Kubernetes concepts is great for Docker users.&lt;/p&gt;&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Conclusion&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Kubernetes might seem complex at first, but if you’re already familiar with Docker, you’re well on your way to mastering it. In this guide, you took important first steps by deploying your app, scaling it, and exposing it with a service, all using tools on your own machine. With a bit of practice and curiosity, you’ll soon unlock the full power of Kubernetes to manage containers at scale, whether locally or in the cloud. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Keep experimenting, and enjoy the journey from zero to Kubernetes pro!&lt;/strong&gt;🚀&lt;/p&gt;

</description>
      <category>kubernetes</category>
      <category>docker</category>
      <category>devops</category>
      <category>cloudnative</category>
    </item>
    <item>
      <title>Docker MCP Catalog &amp; Toolkit: Building Smarter AI Agents with Ease</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Tue, 20 May 2025 11:43:38 +0000</pubDate>
      <link>https://dev.to/docker/docker-mcp-catalog-toolkit-building-smarter-ai-agents-with-ease-408c</link>
      <guid>https://dev.to/docker/docker-mcp-catalog-toolkit-building-smarter-ai-agents-with-ease-408c</guid>
      <description>&lt;p&gt;&lt;strong&gt;Introduction: What Is Docker MCP and Why It Matters&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;The rise of agent-based AI applications, powered by ChatGPT, Claude, and custom LLMs, has created a demand for modular, secure, and standardized integrations with real-world tools. Docker’s Model Context Protocol (MCP), along with its Catalog and Toolkit, addresses this need.&lt;/p&gt;

&lt;p&gt;Docker is positioning itself not just as a container platform but as the infrastructure backbone for intelligent agents. In this post, we’ll explore the MCP architecture, Catalog, and Toolkit, and demonstrate how to build your own MCP server.&lt;/p&gt;

&lt;h2&gt;
  
  
  Section 1: Understanding MCP: The Model Context Protocol
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;What it is:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;MCP is an &lt;strong&gt;open protocol&lt;/strong&gt; that allows AI clients (like agents) to call real-world services securely and predictably.&lt;/li&gt;
&lt;li&gt;It's designed for tool interoperability, secure credential management (handling API keys and tokens), and container-based execution.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Why it matters:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Without standards like MCP, agents rely on brittle APIs or unsafe plugins.&lt;/li&gt;
&lt;li&gt;Docker provides a secure, isolated runtime to host these services in containers.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Visual overview:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fnl9fzxlbty6djtfhhuzn.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fnl9fzxlbty6djtfhhuzn.png" alt="MCP Arch Diagram" width="800" height="533"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;How an AI client communicates with containerized services via MCP&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  Section 2: MCP Catalog: Prebuilt, Secure MCP Servers
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;What it includes:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;A growing library of 100+ Docker-verified MCP servers, including:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Stripe&lt;/li&gt;
&lt;li&gt;LangChain&lt;/li&gt;
&lt;li&gt;Elastic&lt;/li&gt;
&lt;li&gt;Pinecone&lt;/li&gt;
&lt;li&gt;Hugging Face&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Key features:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Each MCP server runs inside a container and includes:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;OpenAPI spec&lt;/li&gt;
&lt;li&gt;Secure default config&lt;/li&gt;
&lt;li&gt;Docker Desktop integration&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Why developers care:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Plug-and-play tools for AI agents.&lt;/li&gt;
&lt;li&gt;Consistent dev experience across services.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Visual overview:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fr1awaatmw44r57fm2as3.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fr1awaatmw44r57fm2as3.png" alt="MCP Catalog Diagram" width="800" height="533"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;MCP Catalog integration with Docker Desktop&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  Section 3: MCP Toolkit: Build Your Own Secure MCP Server
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Toolkit CLI Features:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;code&gt;mcp init&lt;/code&gt; → Scaffolds new MCP server&lt;/li&gt;
&lt;li&gt;
&lt;code&gt;mcp run&lt;/code&gt; → Runs local dev version&lt;/li&gt;
&lt;li&gt;
&lt;code&gt;mcp deploy&lt;/code&gt; → Deploy to Docker Desktop&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Security features:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Container isolation&lt;/li&gt;
&lt;li&gt;OAuth support for credentials&lt;/li&gt;
&lt;li&gt;Optional rate limiting and tracing&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Demo walkthrough:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;npm install -g @docker/mcp-toolkit
mcp init my-weather-api
cd my-weather-api
mcp run
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;Visual walkthrough:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fpflyy3al2z9h1nxzhw03.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fpflyy3al2z9h1nxzhw03.png" alt="MCP Toolkit Diagram" width="800" height="533"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;MCP Toolkit Workflow: From CLI to Container&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  Section 4: Connecting MCP Servers to AI Clients
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Supported clients:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Claude (Anthropic)&lt;/li&gt;
&lt;li&gt;GPT Agents (OpenAI)&lt;/li&gt;
&lt;li&gt;Docker AI (beta)&lt;/li&gt;
&lt;li&gt;VS Code Extensions&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;How it works:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Agents call &lt;code&gt;/invoke&lt;/code&gt; endpoint defined in MCP spec.&lt;/li&gt;
&lt;li&gt;Secure token exchange handles identity.&lt;/li&gt;
&lt;li&gt;Response returned to model for reasoning/action.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Use case example:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;Claude uses a Docker MCP server to call a Stripe payment processing container during an e-commerce interaction.&lt;/em&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Visual flow:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fqht7pop4qx9u0lakaito.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fqht7pop4qx9u0lakaito.png" alt="Agent-to-API via Docker MCP" width="800" height="533"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;Shows how Claude securely calls a Stripe service via Docker MCP.&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  Section 5: Best Practices for MCP Server Developers
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Security:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Never use root containers&lt;/li&gt;
&lt;li&gt;Use &lt;code&gt;docker scan&lt;/code&gt; and &lt;code&gt;trivy&lt;/code&gt; for image vulnerability scanning&lt;/li&gt;
&lt;li&gt;Store secrets with Docker's secret manager (or Vault)&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Performance:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Keep containers lightweight (use Alpine or Distroless)&lt;/li&gt;
&lt;li&gt;Use streaming responses for LLM interaction&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Testing tips:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Use &lt;code&gt;Postman&lt;/code&gt; + &lt;code&gt;curl&lt;/code&gt; to test &lt;code&gt;/invoke&lt;/code&gt; endpoint&lt;/li&gt;
&lt;li&gt;Lint OpenAPI specs with &lt;code&gt;swagger-cli&lt;/code&gt;
&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  Section 6: The Future of MCP: What Comes Next?
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Predictions:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Docker AI Dashboard integration&lt;/li&gt;
&lt;li&gt;MCP orchestration (multiple services per agent)&lt;/li&gt;
&lt;li&gt;AI-native DevOps (agents building infra with MCP servers)&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Opportunities for devs:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Contribute to open MCP servers&lt;/li&gt;
&lt;li&gt;Submit to Docker Catalog&lt;/li&gt;
&lt;li&gt;Build agent tools for internal or public use&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Closing Thoughts&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Docker’s MCP Catalog and Toolkit are still in beta, but the path forward is clear: AI apps need real-world tool access, and Docker is building a secure, open ecosystem to power it.&lt;/p&gt;

&lt;p&gt;Whether you’re building agent frameworks or just experimenting with tool-using LLMs, now’s the perfect time to get involved.&lt;/p&gt;

&lt;p&gt;Got ideas for MCP servers you want to see? Or thinking about contributing your own? I’d love to hear from you!  😊&lt;/p&gt;

</description>
      <category>dockermcp</category>
      <category>aiagents</category>
      <category>containersecurity</category>
      <category>devopsai</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sun, 18 May 2025 10:46:20 +0000</pubDate>
      <link>https://dev.to/karanverma/-a2d</link>
      <guid>https://dev.to/karanverma/-a2d</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" class="crayons-story__hidden-navigation-link"&gt;From Beginner to Pro: Docker + Terraform for Scalable AI Agents&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2455826" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;May 3 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" id="article-link-2455826"&gt;
          From Beginner to Pro: Docker + Terraform for Scalable AI Agents
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/terraform"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;terraform&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/aideployment"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;aideployment&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/mlops"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;mlops&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;1&lt;span class="hidden s:inline"&gt; reaction&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>docker</category>
      <category>terraform</category>
      <category>aideployment</category>
      <category>mlops</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sun, 18 May 2025 08:15:29 +0000</pubDate>
      <link>https://dev.to/karanverma/-4e6e</link>
      <guid>https://dev.to/karanverma/-4e6e</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/optimizing-docker-image-builds-for-speed-efficiency-17b" class="crayons-story__hidden-navigation-link"&gt;Optimizing Docker Image Builds for Speed &amp;amp; Efficiency&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2381489" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/optimizing-docker-image-builds-for-speed-efficiency-17b" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;Apr 4 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/optimizing-docker-image-builds-for-speed-efficiency-17b" id="article-link-2381489"&gt;
          Optimizing Docker Image Builds for Speed &amp;amp; Efficiency
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/devops"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;devops&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/softwareoptimization"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;softwareoptimization&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/containerization"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;containerization&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/optimizing-docker-image-builds-for-speed-efficiency-17b" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;1&lt;span class="hidden s:inline"&gt; reaction&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/optimizing-docker-image-builds-for-speed-efficiency-17b#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            2 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>docker</category>
      <category>devops</category>
      <category>softwareoptimization</category>
      <category>containerization</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sat, 17 May 2025 04:38:29 +0000</pubDate>
      <link>https://dev.to/karanverma/-18ib</link>
      <guid>https://dev.to/karanverma/-18ib</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on" class="crayons-story__hidden-navigation-link"&gt;From Zero to GenAI Cluster: Scalable Local LLMs with Docker, Kubernetes, and GPU Scheduling&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2455529" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;May 3 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on" id="article-link-2455529"&gt;
          From Zero to GenAI Cluster: Scalable Local LLMs with Docker, Kubernetes, and GPU Scheduling
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/genai"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;genai&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/kubernetes"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;kubernetes&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/llm"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;llm&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;6&lt;span class="hidden s:inline"&gt; reactions&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>genai</category>
      <category>docker</category>
      <category>kubernetes</category>
      <category>llm</category>
    </item>
    <item>
      <title>From Beginner to Pro: Docker + Terraform for Scalable AI Agents</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sat, 03 May 2025 10:49:32 +0000</pubDate>
      <link>https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2</link>
      <guid>https://dev.to/docker/from-beginner-to-pro-deploying-scalable-ai-workloads-with-docker-terraform-41f2</guid>
      <description>&lt;p&gt;&lt;strong&gt;Introduction&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;As AI and machine learning workloads grow more complex, developers and DevOps engineers are looking for reliable, reproducible, and scalable ways to deploy them. While tools like Docker and Terraform are widely known, many developers haven’t yet fully unlocked their combined potential, especially when it comes to deploying AI agents or LLMs across cloud or hybrid environments.&lt;/p&gt;

&lt;p&gt;This guide walks you through the journey from Docker and Terraform basics to building scalable infrastructure for modern AI/ML systems.&lt;/p&gt;

&lt;p&gt;Whether you’re a beginner trying to get your first container up and running or an expert deploying multi-agent LLM setups with GPU-backed infrastructure, this article is for you.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fkfojs9wd8srqueomzc2m.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fkfojs9wd8srqueomzc2m.png" alt="docker terraform" width="800" height="800"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Docker 101: Containerizing Your First AI Model&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Let’s start with Docker. Containers make it easier to package and ship your applications. Here’s a quick example of containerizing a PyTorch-based inference model.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Dockerfile:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;FROM python:3.9-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY . .
CMD ["python", "inference.py"]
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;Build &amp;amp; Run:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;docker build -t ai-agent .
docker run -p 5000:5000 ai-agent
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;You now have a reproducible and portable AI model running in a container!&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Terraform 101: Your Infrastructure as Code&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Now let’s set up the infrastructure to run this container in the cloud using Terraform.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Basic Terraform Script:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;provider "aws" {
  region = "us-east-1"
}

resource "aws_instance" "agent" {
  ami           = "ami-0abcdef1234567890"  # Choose a GPU-compatible AMI
  instance_type = "g4dn.xlarge"

  provisioner "remote-exec" {
    inline = [
      "sudo docker run -d -p 5000:5000 ai-agent"
    ]
  }
}
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;Deploy:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;terraform init
terraform apply
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Boom your container is live on an EC2 instance!&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Integrating Docker + Terraform: Scalable AI Agent Setup&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Now, we combine both tools to:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Auto-provision compute with Terraform&lt;/li&gt;
&lt;li&gt;Pull and run your Docker images automatically&lt;/li&gt;
&lt;li&gt;Scale agents dynamically by changing Terraform variables&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;Example:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;variable "agent_count" {
  default = 3
}

resource "aws_instance" "agent" {
  count         = var.agent_count
  ami           = "ami-0abc123456"
  instance_type = "g4dn.xlarge"
  ...
}
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;This lets you spin up multiple Dockerized AI agents across your cloud fleet—perfect for inference APIs or retrieval-augmented generation (RAG) systems.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Advanced Use Case: AI Agents with Multi-GPU, CI/CD &amp;amp; Terraform&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Imagine this setup:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Each agent runs an OpenAI-compatible LLM locally (e.g., Mistral, Ollama, LLaMA.cpp)&lt;/li&gt;
&lt;li&gt;Terraform provisions GPU instances and networking&lt;/li&gt;
&lt;li&gt;Docker builds include prompt routers and memory systems&lt;/li&gt;
&lt;li&gt;GitHub Actions auto-triggers Terraform for deployments&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Benefits:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Reproducibility across dev, staging, and prod&lt;/li&gt;
&lt;li&gt;Cost savings via spot instances&lt;/li&gt;
&lt;li&gt;Seamless rollback via Terraform state&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;This is modern MLOps, containerized.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;☁️ Hybrid Multi-Cloud AI with Docker + Terraform&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;You can even expand this setup to support:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Azure or GCP compute targets&lt;/li&gt;
&lt;li&gt;Multi-region failover&lt;/li&gt;
&lt;li&gt;Local LLM agents in Docker Swarm clusters (home lab, edge)&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Pro Tip:&lt;/strong&gt; Use Terraform Cloud or Atlantis for remote state and team workflows.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Visual Overview: How Docker and Terraform Work Together to Deploy AI Agents&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fbfnbt92fso865h46di3p.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fbfnbt92fso865h46di3p.png" alt="arch docker and terraform" width="800" height="533"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;This diagram maps the full lifecycle from writing infrastructure-as-code, containerizing models, and deploying everything automatically.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Simulated Real-World Project: Structure, README &amp;amp; CLI&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;This structure outlines a robust setup designed for deploying and testing Docker + Terraform AI agents in hybrid cloud environments. It’s a scalable, reliable framework that can be leveraged for complex AI deployments.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;📁 Project Structure&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;.
├── Dockerfile
├── terraform/
│   ├── main.tf
│   ├── variables.tf
│   └── outputs.tf
├── cloud-init/
│   └── init.sh
├── ai-model/
│   ├── inference.py
│   └── requirements.txt
└── README.md
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;Sample README.md (Private/Internal Repo Summary)&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Title:&lt;/strong&gt; Scalable AI Agent Deployment with Docker &amp;amp; Terraform&lt;/p&gt;

&lt;p&gt;This project sets up a fully Dockerized AI inference agent that is deployed via Terraform on GPU-enabled EC2 instances. It demonstrates:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Docker container for model inference (PyTorch/Transformers)&lt;/li&gt;
&lt;li&gt;Terraform to provision compute infra + networking&lt;/li&gt;
&lt;li&gt;Cloud-init for auto-starting containers post-launch&lt;/li&gt;
&lt;li&gt;Multi-agent scaling logic with variable interpolation&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Basic Usage:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;code&gt;terraform init&lt;br&gt;
terraform apply&lt;/code&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Run Docker Locally:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;docker build -t ai-agent .
docker run -p 5000:5000 ai-agent
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;CLI Output Snapshot&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Terraform:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;&amp;gt; terraform apply

Apply complete! Resources:
 - aws_instance.agent[0]
 - aws_security_group.main

Public IP: 34.201.12.77
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Docker:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;&amp;gt; docker ps

CONTAINER ID   IMAGE       COMMAND                STATUS       PORTS
ae34c2f1c11b   ai-agent    "python inference.py"  Up 2 mins    5000/tcp
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;⚙️ Note: This setup has been tested with both local GPUs and AWS EC2 g4dn instances. The Docker + Terraform pipeline helped me cut down deployment effort by over 60% and simplified environment consistency across dev and test runs.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Simulated Real-World Project: Structure, README &amp;amp; CLI&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;This structure outlines a robust setup designed for deploying and testing Docker + Terraform AI agents in hybrid cloud environments. It’s a scalable, reliable framework that can be leveraged for complex AI deployments.&lt;/p&gt;

&lt;p&gt;For more information on Docker, you can refer to the &lt;a href="https://docs.docker.com/" rel="noopener noreferrer"&gt;official Docker documentation&lt;/a&gt; and explore relevant open-source projects on &lt;a href="https://github.com/docker" rel="noopener noreferrer"&gt;Docker's GitHub&lt;/a&gt;. Additionally, for Terraform-related resources, check out the &lt;a href="https://www.terraform.io/docs/" rel="noopener noreferrer"&gt;official Terraform documentation&lt;/a&gt; and &lt;a href="https://github.com/hashicorp/terraform" rel="noopener noreferrer"&gt;Terraform GitHub&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Final Takeaways&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;✅ Docker simplifies packaging AI/ML models&lt;/li&gt;
&lt;li&gt;✅ Terraform provisions scalable infrastructure in minutes&lt;/li&gt;
&lt;li&gt;✅ Together, they form a powerful pattern for reliable AI deployment&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;Whether you’re running LLMs locally, deploying agents in the cloud, or scaling across multi-cloud environments, this stack is your launchpad.&lt;/p&gt;

&lt;p&gt;👋 Call to Action&lt;/p&gt;

&lt;p&gt;If this guide helped you, share it with your team or community!&lt;/p&gt;

&lt;p&gt;Thanks for reading. Happy hacking and may your containers always build clean! 🚀&lt;/p&gt;

</description>
      <category>docker</category>
      <category>terraform</category>
      <category>aideployment</category>
      <category>mlops</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sat, 03 May 2025 08:17:01 +0000</pubDate>
      <link>https://dev.to/karanverma/-4093</link>
      <guid>https://dev.to/karanverma/-4093</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/revolutionizing-devops-how-docker-genai-gordon-powers-ai-driven-container-management-5bid" class="crayons-story__hidden-navigation-link"&gt;What If Your Containers Managed Themselves? Meet Docker GenAI Gordon&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2332649" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/revolutionizing-devops-how-docker-genai-gordon-powers-ai-driven-container-management-5bid" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;Mar 14 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/revolutionizing-devops-how-docker-genai-gordon-powers-ai-driven-container-management-5bid" id="article-link-2332649"&gt;
          What If Your Containers Managed Themselves? Meet Docker GenAI Gordon
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/gordon"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;gordon&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/ai"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;ai&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/devops"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;devops&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/revolutionizing-devops-how-docker-genai-gordon-powers-ai-driven-container-management-5bid" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;1&lt;span class="hidden s:inline"&gt; reaction&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/revolutionizing-devops-how-docker-genai-gordon-powers-ai-driven-container-management-5bid#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>docker</category>
      <category>gordon</category>
      <category>ai</category>
      <category>devops</category>
    </item>
    <item>
      <title>From Zero to GenAI Cluster: Scalable Local LLMs with Docker, Kubernetes, and GPU Scheduling</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sat, 03 May 2025 08:11:44 +0000</pubDate>
      <link>https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on</link>
      <guid>https://dev.to/docker/from-zero-to-genai-cluster-scalable-local-llms-with-docker-kubernetes-and-gpu-scheduling-47on</guid>
      <description>&lt;p&gt;A practical guide to deploying fast, private, and production-ready large language models with vLLM, Ollama, and Kubernetes-native orchestration. Build your own scalable GenAI cluster with Docker, Kubernetes, and GPU scheduling for a fully private, production-ready LLM setup.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Prerequisites&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Before we begin, ensure your system meets the following requirements:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;A Kubernetes cluster with &lt;strong&gt;GPU-enabled nodes&lt;/strong&gt; (e.g., via GKE, AKS, or bare-metal)&lt;/li&gt;
&lt;li&gt;The &lt;strong&gt;NVIDIA device plugin&lt;/strong&gt; installed on the cluster&lt;/li&gt;
&lt;li&gt;Helm CLI installed and configured&lt;/li&gt;
&lt;li&gt;Docker CLI and access to a GPU-compatible runtime (e.g., &lt;code&gt;nvidia-docker2&lt;/code&gt;)&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Introduction&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Local LLMs are no longer a research luxury, they're a production need. But deploying them at scale, with GPU access, container orchestration, and real-time monitoring? That’s still murky territory for many.&lt;/p&gt;

&lt;p&gt;In this article, I’ll walk you through how I built a fully operational GenAI cluster using Docker, Kubernetes, and GPU scheduling. It serves powerful language models like vLLM, Ollama, or HuggingFace TGI. We’ll make it observable with Prometheus and Grafana, and ready to scale when the real load hits.&lt;/p&gt;

&lt;p&gt;This isn’t just another tutorial. It’s a battle-tested, experience-backed &lt;strong&gt;blueprint for real-world AI infrastructure&lt;/strong&gt;, written for developers and DevOps engineers pushing the boundaries of what GenAI can do.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Why Local/Private LLMs Matter&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Many teams today are realizing that hosted APIs like OpenAI and Anthropic, while convenient, come with serious trade-offs:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Cost grows fast&lt;/strong&gt; when usage scales&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Sensitive data&lt;/strong&gt; can't always be sent to third-party clouds&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Customization&lt;/strong&gt; is limited to what the API provider allows&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Latency&lt;/strong&gt; becomes a bottleneck in low-connectivity environments&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;Self-hosting LLMs means freedom, control, and flexibility. But only if you know how to do it &lt;strong&gt;right&lt;/strong&gt;.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;What We'll Build&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;We’ll deploy a production-grade Kubernetes cluster featuring:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;vLLM / Ollama / TGI&lt;/strong&gt; model server containers&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;GPU scheduling and node affinity&lt;/strong&gt;&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Ingress with HTTPS&lt;/strong&gt; via NGINX&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Autoscaling&lt;/strong&gt; using HPA or KEDA&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Prometheus + Grafana&lt;/strong&gt; for real-time insights&lt;/li&gt;
&lt;li&gt;Declarative infrastructure using Helm or plain YAML&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Architecture Overview&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Figure: High-level architecture of a scalable GenAI Cluster using Docker, Kubernetes, and GPU scheduling.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fnip2b5o2rc75447cwwt0.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fnip2b5o2rc75447cwwt0.png" alt="Arch Docker" width="800" height="1200"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;This modular, observable cluster gives you full control over your LLM infrastructure, without vendor lock-in.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step 1: Dockerizing the Model Server&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Let’s start small: a single Docker container that wraps a model server like vLLM.&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;# Dockerfile.vllm
FROM nvidia/cuda:12.2.0-base-ubuntu20.04

RUN apt update &amp;amp;&amp;amp; apt install -y git python3 python3-pip

RUN pip install vllm torch transformers

WORKDIR /app
COPY start.sh ./
CMD ["bash", "start.sh"]
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;start.sh:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;#!/bin/bash
python3 -m vllm.entrypoints.openai.api_server --model facebook/opt-1.3b --port 8000
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Then, build your container:&lt;/p&gt;

&lt;p&gt;&lt;code&gt;docker build -f Dockerfile.vllm -t vllm-server:v0.1 .&lt;/code&gt;&lt;/p&gt;

&lt;p&gt;You can also use Ollama if you prefer pre-packaged models and a lower barrier to entry. vLLM is recommended for higher throughput and OpenAI-compatible APIs.&lt;/p&gt;

&lt;p&gt;This is your first step toward building a modular, GPU-ready inference system.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step 2: Kubernetes Deployment with GPU Scheduling&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;apiVersion: apps/v1
kind: Deployment
metadata:
  name: vllm-deployment
spec:
  replicas: 1
  selector:
    matchLabels:
      app: vllm
  template:
    metadata:
      labels:
        app: vllm
    spec:
      containers:
        - name: vllm
          image: vllm-server:v0.1
          resources:
            limits:
              nvidia.com/gpu: 1
          ports:
            - containerPort: 8000
      nodeSelector:
        kubernetes.io/role: gpu
      tolerations:
        - key: "nvidia.com/gpu"
          operator: "Exists"
          effect: "NoSchedule"
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;And here’s the corresponding &lt;strong&gt;Service&lt;/strong&gt; definition:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;apiVersion: v1
kind: Service
metadata:
  name: vllm-service
spec:
  selector:
    app: vllm
  ports:
    - protocol: TCP
      port: 8000
      targetPort: 8000
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;This exposes your model server inside the cluster.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step 3: Ingress and Load Balancing&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Install NGINX Ingress Controller:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm install nginx ingress-nginx/ingress-nginx
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Then configure ingress:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
  name: vllm-ingress
spec:
  rules:
    - host: vllm.local
      http:
        paths:
        - path: /
          pathType: Prefix
          backend:
            service:
              name: vllm-service
              port:
                number: 8000
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Update your DNS or &lt;code&gt;/etc/hosts&lt;/code&gt; to route &lt;code&gt;vllm.local&lt;/code&gt; to your cluster.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step 4: Autoscaling with KEDA (Optional)&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;helm repo add kedacore https://kedacore.github.io/charts
helm install keda kedacore/keda
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;With KEDA, you can scale your LLM pods based on GPU utilization, HTTP traffic, or even Kafka topic lag.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step 5: Monitoring with Prometheus + Grafana&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Install full-stack observability:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;helm repo add prometheus-community https://prometheus-community.github.io/helm-charts
helm install monitoring prometheus-community/kube-prometheus-stack
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Expose a &lt;code&gt;/metrics&lt;/code&gt; endpoint from your container.&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;from prometheus_client import start_http_server, Summary
import time

REQUEST_TIME = Summary('request_processing_seconds', 'Time spent processing request')

@REQUEST_TIME.time()
def process_request():
    time.sleep(1)

if __name__ == '__main__':
    start_http_server(8001)
    while True:
        process_request()
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;Or use GPU exporters like &lt;code&gt;dcgm-exporter&lt;/code&gt;. Grafana will pull all this into beautiful dashboards.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Step 6: Optional Components&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Vector DB:&lt;/strong&gt; Qdrant, Weaviate, or Chroma&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Auth Gateway:&lt;/strong&gt; Add OAuth2 Proxy or Istio&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;LangServe or FastAPI:&lt;/strong&gt; Wrap your model with an API server or LangChain interface&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Persistent Volumes / Object Store:&lt;/strong&gt; Save fine-tuned models using PVCs or MinIO&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;Final Thoughts&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;This isn’t just code. It’s the story of how I learned to stitch together powerful AI infrastructure from open-source tools and make it reliable enough for real-world teams to trust.&lt;/p&gt;

&lt;p&gt;Docker gave me modularity. Kubernetes gave me orchestration. GPUs gave me the muscle.&lt;/p&gt;

&lt;p&gt;Put together, they gave me something every AI builder wants: freedom.&lt;/p&gt;

&lt;p&gt;If you're tired of vendor lock-in and ready to roll up your sleeves, this cluster is your launchpad.&lt;/p&gt;

&lt;p&gt;This is just the beginning. Start building your GenAI infrastructure today and take control of your AI stack. Share your progress, contribute to the community, and let’s push the boundaries of what’s possible together.&lt;/p&gt;

&lt;p&gt;See you at the edge! 🌍&lt;/p&gt;

</description>
      <category>genai</category>
      <category>docker</category>
      <category>kubernetes</category>
      <category>llm</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Tue, 29 Apr 2025 08:53:51 +0000</pubDate>
      <link>https://dev.to/karanverma/-35e</link>
      <guid>https://dev.to/karanverma/-35e</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/unlocking-the-future-with-docker-genai-gordon-a-game-changer-for-ai-workloads-84l" class="crayons-story__hidden-navigation-link"&gt;Unlocking the Future with Docker GenAI Gordon: A Game-Changer for AI Workloads&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2332434" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/unlocking-the-future-with-docker-genai-gordon-a-game-changer-for-ai-workloads-84l" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;Mar 14 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/unlocking-the-future-with-docker-genai-gordon-a-game-changer-for-ai-workloads-84l" id="article-link-2332434"&gt;
          Unlocking the Future with Docker GenAI Gordon: A Game-Changer for AI Workloads
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/devops"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;devops&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/ai"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;ai&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/automation"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;automation&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/unlocking-the-future-with-docker-genai-gordon-a-game-changer-for-ai-workloads-84l" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;1&lt;span class="hidden s:inline"&gt; reaction&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/unlocking-the-future-with-docker-genai-gordon-a-game-changer-for-ai-workloads-84l#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>docker</category>
      <category>devops</category>
      <category>ai</category>
      <category>automation</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Mon, 28 Apr 2025 05:39:14 +0000</pubDate>
      <link>https://dev.to/karanverma/-4kgg</link>
      <guid>https://dev.to/karanverma/-4kgg</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/unlocking-seamless-machine-learning-deployment-with-docker-a-guide-to-essential-cicd-tools-4af2" class="crayons-story__hidden-navigation-link"&gt;Unlocking Seamless Machine Learning Deployment with Docker: A Guide to Essential CI/CD Tools&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2023961" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/unlocking-seamless-machine-learning-deployment-with-docker-a-guide-to-essential-cicd-tools-4af2" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;Oct 3 '24&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/unlocking-seamless-machine-learning-deployment-with-docker-a-guide-to-essential-cicd-tools-4af2" id="article-link-2023961"&gt;
          Unlocking Seamless Machine Learning Deployment with Docker: A Guide to Essential CI/CD Tools
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/machinelearning"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;machinelearning&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/devops"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;devops&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/cicd"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;cicd&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
            &lt;a href="https://dev.to/docker/unlocking-seamless-machine-learning-deployment-with-docker-a-guide-to-essential-cicd-tools-4af2#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              &lt;span class="hidden s:inline"&gt;Add Comment&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>docker</category>
      <category>machinelearning</category>
      <category>devops</category>
      <category>cicd</category>
    </item>
    <item>
      <title>[Boost]</title>
      <dc:creator>Karan Verma</dc:creator>
      <pubDate>Sun, 27 Apr 2025 15:30:07 +0000</pubDate>
      <link>https://dev.to/karanverma/-h7m</link>
      <guid>https://dev.to/karanverma/-h7m</guid>
      <description>&lt;div class="ltag__link--embedded"&gt;
  &lt;div class="crayons-story "&gt;
  &lt;a href="https://dev.to/docker/celebrating-dockers-12th-birthday-how-far-weve-come-whats-next-46fn" class="crayons-story__hidden-navigation-link"&gt;Celebrating Docker’s 12th Birthday: How Far We’ve Come &amp;amp; What’s Next! 🎉🐳&lt;/a&gt;


  &lt;div class="crayons-story__body crayons-story__body-full_post"&gt;
    &lt;div class="crayons-story__top"&gt;
      &lt;div class="crayons-story__meta"&gt;
        &lt;div class="crayons-story__author-pic"&gt;
          &lt;a class="crayons-logo crayons-logo--l" href="/docker"&gt;
            &lt;img alt="Docker logo" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Forganization%2Fprofile_image%2F3459%2F42b5911d-1b27-42a6-988a-a45d81aaaf7a.png" class="crayons-logo__image"&gt;
          &lt;/a&gt;

          &lt;a href="/karanverma" class="crayons-avatar  crayons-avatar--s absolute -right-2 -bottom-2 border-solid border-2 border-base-inverted  "&gt;
            &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" alt="karanverma profile" class="crayons-avatar__image"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
        &lt;div&gt;
          &lt;div&gt;
            &lt;a href="/karanverma" class="crayons-story__secondary fw-medium m:hidden"&gt;
              Karan Verma
            &lt;/a&gt;
            &lt;div class="profile-preview-card relative mb-4 s:mb-0 fw-medium hidden m:inline-block"&gt;
              
                Karan Verma
                
              
              &lt;div id="story-author-preview-content-2358142" class="profile-preview-card__content crayons-dropdown branded-7 p-4 pt-0"&gt;
                &lt;div class="gap-4 grid"&gt;
                  &lt;div class="-mt-4"&gt;
                    &lt;a href="/karanverma" class="flex"&gt;
                      &lt;span class="crayons-avatar crayons-avatar--xl mr-2 shrink-0"&gt;
                        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F22910%2F4747ba67-2c56-4ab9-9608-a3ec5a2d65ad.png" class="crayons-avatar__image" alt=""&gt;
                      &lt;/span&gt;
                      &lt;span class="crayons-link crayons-subtitle-2 mt-5"&gt;Karan Verma&lt;/span&gt;
                    &lt;/a&gt;
                  &lt;/div&gt;
                  &lt;div class="print-hidden"&gt;
                    
                      Follow
                    
                  &lt;/div&gt;
                  &lt;div class="author-preview-metadata-container"&gt;&lt;/div&gt;
                &lt;/div&gt;
              &lt;/div&gt;
            &lt;/div&gt;

            &lt;span&gt;
              &lt;span class="crayons-story__tertiary fw-normal"&gt; for &lt;/span&gt;&lt;a href="/docker" class="crayons-story__secondary fw-medium"&gt;Docker&lt;/a&gt;
            &lt;/span&gt;
          &lt;/div&gt;
          &lt;a href="https://dev.to/docker/celebrating-dockers-12th-birthday-how-far-weve-come-whats-next-46fn" class="crayons-story__tertiary fs-xs"&gt;&lt;time&gt;Mar 26 '25&lt;/time&gt;&lt;span class="time-ago-indicator-initial-placeholder"&gt;&lt;/span&gt;&lt;/a&gt;
        &lt;/div&gt;
      &lt;/div&gt;

    &lt;/div&gt;

    &lt;div class="crayons-story__indention"&gt;
      &lt;h2 class="crayons-story__title crayons-story__title-full_post"&gt;
        &lt;a href="https://dev.to/docker/celebrating-dockers-12th-birthday-how-far-weve-come-whats-next-46fn" id="article-link-2358142"&gt;
          Celebrating Docker’s 12th Birthday: How Far We’ve Come &amp;amp; What’s Next! 🎉🐳
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;div class="crayons-story__tags"&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/dockerbirthday"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;dockerbirthday&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/docker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;docker&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/dockercommunity"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;dockercommunity&lt;/a&gt;
            &lt;a class="crayons-tag  crayons-tag--monochrome " href="/t/happybirthdaydocker"&gt;&lt;span class="crayons-tag__prefix"&gt;#&lt;/span&gt;happybirthdaydocker&lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="crayons-story__bottom"&gt;
        &lt;div class="crayons-story__details"&gt;
          &lt;a href="https://dev.to/docker/celebrating-dockers-12th-birthday-how-far-weve-come-whats-next-46fn" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left"&gt;
            &lt;div class="multiple_reactions_aggregate"&gt;
              &lt;span class="multiple_reactions_icons_container"&gt;
                  &lt;span class="crayons_icon_container"&gt;
                    &lt;img src="https://assets.dev.to/assets/sparkle-heart-5f9bee3767e18deb1bb725290cb151c25234768a0e9a2bd39370c382d02920cf.svg" width="18" height="18"&gt;
                  &lt;/span&gt;
              &lt;/span&gt;
              &lt;span class="aggregate_reactions_counter"&gt;1&lt;span class="hidden s:inline"&gt; reaction&lt;/span&gt;&lt;/span&gt;
            &lt;/div&gt;
          &lt;/a&gt;
            &lt;a href="https://dev.to/docker/celebrating-dockers-12th-birthday-how-far-weve-come-whats-next-46fn#comments" class="crayons-btn crayons-btn--s crayons-btn--ghost crayons-btn--icon-left flex items-center"&gt;
              Comments


              2&lt;span class="hidden s:inline"&gt; comments&lt;/span&gt;
            &lt;/a&gt;
        &lt;/div&gt;
        &lt;div class="crayons-story__save"&gt;
          &lt;small class="crayons-story__tertiary fs-xs mr-2"&gt;
            4 min read
          &lt;/small&gt;
            
              &lt;span class="bm-initial"&gt;
                

              &lt;/span&gt;
              &lt;span class="bm-success"&gt;
                

              &lt;/span&gt;
            
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;

&lt;/div&gt;


</description>
      <category>dockerbirthday</category>
      <category>docker</category>
      <category>dockercommunity</category>
      <category>happybirthdaydocker</category>
    </item>
  </channel>
</rss>
