<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: Konstantinos Passadis | Azure MVP | MCT</title>
    <description>The latest articles on DEV Community by Konstantinos Passadis | Azure MVP | MCT (@passadis).</description>
    <link>https://dev.to/passadis</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/passadis"/>
    <language>en</language>
    <item>
      <title>The Rise of vLLM in Modern Cloud Development: Revolutionizing AI Inference</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Sun, 01 Mar 2026 16:38:24 +0000</pubDate>
      <link>https://dev.to/passadis/the-rise-of-vllm-in-modern-cloud-development-revolutionizing-ai-inference-4b9j</link>
      <guid>https://dev.to/passadis/the-rise-of-vllm-in-modern-cloud-development-revolutionizing-ai-inference-4b9j</guid>
      <description>&lt;div class="crayons-card c-embed text-styles text-styles--secondary"&gt;
    &lt;div class="c-embed__content"&gt;
        &lt;div class="c-embed__cover"&gt;
          &lt;a href="https://www.linkedin.com/pulse/rise-vllm-modern-cloud-development-revolutionizing-ai-passadis-gjdsf" class="c-link align-middle" rel="noopener noreferrer"&gt;
            &lt;img alt="" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fmedia.licdn.com%2Fdms%2Fimage%2Fv2%2FD4D12AQHpYZBqK3eBvw%2Farticle-cover_image-shrink_720_1280%2FB4DZyp5iUEGQAM-%2F0%2F1772376967784%3Fe%3D2147483647%26v%3Dbeta%26t%3DmSv6aDFsVzPkLjpClHPEz5z0POJzvWMu24jyHh2W-gk" height="427" class="m-0" width="760"&gt;
          &lt;/a&gt;
        &lt;/div&gt;
      &lt;div class="c-embed__body"&gt;
        &lt;h2 class="fs-xl lh-tight"&gt;
          &lt;a href="https://www.linkedin.com/pulse/rise-vllm-modern-cloud-development-revolutionizing-ai-passadis-gjdsf" rel="noopener noreferrer" class="c-link"&gt;
            The Rise of vLLM in Modern Cloud Development: Revolutionizing AI Inference
          &lt;/a&gt;
        &lt;/h2&gt;
          &lt;p class="truncate-at-3"&gt;
            Introduction to the AI Cloud Bottleneck The emergence of Large Language Models (LLMs) has revolutionized cloud applications, from universal chatbots to automated programming assistants. However, hosting these models in the cloud is notoriously expensive due to the massive computational and memory re
          &lt;/p&gt;
        &lt;div class="color-secondary fs-s flex items-center"&gt;
            &lt;img alt="favicon" class="c-embed__favicon m-0 mr-2 radius-0" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fstatic.licdn.com%2Faero-v1%2Fsc%2Fh%2Fal2o9zrvru7aqj8e1x2rzsrca" width="64" height="64"&gt;
          linkedin.com
        &lt;/div&gt;
      &lt;/div&gt;
    &lt;/div&gt;
&lt;/div&gt;


</description>
      <category>ai</category>
      <category>cloud</category>
      <category>llm</category>
      <category>vllm</category>
    </item>
    <item>
      <title>Make your Applications smarter: Powerful Machine Learning demo</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Thu, 06 Nov 2025 23:32:36 +0000</pubDate>
      <link>https://dev.to/passadis/make-your-applications-smarter-powerful-machine-learning-demo-264i</link>
      <guid>https://dev.to/passadis/make-your-applications-smarter-powerful-machine-learning-demo-264i</guid>
      <description>&lt;div class="crayons-card c-embed text-styles text-styles--secondary"&gt;
    &lt;a href="https://www.cloudblogger.eu/post/powerful-ml-demo" rel="noopener noreferrer"&gt;
      cloudblogger.eu
    &lt;/a&gt;
&lt;/div&gt;


</description>
      <category>webdev</category>
      <category>ai</category>
      <category>azure</category>
      <category>machinelearning</category>
    </item>
    <item>
      <title>Orbit v0.4.5</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Thu, 06 Nov 2025 23:24:02 +0000</pubDate>
      <link>https://dev.to/passadis/orbit-v045-3m5b</link>
      <guid>https://dev.to/passadis/orbit-v045-3m5b</guid>
      <description>&lt;h2&gt;🚀 Orbit v0.4.5 - Next Gen Distributed VCS with Email-Based Namespaces&lt;/h2&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fgithub.com%2Fuser-attachments%2Fassets%2F72f10322-3a33-4dd4-a9c2-0f8250d3c361" class="article-body-image-wrapper"&gt;&lt;img alt="orbitvcs00" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fgithub.com%2Fuser-attachments%2Fassets%2F72f10322-3a33-4dd4-a9c2-0f8250d3c361" width="420" height="367"&gt;&lt;/a&gt;&lt;/p&gt;




&lt;p&gt;&lt;strong&gt;Orbit&lt;/strong&gt; is a production-ready distributed version control system built on the revolutionary &lt;strong&gt;Virtual Object Store (VOS)&lt;/strong&gt; architecture with &lt;strong&gt;VNP (VOS Network Protocol)&lt;/strong&gt; for lightning-fast, SHA3-secured transactions. Features &lt;strong&gt;AI-powered natural language queries&lt;/strong&gt;, &lt;strong&gt;email-based namespace security&lt;/strong&gt;, &lt;strong&gt;self-service registration&lt;/strong&gt;, and &lt;strong&gt;GitHub-compatible clone workflows&lt;/strong&gt; with seamless distributed development and auto-repository creation.&lt;/p&gt;

&lt;h2&gt;
  
  
  🎯 Key Features (v0.4.5)
&lt;/h2&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;🤖 AI-Powered Queries&lt;/strong&gt; - Ask questions about your repository in natural language using Azure OpenAI&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;⚡ Revolutionary VOS Architecture&lt;/strong&gt; - Virtual Object Store with 40% faster operations than Git&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🌐 VNP Protocol&lt;/strong&gt; - Custom VOS Network Protocol with SHA3-256 secured transactions&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📧 Email-Based Namespaces&lt;/strong&gt; - &lt;a href="mailto:alice@company.com"&gt;alice@company.com&lt;/a&gt; gets alice/* access (collision-proof)&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🔐 Self-Service Registration&lt;/strong&gt; - REST API user management with token authentication
&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🏗️ Auto-Repository Creation&lt;/strong&gt; - Repositories created automatically when accessed&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📥 GitHub-Like Clone Workflow&lt;/strong&gt; - &lt;code&gt;orb clone&lt;/code&gt; → &lt;code&gt;orb checkout&lt;/code&gt; → actual files&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🔄 Complete Object Graph Sync&lt;/strong&gt; - Full commits, trees, files, and chunks synchronization&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;☁️ Azure Production Deployment&lt;/strong&gt; - Container Apps with persistent namespace storage&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  You want to test it end to end ? Contact me via passadis.github.io to discuss your use case and help you set up the Server!
&lt;/h2&gt;

&lt;h2&gt;
  
  
  🚀 Quick Start
&lt;/h2&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# 1. Register with email (self-service)&lt;/span&gt;
curl &lt;span class="nt"&gt;-X&lt;/span&gt; POST http://your-server.com:8081/admin/users &lt;span class="se"&gt;\&lt;/span&gt;
  &lt;span class="nt"&gt;-H&lt;/span&gt; &lt;span class="s2"&gt;"Content-Type: application/json"&lt;/span&gt; &lt;span class="se"&gt;\&lt;/span&gt;
  &lt;span class="nt"&gt;-d&lt;/span&gt; &lt;span class="s1"&gt;'{"username": "alice@company.com", "repositories": [], "permissions": {"read": true, "write": true, "admin": false}}'&lt;/span&gt;

&lt;span class="c"&gt;# 2. Set authentication token&lt;/span&gt;
&lt;span class="nb"&gt;export &lt;/span&gt;&lt;span class="nv"&gt;ORBIT_TOKEN&lt;/span&gt;&lt;span class="o"&gt;=&lt;/span&gt;&lt;span class="s2"&gt;"your-token-here"&lt;/span&gt;

&lt;span class="c"&gt;# 3. Clone any repository (auto-created if doesn't exist)&lt;/span&gt;
orb clone &lt;span class="s2"&gt;"orbits://your-server.com:8082/alice/my-project"&lt;/span&gt; my-project

&lt;span class="c"&gt;# 4. Checkout files and start working&lt;/span&gt;
&lt;span class="nb"&gt;cd &lt;/span&gt;my-project
orb checkout

&lt;span class="c"&gt;# 5. Ask AI about your repository! 🤖&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"What files are tracked in this repository?"&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"What is the current branch and latest commit?"&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"How many files were changed recently?"&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  🤖 AI-Powered Queries (NEW!)
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Orbit now features built-in AI assistance powered by Azure OpenAI!&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Ask questions about your repository in natural language:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# Query repository state&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"What is the current branch?"&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"How many files are tracked?"&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"What's the latest commit message?"&lt;/span&gt;

&lt;span class="c"&gt;# Analyze changes&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"What files were modified recently?"&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"Tell me about recent commits"&lt;/span&gt;
orb ai &lt;span class="s2"&gt;"Is the repository clean?"&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;Setup&lt;/strong&gt;: Create &lt;code&gt;.orbit-ai-config.json&lt;/code&gt; or use environment variables:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;code&gt;AZURE_OPENAI_ENDPOINT&lt;/code&gt; - Your Azure OpenAI endpoint&lt;/li&gt;
&lt;li&gt;
&lt;code&gt;AZURE_OPENAI_API_KEY&lt;/code&gt; - Your API key&lt;/li&gt;
&lt;li&gt;
&lt;code&gt;AZURE_OPENAI_DEPLOYMENT&lt;/code&gt; - Model deployment name&lt;/li&gt;
&lt;li&gt;
&lt;code&gt;AZURE_OPENAI_API_VERSION&lt;/code&gt; - API version&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;The AI has full context of your repository including current branch, tracked files, commit history, and working directory status!&lt;/p&gt;

&lt;h2&gt;
  
  
  📧 Email-Based Security
&lt;/h2&gt;

&lt;p&gt;Access repositories based on your email namespace:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;code&gt;alice@company.com&lt;/code&gt; → can access &lt;code&gt;alice/*&lt;/code&gt; repositories&lt;/li&gt;
&lt;li&gt;
&lt;code&gt;bob@startup.io&lt;/code&gt; → can access &lt;code&gt;bob/*&lt;/code&gt; repositories
&lt;/li&gt;
&lt;li&gt;Automatic collision prevention and namespace isolation&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  🏗️ Self-Service Repository Management
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;New in v0.4.5&lt;/strong&gt;: Repositories are created automatically when you access them:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# List your namespace repositories (authenticates automatically)&lt;/span&gt;
orb list-repos &lt;span class="s2"&gt;"orbits://your-server.com:8082"&lt;/span&gt;

&lt;span class="c"&gt;# Clone creates repository if it doesn't exist&lt;/span&gt;
orb clone &lt;span class="s2"&gt;"orbits://your-server.com:8082/alice/new-idea"&lt;/span&gt; new-idea

&lt;span class="c"&gt;# Your email determines namespace access:&lt;/span&gt;
&lt;span class="c"&gt;# alice@company.com can access alice/project1, alice/project2, etc.&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  ☁️ Azure Production Ready
&lt;/h2&gt;

&lt;p&gt;Deploy with complete namespace isolation and persistent storage:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# Server runs on port 8082 (VNP protocol)&lt;/span&gt;
&lt;span class="c"&gt;# Admin API runs on port 8081 (user management)&lt;/span&gt;
&lt;span class="c"&gt;# Each namespace gets isolated directory: /alice/, /bob/, etc.&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  🔧 Command Reference
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Core Commands
&lt;/h3&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;orb init                           &lt;span class="c"&gt;# Initialize new repository&lt;/span&gt;
orb save &lt;span class="nt"&gt;-m&lt;/span&gt; &lt;span class="s2"&gt;"message"&lt;/span&gt;              &lt;span class="c"&gt;# Create commit with complete object graph&lt;/span&gt;
orb check                          &lt;span class="c"&gt;# Check working directory status&lt;/span&gt;
orb &lt;span class="nb"&gt;history&lt;/span&gt;                        &lt;span class="c"&gt;# Show commit history (DAG)&lt;/span&gt;
orb revert                         &lt;span class="c"&gt;# Revert files to their last committed state&lt;/span&gt;
orb fetch                          &lt;span class="c"&gt;# Fetch and convert a Git repository to Orbit format&lt;/span&gt;
orb checkout                       &lt;span class="c"&gt;# Checkout files from commits&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h3&gt;
  
  
  AI Commands &lt;em&gt;(NEW in v0.4.5)&lt;/em&gt; 🤖
&lt;/h3&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;orb ai &lt;span class="s2"&gt;"your question"&lt;/span&gt;             &lt;span class="c"&gt;# Ask AI about your repository using natural language&lt;/span&gt;
                                   &lt;span class="c"&gt;# Examples: "What's the current branch?"&lt;/span&gt;
                                   &lt;span class="c"&gt;#          "How many files are tracked?"&lt;/span&gt;
                                   &lt;span class="c"&gt;#          "What's the latest commit?"&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h3&gt;
  
  
  Distributed Commands &lt;em&gt;(v0.4.5)&lt;/em&gt;
&lt;/h3&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;orb list-repos &amp;lt;url&amp;gt;               &lt;span class="c"&gt;# List repositories in your namespace&lt;/span&gt;
orb clone &amp;lt;url/namespace/repo&amp;gt; &amp;lt;local-name&amp;gt;  &lt;span class="c"&gt;# Clone (auto-creates if needed)&lt;/span&gt;
orb &lt;span class="nb"&gt;sync&lt;/span&gt; &amp;lt;url&amp;gt;                     &lt;span class="c"&gt;# Synchronize with remote server&lt;/span&gt;
orb register                       &lt;span class="c"&gt;# Register a new user account on an Orbit server&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  🏗️ Architecture
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Revolutionary VOS + VNP Architecture
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Virtual Object Store (VOS)&lt;/strong&gt; - 40% faster than Git with content-addressed storage&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;VNP Protocol&lt;/strong&gt; - Custom VOS Network Protocol with SHA3-256 secured transactions&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Post-Quantum Security&lt;/strong&gt; - SHA3-256 hashing for future-proof cryptographic security&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Email-Based Namespaces&lt;/strong&gt; - &lt;a href="mailto:alice@company.com"&gt;alice@company.com&lt;/a&gt; → alice/* access with collision prevention&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Auto-Repository Creation&lt;/strong&gt; - Repositories created on first access with namespace isolation&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;AI Integration&lt;/strong&gt; - Azure OpenAI GPT-4o for natural language repository queries&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  Production Deployment
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Azure Container Apps&lt;/strong&gt; - Dual-port deployment (8082 + 8081)&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Namespace Isolation&lt;/strong&gt; - Each user gets isolated directory&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;REST Admin API&lt;/strong&gt; - Self-service user registration&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;TLS Security&lt;/strong&gt; - End-to-end encrypted communication&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;AI-Powered CLI&lt;/strong&gt; - Built-in intelligent assistant for repository operations&lt;/li&gt;
&lt;/ul&gt;




&lt;h2&gt;
  
  
  📋 Version History
&lt;/h2&gt;

&lt;h3&gt;
  
  
  🚀 v0.4.5 - Email-Based Namespaces &amp;amp; AI-Powered Queries &lt;em&gt;(Current)&lt;/em&gt;
&lt;/h3&gt;

&lt;p&gt;&lt;strong&gt;Released:&lt;/strong&gt; November 2025&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;🤖 AI-Powered Queries&lt;/strong&gt; - Natural language repository queries using Azure OpenAI GPT-4o&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📧 Email-Based Namespace Security&lt;/strong&gt; - &lt;a href="mailto:alice@company.com"&gt;alice@company.com&lt;/a&gt; gets alice/* access automatically&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🔐 Self-Service Registration&lt;/strong&gt; - REST API for user management without admin intervention&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🏗️ Auto-Repository Creation&lt;/strong&gt; - Repositories created when accessed (like GitHub)&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📥 Complete Clone Workflow&lt;/strong&gt; - &lt;code&gt;orb clone&lt;/code&gt; → &lt;code&gt;orb checkout&lt;/code&gt; → working files extracted&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🔄 Object Graph Integrity&lt;/strong&gt; - Full commits, trees, files, chunks with proper routing&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;☁️ Production Azure Deployment&lt;/strong&gt; - Dual-port server with namespace isolation&lt;/li&gt;
&lt;/ul&gt;




&lt;p&gt;&lt;strong&gt;Orbit v0.4.5&lt;/strong&gt; - &lt;em&gt;AI-powered distributed VCS with email-based security and natural language queries.&lt;/em&gt; 🌟🤖&lt;/p&gt;

&lt;p&gt;&lt;em&gt;Built with ❤️ by K.Passadis in Rust for performance, security, and developer productivity.&lt;/em&gt;&lt;/p&gt;

</description>
      <category>vcs</category>
      <category>quantum</category>
      <category>ai</category>
      <category>tls</category>
    </item>
    <item>
      <title>Orbit v0.3.0 - Next-Generation Version Control System</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Tue, 14 Oct 2025 18:49:03 +0000</pubDate>
      <link>https://dev.to/passadis/orbit-v030-next-generation-version-control-system-1034</link>
      <guid>https://dev.to/passadis/orbit-v030-next-generation-version-control-system-1034</guid>
      <description>&lt;h2&gt;
  
  
  🚀 &lt;a href="https://github.com/passadis/orbit" rel="noopener noreferrer"&gt;Orbit - Next-Generation Version Control System&lt;/a&gt;
&lt;/h2&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fgithub.com%2Fuser-attachments%2Fassets%2F72f10322-3a33-4dd4-a9c2-0f8250d3c361" class="article-body-image-wrapper"&gt;&lt;img alt="orbitvcs00" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fgithub.com%2Fuser-attachments%2Fassets%2F72f10322-3a33-4dd4-a9c2-0f8250d3c361" width="420" height="367"&gt;&lt;/a&gt;&lt;/p&gt;




&lt;p&gt;&lt;strong&gt;Orbit&lt;/strong&gt; is a performance-focused, post-quantum secure version control system built on a revolutionary &lt;strong&gt;Virtual Object Store (VOS)&lt;/strong&gt; architecture. Designed for the future of software development, Orbit delivers superior performance while maintaining cryptographic security against quantum computing threats.&lt;/p&gt;

&lt;h2&gt;
  
  
  ⚡ Key Performance Advantages
&lt;/h2&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;40% Faster Status Checks&lt;/strong&gt; - VOS Index optimization with metadata caching&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;1.75x-2.67x Overall Performance&lt;/strong&gt; - Benchmarked against Git with statistical validation&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Lightning-Fast Operations&lt;/strong&gt; - Selective re-hashing and intelligent caching&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Post-Quantum Security&lt;/strong&gt; - SHA3-256 (Keccak) cryptographic hashing throughout&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  🛡️ Security &amp;amp; Architecture
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Post-Quantum Cryptography
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;SHA3-256 (Keccak)&lt;/strong&gt; hashing for all objects and commits&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Future-proof&lt;/strong&gt; against quantum computing attacks&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;NIST-approved&lt;/strong&gt; cryptographic standards&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  Virtual Object Store (VOS)
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Content-Defined Chunking&lt;/strong&gt; using FastCDC algorithm&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Global Deduplication&lt;/strong&gt; across repository history&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Efficient Storage&lt;/strong&gt; with intelligent object compression&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Metadata-Based Optimization&lt;/strong&gt; for instant status checks&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  🎯 Revolutionary VOS Index
&lt;/h2&gt;

&lt;p&gt;Orbit's &lt;strong&gt;VOS Index&lt;/strong&gt; represents a breakthrough in version control efficiency:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Metadata Caching&lt;/strong&gt; - File attributes cached for instant comparison&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Selective Re-hashing&lt;/strong&gt; - Only modified files are processed&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Timestamp Intelligence&lt;/strong&gt; - Smart file change detection&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Zero-Copy Operations&lt;/strong&gt; - Minimal I/O for status checks&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;em&gt;This novel approach significantly outperforms traditional index mechanisms used by Git and Mercurial.&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  📦 Installation
&lt;/h2&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# Install from source (Rust required)&lt;/span&gt;
git clone https://github.com/your-org/orbit
&lt;span class="nb"&gt;cd &lt;/span&gt;orbit
cargo &lt;span class="nb"&gt;install&lt;/span&gt; &lt;span class="nt"&gt;--path&lt;/span&gt; &lt;span class="nb"&gt;.&lt;/span&gt;

&lt;span class="c"&gt;# Verify installation&lt;/span&gt;
orb &lt;span class="nt"&gt;--version&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  🚀 Quick Start
&lt;/h2&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# Initialize a new repository&lt;/span&gt;
orb init

&lt;span class="c"&gt;# Check repository status (40% faster than git status)&lt;/span&gt;
orb status

&lt;span class="c"&gt;# Save changes with a commit&lt;/span&gt;
orb save &lt;span class="nt"&gt;-m&lt;/span&gt; &lt;span class="s2"&gt;"Initial commit with post-quantum security"&lt;/span&gt;

&lt;span class="c"&gt;# View commit history with DAG visualization&lt;/span&gt;
orb &lt;span class="nb"&gt;history&lt;/span&gt;

&lt;span class="c"&gt;# Revert files to last committed state&lt;/span&gt;
orb revert README.md
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  � Migrating from Git
&lt;/h2&gt;

&lt;p&gt;Orbit v0.3.0 makes Git migration seamless! Convert any Git repository to Orbit format with full history preservation:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;&lt;span class="c"&gt;# Migrate any Git repository (local or remote)&lt;/span&gt;
orb fetch https://github.com/user/repository.git

&lt;span class="c"&gt;# Specify custom target directory&lt;/span&gt;
orb fetch &lt;span class="nt"&gt;--target&lt;/span&gt; my-project https://github.com/user/repository.git

&lt;span class="c"&gt;# Navigate and use Orbit commands&lt;/span&gt;
&lt;span class="nb"&gt;cd &lt;/span&gt;repository
orb status    &lt;span class="c"&gt;# 40% faster than git status&lt;/span&gt;
orb &lt;span class="nb"&gt;history&lt;/span&gt;   &lt;span class="c"&gt;# View converted commit history&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;strong&gt;What gets preserved:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;✅ &lt;strong&gt;Full commit history&lt;/strong&gt; with SHA3-256 security upgrade&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Author information&lt;/strong&gt; and timestamps
&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Commit messages&lt;/strong&gt; and metadata&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;File contents&lt;/strong&gt; with content-defined chunking&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Directory structure&lt;/strong&gt; exactly as in Git&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;What gets upgraded:&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;🔐 &lt;strong&gt;Post-quantum security&lt;/strong&gt; with SHA3-256 hashing&lt;/li&gt;
&lt;li&gt;⚡ &lt;strong&gt;Performance improvements&lt;/strong&gt; with VOS Index optimization&lt;/li&gt;
&lt;li&gt;📦 &lt;strong&gt;Better deduplication&lt;/strong&gt; with FastCDC chunking&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  �📊 Benchmarked Performance
&lt;/h2&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;Operation&lt;/th&gt;
&lt;th&gt;Git&lt;/th&gt;
&lt;th&gt;Orbit v0.2&lt;/th&gt;
&lt;th&gt;Improvement&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;Status Check&lt;/td&gt;
&lt;td&gt;110.3ms ± 20.6ms&lt;/td&gt;
&lt;td&gt;63.1ms ± 25.6ms&lt;/td&gt;
&lt;td&gt;&lt;strong&gt;1.75x faster&lt;/strong&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Initial Commit&lt;/td&gt;
&lt;td&gt;1.694s ± 0.028s&lt;/td&gt;
&lt;td&gt;1.057s ± 0.199s&lt;/td&gt;
&lt;td&gt;&lt;strong&gt;1.60x faster&lt;/strong&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Repository Init&lt;/td&gt;
&lt;td&gt;~50ms&lt;/td&gt;
&lt;td&gt;~30ms&lt;/td&gt;
&lt;td&gt;&lt;strong&gt;1.67x faster&lt;/strong&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;&lt;em&gt;Benchmarks performed with hyperfine statistical analysis on realistic codebases&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  🔧 Command Reference
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Core Commands
&lt;/h3&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;orb init                    &lt;span class="c"&gt;# Initialize new repository&lt;/span&gt;
orb save &lt;span class="nt"&gt;-m&lt;/span&gt; &lt;span class="s2"&gt;"message"&lt;/span&gt;       &lt;span class="c"&gt;# Create commit with message&lt;/span&gt;
orb status                  &lt;span class="c"&gt;# Check working directory status&lt;/span&gt;
orb &lt;span class="nb"&gt;history&lt;/span&gt;                 &lt;span class="c"&gt;# Show commit history (DAG)&lt;/span&gt;
orb revert &lt;span class="o"&gt;[&lt;/span&gt;files...]       &lt;span class="c"&gt;# Revert files to HEAD state&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h3&gt;
  
  
  Information Commands
&lt;/h3&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;orb &lt;span class="nt"&gt;--help&lt;/span&gt;                  &lt;span class="c"&gt;# Comprehensive help system&lt;/span&gt;
orb &lt;span class="nt"&gt;--version&lt;/span&gt;               &lt;span class="c"&gt;# Show version information&lt;/span&gt;
orb &amp;lt;&lt;span class="nb"&gt;command&lt;/span&gt;&lt;span class="o"&gt;&amp;gt;&lt;/span&gt; &lt;span class="nt"&gt;--help&lt;/span&gt;        &lt;span class="c"&gt;# Command-specific help&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h3&gt;
  
  
  Advanced Features &lt;em&gt;(Coming Soon)&lt;/em&gt;
&lt;/h3&gt;



&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight shell"&gt;&lt;code&gt;orb &lt;span class="nb"&gt;sync&lt;/span&gt;                    &lt;span class="c"&gt;# Remote synchronization (v0.3+)&lt;/span&gt;
orb branch                  &lt;span class="c"&gt;# Branch management (v0.3+)&lt;/span&gt;
orb merge                   &lt;span class="c"&gt;# Intelligent merging (v0.3+)&lt;/span&gt;
&lt;span class="c"&gt;# Additional advanced features in development...&lt;/span&gt;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;h2&gt;
  
  
  🏗️ Technical Architecture
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Object Model
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Commits&lt;/strong&gt; - DAG nodes with SHA3-256 integrity&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Trees&lt;/strong&gt; - Directory structures with chunked content&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Blobs&lt;/strong&gt; - File data with content-defined chunking&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Index&lt;/strong&gt; - Metadata cache for performance optimization&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  Storage Engine
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Content Addressing&lt;/strong&gt; - All objects identified by SHA3-256 hash&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Deduplication&lt;/strong&gt; - Identical content stored only once globally&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Compression&lt;/strong&gt; - Efficient storage with modern algorithms&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Integrity&lt;/strong&gt; - Cryptographic verification of all data&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  Performance Optimizations
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;VOS Index Caching&lt;/strong&gt; - Metadata-based change detection&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Selective Processing&lt;/strong&gt; - Only modified files are re-processed&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Parallel Operations&lt;/strong&gt; - Multi-threaded where beneficial&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Zero-Copy I/O&lt;/strong&gt; - Minimal data movement for speed&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  🔬 Innovation Highlights
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Novel VOS Index Implementation
&lt;/h3&gt;

&lt;p&gt;Orbit's VOS Index uses advanced metadata caching combined with selective re-hashing to achieve &lt;strong&gt;40% faster status checks&lt;/strong&gt; compared to traditional version control systems. This innovative approach caches file metadata and performs intelligent timestamp-based change detection, eliminating unnecessary hash computations.&lt;/p&gt;

&lt;h3&gt;
  
  
  Integrated Content-Defined Chunking
&lt;/h3&gt;

&lt;p&gt;The seamless integration of &lt;strong&gt;FastCDC&lt;/strong&gt; (Content-Defined Chunking) with &lt;strong&gt;SHA3-256&lt;/strong&gt; post-quantum cryptography within the VOS object model enables:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Global deduplication&lt;/strong&gt; across entire repository history&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Efficient storage&lt;/strong&gt; of large binary files&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Future-proof security&lt;/strong&gt; with quantum-resistant hashing&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Optimal performance&lt;/strong&gt; with intelligent chunking boundaries&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  🛣️ Roadmap
&lt;/h2&gt;

&lt;h3&gt;
  
  
  v0.3 - Distributed Operations
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;Remote repository synchronization&lt;/li&gt;
&lt;li&gt;Branch management and merging&lt;/li&gt;
&lt;li&gt;Advanced conflict resolution&lt;/li&gt;
&lt;li&gt;Network protocols for collaboration&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  v0.4 - Enterprise Features
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;Access control and permissions&lt;/li&gt;
&lt;li&gt;Repository analytics and insights&lt;/li&gt;
&lt;li&gt;Advanced merge strategies&lt;/li&gt;
&lt;li&gt;Performance monitoring&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  v1.0 - Production Ready
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;Full Git compatibility layer&lt;/li&gt;
&lt;li&gt;Migration tools and utilities&lt;/li&gt;
&lt;li&gt;Enterprise deployment tools&lt;/li&gt;
&lt;li&gt;Comprehensive documentation&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  📈 Why Choose Orbit?
&lt;/h2&gt;

&lt;h3&gt;
  
  
  For Developers
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Faster Operations&lt;/strong&gt; - Spend less time waiting, more time coding&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Modern Architecture&lt;/strong&gt; - Built with current best practices&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Future-Proof&lt;/strong&gt; - Post-quantum cryptography ready&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Intuitive Commands&lt;/strong&gt; - Clean, discoverable interface&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  For Organizations
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Performance Gains&lt;/strong&gt; - Measurable productivity improvements&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Security Assurance&lt;/strong&gt; - Quantum-resistant cryptography&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Innovation&lt;/strong&gt; - Next-generation version control technology&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Reliability&lt;/strong&gt; - Rust-based implementation with memory safety&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  🤝 Contributing
&lt;/h2&gt;

&lt;p&gt;Orbit is under active development. We welcome contributions in:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Performance optimizations&lt;/li&gt;
&lt;li&gt;Security enhancements&lt;/li&gt;
&lt;li&gt;Feature development&lt;/li&gt;
&lt;li&gt;Documentation improvements&lt;/li&gt;
&lt;li&gt;Testing and validation&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  📄 License
&lt;/h2&gt;

&lt;p&gt;MIT License - See &lt;a href="https://dev.toLICENSE"&gt;LICENSE&lt;/a&gt; file for details.&lt;/p&gt;

&lt;h2&gt;
  
  
  🔗 Links
&lt;/h2&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;Documentation&lt;/strong&gt;: [Coming Soon]&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Issues&lt;/strong&gt;: &lt;a href="https://github.com/your-org/orbit/issues" rel="noopener noreferrer"&gt;GitHub Issues&lt;/a&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Discussions&lt;/strong&gt;: &lt;a href="https://github.com/your-org/orbit/discussions" rel="noopener noreferrer"&gt;GitHub Discussions&lt;/a&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;Benchmarks&lt;/strong&gt;: Included in repository under &lt;code&gt;/benchmarks&lt;/code&gt;
&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  📋 Version History
&lt;/h2&gt;

&lt;h3&gt;
  
  
  🚀 v0.3.0 - Git Interoperability (Current)
&lt;/h3&gt;

&lt;p&gt;&lt;strong&gt;Released:&lt;/strong&gt; October 2025&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;🔄 Git Migration&lt;/strong&gt;: New &lt;code&gt;orb fetch&lt;/code&gt; command for seamless Git-to-Orbit conversion&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🌐 Repository Import&lt;/strong&gt;: Import any Git repository with full history preservation&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🧹 Smart Cleanup&lt;/strong&gt;: Windows-compatible file handling and cleanup&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;⚡ In-Place Conversion&lt;/strong&gt;: Efficient conversion process without temporary directories&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📊 Migration Stats&lt;/strong&gt;: Real-time progress indicators during conversion&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🔒 Preserved Metadata&lt;/strong&gt;: Author information, timestamps, and commit messages maintained&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  🏗️ v0.2.0 - Foundation Release
&lt;/h3&gt;

&lt;p&gt;&lt;strong&gt;Released:&lt;/strong&gt; October 2025&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;🔐 Post-Quantum Security&lt;/strong&gt;: SHA3-256 (Keccak) cryptographic hashing&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;⚡ VOS Index&lt;/strong&gt;: 40% faster status checks with metadata optimization&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📦 FastCDC Chunking&lt;/strong&gt;: Content-defined chunking for deduplication&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🎯 Core Commands&lt;/strong&gt;: &lt;code&gt;init&lt;/code&gt;, &lt;code&gt;save&lt;/code&gt;, &lt;code&gt;status&lt;/code&gt;, &lt;code&gt;history&lt;/code&gt;, &lt;code&gt;revert&lt;/code&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📈 Performance Benchmarks&lt;/strong&gt;: Comprehensive performance testing suite&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🛡️ Data Integrity&lt;/strong&gt;: Tamper-proof commit signatures and verification&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  🌱 v0.1.0 - Initial Concept
&lt;/h3&gt;

&lt;p&gt;&lt;strong&gt;Released:&lt;/strong&gt; Early 2025&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;💡 Proof of Concept&lt;/strong&gt;: Basic version control functionality&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🏛️ DAG Architecture&lt;/strong&gt;: Directed Acyclic Graph for commit relationships&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📁 Virtual Object Store&lt;/strong&gt;: Foundation VOS implementation&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🦀 Rust Implementation&lt;/strong&gt;: Memory-safe systems programming foundation&lt;/li&gt;
&lt;/ul&gt;




&lt;h2&gt;
  
  
  🎯 What's Next?
&lt;/h2&gt;

&lt;h3&gt;
  
  
  🔮 v0.4.0 - Remote Collaboration (Planned)
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;☁️ Remote Repositories&lt;/strong&gt;: Push/pull with remote Orbit repositories&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🤝 Conflict Resolution&lt;/strong&gt;: Advanced merge algorithms&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🔄 Sync Protocol&lt;/strong&gt;: Efficient synchronization between repositories&lt;/li&gt;
&lt;/ul&gt;

&lt;h3&gt;
  
  
  🔮 v0.5.0 - Advanced Features (Planned)
&lt;/h3&gt;

&lt;ul&gt;
&lt;li&gt;
&lt;strong&gt;🌿 Branch Management&lt;/strong&gt;: Full branching and merging support&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;🏷️ Tagging System&lt;/strong&gt;: Release tagging and versioning&lt;/li&gt;
&lt;li&gt;
&lt;strong&gt;📊 Advanced Analytics&lt;/strong&gt;: Repository statistics and insights&lt;/li&gt;
&lt;/ul&gt;




&lt;p&gt;&lt;strong&gt;Orbit v0.3.0&lt;/strong&gt; - &lt;em&gt;Git migration made simple. The future of version control is here.&lt;/em&gt; 🌟&lt;/p&gt;

&lt;p&gt;&lt;em&gt;Built with ❤️ by &lt;a class="mentioned-user" href="https://dev.to/passadis"&gt;@passadis&lt;/a&gt; in Rust for performance, security, and developer productivity.&lt;/em&gt;&lt;/p&gt;

</description>
      <category>vcs</category>
      <category>orb</category>
      <category>rust</category>
      <category>opensource</category>
    </item>
    <item>
      <title>How to built your Custom AI Server with AKS and your custom Clients with Vite and NextJS – Part 1</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Mon, 31 Mar 2025 22:39:30 +0000</pubDate>
      <link>https://dev.to/passadis/how-to-built-your-custom-ai-server-with-aks-and-your-custom-clients-with-vite-and-nextjs-part-1-13gc</link>
      <guid>https://dev.to/passadis/how-to-built-your-custom-ai-server-with-aks-and-your-custom-clients-with-vite-and-nextjs-part-1-13gc</guid>
      <description>&lt;div class="crayons-card c-embed text-styles text-styles--secondary"&gt;
    &lt;a href="https://www.cloudblogger.eu/post/build-your-custom-ai-server" rel="noopener noreferrer"&gt;
      cloudblogger.eu
    &lt;/a&gt;
&lt;/div&gt;


</description>
      <category>ai</category>
      <category>vite</category>
      <category>nextjs</category>
      <category>cloud</category>
    </item>
    <item>
      <title>Azure AI Agent Service The brand new Azure AI Agent Service at your fingertips</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Mon, 31 Mar 2025 22:36:10 +0000</pubDate>
      <link>https://dev.to/passadis/azure-ai-agent-servicethe-brand-new-azure-ai-agent-service-at-your-fingertips-2eln</link>
      <guid>https://dev.to/passadis/azure-ai-agent-servicethe-brand-new-azure-ai-agent-service-at-your-fingertips-2eln</guid>
      <description>&lt;div class="crayons-card c-embed text-styles text-styles--secondary"&gt;
    &lt;div class="c-embed__content"&gt;
      &lt;div class="c-embed__body flex items-center justify-between"&gt;
        &lt;a href="https://www.cloudblogger.eu/post/the-brand-new-azure-ai-agent-service-in-your-fingertips" rel="noopener noreferrer" class="c-link fw-bold flex items-center"&gt;
          &lt;span class="mr-2"&gt;cloudblogger.eu&lt;/span&gt;
          

        &lt;/a&gt;
      &lt;/div&gt;
    &lt;/div&gt;
&lt;/div&gt;


</description>
      <category>azure</category>
      <category>ai</category>
      <category>microsoft</category>
      <category>cloud</category>
    </item>
    <item>
      <title>How to orchestrate your Agents with Azure AI Agent Service and Azure API Management</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Thu, 27 Feb 2025 00:04:37 +0000</pubDate>
      <link>https://dev.to/passadis/how-to-orchestrate-your-agents-with-azure-ai-agent-service-and-azure-api-management-502a</link>
      <guid>https://dev.to/passadis/how-to-orchestrate-your-agents-with-azure-ai-agent-service-and-azure-api-management-502a</guid>
      <description>&lt;div class="crayons-card c-embed text-styles text-styles--secondary"&gt;
      &lt;div class="c-embed__cover"&gt;
        &lt;a href="https://www.cloudblogger.eu/2025/02/15/the-brand-new-azure-ai-agent-service-in-your-fingertips/" class="c-link s:max-w-50 align-middle" rel="noopener noreferrer"&gt;
          &lt;img alt="" src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fwww.cloudblogger.eu%2Fwp-content%2Fuploads%2F2023%2F10%2Flogo1.jpg" height="113" class="m-0" width="401"&gt;
        &lt;/a&gt;
      &lt;/div&gt;
    &lt;div class="c-embed__body"&gt;
      &lt;h2 class="fs-xl lh-tight"&gt;
        &lt;a href="https://www.cloudblogger.eu/2025/02/15/the-brand-new-azure-ai-agent-service-in-your-fingertips/" rel="noopener noreferrer" class="c-link"&gt;
          The brand new Azure AI Agent Service at your fingertips - CloudBlogger@2025
        &lt;/a&gt;
      &lt;/h2&gt;
        &lt;p class="truncate-at-3"&gt;
          Azure AI Agent Service is the newest addition in Azure AI Foundry, making the process of creating Agents easier and fun ! Lets see that in action.
        &lt;/p&gt;
      &lt;div class="color-secondary fs-s flex items-center"&gt;
        cloudblogger.eu
      &lt;/div&gt;
    &lt;/div&gt;
&lt;/div&gt;


</description>
      <category>ai</category>
      <category>azure</category>
      <category>agenticai</category>
    </item>
    <item>
      <title>Azure Index AI &amp; MS Fabric</title>
      <dc:creator>Konstantinos Passadis | Azure MVP | MCT</dc:creator>
      <pubDate>Wed, 18 Sep 2024 19:26:02 +0000</pubDate>
      <link>https://dev.to/passadis/azure-index-ai-ms-fabric-2pcj</link>
      <guid>https://dev.to/passadis/azure-index-ai-ms-fabric-2pcj</guid>
      <description>&lt;h2&gt;
  
  
  How to create an AI Web App with Azure OpenAI, Azure AI Search with Vector Embeddings and Microsoft Fabric Pipelines
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Intro&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Welcome, visitor! Today, we embark on an exciting journey to build an AI Assistant and Recommendations bot with cutting-edge features, helping users decide which Book is best suitable for their preferences. Our bot will handle various interactions, such as, providing customized recommendations, and engaging in chat conversations. Additionally, users can register and log in to this Azure Cloud-native AI application. Microsoft Fabric will handle, automation and AI related tasks such as:&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;    Load and clean the books Dataset with triggered Pipelines and Notebooks&lt;/li&gt;
&lt;li&gt;    Transform the Dataset to JSON and making proper adjustments for Vector usability&lt;/li&gt;
&lt;li&gt;    Load the cleaned and transformed Dataset to Azure AI Search and configuring Vector and Semantic profiles&lt;/li&gt;
&lt;li&gt;    Create and save embeddings with Azure OpenAI to Azure AI Search&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;As you may already guessed our foundation lies in Microsoft Fabric, leveraging its powerful Python Notebooks, Pipelines, and Datalake toolsets. We’ll integrate these tools with a custom Identity Database and an AI Assistant. Our mission? To explore the core AI functionalities that set modern applications apart—think embeddings, semantic kernel, and vectors. As we navigate Microsoft Azure’s vast offerings, we’ll build our solution from scratch...&lt;br&gt;
&lt;strong&gt;Prerequisites for Workshop&lt;/strong&gt;&lt;br&gt;
Apart from this guide, everything will be shared through GitHub; nevertheless we need:&lt;/p&gt;

&lt;p&gt;Azure Subscription, access to Azure OpenAI with text-embeddings ad chat-gpt deployments, Microsoft Fabric with a Pro license (trial is fine), patience and excitement!&lt;br&gt;
Infrastructure&lt;/p&gt;

&lt;p&gt;I do respect everyone’s time and i am going to point you to the Git Hub repo that holds the whole implementation, along with Terraform automation. We will start with the SQL query that is running within terraform. The query needs the following code:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;CREATE TABLE Users (
    UserId INT IDENTITY(1,1) PRIMARY KEY,
    FirstName NVARCHAR(50) NOT NULL,
    LastName NVARCHAR(50) NOT NULL,
    Username NVARCHAR(50) UNIQUE NOT NULL,
    PasswordHash NVARCHAR(255) NOT NULL,
    Age INT NOT NULL,
    photoUrl NVARCHAR(500) NOT NULL
);

-- Genres table
CREATE TABLE Genres (
    GenreId INT PRIMARY KEY IDENTITY(1,1),
    GenreName NVARCHAR(50)
);

-- UsersGenres join table
CREATE TABLE UsersGenres (
    UserId INT,
    GenreId INT,
    FOREIGN KEY (UserId) REFERENCES Users(UserId),
    FOREIGN KEY (GenreId) REFERENCES Genres(GenreId)
);
ALTER DATABASE usersdb01  
SET CHANGE_TRACKING = ON  
(CHANGE_RETENTION = 2 DAYS, AUTO_CLEANUP = ON)
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;We have enabled Change Tracking in case we wan to trigger the Embeddings creation upon each change on the Database.&lt;/p&gt;

&lt;p&gt;You can see we are using a JOIN statement to handle users and genres since the various genres selected by the users will help the assistant to make recommendations. We are also enabling Change Tracking so we can trigger updates for the Vector once a change is made. Keep in mind you need the sqlcmd installed on your Workstation !&lt;/p&gt;

&lt;p&gt;Also you must add the OpenAI and AI Search Endpoint &amp;amp; API KEYs to Azure Key Vault with the names declared in the backend. You have to assign yourself the Key Vault Administrator role for that.&lt;br&gt;
Vector, Embeddings &amp;amp; Fabric Pipelines&lt;/p&gt;

&lt;p&gt;Yes you read it well! We are going to get a Books Dataset from Kaggle, clean it , transform it and upload it to AI Search, where we will create an index for the books. We will also create and store the embeddings using Vector Profile from AI Search. In a similar manner we will get the Users from SQL and upload them to AI Search users index, create the embeddings and save them as well. The real exciting stuff is that we will use Microsoft Fabric Pipelines and Notebooks for the books index and embeddings ! So it is important to have a Fabric Pro Trial License with the minimum capacity enabled.&lt;br&gt;
Books Dataset&lt;/p&gt;

&lt;p&gt;The ultimate purpose here is to achieve automation for the creation of embeddings for both Books and Users datasets, so on the Web App we can get recommendations based on preferences but also on actual queries we set to the AI Assistant. We will get a main books dataset as Delimited Text (CSV) and transform it to JSON with correct format so it can be uploaded to Azure AI Search index, utilizing the native AI Search vector profiles and Azure OpenAI for the embeddings. The Fabric Pipelines will be triggered on schedule and we will explore other possible ways.&lt;/p&gt;

&lt;p&gt;In Microsoft Fabric, Notebooks are an important tool as in most modern Data Platforms. The managed Spark Clusters allows us to create and execute powerful scripts in the form of Python Notebooks (PySpark), add them in a Pipeline and build solid Projects and Solutions. Microsoft Fabric provides the ability to pre install libraries and configure our Spark Compute within Environments, so our code will have all requirements in this managed environment. In our case we will install all required libraries and also pin the OpenAI version to pre 1.0.0 for this project. But let’s take it from the start. We need to access app.fabric.microsoft.com and create a new Workspace with a Trial Pro License. It should look like this and also has the diamond icon:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fjyd59hzu97nbbi7u3qwd.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fjyd59hzu97nbbi7u3qwd.PNG" alt="Image description" width="750" height="330"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Once we have our Workspace in place we can select it and from the left menu select New and create the Environment and later a Lakehouse.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fr6c5yja7rghto63e33yk.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fr6c5yja7rghto63e33yk.PNG" alt="Image description" width="398" height="466"&gt;&lt;/a&gt;&lt;br&gt;
The Environment settings that worked for me are the following, you can see that we just install Public Libraries:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F610hu04orgt89iuqo8ig.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F610hu04orgt89iuqo8ig.PNG" alt="Image description" width="800" height="354"&gt;&lt;/a&gt;&lt;br&gt;
Since all the code will be available on GitHub i prefer to explore the next task, create the Pipeline, which will contain the Notebooks. Select your Workspace icon on the left vertical menu, find the NEW+ drop-down menu and More Options until you find the Data Pipeline. You will be presented with the familiar Synapse\Data Factory dashboard (quite similar) where we can start inserting our activities. You have to create all Notebooks before hands just to keep everything in order. So based on the GitHub we will have 5 Notebooks ready. The Fabric API does not support yet firing pipelines, it will happen eventually, so can either schedule or work with Event Stream. The Reflex supports same Directory Azure Connections only ( We will have a look another time), but our Subscription is on another Tenant so yeah! Schedule it is !&lt;/p&gt;

&lt;p&gt;The Pipeline has the following activities:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fvj02tc93eutjf5d91n63.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fvj02tc93eutjf5d91n63.PNG" alt="Image description" width="800" height="218"&gt;&lt;/a&gt;&lt;br&gt;
Let’s shed some light !&lt;/p&gt;

&lt;p&gt;Once we have a Books Dataset (search Kaggle) we upload it to Azure Blob Storage. Then all yu have to do is to create a Copy Task from Fabric, to get the file fro Blob Storage into the Lakehouse, Pretty simple and straightforward.&lt;/p&gt;

&lt;p&gt;We assume that the Dataset is stored in Blob Storage Account so we get that CSV into the Lakehouse. First Notebook is cleaning the data with Python, remove nulls, remove non-English characters and so on. Since the activity stores it as part of a Folder-like structure with non-direct access we need a task to save it on our Lakehouse. We then transform to JSON, make the JSON a correct array set of records, again save it to Lakehouse and the last 2 Notebooks are creating the AI Search Index, uploading the JSON to AI Search, configure the AI Search with vector and semantic profiles and get all records to create embeddings from Azure OpenAI and store those back to AI Search. Due to the great number of Documents we apply rate-limit evasion (back-off) and you can be sure this will take almost 30 minutes to conclude for around 9500 records.&lt;br&gt;
Users Dataset&lt;/p&gt;

&lt;p&gt;Most of the workflow is similar for the users index and embeddings. The difference is that our users are stored and updated with new ones, in an Azure SQL Database. Since we utilize pipelines, Microsoft Fabric natively connects to Azure SQL and in fact our activity is a Copy Task but we have a query to bring SQL data.&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;    SELECT u.UserId, u.Age, STRING_AGG(g.GenreName, ',') AS Genres
    FROM Users u
    JOIN UsersGenres ug ON u.UserId = ug.UserId
    JOIN Genres g ON ug.GenreId = g.GenreId
    GROUP BY u.UserId, u.Age
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;em&gt;This SQL query is selecting data from three related tables: Users, UsersGenres, and Genres. Specifically, it’s returning a list of users (based on their UserId and Age) along with a comma-separated list of all the genres associated with each user. The STRING_AGG function is used to concatenate the GenreName into a single string, separated by commas. The JOIN operations are used to link the tables together based on common fields – in this case, the UserId in the Users and UsersGenres tables, and the GenreId in the UsersGenres and Genres tables. The GROUP BY clause is grouping the results by both UserId and Age, meaning that each row in the output will represent a unique combination of these two fields.&lt;/em&gt;&lt;br&gt;
So it is a simpler process after all, and due to the small amount of users ( i can only subscribe up to 5-6 imaginary accounts ! ), it is a quicker process. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F9v83ao0axhosf567hk1k.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F9v83ao0axhosf567hk1k.PNG" alt="Image description" width="790" height="363"&gt;&lt;/a&gt;&lt;br&gt;
So what have we done so far ? Well let’s break it down, shall we ?&lt;br&gt;
&lt;strong&gt;Process&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Created the main Infrastructure using Terraform – available on GitHub&lt;br&gt;
The Infra provides a Web UI where we register as users and select favorite book Genres, and can login into a Dashboard that we have access to an AI Assistant. The database used to store User’s info is Azure SQL. The Infrastructure consists also of Azure Key Vault, Azure Container Registry, Azure AI Search and Azure Web Apps. A separate Azure OpenAI is already in place.&lt;br&gt;
    The backend creates a Join Table to store UserId with Genres so later it will be easier to create personalized recommendations&lt;br&gt;
    We got a Books dataset with [id, Author, Title, Genres, Rating] fields and upload it to Azure Blob Storage&lt;br&gt;
    We activated Trial (or just have available) license for Microsoft Fabric capacity&lt;br&gt;
    We created Jupyter Notebooks to clean the source books dataset, transform it and store it as JSON&lt;br&gt;
    We created a Fabric Pipeline integrating these Notebooks and new ones that create a books-index in Azure AI Search, configure it with Vector and Semantic Profiles and uploaded all JSON records in it&lt;br&gt;
    The Pipeline continues with additional Notebooks that create embeddings with Azure OpenAI and store this embeddings back in Azure AI Search.&lt;br&gt;
    A new Pipeline has been deployed, that gets the Users data with a query that combines the Genres information with Users from the Azure SQL Database resource and stores it as JSON&lt;br&gt;
    The users Pipeline creates and configures a new users-index in Azure AI Search, configures Vector and Semantic profiles and creates embeddings, for all data, with Azure OpenAI and stores the embeddings back to the index.&lt;/p&gt;

&lt;p&gt;Now we are left with the Backend details and maybe some minor changes for the Frontend. As you will see the GitHub repo contains all required files to create a Docker Image, push it to Container Registry and create a Web App in Azure Web Apps. Use: [ docker build -t backend . ] and tag and push: &lt;em&gt;&lt;u&gt;[ docker tag backend {acrname}.azurecr.io/backend:v1 ] , [ docker push {acrname}.azurecr.io/backend:v1 ]&lt;/u&gt;&lt;/em&gt;. We will be able to see our new Repo on Azure Container Registry and deploy our new Web App :&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Femjvzmww1cg0sgdrpvdl.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Femjvzmww1cg0sgdrpvdl.jpg" alt="Image description" width="800" height="343"&gt;&lt;/a&gt;&lt;br&gt;
Don’t forget to add * in CORS settings for the backend Web App!&lt;/p&gt;

&lt;p&gt;The overall Architecture is like this:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F3xmt7vvnfbiebi0szdnb.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F3xmt7vvnfbiebi0szdnb.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;br&gt;
The only variable needed for the Backend Web App is the KeyVault name and the User Assigned Managed Identity ID. All access to other services (SQL, Storage Account, Ai Search, Azure OpenAI) is going through Key Vault Secrets.&lt;/p&gt;

&lt;p&gt;Let’s have a quick look on our Backend:&lt;br&gt;
&lt;/p&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;import dotenv from 'dotenv';
import express from 'express';
import sql from 'mssql';
import bcrypt from 'bcrypt';
import jwt from 'jsonwebtoken';
import multer from 'multer';
import azureStorage from 'azure-storage';
import getStream from 'into-stream';
import cors from 'cors';
import { SecretClient } from "@azure/keyvault-secrets";
import { DefaultAzureCredential } from "@azure/identity";
import { OpenAIClient, AzureKeyCredential } from '@azure/openai';
import { SearchClient } from '@azure/search-documents';
import bodyParser from 'body-parser';

dotenv.config();

const app = express();
app.use(cors({ origin: '*' }));
app.use((req, res, next) =&amp;gt; {
    res.setHeader('X-Content-Type-Options', 'nosniff');
    next();
});

app.use(express.json());
// set up rate limiter: maximum of five requests per minute
var RateLimit = require('express-rate-limit');
var limiter = RateLimit({
  windowMs: 15 * 60 * 1000, // 15 minutes
  max: 100, // max 100 requests per windowMs
});

// apply rate limiter to all requests
app.use(limiter);

app.get('/:path', function(req, res) {
  let path = req.params.path;
  if (isValidPath(path))
    res.sendFile(path);
});
const vaultName = process.env.AZURE_KEY_VAULT_NAME;
const vaultUrl = `https://${vaultName}.vault.azure.net`;
const credential = new DefaultAzureCredential({
    managedIdentityClientId: process.env.MANAGED_IDENTITY_CLIENT_ID, // Use environment variable for managed identity client ID
});
const secretClient = new SecretClient(vaultUrl, credential);

async function getSecret(secretName) {
    const secret = await secretClient.getSecret(secretName);
    return secret.value;
}

const inMemoryStorage = multer.memoryStorage();
const uploadStrategy = multer({ storage: inMemoryStorage }).single('photo');

let sqlConfig;
let storageAccountName;
let azureStorageConnectionString;
let jwtSecret;
let searchEndpoint;
let searchApiKey;
let openaiEndpoint;
let openaiApiKey;

async function initializeApp() {
    sqlConfig = {
        user: await getSecret("sql-admin-username"),
        password: await getSecret("sql-admin-password"),
        database: await getSecret("sql-database-name"),
        server: await getSecret("sql-server-name"),
        options: {
            encrypt: true,
            trustServerCertificate: false
        }
    };

    storageAccountName = await getSecret("storage-account-name");
    azureStorageConnectionString = await getSecret("storage-account-connection-string");
    jwtSecret = await getSecret("jwt-secret");
    searchEndpoint = await getSecret("search-endpoint");
    searchApiKey = await getSecret("search-apikey");
    openaiEndpoint = await getSecret("openai-endpoint");
    openaiApiKey = await getSecret("openai-apikey");

    //console.log("SQL Config:", sqlConfig);
    // console.log("Storage Account Name:", storageAccountName);
    // console.log("Azure Storage Connection String:", azureStorageConnectionString);
    // console.log("JWT Secret:", jwtSecret);
    // console.log("Search Endpoint:", searchEndpoint);
    // console.log("Search API Key:", searchApiKey);
    // console.log("OpenAI Endpoint:", openaiEndpoint);
    // console.log("OpenAI API Key:", openaiApiKey);

    // Initialize OpenAI and Azure Search clients
    const openaiClient = new OpenAIClient(openaiEndpoint, new AzureKeyCredential(openaiApiKey));
    const userSearchClient = new SearchClient(searchEndpoint, 'users-index', new AzureKeyCredential(searchApiKey));
    const bookSearchClient = new SearchClient(searchEndpoint, 'books-index', new AzureKeyCredential(searchApiKey));

    // Start server
    const PORT = process.env.PORT || 3001;
    app.listen(PORT, () =&amp;gt; {
        console.log(`Server is running on port ${PORT}`);
    }).on('error', error =&amp;gt; {
        console.error("Error initializing application:", error);
    });
}
initializeApp().catch(error =&amp;gt; {
    console.error("Error initializing application:", error);
});

// Upload photo endpoint
app.post('/uploadphoto', uploadStrategy, (req, res) =&amp;gt; {
    if (!req.file) {
        return res.status(400).send('No file uploaded.');
    }

    const blobName = `userphotos/${Date.now()}_${req.file.originalname}`;
    const stream = getStream(req.file.buffer);
    const streamLength = req.file.buffer.length;
    const blobService = azureStorage.createBlobService(azureStorageConnectionString);

    blobService.createBlockBlobFromStream('pics', blobName, stream, streamLength, err =&amp;gt; {
        if (err) {
            console.error(err);
            res.status(500).send('Error uploading the file');
        } else {
            const photoUrl = `https://${storageAccountName}.blob.core.windows.net/pics/${blobName}`;
            res.status(200).send({ photoUrl });
        }
    });
});

// Register endpoint
app.post('/register', uploadStrategy, async (req, res) =&amp;gt; {
    const { firstName, lastName, username, password, age, emailAddress, genres } = req.body;
    if (!password) {
        return res.status(400).send({ message: 'Password is required' });
    }

    let photoUrl = '';
    if (req.file) {
        const blobName = `userphotos/${Date.now()}_${req.file.originalname}`;
        const stream = getStream(req.file.buffer);
        const streamLength = req.file.buffer.length;
        const blobService = azureStorage.createBlobService(azureStorageConnectionString);

        await new Promise((resolve, reject) =&amp;gt; {
            blobService.createBlockBlobFromStream('pics', blobName, stream, streamLength, err =&amp;gt; {
                if (err) {
                    console.error(err);
                    reject(err);
                } else {
                    photoUrl = `https://${storageAccountName}.blob.core.windows.net/pics/${blobName}`;
                    resolve();
                }
            });
        });
    }

    const hashedPassword = await bcrypt.hash(password, 10);

    try {
        let pool = await sql.connect(sqlConfig);
        let result = await pool.request()
            .input('username', sql.NVarChar, username)
            .input('password', sql.NVarChar, hashedPassword)
            .input('firstname', sql.NVarChar, firstName)
            .input('lastname', sql.NVarChar, lastName)
            .input('age', sql.Int, age)
            .input('emailAddress', sql.NVarChar, emailAddress)
            .input('photoUrl', sql.NVarChar, photoUrl)
            .query(`
                INSERT INTO Users 
                (Username, PasswordHash, FirstName, LastName, Age, EmailAddress, PhotoUrl) 
                VALUES 
                (@username, @password, @firstname, @lastname, @age, @emailAddress, @photoUrl);
                SELECT SCOPE_IDENTITY() AS UserId;
            `);

        const userId = result.recordset[0].UserId;

        if (genres &amp;amp;&amp;amp; genres.length &amp;gt; 0) {
            const genreNames = genres.split(','); // Assuming genres are sent as a comma-separated string
            for (const genreName of genreNames) {
                let genreResult = await pool.request()
                    .input('genreName', sql.NVarChar, genreName.trim())
                    .query(`
                        IF NOT EXISTS (SELECT 1 FROM Genres WHERE GenreName = @genreName)
                        BEGIN
                            INSERT INTO Genres (GenreName) VALUES (@genreName);
                        END
                        SELECT GenreId FROM Genres WHERE GenreName = @genreName;
                    `);

                const genreId = genreResult.recordset[0].GenreId;

                await pool.request()
                    .input('userId', sql.Int, userId)
                    .input('genreId', sql.Int, genreId)
                    .query('INSERT INTO UsersGenres (UserId, GenreId) VALUES (@userId, @genreId)');
            }
        }

        res.status(201).send({ message: 'User registered successfully' });
    } catch (error) {
        console.error(error);
        res.status(500).send({ message: 'Error registering user' });
    }
});

// Login endpoint
app.post('/login', async (req, res) =&amp;gt; {
    try {
        let pool = await sql.connect(sqlConfig);
        let result = await pool.request()
            .input('username', sql.NVarChar, req.body.username)
            .query('SELECT UserId, PasswordHash FROM Users WHERE Username = @username');

        if (result.recordset.length === 0) {
            return res.status(401).send({ message: 'Invalid username or password' });
        }

        const user = result.recordset[0];
        const validPassword = await bcrypt.compare(req.body.password, user.PasswordHash);

        if (!validPassword) {
            return res.status(401).send({ message: 'Invalid username or password' });
        }

        const token = jwt.sign({ UserId: user.UserId }, jwtSecret, { expiresIn: '1h' });
        res.send({ token: token, UserId: user.UserId });
    } catch (error) {
        console.error(error);
        res.status(500).send({ message: 'Error logging in' });
    }
});

// Get user data endpoint
app.get('/user/:UserId', async (req, res) =&amp;gt; {
    try {
        let pool = await sql.connect(sqlConfig);
        let result = await pool.request()
            .input('UserId', sql.Int, req.params.UserId)
            .query('SELECT Username, FirstName, LastName, Age, EmailAddress, PhotoUrl FROM Users WHERE UserId = @UserId');

        if (result.recordset.length === 0) {
            return res.status(404).send({ message: 'User not found' });
        }

        const user = result.recordset[0];
        res.send(user);
    } catch (error) {
        console.error(error);
        res.status(500).send({ message: 'Error fetching user data' });
    }
});

// AI Assistant endpoint for book questions and recommendations
app.post('/ai-assistant', async (req, res) =&amp;gt; {
    const { query, userId } = req.body;

    console.log('Received request body:', req.body);
    console.log('Extracted userId:', userId);

    try {
        if (!userId) {
            console.error('User ID is missing from the request.');
            return res.status(400).send({ message: 'User ID is required.' });
        }

        //console.log(`Received request for user ID: ${userId}`);

        // Retrieve user data
        let pool = await sql.connect(sqlConfig);
        let userResult = await pool.request()
            .input('UserId', sql.Int, userId)
            .query('SELECT * FROM Users WHERE UserId = @UserId');

        const user = userResult.recordset[0];

        if (!user) {
            console.error(`User with ID ${userId} not found.`);
            return res.status(404).send({ message: `User with ID ${userId} not found.` });
        }

        console.log(`User data: ${JSON.stringify(user)}`);

        if (query.toLowerCase().includes("recommendation")) {
            // Fetch user genres
            const userGenresResult = await pool.request()
                .input('UserId', sql.Int, userId)
                .query('SELECT GenreName FROM Genres g JOIN UsersGenres ug ON g.GenreId = ug.GenreId WHERE ug.UserId = @UserId');

            const userGenres = userGenresResult.recordset.map(record =&amp;gt; record.GenreName).join(' ');

            //console.log(`User genres: ${userGenres}`);

            // Fetch user embedding from search index
            const userSearchClient = new SearchClient(searchEndpoint, 'users-index', new AzureKeyCredential(searchApiKey));
            const userEmbeddingResult = await userSearchClient.getDocument(String(user.UserId));
            const userEmbedding = userEmbeddingResult.Embedding;

            //console.log(`User embedding result: ${JSON.stringify(userEmbeddingResult)}`);
            //console.log(`User embedding: ${userEmbedding}`);

            if (!userEmbedding || userEmbedding.length === 0) {
                console.error('User embedding not found.');
                return res.status(500).send({ message: 'User embedding not found.' });
            }

            // Search for recommendations
            const bookSearchClient = new SearchClient(searchEndpoint, 'books-index', new AzureKeyCredential(searchApiKey));
            const searchResponse = await bookSearchClient.search("*", {
                vectors: [{
                    value: userEmbedding,
                    fields: ["Embedding"],
                    kNearestNeighborsCount: 5
                }],
                includeTotalCount: true,
                select: ["Title", "Author"]
            });

            const recommendations = [];
            for await (const result of searchResponse.results) {
                recommendations.push({
                    title: result.document.Title,
                    author: result.document.Author,
                    score: result.score
                });
            }

            // Limit recommendations to top 5
            const topRecommendations = recommendations.slice(0, 5);

            return res.json({ response: "Here are some personalized recommendations for you:", recommendations: topRecommendations });
        } else {
            // General book query
            const openaiClient = new OpenAIClient(openaiEndpoint, new AzureKeyCredential(openaiApiKey));
            const deploymentId = "gpt";  // Replace with your deployment ID

            // Extract rating and genre from query
            const ratingMatch = query.match(/rating over (\d+(\.\d+)?)/);
            const genreMatch = query.match(/genre (\w+)/i);
            const rating = ratingMatch ? parseFloat(ratingMatch[1]) : null;
            const genre = genreMatch ? genreMatch[1] : null;

            if (rating &amp;amp;&amp;amp; genre) {
                // Search for books with the specified genre and rating
                const bookSearchClient = new SearchClient(searchEndpoint, 'books-index', new AzureKeyCredential(searchApiKey));
                const searchResponse = await bookSearchClient.search("*", {
                    filter: `Rating gt ${rating} and Genres/any(g: g eq '${genre}')`,
                    top: 5,
                    select: ["Title", "Author", "Rating"]
                });

                const books = [];
                for await (const result of searchResponse.results) {
                    books.push({
                        title: result.document.Title,
                        author: result.document.Author,
                        rating: result.document.Rating
                    });
                }

                const bookResponse = books.map(book =&amp;gt; `${book.title} by ${book.author} with rating ${book.rating}`).join('\n');
                return res.json({ response: `Here are 5 books with rating over ${rating} in ${genre} genre:\n${bookResponse}` });
            } else {
                // Handle general queries about books using OpenAI with streaming chat completions
                const events = await openaiClient.streamChatCompletions(
                    deploymentId,
                    [
                        { role: "system", content: "You are a helpful assistant that answers questions about books and provides personalized recommendations." },
                        { role: "user", content: query }
                    ],
                    { maxTokens: 350 }
                );

                let aiResponse = "";
                for await (const event of events) {
                    for (const choice of event.choices) {
                        aiResponse += choice.delta?.content || '';
                    }
                }

                return res.json({ response: aiResponse });
            }
        }
    } catch (error) {
        console.error('Error processing AI Assistant request:', error);
        return res.status(500).send({ message: 'Error processing your request.' });
    }
});
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;em&gt;As you can see apart form the registration and login endpoints we have the ai-assistant endpoint. Users are able not only to get personalized recommendations when the word “recommendations” is in the chat, but also information on Genres and ratings, again when these words are in the Chat request. Also they can chat regularly with the Assistant about books and literature!&lt;/em&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fip5cry5xhwqapgn8bi6v.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fip5cry5xhwqapgn8bi6v.PNG" alt="Image description" width="800" height="305"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fqmwd61nerv40zml2ykf4.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fqmwd61nerv40zml2ykf4.PNG" alt="Image description" width="800" height="127"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F41hd5o7jwfa70jg9vwzi.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F41hd5o7jwfa70jg9vwzi.PNG" alt="Image description" width="800" height="172"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F830wjcdbt9eyf1a6b21o.PNG" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F830wjcdbt9eyf1a6b21o.PNG" alt="Image description" width="800" height="254"&gt;&lt;/a&gt;&lt;br&gt;
&lt;strong&gt;Conclusion&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;We just build our own Web AI Assistant with an enhanced recommendation engine, utilizing a number of Azure and Microsoft Services. It is important to prepare well ahead of such a project, load yourself with patience and be prepared to make mistakes and learn ! I reached 15 Docker Images for the backend to have a basic functionality ! But hey i did it for everyone so you can just grab it and enjoy it, even make it better! Thank you for staying up to this point!&lt;br&gt;
Code and Notebooks : &lt;a href="https://github.com/passadis/ai-assistant" rel="noopener noreferrer"&gt;GitHub&lt;/a&gt;&lt;br&gt;
References&lt;/p&gt;

&lt;p&gt;&lt;a href="http://Azure%20AI%20Search%20client%20library%20for%20JavaScript" rel="noopener noreferrer"&gt;    Azure SDK for JavaScript&lt;/a&gt;&lt;br&gt;
&lt;a href="https://learn.microsoft.com/en-us/azure/search/" rel="noopener noreferrer"&gt;    Azure AI Search&lt;/a&gt;&lt;br&gt;
&lt;a href="https://learn.microsoft.com/en-us/azure/search/vector-search-how-to-create-index?tabs=config-2023-11-01%2Crest-2023-11-01%2Cpush%2Cportal-check-index" rel="noopener noreferrer"&gt;    Create a Vector Index&lt;/a&gt;&lt;br&gt;
&lt;a href="https://learn.microsoft.com/en-us/azure/search/vector-search-how-to-generate-embeddings" rel="noopener noreferrer"&gt;    Generate Embeddings&lt;/a&gt;&lt;br&gt;
&lt;a href="https://learn.microsoft.com/en-us/fabric/cicd/deployment-pipelines/intro-to-deployment-pipelines" rel="noopener noreferrer"&gt;    Fabric: Introduction to deployment pipelines&lt;/a&gt;&lt;br&gt;
&lt;a href="https://learn.microsoft.com/en-us/fabric/data-engineering/author-execute-notebook" rel="noopener noreferrer"&gt;    Develop, execute, and manage Microsoft Fabric notebooks&lt;/a&gt;&lt;br&gt;
&lt;strong&gt;&lt;a href="https://www.cloudblogger.eu/2024/07/23/azure-ai-search-nativity-in-microsoft-fabric/" rel="noopener noreferrer"&gt;This blog post is initially published on CloudBlogger &lt;/a&gt;&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fctermiv5xdh7qyde2zn5.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media.dev.to/cdn-cgi/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fctermiv5xdh7qyde2zn5.jpg" alt="Image description" width="401" height="113"&gt;&lt;/a&gt;&lt;/p&gt;

</description>
      <category>ai</category>
      <category>azure</category>
      <category>vectordatabase</category>
    </item>
  </channel>
</rss>
