<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: Aswin Vijayakumar</title>
    <description>The latest articles on DEV Community by Aswin Vijayakumar (@aswinvk28).</description>
    <link>https://dev.to/aswinvk28</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/aswinvk28"/>
    <language>en</language>
    <item>
      <title>EquipAny with KlinterAI - Creative Art Published through the Hackathon</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Sun, 30 Apr 2023 22:58:16 +0000</pubDate>
      <link>https://dev.to/aswinvk28/equipany-with-klinterai-creative-art-published-through-the-hackathon-2pg2</link>
      <guid>https://dev.to/aswinvk28/equipany-with-klinterai-creative-art-published-through-the-hackathon-2pg2</guid>
      <description>&lt;h1&gt;&lt;u&gt;&lt;strong&gt;Observability By Parts&lt;/strong&gt;&lt;/u&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Finivvfig2u1onvmfmuju.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Finivvfig2u1onvmfmuju.png" alt="Image description" width="800" height="262"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fp3sgfqvt2fgq7u3pgp84.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fp3sgfqvt2fgq7u3pgp84.png" alt="Image description" width="800" height="960"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;h1&gt;&lt;u&gt;&lt;strong&gt;Object Detection&lt;/strong&gt;&lt;/u&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fpkurlw4oo4vmymaubxnp.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fpkurlw4oo4vmymaubxnp.png" alt="Image description" width="400" height="227"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Funyclhykv5915abszcf2.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Funyclhykv5915abszcf2.png" alt="Image description" width="399" height="226"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fg4icio5aet90d7pxs0j7.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fg4icio5aet90d7pxs0j7.png" alt="Image description" width="400" height="376"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt;&lt;u&gt;&lt;strong&gt;Traceability&lt;/strong&gt;&lt;/u&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fax4eyh7f4fbecc0kj6pd.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fax4eyh7f4fbecc0kj6pd.png" alt="Image description" width="800" height="541"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt;&lt;u&gt;&lt;strong&gt;Risk Assessment&lt;/strong&gt;&lt;/u&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ffcdc9jw9xpt79hll4apk.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ffcdc9jw9xpt79hll4apk.png" alt="Image description" width="800" height="514"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fmaebnpvn3fkgrrm2evv4.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fmaebnpvn3fkgrrm2evv4.png" alt="Image description" width="800" height="502"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt;&lt;u&gt;&lt;strong&gt;Activity Recognition&lt;/strong&gt;&lt;/u&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ffgjh0lif9yhng8tidwgz.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ffgjh0lif9yhng8tidwgz.png" alt="Image description" width="800" height="533"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt;&lt;strong&gt;&lt;u&gt;EquipAny in Operation to Identify Risks&lt;/u&gt;&lt;/strong&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fsiu2g40io9hkj16off3p.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fsiu2g40io9hkj16off3p.png" alt="Image description" width="398" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;u&gt; &lt;/u&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Flbnkbzi4k74jevo9by7h.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Flbnkbzi4k74jevo9by7h.png" alt="Image description" width="398" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fsppnka1dfmbfxwzjyfca.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fsppnka1dfmbfxwzjyfca.png" alt="Image description" width="398" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fdltutn4ughrmc0vdz515.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fdltutn4ughrmc0vdz515.png" alt="Image description" width="398" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt; &lt;/h1&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F03d4b3mx9h3x5ny2zlxg.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F03d4b3mx9h3x5ny2zlxg.png" alt="Image description" width="398" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fz7q74xjpbp9pqgstsylc.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fz7q74xjpbp9pqgstsylc.png" alt="Image description" width="398" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;h1&gt;&lt;strong&gt;&lt;u&gt;Metadata Transfer&lt;/u&gt;&lt;/strong&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fyu1jjht25fi848kklhry.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fyu1jjht25fi848kklhry.png" alt="Image description" width="800" height="479"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;h1&gt;&lt;strong&gt;&lt;u&gt;Reviews&lt;/u&gt;&lt;/strong&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fs1pb4fnn8w4tqlirmglx.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fs1pb4fnn8w4tqlirmglx.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F7sv3g92k06wo750hr7r1.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F7sv3g92k06wo750hr7r1.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fkub4lfubsdjqav4me8lv.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fkub4lfubsdjqav4me8lv.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fpodknzle8ouqz7mfqc07.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fpodknzle8ouqz7mfqc07.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt;&lt;strong&gt;&lt;u&gt;Visualization&lt;/u&gt;&lt;/strong&gt;&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fmrxsasuo2uiiamd3rfmw.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fmrxsasuo2uiiamd3rfmw.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fh125fk4wpx1qd0zf2e0p.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fh125fk4wpx1qd0zf2e0p.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F6pwlq6cl7bxwu33ntmnq.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F6pwlq6cl7bxwu33ntmnq.png" alt="Image description" width="800" height="436"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fmkneudbtea7b07ko6vht.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fmkneudbtea7b07ko6vht.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fvcc0voeu8qm29xnal7k2.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fvcc0voeu8qm29xnal7k2.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;&lt;u&gt;Scenes in an Animation&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fhoviu5kxb12jw6lkcfv2.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fhoviu5kxb12jw6lkcfv2.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Foern22fpfd7kpd7eqjyd.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Foern22fpfd7kpd7eqjyd.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fqbkhmtywot9wmmfsfgpv.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fqbkhmtywot9wmmfsfgpv.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fjzea1mcnzjwv504xomks.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fjzea1mcnzjwv504xomks.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt; &lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;&lt;strong&gt;&lt;strong&gt;&lt;strong&gt;&lt;strong&gt;&lt;strong&gt;Aswin Vijayakumar&lt;/strong&gt;&lt;/strong&gt;&lt;/strong&gt;&lt;/strong&gt;&lt;/strong&gt;&lt;/strong&gt;&lt;/p&gt;

</description>
      <category>observability</category>
      <category>ai</category>
      <category>machinelearning</category>
      <category>mobileapp</category>
    </item>
    <item>
      <title>The Concept Developed for Mobile App — EquipAny.</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Fri, 28 Apr 2023 22:05:46 +0000</pubDate>
      <link>https://dev.to/aswinvk28/the-concept-developed-for-mobile-app-equipany-45gc</link>
      <guid>https://dev.to/aswinvk28/the-concept-developed-for-mobile-app-equipany-45gc</guid>
      <description>&lt;p&gt;&lt;strong&gt;Activity Recognition is a key part of the KlinterAI side of EquipAny Mobile App.&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Recording a Risk through a Risk Register, involves integrating Traceability into the App, through Object Detection, Anomaly Detection, and moreover Activity Recognition Methods.&lt;/p&gt;

&lt;p&gt;As part of the &lt;strong&gt;SAS Hackathon&lt;/strong&gt;, we developed EquipAny as part of the KlinterAI project.&lt;/p&gt;

&lt;p&gt;Our Team at &lt;strong&gt;KlinterAI&lt;/strong&gt;, developed a &lt;strong&gt;Risk Methodology&lt;/strong&gt; mapping the &lt;strong&gt;Incidents&lt;/strong&gt; and &lt;strong&gt;Risks&lt;/strong&gt; at a &lt;strong&gt;Construction Site&lt;/strong&gt;.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fyd4qx7z3z0e5fn2dpynx.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fyd4qx7z3z0e5fn2dpynx.png" alt="Image description" width="800" height="503"&gt;&lt;/a&gt;&lt;br&gt;
EquipAny — Impact and Likelihood, Risk Assessment, for 7 Cases&lt;/p&gt;

&lt;p&gt;Along the Impact and Likelihood are 7 of the 9 Cases: From “Work Stoppages Causing Productivity Losses” to “Falling Material(s)”.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ftzht7q7lbestbfkiu40l.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ftzht7q7lbestbfkiu40l.png" alt="Image description" width="483" height="241"&gt;&lt;/a&gt;&lt;br&gt;
EquipAny — 9 Cases of EquipAny Risk Register&lt;/p&gt;

&lt;p&gt;The Issues are Categorized as Potential Issue(s), Potential Risk(s), Incident(s) and Environmental Incident(s) based on:&lt;/p&gt;

&lt;p&gt;“Machine Learning Algorithms” and By “Applying Artificial Intelligence” Methods.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fffdl6yom4pzefv3vg5xv.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fffdl6yom4pzefv3vg5xv.png" alt="Image description" width="800" height="514"&gt;&lt;/a&gt;&lt;br&gt;
EquipAny — “Unexpected Delays” — “Major Incidents”&lt;/p&gt;

&lt;h1&gt;
  
  
  Motivation — For Construction Piling Foundations
&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F2sqo2ypfn22t73svokjq.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F2sqo2ypfn22t73svokjq.png" alt="Image description" width="720" height="453"&gt;&lt;/a&gt;&lt;br&gt;
Motivation — Land Subsidence in Central Valley of California&lt;/p&gt;

&lt;p&gt;In the Central Valley of California there is Land Subsidence due to a Lack of Ground Water. Ground Water Recharge needs to be in place, for the Soil. There are Cultivated Lands for such purposes, seen as Patches above in the image.&lt;/p&gt;

&lt;p&gt;Compliance Reasons&lt;br&gt;
According to &lt;a href="https://www.legislation.gov.uk/ukpga/2008/27/section/39" rel="noopener noreferrer"&gt;Climate Change Act 2008&lt;/a&gt;, The &lt;a href="https://www.legislation.gov.uk/ukpga/2008/27/section/39" rel="noopener noreferrer"&gt;General Ancillary Powers&lt;/a&gt;, are provided to the Committee to enter into contracts within, possibly Construction Site(s), where there could be an Oil Leakage for example.&lt;/p&gt;

&lt;p&gt;Such General Ancillary Powers, can publish certain results of activities carried out by the Committee or others.&lt;/p&gt;

&lt;p&gt;Publishing Oil Leakage, into the Social Media, using a Mobile Screenshot, would just damage the reputation of the Construction Site(s) and the Related &lt;a href="https://github.com/aswinvk28/ai-in-business/blob/main/Metrics/data_governance.md#data-metrics" rel="noopener noreferrer"&gt;Provenance&lt;/a&gt;.&lt;/p&gt;

&lt;h1&gt;
  
  
  A Business Case — about using Ancillary Services with KlinterAI
&lt;/h1&gt;

&lt;p&gt;According to the &lt;a href="https://youtu.be/zgOInLHlKc8" rel="noopener noreferrer"&gt;Pitch Video&lt;/a&gt;, &lt;a href="https://communities.sas.com/t5/SAS-Hacker-s-Hub/KlinterAI-Integrating-Observability-in-the-Construction-Industry/ta-p/863451" rel="noopener noreferrer"&gt;KlinterAI&lt;/a&gt;, the AI Solution, would provide Technical Support and Data Stewardship Support using AI and non-AI Solutions, which could be integrated with the mobile apps. Please see this: screenshots extracted from an Animated Video.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F78jbeyi109rga7fhp7zd.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F78jbeyi109rga7fhp7zd.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fo6pa5uexxcvd9m1p9vp2.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fo6pa5uexxcvd9m1p9vp2.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;br&gt;
KlinterAI — Construction Corp. and Oil Leakage&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fc1ob1d35atbe7jhsnrwx.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fc1ob1d35atbe7jhsnrwx.png" alt="Image description" width="640" height="360"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F39p3m97fxx4hqwnem2ot.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F39p3m97fxx4hqwnem2ot.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;br&gt;
KlinterAI — Daily Report, Dispute and Legal Claim&lt;/p&gt;

&lt;p&gt;More Business Cases, applicable to Risk Identification, are presented in: &lt;a href="https://medium.com/klinterai/statistics-related-to-6-business-case-s-of-equipany-report-incidents-and-identify-risks-at-4b757a4d2e08" rel="noopener noreferrer"&gt;Statistics Related to 6 Business Case(s)&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The Risk Methodology, Risk Assessment and a List of Incidents and Risks are provided in: &lt;a href="https://medium.com/klinterai/equipany-risk-assessment-and-activity-recognition-d2c11a577ad7" rel="noopener noreferrer"&gt;EquipAny — Risk Assessment and Activity Recognition&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The Business Case(s) are searchable within my YouTube Channel:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://www.youtube.com/@virtualcourtroom/search?query=business-case" rel="noopener noreferrer"&gt;https://www.youtube.com/@virtualcourtroom/search?query=business-case&lt;/a&gt;&lt;/p&gt;

&lt;h1&gt;
  
  
  Reviews on Glide Prototype — Low-Fidelity
&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fhcw7sqmtqhlyqvdesfgh.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fhcw7sqmtqhlyqvdesfgh.png" alt="Image description" width="800" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fhtzjswetkm8mtd7l6pah.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fhtzjswetkm8mtd7l6pah.png" alt="Image description" width="800" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F0yytanv8o27dqtccahjo.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F0yytanv8o27dqtccahjo.png" alt="Image description" width="800" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fb4wlj2vi2bogmcf4zna7.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fb4wlj2vi2bogmcf4zna7.png" alt="Image description" width="800" height="400"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Review 3 and Review 4–8 and 7 respectively&lt;/strong&gt;&lt;/p&gt;

&lt;h1&gt;
  
  
  More Review(s)
&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F8tnurt0ynvo33ftzz1b2.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F8tnurt0ynvo33ftzz1b2.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F46cesf7kp07hp26289u2.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F46cesf7kp07hp26289u2.png" alt="Image description" width="800" height="450"&gt;&lt;/a&gt;&lt;br&gt;
Mockup(s) of Glide App at &lt;a href="https://EquipAny.Glide.Page" rel="noopener noreferrer"&gt;https://equipany.glide.page/&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Team Member(s) at SAS Hackathon for KlinterAI&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Frzje4p8okign0er90zp0.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Frzje4p8okign0er90zp0.png" alt="Image description" width="800" height="749"&gt;&lt;/a&gt;&lt;br&gt;
★ Members — Aswin, Luis, Marcus and Afeef KK — With the SAS Hackathon ★&lt;/p&gt;

&lt;p&gt;Please contact our Google Group(s) Email is available at: &lt;a href="https://groups.google.com/u/2/a/klinterai.com/g/project-discussiondiscussion" rel="noopener noreferrer"&gt;project-discussiondiscussion@klinterai.com&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F8o0a52cqf5pr0sgxzu7l.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F8o0a52cqf5pr0sgxzu7l.png" alt="Image description" width="800" height="582"&gt;&lt;/a&gt;&lt;br&gt;
&lt;strong&gt;KlinterAI — Logo !&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fatwr5y6yu9eennoefykj.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fatwr5y6yu9eennoefykj.png" alt="Image description" width="720" height="120"&gt;&lt;/a&gt;&lt;br&gt;
&lt;strong&gt;EquipAny — Logo !&lt;/strong&gt;&lt;/p&gt;

</description>
      <category>activityrecognition</category>
      <category>construction</category>
      <category>landsubsidence</category>
      <category>riskmethodology</category>
    </item>
    <item>
      <title>Statistics related to 6 Business Case(s) of EquipAny - Report Incidents and Identify Risks at Workplace</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Sun, 16 Apr 2023 09:09:29 +0000</pubDate>
      <link>https://dev.to/aswinvk28/statistics-related-to-6-business-cases-of-equipany-report-incidents-and-identify-risks-at-workplace-16cp</link>
      <guid>https://dev.to/aswinvk28/statistics-related-to-6-business-cases-of-equipany-report-incidents-and-identify-risks-at-workplace-16cp</guid>
      <description>&lt;h1&gt;
  
  
  &lt;strong&gt;Fatal Injuries due to Industry&lt;/strong&gt;
&lt;/h1&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fp7edf2dv1sk3rvypd4s4.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fp7edf2dv1sk3rvypd4s4.png" alt="Image description" width="800" height="594"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Statistics of Issues due to Mobile Phone Usage&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fq0lwmfj0koainvi1t8hb.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fq0lwmfj0koainvi1t8hb.png" alt="Image description" width="797" height="217"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Falls Lead in Workplace Fatalities&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fddc9m4tzlhgfb9p1h1wl.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fddc9m4tzlhgfb9p1h1wl.png" alt="Image description" width="800" height="758"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;FPS and AGS Data Format, are 1991 Metadata Information&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;DCPG and DCPT are used together to record dynamic cone penetrometer (DCP) tests where data is recorded as penetration per blow. If&lt;br&gt;
data is recorded as blows over an interval, data shall be recorded in DPRG and DPRB&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F3s48rze78coatchu0oj4.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F3s48rze78coatchu0oj4.png" alt="Image description" width="702" height="305"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Data Format and Information Exchange&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fgdk9q61iy2ern7fxm4d1.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fgdk9q61iy2ern7fxm4d1.png" alt="Image description" width="800" height="797"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Labour Quotient Productivity is an increase, but Multi-Factor Productivity is dipping&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F6cujowe8w858phszsrzv.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F6cujowe8w858phszsrzv.png" alt="Image description" width="700" height="549"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Monitoring Construction Activities on a small Building Project with an Open Space&lt;/strong&gt;
&lt;/h2&gt;

&lt;h3&gt;
  
  
  Object Detected from a Timelapse Video, of CCTV Camera Pros
&lt;/h3&gt;

&lt;p&gt;&lt;a href="https://youtu.be/zUiXM8oOjwg" rel="noopener noreferrer"&gt;Object Detected from a Timelapse Video, of CCTV Camera Pros&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fyptah8af2z1tk3os6fgx.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fyptah8af2z1tk3os6fgx.png" alt="Image description" width="800" height="332"&gt;&lt;/a&gt;&lt;/p&gt;

</description>
      <category>observability</category>
      <category>construction</category>
      <category>software</category>
      <category>visualization</category>
    </item>
    <item>
      <title>KlinterAI Ancillary Services</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Sun, 12 Mar 2023 02:55:38 +0000</pubDate>
      <link>https://dev.to/aswinvk28/klinterai-ancillary-services-lbk</link>
      <guid>https://dev.to/aswinvk28/klinterai-ancillary-services-lbk</guid>
      <description>&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F7x2w2hntxw492etijqg4.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F7x2w2hntxw492etijqg4.png" alt="Image description" width="800" height="465"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Business Cases of KlinterAI&lt;/strong&gt;
&lt;/h2&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Labour &amp;amp; Contract POs&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;A Punch List is created on a monthly or bi-monthly basis by a Site Inspector, which requires manual identification over a Quality Inspection. Our database is used to produce Safety incidents detected over a Computer Vision Depth-Camera into the Punch Lists. A Daily Report is sent to the Client weekly or monthly, consisting of the Number and Types of Piles, and its measures. These would be generated from our AI Platform, KlinterAI, that supports counting of Number of Piles, recording the type of Activities involving any Repairs or Maintenance, and using the BIM to obtain the Types of Piles, Measures into the Daily Report. We are researchers of the FPS (Federation of Piling Specialists) and AGS Data Format because we need to improve the Security not only for Open Sites, but also for Computer Vision and IoT related tasks, in the Cloud too.  &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Costing and Classification Codes&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;To integrate Costing into BIM, there are Costing sheets, Bill of Materials (BOM), Labour &amp;amp; Contract Purchase Orders integrated into the BIM. These are overall costs affecting the Construction Project, and will only convey the Productivity in terms of a Quotient. To report on the Multi-Factor Productivity, which is low in the UK from 1972 onwards, it requires us to understand whether we are actually prioritizing the Costing based on Materials, Labour, Maintenance, and Repairs. How do we efficiently report the Classification Code against an Activity that Revit has developed for a Construction Site work? &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Environmental Incident&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;Consider you have an oil leakage, and you want to use an Ancillary Equipment for mitigating the risk, such as an Oil Absorbing Carpet. You also would like to approach ancillary processes such as the Regulatory Authorities and Media, then it would be best to use a Platform that would enable you to reduce the reputation damage caused due to a delayed site work and improve your branding. &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Resolving Disputes and Conflict Resolution&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;How would you approach, if you had a legal claim or litigation against your corporation for an oil leakage, for whom the client is not satisfied with what you produced or communicated towards a Daily Report? &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;WiFi Crack and Cyber Security Threat&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;Consider an individual or a hacker group, wants to collect data on machinery and IoT devices installed, by cracking the WiFi network. What would you do to integrate a Private &amp;amp; Secure Data Transfer?&lt;/p&gt;

</description>
      <category>legal</category>
      <category>ancillary</category>
      <category>dispute</category>
      <category>privatesecuredataexchange</category>
    </item>
    <item>
      <title>KlinterAI Costing Labour Contract POs</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Sun, 12 Mar 2023 02:45:32 +0000</pubDate>
      <link>https://dev.to/aswinvk28/klinterai-costing-labour-contract-pos-42kb</link>
      <guid>https://dev.to/aswinvk28/klinterai-costing-labour-contract-pos-42kb</guid>
      <description>&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F0qyhiej4z44yk8w2kfct.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F0qyhiej4z44yk8w2kfct.png" alt="Image description" width="800" height="161"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Business Cases of KlinterAI&lt;/strong&gt;
&lt;/h2&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Labour &amp;amp; Contract POs&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;A Punch List is created on a monthly or bi-monthly basis by a Site Inspector, which requires manual identification over a Quality Inspection. Our database is used to produce Safety incidents detected over a Computer Vision Depth-Camera into the Punch Lists. A Daily Report is sent to the Client weekly or monthly, consisting of the Number and Types of Piles, and its measures. These would be generated from our AI Platform, KlinterAI, that supports counting of Number of Piles, recording the type of Activities involving any Repairs or Maintenance, and using the BIM to obtain the Types of Piles, Measures into the Daily Report. We are researchers of the FPS (Federation of Piling Specialists) and AGS Data Format because we need to improve the Security not only for Open Sites, but also for Computer Vision and IoT related tasks, in the Cloud too.  &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Costing and Classification Codes&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;To integrate Costing into BIM, there are Costing sheets, Bill of Materials (BOM), Labour &amp;amp; Contract Purchase Orders integrated into the BIM. These are overall costs affecting the Construction Project, and will only convey the Productivity in terms of a Quotient. To report on the Multi-Factor Productivity, which is low in the UK from 1972 onwards, it requires us to understand whether we are actually prioritizing the Costing based on Materials, Labour, Maintenance, and Repairs. How do we efficiently report the Classification Code against an Activity that Revit has developed for a Construction Site work? &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Environmental Incident&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;Consider you have an oil leakage, and you want to use an Ancillary Equipment for mitigating the risk, such as an Oil Absorbing Carpet. You also would like to approach ancillary processes such as the Regulatory Authorities and Media, then it would be best to use a Platform that would enable you to reduce the reputation damage caused due to a delayed site work and improve your branding. &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;Resolving Disputes and Conflict Resolution&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;How would you approach, if you had a legal claim or litigation against your corporation for an oil leakage, for whom the client is not satisfied with what you produced or communicated towards a Daily Report? &lt;/p&gt;

&lt;h3&gt;
  
  
  &lt;strong&gt;WiFi Crack and Cyber Security Threat&lt;/strong&gt;
&lt;/h3&gt;

&lt;p&gt;Consider an individual or a hacker group, wants to collect data on machinery and IoT devices installed, by cracking the WiFi network. What would you do to integrate a Private &amp;amp; Secure Data Transfer?&lt;/p&gt;

</description>
      <category>classificationcodes</category>
      <category>activityrecognition</category>
      <category>ganttchart</category>
      <category>foreman</category>
    </item>
    <item>
      <title>Improving Safety in the Construction Industry through Observability</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Thu, 05 Jan 2023 08:59:56 +0000</pubDate>
      <link>https://dev.to/aswinvk28/improving-safety-in-the-construction-industry-through-observability-4l8o</link>
      <guid>https://dev.to/aswinvk28/improving-safety-in-the-construction-industry-through-observability-4l8o</guid>
      <description>&lt;h2&gt;
  
  
  Introduction
&lt;/h2&gt;

&lt;p&gt;&lt;strong&gt;Observability&lt;/strong&gt; in the Construction Industry is a new technique to improve the &lt;strong&gt;Productivity&lt;/strong&gt;. In a Construction Site, especially Foundations Site, there will be many machines operating over a period of duration. The Construction Scheduling separates the Activities Recognized from Frames into a Project Gantt Chart. Such a Project Gantt Chart is integrated into BIM 4D, with autonomy. &lt;/p&gt;

&lt;h2&gt;
  
  
  Improving Safety
&lt;/h2&gt;

&lt;p&gt;Construction Site Safety depends on people working at the site, the Quality of Machines imported into the Site, Rules laid out by Construction Project Managers, and the Design or a BIM Model of the Project. Safe use of phones within the designated areas in the Site, an awareness of health and safety risks at work, reporting any risks in the site, Safe and efficient materials storage and so on. Materials Storage involves Flammable materials, Storage areas, Pedestrian Routes, Storage at height, Tidiness and Deliveries such as planning deliveries on site to keep the amount of material minimum on the site. &lt;/p&gt;

&lt;h2&gt;
  
  
  Productivity from 1972 onwards
&lt;/h2&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fefe2ne14x1kc207vloz6.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fefe2ne14x1kc207vloz6.png" alt="Image description" width="700" height="549"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;From 1972 onwards, there is no improvement in the Multi-Factor Productivity in the Construction Sector. This indicates the energy, purchased services, land and facilities management. The greatest improvement is in the Market Sector output per hour. Market sector MFP and Construction output per hour are almost the same. &lt;/p&gt;

&lt;h2&gt;
  
  
  Observability by Parts
&lt;/h2&gt;

&lt;p&gt;Observability by Parts is a method to selectively detect the Objects and Equipments, and recognize the activities to isolate a particular section of the work. By isolating a particular area of work, better safety can be introduced in the Site and working conditions because Enclosures can be sealed as well as governed using an Integrated Computer Vision platform. &lt;/p&gt;

&lt;h3&gt;
  
  
  Full-view of the object detection frame using Computer Vision
&lt;/h3&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fzz6rdr72n5n3p0qt65p5.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fzz6rdr72n5n3p0qt65p5.png" alt="Full-view" width="800" height="523"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;By Detecting all objects, the objects and Equipments are recorded into the Software.&lt;/em&gt;&lt;/p&gt;

&lt;h3&gt;
  
  
  Partial-view of the object detection frame using Computer Vision
&lt;/h3&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fuorv6ba1f6lgp5ik8nmb.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fuorv6ba1f6lgp5ik8nmb.png" alt="Partial-view" width="800" height="524"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;em&gt;By Detecting some of the objects, only those objects which want to get exposed will be recorded into the Software.&lt;/em&gt;&lt;/p&gt;

&lt;h2&gt;
  
  
  Other Measures to Improve Safety
&lt;/h2&gt;

&lt;ul&gt;
&lt;li&gt;Increases Safety due to Execution of Intermediary Activities&lt;/li&gt;
&lt;li&gt;More holistic identification and tracking of objects&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  Risk mitigation in the Construction Site through Observability
&lt;/h2&gt;

&lt;ul&gt;
&lt;li&gt;Involving Costing while recognizing the Activities, and integrating with BIM 5D&lt;/li&gt;
&lt;li&gt;Efficiency and Risk reduction due to a Traceability on Material POs, Contract POs and Supplier POs&lt;/li&gt;
&lt;/ul&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Conclusion&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;Integrating &lt;strong&gt;Observability&lt;/strong&gt; for producing Construction Scheduling Gantt Chart, will require data collection of activities, and definition of well-defined Metrics. &lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;&lt;p&gt;Observability can be used for a &lt;strong&gt;Quality Inspection&lt;/strong&gt; to generate Punch Lists in Construction Site for recording balancing works and deficiencies. &lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;Another use of integrating Observability is to generate &lt;strong&gt;Daily Report&lt;/strong&gt; which the Foreman or the Project Manager is responsible for&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;A &lt;strong&gt;Project Field Observation&lt;/strong&gt;, involving the screenshots of deficiencies will be added by the project person&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;A &lt;strong&gt;Demand Inspection&lt;/strong&gt; of the Construction site involving an activity list recognized and the deficiencies detected using Scheduled Walks&lt;/p&gt;&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;Thanks for reading the article&lt;/p&gt;

&lt;p&gt;This is based off of a LinkedIn article:&lt;br&gt;
&lt;a href="https://www.linkedin.com/pulse/integrating-observability-construction-sector-aswin-vijayakumar/" rel="noopener noreferrer"&gt;https://www.linkedin.com/pulse/integrating-observability-construction-sector-aswin-vijayakumar/&lt;/a&gt;&lt;/p&gt;

</description>
      <category>observability</category>
      <category>scheduling</category>
      <category>gantt</category>
      <category>react</category>
    </item>
    <item>
      <title>Telemetry on software defined networking</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Sat, 05 Nov 2022 13:05:20 +0000</pubDate>
      <link>https://dev.to/aswinvk28/telemetry-on-software-defined-networking-53po</link>
      <guid>https://dev.to/aswinvk28/telemetry-on-software-defined-networking-53po</guid>
      <description>&lt;h2&gt;
  
  
  &lt;strong&gt;WiFi 7&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;WiFi 7 is coming up with detection and human sensing. This creates an enormous amount of opportunities that works with opentelemetry. I had an opportunity to listen to opentelemetry, which involves solving observability by logging, metrics and synthetic checks. &lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;Human in the loop&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;I have observed human-in-the-loop scenarios where certain images are classified into desired classes. This involves a high dimensional data drift which may be matured into high dimensional concept drift, using automation. DevOps will also remain prominent in such a scenario. &lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;strong&gt;OpenTelemetry&lt;/strong&gt;
&lt;/h2&gt;

&lt;p&gt;If WiFi 7 improves it functionality, then security becomes stronger because it is illegal to tamper with the networking equipments. OpenTelemetry will be widely used by businesses to achieve 99.99% uptime in observability.&lt;/p&gt;

</description>
      <category>sdn</category>
      <category>networking</category>
      <category>openflow</category>
      <category>opensdn</category>
    </item>
    <item>
      <title>More Detections integrated into cameras, these days to improve Observability</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Fri, 04 Nov 2022 12:57:35 +0000</pubDate>
      <link>https://dev.to/aswinvk28/drift-detection-integrated-into-cameras-these-days-lgh</link>
      <guid>https://dev.to/aswinvk28/drift-detection-integrated-into-cameras-these-days-lgh</guid>
      <description>&lt;p&gt;&lt;strong&gt;## Introduction&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Well, recently I have found that anomaly detection is used in airports using cameras, and it is also related to theft detection using cameras. I guess these anomaly detection techniques are quite close to drift detection techniques. Since there is novelty detection (involving human workflow) and anomaly detection (involving ML workflow), these cameras need such code, especially oneAPI is the necessary route.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;## Licensing&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;I have observed that OpenVINO removed its licensing in 2020 - 2021. oneAPI takes a similar approach, and it will be accessible to developers who can actually install these cameras in sites. This is quite Open. I would like to tag this article, &lt;/p&gt;
&lt;div class="ltag__link"&gt;
  &lt;a href="/sramkrishna" class="ltag__link__link"&gt;
    &lt;div class="ltag__link__pic"&gt;
      &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F443951%2Ff2f5b217-fd68-415a-a70c-31e7a6abed54.jpeg" alt="sramkrishna"&gt;
    &lt;/div&gt;
  &lt;/a&gt;
  &lt;a href="/sramkrishna/oneapi-and-importance-of-open-platforms-l70" class="ltag__link__link"&gt;
    &lt;div class="ltag__link__content"&gt;
      &lt;h2&gt;oneAPI and importance of open platforms&lt;/h2&gt;
      &lt;h3&gt;Sriram Ramkrishna ・ Nov 3 '22&lt;/h3&gt;
      &lt;div class="ltag__link__taglist"&gt;
        &lt;span class="ltag__link__tag"&gt;#beginners&lt;/span&gt;
        &lt;span class="ltag__link__tag"&gt;#hpc&lt;/span&gt;
        &lt;span class="ltag__link__tag"&gt;#aiml&lt;/span&gt;
        &lt;span class="ltag__link__tag"&gt;#opensource&lt;/span&gt;
      &lt;/div&gt;
    &lt;/div&gt;
  &lt;/a&gt;
&lt;/div&gt;
 

&lt;p&gt;&lt;strong&gt;## Some concerns&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;The stakeholders are concerned about privacy issues whenever a camera is installed on site. They tend to follow their human workflows and prioritize that if AI is not available. A closed solution that works with the industries is not only expensive, but also faces issues with usage of GPUs just because their licensing is not following Open standards. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;## What do companies think?&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;The companies think they can just carry on with monitoring instead of integrating Observability into the sectors, which is essential in this age where AI chips and Quantum computing are popular. The MoD has recently entered into a contract with Orca Computing to buy Quantum Computer. The advancement is huge as of now, and there are opportunities in AI to integrate observability into industries and to employ Data Scientists in various fields. &lt;/p&gt;

</description>
      <category>machinelearning</category>
      <category>drift</category>
      <category>oneapi</category>
      <category>licensing</category>
    </item>
    <item>
      <title>Observability vs Monitoring</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Thu, 03 Nov 2022 13:54:22 +0000</pubDate>
      <link>https://dev.to/aswinvk28/observability-vs-monitoring-1eim</link>
      <guid>https://dev.to/aswinvk28/observability-vs-monitoring-1eim</guid>
      <description>&lt;h2&gt;
  
  
  &lt;em&gt;&lt;strong&gt;Observability vs Monitoring&lt;/strong&gt;&lt;/em&gt;
&lt;/h2&gt;

&lt;p&gt;Observability is making sure the product is managed with a better integration of results with the behavior of the system such that there is 99.99% uptime. Monitoring involves constant overseeing of events involving multiple workflows that may slow down a process creating overheads in the holistic process of management. Observability can impact the understanding of results such that they may not be fully complete due to issues in detection of events. Customer happiness and customer satisfaction is dependent on the visibility of the system. Through monitoring the human workflows are monitored which as well supersedes a process where a quality inspection for these workflows is assigned such that monitoring of some human workflows are eliminated. Through observability we will be able to automate some human workflows through a qualitative process of data analytics and automation to achieve a matured system of evaluating the processes. &lt;/p&gt;

&lt;p&gt;Through data visualization, logging, tracing, and data analytics, one can achieve the constant overseeing of events. Such processes are testable which gives the client to evaluate a job in better ways than existing methods. &lt;/p&gt;

&lt;p&gt;APIs are necessary in such processes that support observability. Such APIs will be delivered as services which allow the customer to load balance them. &lt;/p&gt;

&lt;p&gt;4 concerns that the author in (BICALHO, 2019) addresses are: (1) Synthetic checks, (2) Metrics, (3) Logging policies&lt;/p&gt;

&lt;h2&gt;
  
  
  &lt;em&gt;&lt;strong&gt;References&lt;/strong&gt;&lt;/em&gt;
&lt;/h2&gt;

&lt;p&gt;BICALHO, A. (2019). Achieving 99.99% uptime - a tale of Observability. Retrieved from &lt;a href="http://www.autodesk.com:" rel="noopener noreferrer"&gt;www.autodesk.com:&lt;/a&gt; &lt;a href="https://www.autodesk.com/autodesk-university/class/Achieving-9999-uptime-tale-Observability-2019#video" rel="noopener noreferrer"&gt;https://www.autodesk.com/autodesk-university/class/Achieving-9999-uptime-tale-Observability-2019#video&lt;/a&gt;&lt;/p&gt;

</description>
      <category>monitoring</category>
      <category>observability</category>
      <category>uptime</category>
      <category>service</category>
    </item>
    <item>
      <title>Improving Accuracy Through Age Drifting Scenarios of Faces</title>
      <dc:creator>Aswin Vijayakumar</dc:creator>
      <pubDate>Thu, 03 Nov 2022 10:13:45 +0000</pubDate>
      <link>https://dev.to/aswinvk28/improving-accuracy-through-age-drifting-scenarios-of-faces-1ad8</link>
      <guid>https://dev.to/aswinvk28/improving-accuracy-through-age-drifting-scenarios-of-faces-1ad8</guid>
      <description>&lt;p&gt;&lt;strong&gt;Abstract&lt;/strong&gt;&lt;br&gt;
The paper addresses the problem of age drifting in face recognition. In this study, face recognition is addressed using Machine Learning models by identification with hyperparameters and models in binary format saved into database. Face recognition performed through face classification is applied over FaceNet and a bandwidth-limited neural network. The bandwidth-limited neural network accepts clustered faces through Face clustering which improves the accuracy. A Canny edge detector applied on face classification models improves the accuracy further. Age drifting is addressed by the bandwidth-limited neural network over grouping the faces by age. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;1.    Introduction&lt;/strong&gt;&lt;br&gt;
Face recognition faces with privacy issues due to which the regulatory bodies object them when they are deployed in sectors such as retail, payment or shops. The paper addresses the method of face recognition by face classification from embeddings. In order to improve the accuracy, one has to address the age drifting scenarios of faces. The age acts as a distribution and can be extracted from a face image and is also expected to be varying across age groups. The lower the age it is likely that the machine learning algorithm used for face classification faces difficulty to learn the new face. This is because lower age groups will contain more changes on facial characteristics and the face similarities within the faces of lower age groups are closer together. The algorithm learns the age distribution thereby increasing the overall accuracy of recognition. &lt;br&gt;
Usually, face recognition is conducted using existing images of people directly such that a high accuracy is obtained. The popular face recognition deep neural network models, FaceNet and ArcFace achieves the state-of-the-art results on any face image. The obtained embeddings are compared against the embeddings from other stored images of the same person and other person to find the identity behind the given face image. The method to perform Face classification on the embeddings generated after face recognition involves comparing the results with stored hyperparameters and machine learning models stored in binary format. &lt;br&gt;
With Machine Learning and Deep Learning, automation is feasible, enabling the recognizable faces that have already been registered in the system. The datasets used in the project have age, making them suitable for performing statistical analysis to explain the drift under a drift understanding context. The concept drift is detected by a change in the decision boundary, the data or both. The project addresses such a drawback while designing the Deep Neural Network (DNN) that produces an improved accuracy using a Voting Classifier compared to that of a FaceNet model for the same evaluation dataset. The study develops a Voting Classifier pipeline that processes face embeddings from two separate deep learning models’ pipelines involving a pre-trained FaceNet model and a custom model based on Convolutional Variational Autoencoders (CVAEs). &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;2.    Related Work&lt;/strong&gt;&lt;br&gt;
The L2 distances of face embeddings obtained from Deep Learning neural networks are used to derive matching and non-matching scores of faces as per the paper [1]. This method is used for verification of an identity. The matching euclidean distances is normalised by non-matching mean and non-matching standard deviation to obtain the matching score and viceversa. This separates the non-matching and matching scores into two histograms. The drawback of this method is that privacy is lost due to direct access to images. The network, ArcFace, introduced by categorical cross entropy loss in the paper [2], performs face classification using face similarity analysis on a refined probe set. The paper achieves a classification accuracy of 97.91% on the refined probe set. ArcFace trains on AgeDB-30 dataset, Youtube Faces (YTF), MegaFace challenge and FaceScrub probe set. The network developed by Google, FaceNet, has about  22K parameters with an embedding vector size of 128. FaceNet performs face classification and verification using similar face similarity analysis techniques. The pose, illumination and expression invariant measure is explored by [3]. which uses a method of similarity scores described in [4]. A statistical test is used for the identification of similar and non-similar images. This is based on ranking of similarity scores where the sum of ranks of first 100 images indicate similar images. In a similarity between two sets of images, the order is determined by ranks of vectors in both sets. In another paper, [5], the method of classification via clustering is explained. The paper states that there is an improvement of accuracy due to clustering of unlabelled faces performed along with classification using Deep Neural Networks. Three methods are stated by the network: (1) Controlled disjoint, (2) Controlled overlap, (3) Semi-controlled. These form clustering techniques where the drawbacks of clustering are addressed in each method. The paper, [6], produces a learned representation of face images using a VAE (Variational Autoencoder) useful for face attribute prediction. Using a linear interpolation of the latent vector, the generated face images by the VAE draws out the images from a distribution and interpolates from source image to target image. The paper, [7], describes about Canny edge detector applied to face recognition algorithms using PCA (Principal Components Analysis). This method improves the accuracy by inference on images. This paper applies the same concept to face embeddings because privacy is enhanced to separate the complexities of Big Semantic Data Storage and Indexing from image data. This is proposed to be the solution where there are several Machine Learning models for data engineering instead of processing images directly for face recognition only. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;3.    Methodology&lt;/strong&gt;&lt;br&gt;
This section addresses the topics of bandwidth-limited neural network and canny edge detector applied on images. In training the embeddings from a neural network, a Voting Classifier is used with a schematic diagram, as shown below. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Faj3rgq0fbh39335aysqa.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Faj3rgq0fbh39335aysqa.png" alt="Image description" width="665" height="454"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Figure 1. Schematic Diagram of Voting Classifier, with soft voting incorporated&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;3.1.  Bandwidth-Limited Neural Network&lt;/strong&gt;&lt;br&gt;
A bandwidth limited neural network is an autoencoder that performs feature extraction by exposing the embeddings in their latent distribution. Since the target labels are attached to the variational autoencoder, in the latent distribution, they require some degree of clustering before classification to improve the accuracy score. The bandwidth on a neural network implies the network accepts only fewer number of identities per one inference time. Consider a group of images are compared using Euclidean distances (L2 distances), then one Euclidean distance matrix M x N will perform a comparison up to fewer number of identities within a neighborhood of faces. Such comparisons can occur between and across the neighborhoods through parallel processing. This is to ensure the resulting embeddings do not lose precision. The bandwidth is explained by the formula:&lt;/p&gt;

&lt;p&gt;Bandwidth=(No of faces (N))/(Time (T))&lt;br&gt;
&lt;strong&gt;Equation 1. Bandwidth as number of faces processed per unit time&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fbpot4o19tkyztdp6puv7.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fbpot4o19tkyztdp6puv7.png" alt="Image description" width="420" height="69"&gt;&lt;/a&gt;&lt;br&gt;
&lt;strong&gt;Figure 2. Training by batch size where the identity and age are sequentially ordered&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Face groupings are obtained through variety of techniques such as DBSCAN, KMeans, Fast-HAC, GCN, GCN-iter2 as mentioned in [5]. Such face groupings are added to the bandwidth limited neural network to improve the accuracy from the results obtained by randomized target vectors to that of ordered target vectors. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;3.2   Canny edge detector&lt;/strong&gt;&lt;br&gt;
Once a convolutional neural network is applied on an image through a neural network, its layer information demonstrates edge like structures. This is due to a variety of filters getting applied while training the images. A Canny edge detector is one such technique to improve the sharpness of the edges within every layer of neural network. Consider an autoencoder is trained on faces, then the edges on the faces are improved, thereby, identification of faces will be better. The factor in the equation is taken to be 1000, suitably such that best results are obtained.&lt;/p&gt;

&lt;p&gt;EdgeGradient(G)= √(G_x^2+G_y^2 )&lt;/p&gt;

&lt;p&gt;Angle(θ)=tan^(-1)⁡(G_x/G_y )&lt;/p&gt;

&lt;p&gt;Initial Accuracy (Image+factor*Edge)=Improved Accuracy&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Equation 2. Set of equations for applying Canny Edge Detector&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fb04vdrgywebl62zserkm.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fb04vdrgywebl62zserkm.png" alt="Image description" width="468" height="244"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fl8r5cat7ysj9vc4ln0if.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fl8r5cat7ysj9vc4ln0if.png" alt="Image description" width="467" height="243"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Figure 3 Canny edge detector images improving inference output of images&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;4.    Results&lt;/strong&gt;&lt;br&gt;
&lt;strong&gt;4.1.  Bandwidth-Limited Neural Network&lt;/strong&gt;&lt;br&gt;
An accuracy improvement from 88.49% to 94% is obtained using such a network which trains the images by ordering by identity and age and training by randomized age values. The results are shown in the below table:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ff5krejws7pyy5riqmjb3.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Ff5krejws7pyy5riqmjb3.png" alt="Image description" width="800" height="461"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another testing scenario was introduced to measure how effective the age-drifting was incorporated by the new model. This revealed interesting results on age-groups (younger and lower) by training younger and training elder. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fycrfdcwwi5y9fxsqexni.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fycrfdcwwi5y9fxsqexni.png" alt="Image description" width="800" height="434"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The above results have been obtained through ordered set of random vectors. Another scenario is to obtain the results without applying face grouping such as a clustering algorithm before sending the target vectors. This method involves applying &lt;strong&gt;np.random.randint&lt;/strong&gt;(0,435) where 435 represents the number of identities to the second parameter during the neural network inference. When trained and tested with randomized target vectors, the following results in model metrics were obtained. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fnlbg9r177lsg0u8n5idj.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fnlbg9r177lsg0u8n5idj.png" alt="Image description" width="800" height="288"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;4.2.  Canny edge detector&lt;/strong&gt;&lt;br&gt;
The results given below show us the difference between a FaceNet network and a bandwidth-limited neural network which is taken as a Convolutional Variational Autoencoder (CVAE). The Canny detector is applied on all the four files: train_younger (facenet), train_younger (cvae), train_elder (facenet) and train_elder (cvae). Out of 122 false negatives registered for randomized target vectors, 25 true positives after applying canny prediction were registered. The results given below shows the improvement of accuracy through a Canny edge detector. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F60czab4qxxhlbbl9vu7d.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2F60czab4qxxhlbbl9vu7d.png" alt="Image description" width="800" height="230"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;5.    Discussion&lt;/strong&gt;&lt;br&gt;
A bandwidth limited neural network is useful to recognize faces within a neighbourhood. Usage of multiple models of these types enable identification of faces from a metadata of information using parallel computing. A huge accuracy improvement suggests such models ensure identification is performed on the embeddings instead on the images directly. By retraining the model with ordered age, the problem of age drifting is solved to some extent. It is up to the rest of the images that have an inherent noise in them, show drift in their target labels. The bandwidth neural network can be fine-tuned using the clustering algorithms listed in the Section 3 Methodology. It is found to show a variation from 93.57% to 97.06% for training elder scenario as opposed to 90.68% to 95.38% accuracy in training younger scenario. The Canny edge detector is found to be useful for applying a small improvement of accuracy to the existing network based on an image filter that filters out drifted images. It is observed that FaceNet responds to the Canny edge detector better than the CVAE. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;6.    Conclusion&lt;/strong&gt;&lt;br&gt;
Addressing age drifting has improved the face recognition system accuracy. A bandwidth limited neural network can be incorporated to organizations or neighborhoods that seeks to consider a service-oriented face recognition system which is limited by number of faces it can recognize. This would make regulations to face recognition more stricter enabling the individuals to control their privacy. A face recognition system with this approach contributes towards mobility enhancement that impacts hours of operations, reducing wait times and predicting more customer demand for transportation. &lt;/p&gt;

&lt;p&gt;&lt;strong&gt;7.    References&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;[1]     P. Wang, Q. Ji and J. L. Wayman, "Modeling and Predicting Face Recognition System Performance Based on Analysis of Similarity Scores," IEEE Journals &amp;amp; Magazine, 2007. &lt;/p&gt;

&lt;p&gt;[2]     J. Deng, J. Guo, N. Xue and a. S. Zafeiriou, "ArcFace: Additive Angular Margin Loss for Deep Face Recognition," CVPR Open Access, p. 10, 2019. &lt;/p&gt;

&lt;p&gt;[3]     F. Schroff, T. Treibitz, D. Kriegman and a. S. Belongie, "Pose, illumination and expression invariant pairwise face-similarity measure via Doppelganger list comparison," 2011 International Conference on Computer Vision, p. pp. 2494–2501, 2011. &lt;/p&gt;

&lt;p&gt;[4]     L. Wolf, T. Hassner, Taigman and a. Y., "Similarity Scores Based on Background Samples," Computer Vision – ACCV, Vols. vol. 5995, H. Zha, R. Taniguchi, and S. Maybank, Eds. Berlin, Heidelberg: Springer Berlin Heidelberg, 2010, pp. 88–97, 2009. &lt;/p&gt;

&lt;p&gt;[5]     A. RoyChowdhury, X. Yu, K. Sohn, E. Learned-Miller and M. Chandraker, "Improving Face Recognition by Clustering Unlabeled Faces in the Wild," arXiV, 2020. &lt;/p&gt;

&lt;p&gt;[6]     X. Hou, L. Shen, K. Sun and G. Qiu, "Deep Feature Consistent Variational Autoencoder," IEEE Winter Conference on Applications of Computer Vision, 2017. &lt;/p&gt;

&lt;p&gt;[7]     R. Ullah, H. Hayat, A. A. Siddiqui, U. A. Siddiqui, J. Khan, F. Ullah, S. Hassan, L. Hasan, W. Albattah, M. Islam and a. G. M. Karami, "A Real-Time Framework for Human Face Detection and Recognition in CCTV Images," Hybrid Approaches for Image and Video Processing, 2022. &lt;/p&gt;

&lt;p&gt;[8]     F. Schroff, D. Kalenichenko and a. J. Philbin, "FaceNet: A Unified Embedding for Face Recognition and Clustering," IEEE Conference on Computer Vision and Pattern Recognition (CVPR), 2015.&lt;/p&gt;

&lt;p&gt;[9]     S. Dorodnicov, A. Grunnet-Jepsen, A. Puzhevich and D. Piro, "Open-Source Ethernet Networking for Intel® RealSense™ Depth Cameras," 2021. [Online]. Available: &lt;a href="https://dev.intelrealsense.com/docs/open-source-ethernet-networking-for-intel-realsense-depth-cameras" rel="noopener noreferrer"&gt;https://dev.intelrealsense.com/docs/open-source-ethernet-networking-for-intel-realsense-depth-cameras&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;[10]    S. A. Rizvi, P. J. Phillips and H. Moon, "The FERET Verification Testing Protocol for Face Recognition Algorithms," NISTIR 6281, 1998. &lt;/p&gt;

</description>
      <category>machinelearning</category>
      <category>research</category>
      <category>python</category>
      <category>datascience</category>
    </item>
  </channel>
</rss>
