<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: sainathsurender</title>
    <description>The latest articles on DEV Community by sainathsurender (@sainathsurender).</description>
    <link>https://dev.to/sainathsurender</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/sainathsurender"/>
    <language>en</language>
    <item>
      <title>What is the best way to analyze huge chunks of data in Oracle DB</title>
      <dc:creator>sainathsurender</dc:creator>
      <pubDate>Tue, 26 Oct 2021 06:57:42 +0000</pubDate>
      <link>https://dev.to/sainathsurender/what-is-the-best-way-to-analyze-huge-chunks-of-data-in-oracle-db-4fcp</link>
      <guid>https://dev.to/sainathsurender/what-is-the-best-way-to-analyze-huge-chunks-of-data-in-oracle-db-4fcp</guid>
      <description>&lt;p&gt;We have a scenario where we need to analyze 2 lakh rows per day and based on that perform insert and update into other tables respectively. Problem is that Oracle Jobs with Parallel switched on(completely optimized) is not able to process at a faster pace and was thinking if Windows service would do that job. Wanted to know what is the right architecture for processing data from a flat table in the fastest way possible.&lt;/p&gt;

</description>
    </item>
  </channel>
</rss>
