<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: Armaan Khan</title>
    <description>The latest articles on DEV Community by Armaan Khan (@armaankhan8270).</description>
    <link>https://dev.to/armaankhan8270</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/armaankhan8270"/>
    <language>en</language>
    <item>
      <title>query 360</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Thu, 21 Aug 2025 05:44:03 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/query-360-2hdb</link>
      <guid>https://dev.to/armaankhan8270/query-360-2hdb</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;-- Check if counts match
SELECT 
    w.WAREHOUSE_NAME,
    w.SPILLAGE_QUERIES_COUNT AS WAREHOUSE_SPILLAGE,
    COALESCE(q.QUERY_SPILLAGE, 0) AS DRILLDOWN_SPILLAGE,
    w.SPILLAGE_QUERIES_COUNT - COALESCE(q.QUERY_SPILLAGE, 0) AS DIFFERENCE
FROM (
    SELECT WAREHOUSE_NAME, SUM(SPILLAGE_QUERIES_COUNT) AS SPILLAGE_QUERIES_COUNT
    FROM POLARGOVERN_TEST.PUBLIC.ALL_WH_KPI_HISTORY_DATA
    WHERE KPI_DATE BETWEEN '2025-08-13' AND '2025-08-19'
    GROUP BY WAREHOUSE_NAME
) w
LEFT JOIN (
    SELECT WAREHOUSE_NAME, SUM(SPILLAGE_QUERIES_COUNT) AS QUERY_SPILLAGE
    FROM POLARSLED_DB.DEMO_UPLOAD.query_360_table  
    WHERE QUERY_DATE BETWEEN '2025-08-13' AND '2025-08-19'
    GROUP BY WAREHOUSE_NAME
) q ON w.WAREHOUSE_NAME = q.WAREHOUSE_NAME
ORDER BY ABS(DIFFERENCE) DESC;




&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;





&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;# 🔥 FIXED: Use the warehouse procedure table for all warehouse data
SQL_ALL_WAREHOUSES = """
SELECT
    WAREHOUSE_NAME,
    MAX(SIZE) AS SIZE,
    MAX(AUTO_SUSPEND) AS AUTO_SUSPEND,
    MIN(MIN_CLUSTER_COUNT) AS MIN_CLUSTER_COUNT,
    MAX(MAX_CLUSTER_COUNT) AS MAX_CLUSTER_COUNT,
    MAX(SCALING_POLICY) AS SCALING_POLICY,
    SUM(QUERIES_LT_1S) AS QUERIES_LT_1S,
    SUM(QUERIES_1_5S) AS QUERIES_1_5S,
    SUM(QUERIES_6_60S) AS QUERIES_6_60S,
    SUM(QUERIES_61_300S) AS QUERIES_61_300S,
    SUM(QUERIES_GT_300S) AS QUERIES_GT_300S,
    SUM(UTIL_0_20_COUNT) AS UTIL_0_20_COUNT,
    SUM(UTIL_20_40_COUNT) AS UTIL_20_40_COUNT,
    SUM(UTIL_40_60_COUNT) AS UTIL_40_60_COUNT,
    SUM(UTIL_60_80_COUNT) AS UTIL_60_80_COUNT,
    SUM(UTIL_80_100_COUNT) AS UTIL_80_100_COUNT,
    SUM(QUEUED_QUERIES_COUNT) AS QUEUED_QUERIES_COUNT,
    SUM(SPILLAGE_QUERIES_COUNT) AS SPILLAGE_QUERIES_COUNT,
    SUM(BLOCKED_TXN_COUNT) AS BLOCKED_TXN_COUNT,
    SUM(FAILED_QUERIES_COUNT) AS FAILED_QUERIES_COUNT,
    SUM(INCIDENT_QUERIES_COUNT) AS INCIDENT_QUERIES_COUNT,
    SUM(CREDITS_USED) AS CREDITS_USED
FROM POLARGOVERN_TEST.PUBLIC.ALL_WH_KPI_HISTORY_DATA
WHERE KPI_DATE &amp;gt;= TO_DATE('2025-08-13')
  AND KPI_DATE &amp;lt;= TO_DATE('2025-08-19')
GROUP BY WAREHOUSE_NAME
ORDER BY WAREHOUSE_NAME
"""

# FIXED: Warehouse KPI drill-down - Now correctly filters and groups
SQL_WAREHOUSE_KPI_DRILL = """
SELECT 
    USER_NAME,
    WAREHOUSE_NAME,
    COUNT(*) AS QUERY_COUNT,
    MIN(START_TIME) AS FIRST_QUERY_TIME,
    MAX(START_TIME) AS LAST_QUERY_TIME,
    AVG(TOTAL_ELAPSED_TIME_SECONDS) AS AVG_DURATION_SECONDS,
    SUM(CREDITS_USED) AS TOTAL_CREDITS,
    COUNT(CASE WHEN EXECUTION_STATUS = 'SUCCESS' THEN 1 END) AS SUCCESSFUL_QUERIES,
    COUNT(CASE WHEN EXECUTION_STATUS IN ('FAILED', 'CANCELLED') THEN 1 END) AS FAILED_QUERIES
FROM POLARSLED_DB.DEMO_UPLOAD.query_360_table
WHERE WAREHOUSE_NAME = %(warehouse_name)s
  AND QUERY_DATE &amp;gt;= TO_DATE('2025-08-13')
  AND QUERY_DATE &amp;lt;= TO_DATE('2025-08-19')
  AND {kpi_column} = 1  -- This filters to only queries that match the KPI
GROUP BY USER_NAME, WAREHOUSE_NAME
ORDER BY QUERY_COUNT DESC
LIMIT 1000
"""

# User KPI drill-down - Returns individual queries as expected
SQL_USER_KPI_DRILL = """
SELECT *
FROM POLARSLED_DB.DEMO_UPLOAD.query_360_table
WHERE USER_NAME = %(username)s
  AND QUERY_DATE &amp;gt;= TO_DATE('2025-08-13')
  AND QUERY_DATE &amp;lt;= TO_DATE('2025-08-19')
  AND {kpi_column} = 1
ORDER BY START_TIME DESC
LIMIT 1000
"""

# User-Warehouse KPI drill-down - Returns individual queries for specific user in warehouse
SQL_USER_WAREHOUSE_KPI_DRILL = """
SELECT *
FROM POLARSLED_DB.DEMO_UPLOAD.query_360_table
WHERE USER_NAME = %(username)s
  AND WAREHOUSE_NAME = %(warehouse_name)s
  AND QUERY_DATE &amp;gt;= TO_DATE('2025-08-13')
  AND QUERY_DATE &amp;lt;= TO_DATE('2025-08-19')
  AND {kpi_column} = 1
ORDER BY START_TIME DESC
LIMIT 1000
"""

SQL_GET_QUERY_DETAILS = """
SELECT *
FROM POLARSLED_DB.DEMO_UPLOAD.query_360_table
WHERE QUERY_ID = %(query_id)s
"""
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;





&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;CREATE OR REPLACE TABLE query_360_table
CLUSTER BY (WAREHOUSE_NAME, DATE(START_TIME), EXECUTION_STATUS)
AS
WITH
qh_base AS (
  SELECT
    q.*,
    ROW_NUMBER() OVER (PARTITION BY q.QUERY_ID ORDER BY q.START_TIME DESC) AS rn
  FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
  WHERE START_TIME &amp;gt;= TO_TIMESTAMP('2025-08-13')
    AND START_TIME &amp;lt;= TO_TIMESTAMP('2025-08-19')
    AND q.WAREHOUSE_NAME IS NOT NULL
),
qh AS (
  SELECT *
  FROM qh_base
  WHERE rn = 1
)

SELECT
    -- 🔥 CORE DETAILS - Only what's needed for drill-down
    qh.QUERY_ID,
    qh.QUERY_HASH,
    qh.SESSION_ID,
    qh.TRANSACTION_ID,
    qh.QUERY_TAG,
    qh.USER_NAME,
    qh.ROLE_NAME,
    qh.ROLE_TYPE,
    qh.USER_TYPE,

    -- TIME DIMENSIONS
    qh.START_TIME,
    qh.END_TIME,
    DATE(qh.START_TIME) AS QUERY_DATE,
    HOUR(qh.START_TIME) AS QUERY_HOUR,
    DAYOFWEEKISO(qh.START_TIME) AS DAY_OF_WEEK,
    DAYNAME(qh.START_TIME) AS DAY_NAME,

    -- 🔥 WAREHOUSE INFO - Only ID and NAME (as requested)
    qh.WAREHOUSE_ID,
    qh.WAREHOUSE_NAME,
    qh.WAREHOUSE_SIZE,
    qh.WAREHOUSE_TYPE,
    qh.CLUSTER_NUMBER,

    -- PERFORMANCE TIMES (SECONDS)
    ROUND(COALESCE(qh.TOTAL_ELAPSED_TIME,0) / 1000, 3) AS TOTAL_ELAPSED_TIME_SECONDS,
    ROUND(COALESCE(qh.EXECUTION_TIME,0) / 1000, 3) AS EXECUTION_TIME_SECONDS,
    ROUND(COALESCE(qh.COMPILATION_TIME,0) / 1000, 3) AS COMPILATION_TIME_SECONDS,
    ROUND(COALESCE(qh.QUEUED_PROVISIONING_TIME,0) / 1000, 3) AS QUEUED_PROVISIONING_TIME_SECONDS,
    ROUND(COALESCE(qh.QUEUED_REPAIR_TIME,0) / 1000, 3) AS QUEUED_REPAIR_TIME_SECONDS,
    ROUND(COALESCE(qh.QUEUED_OVERLOAD_TIME,0) / 1000, 3) AS QUEUED_OVERLOAD_TIME_SECONDS,

    -- 🔥 DURATION BUCKETS - EXACT MATCH with your warehouse procedure
    CASE WHEN COALESCE(qh.TOTAL_ELAPSED_TIME,0) &amp;lt; 1000 THEN 1 ELSE 0 END AS QUERIES_LT_1S,
    CASE WHEN COALESCE(qh.TOTAL_ELAPSED_TIME,0) BETWEEN 1000 AND 5000 THEN 1 ELSE 0 END AS QUERIES_1_5S,
    CASE WHEN COALESCE(qh.TOTAL_ELAPSED_TIME,0) BETWEEN 5001 AND 60000 THEN 1 ELSE 0 END AS QUERIES_6_60S,
    CASE WHEN COALESCE(qh.TOTAL_ELAPSED_TIME,0) BETWEEN 60001 AND 300000 THEN 1 ELSE 0 END AS QUERIES_61_300S,
    CASE WHEN COALESCE(qh.TOTAL_ELAPSED_TIME,0) &amp;gt; 300000 THEN 1 ELSE 0 END AS QUERIES_GT_300S,

    -- 🔥 KPI FLAGS - EXACT MATCH with your warehouse procedure logic

    -- 1) Queued queries - EXACT MATCH with procedure
    CASE WHEN (
        COALESCE(qh.QUEUED_OVERLOAD_TIME,0) &amp;gt; 0 
        OR COALESCE(qh.QUEUED_PROVISIONING_TIME,0) &amp;gt; 0 
        OR COALESCE(qh.QUEUED_REPAIR_TIME,0) &amp;gt; 0
    ) THEN 1 ELSE 0 END AS QUEUED_QUERIES_COUNT,

    -- 2) Spillage queries - EXACT MATCH with procedure
    CASE WHEN (
        COALESCE(qh.BYTES_SPILLED_TO_LOCAL_STORAGE,0) &amp;gt; 0 
        OR COALESCE(qh.BYTES_SPILLED_TO_REMOTE_STORAGE,0) &amp;gt; 0
    ) THEN 1 ELSE 0 END AS SPILLAGE_QUERIES_COUNT,

    -- 3) Blocked transactions - EXACT MATCH with procedure
    CASE WHEN qh.TRANSACTION_BLOCKED_TIME IS NOT NULL THEN 1 ELSE 0 END AS BLOCKED_TXN_COUNT,

    -- 4) Failed queries - EXACT MATCH with procedure
    CASE WHEN qh.ERROR_CODE IS NOT NULL THEN 1 ELSE 0 END AS FAILED_QUERIES_COUNT,

    -- 5) Incident queries - EXACT MATCH with procedure
    CASE WHEN (
        qh.EXECUTION_STATUS = 'INCIDENT' 
        OR qh.QUERY_TAG ILIKE '%incident%'
    ) THEN 1 ELSE 0 END AS INCIDENT_QUERIES_COUNT,

    -- 🔥 Additional KPI flags for drill-down (not used in warehouse aggregation)
    CASE WHEN (
        COALESCE(qh.BYTES_SPILLED_TO_LOCAL_STORAGE,0) &amp;gt; 0 
        OR COALESCE(qh.BYTES_SPILLED_TO_REMOTE_STORAGE,0) &amp;gt; 0
    ) THEN 1 ELSE 0 END AS SPILLED_QUERIES,

    CASE WHEN (
        qh.WAREHOUSE_SIZE IN ('Medium','Large','X-Large','2X-Large','3X-Large','4X-Large','5X-Large','6X-Large')
        AND COALESCE(qh.PERCENTAGE_SCANNED_FROM_CACHE,0) &amp;gt; 80
        AND COALESCE(qh.TOTAL_ELAPSED_TIME,0) / 1000 &amp;gt; 30
    ) THEN 1 ELSE 0 END AS OVER_PROVISIONED_QUERIES,

    CASE WHEN (
        HOUR(qh.START_TIME) BETWEEN 9 AND 17
        AND DAYOFWEEKISO(qh.START_TIME) BETWEEN 1 AND 5
        AND COALESCE(qh.TOTAL_ELAPSED_TIME,0) / 1000 &amp;gt; 300
    ) THEN 1 ELSE 0 END AS PEAK_HOUR_LONG_RUNNING_QUERIES,

    CASE WHEN REGEXP_LIKE(qh.QUERY_TEXT, 'SELECT\\s*\\*', 'i') THEN 1 ELSE 0 END AS SELECT_STAR_QUERIES,

    CASE WHEN (
        COALESCE(qh.PARTITIONS_SCANNED,0) = COALESCE(qh.PARTITIONS_TOTAL,0)
        AND COALESCE(qh.PARTITIONS_TOTAL,0) &amp;gt; 1
        AND COALESCE(qh.BYTES_SCANNED,0) &amp;gt; POWER(1024,3)
    ) THEN 1 ELSE 0 END AS UNPARTITIONED_SCAN_QUERIES,

    CASE WHEN COUNT(*) OVER (PARTITION BY qh.QUERY_HASH) &amp;gt; 1 AND qh.QUERY_HASH IS NOT NULL THEN 1 ELSE 0 END AS REPEATED_QUERIES,

    CASE WHEN (
        (LENGTH(qh.QUERY_TEXT) - LENGTH(REPLACE(UPPER(qh.QUERY_TEXT), 'JOIN', ''))) / 4 &amp;gt; 3
        OR UPPER(qh.QUERY_TEXT) LIKE '%WINDOW%'
        OR UPPER(qh.QUERY_TEXT) LIKE '%WITH%'
    ) THEN 1 ELSE 0 END AS COMPLEX_JOIN_QUERIES,

    CASE WHEN (
        COALESCE(qh.ROWS_PRODUCED,0) = 0
        AND UPPER(qh.QUERY_TEXT) LIKE 'SELECT%'
        AND COALESCE(qh.TOTAL_ELAPSED_TIME,0) / 1000 &amp;gt; 5
    ) THEN 1 ELSE 0 END AS ZERO_RESULT_QUERIES,

    CASE WHEN COALESCE(qh.COMPILATION_TIME,0) / 1000 &amp;gt; 10 THEN 1 ELSE 0 END AS HIGH_COMPILE_QUERIES,

    -- Credits calculation for individual queries
    CASE 
        WHEN COALESCE(qh.EXECUTION_TIME,0) = 0 THEN 0 
        ELSE ROUND(
            (qh.EXECUTION_TIME::FLOAT / 3600000) *
            CASE qh.WAREHOUSE_SIZE
                WHEN 'X-Small' THEN 1
                WHEN 'Small' THEN 2
                WHEN 'Medium' THEN 4
                WHEN 'Large' THEN 8
                WHEN 'X-Large' THEN 16
                WHEN '2X-Large' THEN 32
                WHEN '3X-Large' THEN 64
                WHEN '4X-Large' THEN 128
                WHEN '5X-Large' THEN 256
                WHEN '6X-Large' THEN 512
                ELSE 1
            END
        , 6)
    END AS CREDITS_USED,

    -- Essential columns for drill-down details
    qh.BYTES_SCANNED,
    qh.PERCENTAGE_SCANNED_FROM_CACHE,
    qh.ROWS_PRODUCED,
    qh.PARTITIONS_SCANNED,
    qh.PARTITIONS_TOTAL,
    COALESCE(qh.BYTES_SPILLED_TO_LOCAL_STORAGE, 0) AS BYTES_SPILLED_LOCAL,
    COALESCE(qh.BYTES_SPILLED_TO_REMOTE_STORAGE, 0) AS BYTES_SPILLED_REMOTE,
    qh.EXECUTION_STATUS,
    qh.ERROR_CODE,
    qh.ERROR_MESSAGE,
    LEFT(qh.QUERY_TEXT, 2000) AS QUERY_TEXT_SAMPLE,
    qh.DATABASE_NAME,
    qh.SCHEMA_NAME,
    qh.QUERY_TYPE

FROM qh;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>sampel data</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Fri, 08 Aug 2025 11:39:11 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/sampel-data-3g82</link>
      <guid>https://dev.to/armaankhan8270/sampel-data-3g82</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;{
  "QUERY_HISTORY_SUMMARY": {
    "QUERY_ID": "01b12345-0000-0000-0001-abc123456789",
    "QUERY_HASH": "a1b2c3d4e5f6g7h8i9j0",
    "QUERY_PARAMETERIZED_HASH": "x1y2z3w4q5r6t7u8v9w0",
    "QUERY_TEXT_PREVIEW": "SELECT * FROM sales WHERE date = '2025-08-07'...",
    "QUERY_TYPE": "SELECT",
    "QUERY_TAG": "sales_report",
    "START_TIME": "2025-08-07T10:00:00Z",
    "END_TIME": "2025-08-07T10:00:15Z",
    "TOTAL_ELAPSED_TIME": 15000,
    "COMPILATION_TIME": 2000,
    "EXECUTION_TIME": 13000,
    "USER_NAME": "john.doe",
    "USER_TYPE": "HUMAN",
    "ROLE_NAME": "ANALYST",
    "ROLE_TYPE": "USER",
    "SESSION_ID": "1234567890",
    "WAREHOUSE_ID": "WH123",
    "WAREHOUSE_NAME": "ANALYTICS_WH",
    "WAREHOUSE_SIZE": "MEDIUM",
    "WAREHOUSE_TYPE": "STANDARD",
    "CLUSTER_NUMBER": 1,
    "DATABASE_ID": "DB123",
    "DATABASE_NAME": "SALES_DB",
    "SCHEMA_ID": "SCH123",
    "SCHEMA_NAME": "PUBLIC",
    "USER_DATABASE_NAME": "SALES_DB",
    "USER_SCHEMA_NAME": "PUBLIC",
    "EXECUTION_STATUS": "SUCCESS",
    "ERROR_CODE": null,
    "ERROR_MESSAGE_PREVIEW": null,
    "BYTES_SCANNED": 104857600,
    "PERCENTAGE_SCANNED_FROM_CACHE": 80,
    "BYTES_WRITTEN": 5242880,
    "ROWS_PRODUCED": 1000,
    "ROWS_INSERTED": 0,
    "ROWS_UPDATED": 0,
    "ROWS_DELETED": 0,
    "CREDITS_USED_CLOUD_SERVICES": 0.5,
    "BYTES_SPILLED_TO_LOCAL_STORAGE": 0,
    "BYTES_SPILLED_TO_REMOTE_STORAGE": 0,
    "PARTITIONS_SCANNED": 10,
    "PARTITIONS_TOTAL": 100,
    "QUEUED_PROVISIONING_TIME": 0,
    "QUEUED_REPAIR_TIME": 0,
    "QUEUED_OVERLOAD_TIME": 0,
    "TRANSACTION_BLOCKED_TIME": 0,
    "DURATION_BUCKET": "10-20 seconds",
    "CREDIT_BUCKET": "40-60 cents",
    "SPILL_STATUS": "NO_SPILL",
    "QUEUE_STATUS": "NOT_QUEUED",
    "ANALYSIS_TIMESTAMP": "2025-08-08T05:07:00Z",
    "ANALYSIS_DATE": "2025-08-07"
  },
  "QUERY_DETAILS_COMPLETE": {
    "QUERY_ID": "01b12345-0000-0000-0001-abc123456789",
    "QUERY_TEXT": "SELECT * FROM sales WHERE date = '2025-08-07' ORDER BY revenue DESC LIMIT 1000",
    "QUERY_HASH": "a1b2c3d4e5f6g7h8i9j0",
    "QUERY_HASH_VERSION": 2,
    "QUERY_PARAMETERIZED_HASH": "x1y2z3w4q5r6t7u8v9w0",
    "QUERY_PARAMETERIZED_HASH_VERSION": 2,
    "QUERY_TYPE": "SELECT",
    "QUERY_TAG": "sales_report",
    "START_TIME": "2025-08-07T10:00:00Z",
    "END_TIME": "2025-08-07T10:00:15Z",
    "TOTAL_ELAPSED_TIME": 15000,
    "COMPILATION_TIME": 2000,
    "EXECUTION_TIME": 13000,
    "QUEUED_PROVISIONING_TIME": 0,
    "QUEUED_REPAIR_TIME": 0,
    "QUEUED_OVERLOAD_TIME": 0,
    "TRANSACTION_BLOCKED_TIME": 0,
    "CHILD_QUERIES_WAIT_TIME": 0,
    "QUERY_RETRY_TIME": 0,
    "QUERY_RETRY_CAUSE": null,
    "FAULT_HANDLING_TIME": 0,
    "LIST_EXTERNAL_FILES_TIME": 0,
    "USER_NAME": "john.doe",
    "USER_TYPE": "HUMAN",
    "ROLE_NAME": "ANALYST",
    "ROLE_TYPE": "USER",
    "SECONDARY_ROLE_STATS": null,
    "SESSION_ID": "1234567890",
    "WAREHOUSE_ID": "WH123",
    "WAREHOUSE_NAME": "ANALYTICS_WH",
    "WAREHOUSE_SIZE": "MEDIUM",
    "WAREHOUSE_TYPE": "STANDARD",
    "CLUSTER_NUMBER": 1,
    "QUERY_LOAD_PERCENT": 10,
    "DATABASE_ID": "DB123",
    "DATABASE_NAME": "SALES_DB",
    "SCHEMA_ID": "SCH123",
    "SCHEMA_NAME": "PUBLIC",
    "USER_DATABASE_ID": "DB123",
    "USER_DATABASE_NAME": "SALES_DB",
    "USER_SCHEMA_ID": "SCH123",
    "USER_SCHEMA_NAME": "PUBLIC",
    "EXECUTION_STATUS": "SUCCESS",
    "ERROR_CODE": null,
    "ERROR_MESSAGE": null,
    "IS_CLIENT_GENERATED_STATEMENT": false,
    "BYTES_SCANNED": 104857600,
    "PERCENTAGE_SCANNED_FROM_CACHE": 80,
    "BYTES_WRITTEN": 5242880,
    "BYTES_WRITTEN_TO_RESULT": 5242880,
    "BYTES_READ_FROM_RESULT": 5242880,
    "ROWS_PRODUCED": 1000,
    "ROWS_WRITTEN_TO_RESULT": 1000,
    "ROWS_INSERTED": 0,
    "ROWS_UPDATED": 0,
    "ROWS_DELETED": 0,
    "ROWS_UNLOADED": 0,
    "BYTES_DELETED": 0,
    "PARTITIONS_SCANNED": 10,
    "PARTITIONS_TOTAL": 100,
    "PARTITION_SCAN_PERCENTAGE": 10,
    "BYTES_SPILLED_TO_LOCAL_STORAGE": 0,
    "BYTES_SPILLED_TO_REMOTE_STORAGE": 0,
    "BYTES_SENT_OVER_THE_NETWORK": 5242880,
    "CREDITS_USED_CLOUD_SERVICES": 0.5,
    "OUTBOUND_DATA_TRANSFER_CLOUD": null,
    "OUTBOUND_DATA_TRANSFER_REGION": null,
    "OUTBOUND_DATA_TRANSFER_BYTES": 0,
    "INBOUND_DATA_TRANSFER_CLOUD": null,
    "INBOUND_DATA_TRANSFER_REGION": null,
    "INBOUND_DATA_TRANSFER_BYTES": 0,
    "EXTERNAL_FUNCTION_TOTAL_INVOCATIONS": 0,
    "EXTERNAL_FUNCTION_TOTAL_SENT_ROWS": 0,
    "EXTERNAL_FUNCTION_TOTAL_RECEIVED_ROWS": 0,
    "EXTERNAL_FUNCTION_TOTAL_SENT_BYTES": 0,
    "EXTERNAL_FUNCTION_TOTAL_RECEIVED_BYTES": 0,
    "QUERY_ACCELERATION_BYTES_SCANNED": 0,
    "QUERY_ACCELERATION_PARTITIONS_SCANNED": 0,
    "QUERY_ACCELERATION_UPPER_LIMIT_SCALE_FACTOR": 0,
    "TRANSACTION_ID": null,
    "RELEASE_VERSION": "7.0.0",
    "COMPILATION_TIME_PERCENTAGE": 13.33,
    "EXECUTION_TIME_PERCENTAGE": 86.67,
    "ROWS_PER_MB_SCANNED": 10,
    "PERFORMANCE_CATEGORY": "FAST",
    "CACHE_EFFICIENCY": "MEDIUM_CACHE_HIT",
    "SPILL_CLASSIFICATION": "NO_SPILL",
    "ANALYSIS_TIMESTAMP": "2025-08-08T05:07:00Z",
    "ANALYSIS_DATE": "2025-08-07"
  },
  "WAREHOUSE_ANALYTICS_DASHBOARD_with_queries": {
    "WAREHOUSE_ID": "WH123",
    "WAREHOUSE_NAME": "ANALYTICS_WH",
    "WAREHOUSE_SIZE": "MEDIUM",
    "WAREHOUSE_TYPE": "STANDARD",
    "CLUSTER_COUNT": 1,
    "SUSPEND_POLICY": null,
    "MIN_CLUSTER_COUNT": null,
    "MAX_CLUSTER_COUNT": null,
    "QUERIES_1_10_SEC": 50,
    "QUERIES_10_20_SEC": 20,
    "QUERIES_20_60_SEC": 10,
    "QUERIES_1_3_MIN": 5,
    "QUERIES_3_5_MIN": 2,
    "QUERIES_5_PLUS_MIN": 1,
    "QUEUED_1_2_MIN": 3,
    "QUEUED_2_5_MIN": 1,
    "QUEUED_5_10_MIN": 0,
    "QUEUED_10_20_MIN": 0,
    "QUEUED_20_PLUS_MIN": 0,
    "QUERIES_SPILLED_LOCAL": 2,
    "QUERIES_SPILLED_REMOTE": 1,
    "TOTAL_BYTES_SPILLED_LOCAL": 1048576,
    "TOTAL_BYTES_SPILLED_REMOTE": 524288,
    "FAILED_QUERIES": 5,
    "SUCCESSFUL_QUERIES": 80,
    "RUNNING_QUERIES": 3,
    "QUERIES_0_20_CENTS": 40,
    "QUERIES_20_40_CENTS": 20,
    "QUERIES_40_60_CENTS": 10,
    "QUERIES_60_80_CENTS": 5,
    "QUERIES_80_100_CENTS": 3,
    "QUERIES_100_PLUS_CENTS": 2,
    "QUERY_IDS": {
      "1-10_sec_ids": ["01b12345-0000-0000-0001-abc123456789", "01b12345-0000-0000-0001-pqr123456789"],
      "10-20_sec_ids": ["01b12345-0000-0000-0002-def123456789"],
      "20-60_sec_ids": [],
      "1-3_min_ids": [],
      "3-5_min_ids": [],
      "5_plus_min_ids": [],
      "queued_1-2_min_ids": ["01b12345-0000-0000-0003-ghi123456789"],
      "queued_2-5_min_ids": [],
      "queued_5-10_min_ids": [],
      "queued_10-20_min_ids": [],
      "queued_20_plus_min_ids": [],
      "spilled_local_ids": ["01b12345-0000-0000-0004-jkl123456789"],
      "spilled_remote_ids": [],
      "failed_queries_ids": ["01b12345-0000-0000-0005-mno123456789"],
      "successful_queries_ids": ["01b12345-0000-0000-0001-abc123456789", "01b12345-0000-0000-0002-def123456789"],
      "credit_0-20_cents_ids": ["01b12345-0000-0000-0001-abc123456789"],
      "credit_20-40_cents_ids": [],
      "credit_40-60_cents_ids": ["01b12345-0000-0000-0002-def123456789"],
      "credit_60-80_cents_ids": [],
      "credit_80-100_cents_ids": [],
      "credit_100_plus_cents_ids": []
    },
    "TOTAL_QUERIES": 88,
    "TOTAL_CREDITS_USED": 25.5,
    "TOTAL_COMPUTE_CREDITS": 20.0,
    "TOTAL_CLOUD_SERVICES_CREDITS": 5.5,
    "ANALYSIS_TIMESTAMP": "2025-08-08T05:07:00Z",
    "ANALYSIS_DATE": "2025-08-07"
  },
  "user_query_performance_report": {
    "user_name": "john.doe",
    "total_queries": 100,
    "warehouses_used": 2,
    "databases_accessed": 3,
    "total_credits": 30.5,
    "avg_execution_time_ms": 15000,
    "avg_bytes_per_row": 104857.6,
    "total_data_scanned_gb": 10.5,
    "failure_cancellation_rate_pct": 5.0,
    "spilled_queries": 3,
    "over_provisioned_queries": 2,
    "peak_hour_long_running_queries": 1,
    "select_star_queries": 5,
    "unpartitioned_scan_queries": 2,
    "repeated_queries": 10,
    "complex_join_queries": 4,
    "zero_result_queries": 3,
    "high_compile_queries": 2,
    "untagged_queries": 15,
    "unlimited_order_by_queries": 1,
    "large_group_by_queries": 2,
    "slow_queries": 5,
    "expensive_distinct_queries": 1,
    "inefficient_like_queries": 2,
    "no_results_with_scan_queries": 3,
    "cartesian_join_queries": 0,
    "high_compile_ratio_queries": 2,
    "weighted_score": 45.5,
    "cost_status": "Normal",
    "recommendations": [
      "Optimize memory usage or increase warehouse size.",
      "Specify columns instead of SELECT *."
    ],
    "query_samples": {
      "spilled": [
        {
          "query_id": "01b12345-0000-0000-0004-jkl123456789",
          "query_text": "SELECT * FROM large_table JOIN huge_table",
          "execution_time_ms": 300000,
          "bytes_scanned": 1073741824,
          "bytes_spilled_to_local_storage": 1048576,
          "bytes_spilled_to_remote_storage": 0,
          "warehouse_size": "LARGE",
          "start_time": "2025-08-07T10:00:00Z"
        }
      ],
      "slow_query": [
        {
          "query_id": "01b12345-0000-0000-0001-abc123456789",
          "query_text": "SELECT * FROM sales WHERE date = '2025-08-07' ORDER BY revenue DESC",
          "execution_time_ms": 15000,
          "bytes_scanned": 104857600,
          "warehouse_size": "MEDIUM",
          "start_time": "2025-08-07T10:00:00Z"
        }
      ],
      "over_provisioned": [],
      "peak_hour_long_running": [],
      "select_star": [
        {
          "query_id": "01b12345-0000-0000-0001-abc123456789",
          "query_text": "SELECT * FROM sales WHERE date = '2025-08-07' ORDER BY revenue DESC",
          "execution_time_ms": 15000,
          "bytes_scanned": 104857600,
          "warehouse_size": "MEDIUM",
          "start_time": "2025-08-07T10:00:00Z"
        }
      ],
      "unpartitioned_scan": [],
      "failed_cancelled": [],
      "zero_result_query": [],
      "high_compile_time": [],
      "untagged_query": [],
      "unlimited_order_by": [],
      "large_group_by": [],
      "expensive_distinct": [],
      "inefficient_like": [],
      "no_results_with_scan": [],
      "cartesian_join": [],
      "high_compile_ratio": []
    }
  }
}
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>ppp</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Fri, 08 Aug 2025 06:43:17 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/ppp-4k67</link>
      <guid>https://dev.to/armaankhan8270/ppp-4k67</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;and a react app . my react app is setup all . with taiwincss . and licide react i useed for icon . you can use anythinng . but give me jsx file a react page whihc show all this 4 tables . and also no errro . make sure desaign the table in this way that it shodu good and interaxtive ,t he have search and filter option and sorting and all .now in warehosue table you dont have to show the colmn query ids , and same in user table you dont have to shwo sampel queryes and recomdtion  . thsi colmn we will used for fither drill down . so in warehouse page when user click on quers colmn . so what we have to do is we have to pass the warehouse id or name to uniqley idefaty that .s eocnd the colmn name which is basicly to detach type ot the quey he selected and third we haev to pass the querd ids . than in python you ave to make on fcuton which is get_users_with_query_count_by_warehouse so we haev to do is take the query table data and than take ids array  queres and filter those id for query summary dtaisl and than gropy byy user and retrn a df wtih user name , query count , query id array. than one more fcuto whcih take the id of query and return those id with qury deteials . whcih we will  use to geanrte a tabel with query id and previe text .one button to view full qurr detaisl . and when user clcok on that veiw detials so we run one more fuctuon whcih take the id and extarct all the detials of that paractul query and based on that we have to show on detials page for query with alld etials we have . . alll the fctuon hsodu be in pyton and with correct name and also in react we just have to call and naviagete . so degn that . frst 5 tabes for all . adn than this warehosue tabel with drill down . so does for user . in user we  dont haev to show the sample queyr and remmddtion colmn we have to us e taht to drill down for exmapl wehn in user table some one click on partcul user queres so we have to take the name of that colmn mens query type and ectarct all the querys for that type form sample query colmn and show a table with the query he ahs ran and preveiw text . adn veiw derials button that swo the fulld etials . i want seamless and best code. also bets of all time in short .
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>react</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Thu, 07 Aug 2025 07:19:06 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/react-4k56</link>
      <guid>https://dev.to/armaankhan8270/react-4k56</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;import React, { useState, useEffect } from 'react';
import { BarChart3, Database, Users, Clock, AlertCircle, CheckCircle, Activity, ArrowLeft, Eye, TrendingUp, Server } from 'lucide-react';

const API_BASE_URL = 'http://localhost:5000';

const WarehouseAnalyticsDashboard = () =&amp;gt; {
  const [currentView, setCurrentView] = useState('warehouses');
  const [warehouses, setWarehouses] = useState([]);
  const [selectedWarehouse, setSelectedWarehouse] = useState(null);
  const [selectedMetric, setSelectedMetric] = useState(null);
  const [selectedUser, setSelectedUser] = useState(null);
  const [queriesByMetric, setQueriesByMetric] = useState({});
  const [userQueries, setUserQueries] = useState([]);
  const [queryDetails, setQueryDetails] = useState(null);
  const [loading, setLoading] = useState(true);
  const [error, setError] = useState(null);

  // Fetch warehouses data
  useEffect(() =&amp;gt; {
    fetchWarehouses();
  }, []);

  const fetchWarehouses = async () =&amp;gt; {
    try {
      setLoading(true);
      const response = await fetch(`${API_BASE_URL}/warehouses`);
      const data = await response.json();

      if (data.status === 'success') {
        setWarehouses(data.data);
        setError(null);
      } else {
        setError(data.error || 'Failed to fetch warehouses');
      }
    } catch (err) {
      setError('Failed to connect to backend. Make sure Flask server is running.');
    } finally {
      setLoading(false);
    }
  };

  const fetchQueriesByMetric = async (warehouseId, metric) =&amp;gt; {
    try {
      setLoading(true);
      const response = await fetch(`${API_BASE_URL}/queries/by-warehouse/${warehouseId}/${metric}`);
      const data = await response.json();

      if (data.status === 'success') {
        setQueriesByMetric(data);
        setSelectedWarehouse(warehouseId);
        setSelectedMetric(metric);
        setCurrentView('queries-by-metric');
      }
    } catch (err) {
      setError('Failed to fetch queries');
    } finally {
      setLoading(false);
    }
  };

  const fetchUserQueries = async (warehouseId, metric, userName) =&amp;gt; {
    try {
      setLoading(true);
      const response = await fetch(`${API_BASE_URL}/queries/by-user/${warehouseId}/${metric}/${encodeURIComponent(userName)}`);
      const data = await response.json();

      if (data.status === 'success') {
        setUserQueries(data.data);
        setSelectedUser(userName);
        setCurrentView('user-queries');
      }
    } catch (err) {
      setError('Failed to fetch user queries');
    } finally {
      setLoading(false);
    }
  };

  const fetchQueryDetails = async (queryId) =&amp;gt; {
    try {
      setLoading(true);
      const response = await fetch(`${API_BASE_URL}/query/details/${queryId}`);
      const data = await response.json();

      if (data.status === 'success') {
        setQueryDetails(data.data);
        setCurrentView('query-details');
      }
    } catch (err) {
      setError('Failed to fetch query details');
    } finally {
      setLoading(false);
    }
  };

  const formatNumber = (num) =&amp;gt; {
    if (num === null || num === undefined) return '0';
    return num.toLocaleString();
  };

  const formatDuration = (ms) =&amp;gt; {
    if (!ms) return 'N/A';
    const seconds = Math.floor(ms / 1000);
    const minutes = Math.floor(seconds / 60);
    const hours = Math.floor(minutes / 60);

    if (hours &amp;gt; 0) return `${hours}h ${minutes % 60}m`;
    if (minutes &amp;gt; 0) return `${minutes}m ${seconds % 60}s`;
    return `${seconds}s`;
  };

  const getMetricColor = (metric) =&amp;gt; {
    const colors = {
      'queries-1-10-sec': 'text-green-600 bg-green-50',
      'queries-10-20-sec': 'text-blue-600 bg-blue-50',
      'queries-20-60-sec': 'text-yellow-600 bg-yellow-50',
      'queries-1-3-min': 'text-orange-600 bg-orange-50',
      'queries-3-5-min': 'text-red-600 bg-red-50',
      'queries-5-plus-min': 'text-red-800 bg-red-100',
      'failed-queries': 'text-red-600 bg-red-50',
      'successful-queries': 'text-green-600 bg-green-50',
      'queries-spilled-local': 'text-purple-600 bg-purple-50',
      'queries-spilled-remote': 'text-purple-800 bg-purple-100'
    };
    return colors[metric] || 'text-gray-600 bg-gray-50';
  };

  const getStatusIcon = (status) =&amp;gt; {
    switch (status) {
      case 'SUCCESS': return &amp;lt;CheckCircle className="w-4 h-4 text-green-500" /&amp;gt;;
      case 'FAIL': return &amp;lt;AlertCircle className="w-4 h-4 text-red-500" /&amp;gt;;
      case 'RUNNING': return &amp;lt;Activity className="w-4 h-4 text-blue-500" /&amp;gt;;
      default: return &amp;lt;Clock className="w-4 h-4 text-gray-500" /&amp;gt;;
    }
  };

  if (loading) {
    return (
      &amp;lt;div className="min-h-screen bg-gray-50 flex items-center justify-center"&amp;gt;
        &amp;lt;div className="flex flex-col items-center space-y-4"&amp;gt;
          &amp;lt;div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600"&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;p className="text-gray-600"&amp;gt;Loading analytics data...&amp;lt;/p&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;
    );
  }

  if (error) {
    return (
      &amp;lt;div className="min-h-screen bg-gray-50 flex items-center justify-center"&amp;gt;
        &amp;lt;div className="bg-white p-8 rounded-lg shadow-md max-w-md w-full"&amp;gt;
          &amp;lt;AlertCircle className="w-12 h-12 text-red-500 mx-auto mb-4" /&amp;gt;
          &amp;lt;h2 className="text-xl font-semibold text-gray-900 text-center mb-2"&amp;gt;Connection Error&amp;lt;/h2&amp;gt;
          &amp;lt;p className="text-gray-600 text-center mb-4"&amp;gt;{error}&amp;lt;/p&amp;gt;
          &amp;lt;button 
            onClick={fetchWarehouses}
            className="w-full bg-blue-600 text-white py-2 px-4 rounded-lg hover:bg-blue-700 transition-colors"
          &amp;gt;
            Retry
          &amp;lt;/button&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;
    );
  }

  return (
    &amp;lt;div className="min-h-screen bg-gray-50"&amp;gt;
      {/* Header */}
      &amp;lt;header className="bg-white shadow-sm border-b border-gray-200"&amp;gt;
        &amp;lt;div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8"&amp;gt;
          &amp;lt;div className="flex items-center justify-between h-16"&amp;gt;
            &amp;lt;div className="flex items-center space-x-4"&amp;gt;
              {currentView !== 'warehouses' &amp;amp;&amp;amp; (
                &amp;lt;button
                  onClick={() =&amp;gt; {
                    if (currentView === 'query-details') setCurrentView('user-queries');
                    else if (currentView === 'user-queries') setCurrentView('queries-by-metric');
                    else if (currentView === 'queries-by-metric') setCurrentView('warehouses');
                  }}
                  className="p-2 text-gray-600 hover:text-gray-900 hover:bg-gray-100 rounded-lg transition-colors"
                &amp;gt;
                  &amp;lt;ArrowLeft className="w-5 h-5" /&amp;gt;
                &amp;lt;/button&amp;gt;
              )}
              &amp;lt;div className="flex items-center space-x-3"&amp;gt;
                &amp;lt;Database className="w-8 h-8 text-blue-600" /&amp;gt;
                &amp;lt;h1 className="text-2xl font-bold text-gray-900"&amp;gt;Warehouse Analytics&amp;lt;/h1&amp;gt;
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;
            &amp;lt;div className="flex items-center space-x-2 text-sm text-gray-500"&amp;gt;
              &amp;lt;Server className="w-4 h-4" /&amp;gt;
              &amp;lt;span&amp;gt;{warehouses.length} Warehouses&amp;lt;/span&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/header&amp;gt;

      &amp;lt;main className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8"&amp;gt;
        {currentView === 'warehouses' &amp;amp;&amp;amp; (
          &amp;lt;WarehousesView 
            warehouses={warehouses} 
            onMetricClick={fetchQueriesByMetric}
          /&amp;gt;
        )}

        {currentView === 'queries-by-metric' &amp;amp;&amp;amp; (
          &amp;lt;QueriesByMetricView 
            data={queriesByMetric}
            onUserClick={fetchUserQueries}
          /&amp;gt;
        )}

        {currentView === 'user-queries' &amp;amp;&amp;amp; (
          &amp;lt;UserQueriesView 
            queries={userQueries}
            userName={selectedUser}
            onQueryClick={fetchQueryDetails}
          /&amp;gt;
        )}

        {currentView === 'query-details' &amp;amp;&amp;amp; (
          &amp;lt;QueryDetailsView queryDetails={queryDetails} /&amp;gt;
        )}
      &amp;lt;/main&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// Warehouses View Component
const WarehousesView = ({ warehouses, onMetricClick }) =&amp;gt; {
  const formatNumber = (num) =&amp;gt; num ? num.toLocaleString() : '0';

  const getClickableMetrics = (warehouse) =&amp;gt; [
    { key: 'QUERIES_1_10_SEC', label: '1-10 sec', value: warehouse.QUERIES_1_10_SEC, apiKey: '1-10-seconds' },
    { key: 'QUERIES_10_20_SEC', label: '10-20 sec', value: warehouse.QUERIES_10_20_SEC, apiKey: '10-20-seconds' },
    { key: 'QUERIES_20_60_SEC', label: '20-60 sec', value: warehouse.QUERIES_20_60_SEC, apiKey: '20-60-seconds' },
    { key: 'QUERIES_1_3_MIN', label: '1-3 min', value: warehouse.QUERIES_1_3_MIN, apiKey: '1-3-minutes' },
    { key: 'QUERIES_3_5_MIN', label: '3-5 min', value: warehouse.QUERIES_3_5_MIN, apiKey: '3-5-minutes' },
    { key: 'QUERIES_5_PLUS_MIN', label: '5+ min', value: warehouse.QUERIES_5_PLUS_MIN, apiKey: '5-plus-minutes' },
    { key: 'FAILED_QUERIES', label: 'Failed', value: warehouse.FAILED_QUERIES, apiKey: 'failed-queries' },
    { key: 'SUCCESSFUL_QUERIES', label: 'Success', value: warehouse.SUCCESSFUL_QUERIES, apiKey: 'successful-queries' },
    { key: 'QUERIES_SPILLED_LOCAL', label: 'Spilled Local', value: warehouse.QUERIES_SPILLED_LOCAL, apiKey: 'spilled-local' },
    { key: 'QUERIES_SPILLED_REMOTE', label: 'Spilled Remote', value: warehouse.QUERIES_SPILLED_REMOTE, apiKey: 'spilled-remote' }
  ];

  return (
    &amp;lt;div className="space-y-6"&amp;gt;
      &amp;lt;div className="flex items-center justify-between"&amp;gt;
        &amp;lt;h2 className="text-3xl font-bold text-gray-900"&amp;gt;Data Warehouses&amp;lt;/h2&amp;gt;
        &amp;lt;div className="text-sm text-gray-500"&amp;gt;Click on any metric to drill down&amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="grid gap-6"&amp;gt;
        {warehouses.map((warehouse) =&amp;gt; (
          &amp;lt;div key={warehouse.WAREHOUSE_ID} className="bg-white rounded-lg shadow-md border border-gray-200 overflow-hidden"&amp;gt;
            &amp;lt;div className="px-6 py-4 bg-gray-50 border-b border-gray-200"&amp;gt;
              &amp;lt;div className="flex items-center justify-between"&amp;gt;
                &amp;lt;div&amp;gt;
                  &amp;lt;h3 className="text-xl font-semibold text-gray-900"&amp;gt;{warehouse.WAREHOUSE_NAME}&amp;lt;/h3&amp;gt;
                  &amp;lt;p className="text-sm text-gray-600"&amp;gt;ID: {warehouse.WAREHOUSE_ID}&amp;lt;/p&amp;gt;
                &amp;lt;/div&amp;gt;
                &amp;lt;div className="text-right"&amp;gt;
                  &amp;lt;div className="text-2xl font-bold text-blue-600"&amp;gt;{formatNumber(warehouse.TOTAL_QUERIES)}&amp;lt;/div&amp;gt;
                  &amp;lt;div className="text-sm text-gray-500"&amp;gt;Total Queries&amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;

            &amp;lt;div className="p-6"&amp;gt;
              &amp;lt;div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-5 gap-4"&amp;gt;
                {getClickableMetrics(warehouse).map((metric) =&amp;gt; (
                  &amp;lt;div
                    key={metric.key}
                    onClick={() =&amp;gt; metric.value &amp;gt; 0 &amp;amp;&amp;amp; onMetricClick(warehouse.WAREHOUSE_ID, metric.apiKey)}
                    className={`p-4 rounded-lg border transition-all duration-200 ${
                      metric.value &amp;gt; 0 
                        ? 'cursor-pointer hover:shadow-md hover:scale-105 border-blue-200 bg-blue-50' 
                        : 'border-gray-200 bg-gray-50'
                    }`}
                  &amp;gt;
                    &amp;lt;div className="text-2xl font-bold text-gray-900"&amp;gt;{formatNumber(metric.value)}&amp;lt;/div&amp;gt;
                    &amp;lt;div className="text-sm text-gray-600 mt-1"&amp;gt;{metric.label}&amp;lt;/div&amp;gt;
                    {metric.value &amp;gt; 0 &amp;amp;&amp;amp; (
                      &amp;lt;div className="text-xs text-blue-600 mt-2 flex items-center"&amp;gt;
                        &amp;lt;TrendingUp className="w-3 h-3 mr-1" /&amp;gt;
                        Click to explore
                      &amp;lt;/div&amp;gt;
                    )}
                  &amp;lt;/div&amp;gt;
                ))}
              &amp;lt;/div&amp;gt;

              &amp;lt;div className="mt-6 pt-4 border-t border-gray-200"&amp;gt;
                &amp;lt;div className="grid grid-cols-1 md:grid-cols-3 gap-4 text-sm"&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-gray-500"&amp;gt;Size:&amp;lt;/span&amp;gt; 
                    &amp;lt;span className="ml-2 font-medium"&amp;gt;{warehouse.WAREHOUSE_SIZE || 'N/A'}&amp;lt;/span&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-gray-500"&amp;gt;Type:&amp;lt;/span&amp;gt; 
                    &amp;lt;span className="ml-2 font-medium"&amp;gt;{warehouse.WAREHOUSE_TYPE || 'N/A'}&amp;lt;/span&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-gray-500"&amp;gt;Credits Used:&amp;lt;/span&amp;gt; 
                    &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatNumber(warehouse.TOTAL_CREDITS_USED)}&amp;lt;/span&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        ))}
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// Queries by Metric View Component
const QueriesByMetricView = ({ data, onUserClick }) =&amp;gt; {
  const formatNumber = (num) =&amp;gt; num ? num.toLocaleString() : '0';

  return (
    &amp;lt;div className="space-y-6"&amp;gt;
      &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
        &amp;lt;h2 className="text-2xl font-bold text-gray-900 mb-4"&amp;gt;
          Queries by {data.metric_type?.replace(/-/g, ' ')} - Warehouse {data.warehouse_id}
        &amp;lt;/h2&amp;gt;
        &amp;lt;div className="grid grid-cols-1 md:grid-cols-3 gap-4 text-sm bg-gray-50 p-4 rounded-lg"&amp;gt;
          &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Total Queries:&amp;lt;/span&amp;gt; &amp;lt;span className="font-semibold ml-2"&amp;gt;{formatNumber(data.count)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Users:&amp;lt;/span&amp;gt; &amp;lt;span className="font-semibold ml-2"&amp;gt;{formatNumber(data.users)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Metric:&amp;lt;/span&amp;gt; &amp;lt;span className="font-semibold ml-2 capitalize"&amp;gt;{data.metric_type?.replace(/-/g, ' ')}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="grid gap-4"&amp;gt;
        {Object.entries(data.data || {}).map(([userName, queries]) =&amp;gt; (
          &amp;lt;div
            key={userName}
            onClick={() =&amp;gt; onUserClick(data.warehouse_id, data.metric_type, userName)}
            className="bg-white rounded-lg shadow-md p-6 cursor-pointer hover:shadow-lg hover:bg-gray-50 transition-all duration-200 border border-gray-200 hover:border-blue-300"
          &amp;gt;
            &amp;lt;div className="flex items-center justify-between"&amp;gt;
              &amp;lt;div className="flex items-center space-x-3"&amp;gt;
                &amp;lt;Users className="w-6 h-6 text-blue-600" /&amp;gt;
                &amp;lt;div&amp;gt;
                  &amp;lt;h3 className="text-lg font-semibold text-gray-900"&amp;gt;{userName}&amp;lt;/h3&amp;gt;
                  &amp;lt;p className="text-sm text-gray-600"&amp;gt;{queries.length} queries in this category&amp;lt;/p&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
              &amp;lt;div className="text-right"&amp;gt;
                &amp;lt;div className="text-2xl font-bold text-blue-600"&amp;gt;{queries.length}&amp;lt;/div&amp;gt;
                &amp;lt;div className="text-xs text-blue-600 flex items-center justify-end mt-1"&amp;gt;
                  &amp;lt;Eye className="w-3 h-3 mr-1" /&amp;gt;
                  View Details
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        ))}
      &amp;lt;/div&amp;gt;

      {Object.keys(data.data || {}).length === 0 &amp;amp;&amp;amp; (
        &amp;lt;div className="text-center py-12 bg-white rounded-lg shadow-md"&amp;gt;
          &amp;lt;AlertCircle className="w-12 h-12 text-gray-400 mx-auto mb-4" /&amp;gt;
          &amp;lt;h3 className="text-lg font-medium text-gray-900 mb-2"&amp;gt;No queries found&amp;lt;/h3&amp;gt;
          &amp;lt;p className="text-gray-600"&amp;gt;No queries match this metric for the selected warehouse.&amp;lt;/p&amp;gt;
        &amp;lt;/div&amp;gt;
      )}
    &amp;lt;/div&amp;gt;
  );
};

// User Queries View Component
const UserQueriesView = ({ queries, userName, onQueryClick }) =&amp;gt; {
  const formatDuration = (ms) =&amp;gt; {
    if (!ms) return 'N/A';
    const seconds = Math.floor(ms / 1000);
    const minutes = Math.floor(seconds / 60);
    return minutes &amp;gt; 0 ? `${minutes}m ${seconds % 60}s` : `${seconds}s`;
  };

  const getStatusIcon = (status) =&amp;gt; {
    switch (status) {
      case 'SUCCESS': return &amp;lt;CheckCircle className="w-4 h-4 text-green-500" /&amp;gt;;
      case 'FAIL': return &amp;lt;AlertCircle className="w-4 h-4 text-red-500" /&amp;gt;;
      case 'RUNNING': return &amp;lt;Activity className="w-4 h-4 text-blue-500" /&amp;gt;;
      default: return &amp;lt;Clock className="w-4 h-4 text-gray-500" /&amp;gt;;
    }
  };

  return (
    &amp;lt;div className="space-y-6"&amp;gt;
      &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
        &amp;lt;h2 className="text-2xl font-bold text-gray-900 mb-4"&amp;gt;Queries for {userName}&amp;lt;/h2&amp;gt;
        &amp;lt;p className="text-gray-600"&amp;gt;{queries.length} queries found&amp;lt;/p&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="bg-white rounded-lg shadow-md overflow-hidden"&amp;gt;
        &amp;lt;div className="overflow-x-auto"&amp;gt;
          &amp;lt;table className="w-full"&amp;gt;
            &amp;lt;thead className="bg-gray-50"&amp;gt;
              &amp;lt;tr&amp;gt;
                &amp;lt;th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"&amp;gt;Query&amp;lt;/th&amp;gt;
                &amp;lt;th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"&amp;gt;Status&amp;lt;/th&amp;gt;
                &amp;lt;th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"&amp;gt;Duration&amp;lt;/th&amp;gt;
                &amp;lt;th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"&amp;gt;Start Time&amp;lt;/th&amp;gt;
                &amp;lt;th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"&amp;gt;Actions&amp;lt;/th&amp;gt;
              &amp;lt;/tr&amp;gt;
            &amp;lt;/thead&amp;gt;
            &amp;lt;tbody className="bg-white divide-y divide-gray-200"&amp;gt;
              {queries.map((query, index) =&amp;gt; (
                &amp;lt;tr key={query.QUERY_ID} className="hover:bg-gray-50"&amp;gt;
                  &amp;lt;td className="px-6 py-4"&amp;gt;
                    &amp;lt;div className="text-sm font-medium text-gray-900 truncate max-w-xs"&amp;gt;
                      {query.QUERY_TEXT_PREVIEW || query.QUERY_TYPE || 'N/A'}
                    &amp;lt;/div&amp;gt;
                    &amp;lt;div className="text-xs text-gray-500"&amp;gt;ID: {query.QUERY_ID}&amp;lt;/div&amp;gt;
                  &amp;lt;/td&amp;gt;
                  &amp;lt;td className="px-6 py-4"&amp;gt;
                    &amp;lt;div className="flex items-center space-x-2"&amp;gt;
                      {getStatusIcon(query.EXECUTION_STATUS)}
                      &amp;lt;span className="text-sm text-gray-900"&amp;gt;{query.EXECUTION_STATUS}&amp;lt;/span&amp;gt;
                    &amp;lt;/div&amp;gt;
                  &amp;lt;/td&amp;gt;
                  &amp;lt;td className="px-6 py-4 text-sm text-gray-900"&amp;gt;
                    {formatDuration(query.TOTAL_ELAPSED_TIME)}
                  &amp;lt;/td&amp;gt;
                  &amp;lt;td className="px-6 py-4 text-sm text-gray-900"&amp;gt;
                    {query.START_TIME ? new Date(query.START_TIME).toLocaleString() : 'N/A'}
                  &amp;lt;/td&amp;gt;
                  &amp;lt;td className="px-6 py-4"&amp;gt;
                    &amp;lt;button
                      onClick={() =&amp;gt; onQueryClick(query.QUERY_ID)}
                      className="inline-flex items-center px-3 py-1 text-xs font-medium text-blue-600 bg-blue-100 rounded-full hover:bg-blue-200 transition-colors"
                    &amp;gt;
                      &amp;lt;Eye className="w-3 h-3 mr-1" /&amp;gt;
                      Details
                    &amp;lt;/button&amp;gt;
                  &amp;lt;/td&amp;gt;
                &amp;lt;/tr&amp;gt;
              ))}
            &amp;lt;/tbody&amp;gt;
          &amp;lt;/table&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// Query Details View Component
const QueryDetailsView = ({ queryDetails }) =&amp;gt; {
  const formatDuration = (ms) =&amp;gt; {
    if (!ms) return 'N/A';
    const seconds = Math.floor(ms / 1000);
    const minutes = Math.floor(seconds / 60);
    return minutes &amp;gt; 0 ? `${minutes}m ${seconds % 60}s` : `${seconds}s`;
  };

  const formatBytes = (bytes) =&amp;gt; {
    if (!bytes) return 'N/A';
    const units = ['B', 'KB', 'MB', 'GB', 'TB'];
    let size = bytes;
    let unitIndex = 0;

    while (size &amp;gt;= 1024 &amp;amp;&amp;amp; unitIndex &amp;lt; units.length - 1) {
      size /= 1024;
      unitIndex++;
    }

    return `${size.toFixed(2)} ${units[unitIndex]}`;
  };

  const formatNumber = (num) =&amp;gt; num ? num.toLocaleString() : 'N/A';

  return (
    &amp;lt;div className="space-y-6"&amp;gt;
      &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
        &amp;lt;h2 className="text-2xl font-bold text-gray-900 mb-4"&amp;gt;Query Details&amp;lt;/h2&amp;gt;
        &amp;lt;div className="text-sm text-gray-600 bg-gray-50 p-3 rounded-lg"&amp;gt;
          &amp;lt;strong&amp;gt;Query ID:&amp;lt;/strong&amp;gt; {queryDetails.QUERY_ID}
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="grid grid-cols-1 lg:grid-cols-2 gap-6"&amp;gt;
        {/* Basic Information */}
        &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
          &amp;lt;h3 className="text-lg font-semibold text-gray-900 mb-4 flex items-center"&amp;gt;
            &amp;lt;BarChart3 className="w-5 h-5 mr-2 text-blue-600" /&amp;gt;
            Basic Information
          &amp;lt;/h3&amp;gt;
          &amp;lt;div className="space-y-3"&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Query Type:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.QUERY_TYPE}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Status:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.EXECUTION_STATUS}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;User:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.USER_NAME}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Role:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.ROLE_NAME}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Database:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.DATABASE_NAME}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Schema:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.SCHEMA_NAME}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;

        {/* Performance Metrics */}
        &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
          &amp;lt;h3 className="text-lg font-semibold text-gray-900 mb-4 flex items-center"&amp;gt;
            &amp;lt;TrendingUp className="w-5 h-5 mr-2 text-green-600" /&amp;gt;
            Performance Metrics
          &amp;lt;/h3&amp;gt;
          &amp;lt;div className="space-y-3"&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Total Duration:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatDuration(queryDetails.TOTAL_ELAPSED_TIME)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Execution Time:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatDuration(queryDetails.EXECUTION_TIME)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Compilation Time:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatDuration(queryDetails.COMPILATION_TIME)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Rows Produced:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatNumber(queryDetails.ROWS_PRODUCED)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Bytes Scanned:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatBytes(queryDetails.BYTES_SCANNED)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Cache Hit %:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.PERCENTAGE_SCANNED_FROM_CACHE}%&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;

        {/* Warehouse Information */}
        &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
          &amp;lt;h3 className="text-lg font-semibold text-gray-900 mb-4 flex items-center"&amp;gt;
            &amp;lt;Server className="w-5 h-5 mr-2 text-purple-600" /&amp;gt;
            Warehouse Information
          &amp;lt;/h3&amp;gt;
          &amp;lt;div className="space-y-3"&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Warehouse:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.WAREHOUSE_NAME}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Size:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.WAREHOUSE_SIZE}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Type:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.WAREHOUSE_TYPE}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Credits Used:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.CREDITS_USED_CLOUD_SERVICES}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Local Spill:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatBytes(queryDetails.BYTES_SPILLED_TO_LOCAL_STORAGE)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Remote Spill:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatBytes(queryDetails.BYTES_SPILLED_TO_REMOTE_STORAGE)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;

        {/* Timing Details */}
        &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
          &amp;lt;h3 className="text-lg font-semibold text-gray-900 mb-4 flex items-center"&amp;gt;
            &amp;lt;Clock className="w-5 h-5 mr-2 text-orange-600" /&amp;gt;
            Timing Details
          &amp;lt;/h3&amp;gt;
          &amp;lt;div className="space-y-3"&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Start Time:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.START_TIME ? new Date(queryDetails.START_TIME).toLocaleString() : 'N/A'}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;End Time:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.END_TIME ? new Date(queryDetails.END_TIME).toLocaleString() : 'N/A'}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Queue Time:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatDuration(queryDetails.QUEUED_PROVISIONING_TIME)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Blocked Time:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{formatDuration(queryDetails.TRANSACTION_BLOCKED_TIME)}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Performance:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.PERFORMANCE_CATEGORY}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Cache Efficiency:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium"&amp;gt;{queryDetails.CACHE_EFFICIENCY}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      {/* Query Text */}
      {queryDetails.QUERY_TEXT &amp;amp;&amp;amp; (
        &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
          &amp;lt;h3 className="text-lg font-semibold text-gray-900 mb-4"&amp;gt;Query Text&amp;lt;/h3&amp;gt;
          &amp;lt;div className="bg-gray-900 text-gray-100 p-4 rounded-lg overflow-x-auto"&amp;gt;
            &amp;lt;pre className="text-sm whitespace-pre-wrap"&amp;gt;{queryDetails.QUERY_TEXT}&amp;lt;/pre&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )}

      {/* Error Information */}
      {queryDetails.ERROR_MESSAGE &amp;amp;&amp;amp; (
        &amp;lt;div className="bg-white rounded-lg shadow-md p-6"&amp;gt;
          &amp;lt;h3 className="text-lg font-semibold text-red-600 mb-4 flex items-center"&amp;gt;
            &amp;lt;AlertCircle className="w-5 h-5 mr-2" /&amp;gt;
            Error Information
          &amp;lt;/h3&amp;gt;
          &amp;lt;div className="space-y-3"&amp;gt;
            &amp;lt;div&amp;gt;&amp;lt;span className="text-gray-500"&amp;gt;Error Code:&amp;lt;/span&amp;gt; &amp;lt;span className="ml-2 font-medium text-red-600"&amp;gt;{queryDetails.ERROR_CODE}&amp;lt;/span&amp;gt;&amp;lt;/div&amp;gt;
            &amp;lt;div className="bg-red-50 p-4 rounded-lg"&amp;gt;
              &amp;lt;span className="text-red-800"&amp;gt;{queryDetails.ERROR_MESSAGE}&amp;lt;/span&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )}
    &amp;lt;/div&amp;gt;
  );
};

export default WarehouseAnalyticsDashboard;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>best</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Thu, 07 Aug 2025 07:18:29 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/best-2glf</link>
      <guid>https://dev.to/armaankhan8270/best-2glf</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;from flask import Flask, jsonify, request
from flask_cors import CORS
import snowflake.connector
import pandas as pd
import json
import os
from datetime import datetime
import logging
from typing import Dict, List, Optional, Any
import uuid

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

app = Flask(__name__)
CORS(app)

class SnowflakeDataManager:
    """Manages Snowflake connections and data caching"""

    def __init__(self):
        self.cache_dir = "cache"
        self.ensure_cache_dir()

    def ensure_cache_dir(self):
        """Create cache directory if it doesn't exist"""
        if not os.path.exists(self.cache_dir):
            os.makedirs(self.cache_dir)

    def execute_query(self, cursor, query: str) -&amp;gt; pd.DataFrame:
        """Execute query and return DataFrame"""
        try:
            cursor.execute(query)
            results = cursor.fetchall()
            columns = [desc[0] for desc in cursor.description]
            df = pd.DataFrame(results, columns=columns)
            logger.info(f"Query executed successfully. Rows returned: {len(df)}")
            return df
        except Exception as e:
            logger.error(f"Error executing query: {str(e)}")
            raise

    def save_to_cache(self, data: pd.DataFrame, filename: str):
        """Save DataFrame to multiple formats for caching"""
        base_path = os.path.join(self.cache_dir, filename)

        try:
            # Save as CSV
            data.to_csv(f"{base_path}.csv", index=False)

            # Save as JSON
            data.to_json(f"{base_path}.json", orient='records', date_format='iso')

            # Save as Parquet (most efficient)
            data.to_parquet(f"{base_path}.parquet", index=False)

            logger.info(f"Data cached successfully: {filename}")
        except Exception as e:
            logger.error(f"Error saving cache: {str(e)}")
            raise

    def load_from_cache(self, filename: str, format_type: str = 'parquet') -&amp;gt; Optional[pd.DataFrame]:
        """Load DataFrame from cache"""
        cache_path = os.path.join(self.cache_dir, f"{filename}.{format_type}")

        try:
            if os.path.exists(cache_path):
                if format_type == 'parquet':
                    return pd.read_parquet(cache_path)
                elif format_type == 'json':
                    return pd.read_json(cache_path)
                elif format_type == 'csv':
                    return pd.read_csv(cache_path)
            return None
        except Exception as e:
            logger.error(f"Error loading cache: {str(e)}")
            return None

    def is_cache_valid(self, filename: str, hours: int = 24) -&amp;gt; bool:
        """Check if cache is still valid based on timestamp"""
        cache_path = os.path.join(self.cache_dir, f"{filename}.parquet")

        if not os.path.exists(cache_path):
            return False

        file_time = datetime.fromtimestamp(os.path.getmtime(cache_path))
        time_diff = datetime.now() - file_time

        return time_diff.total_seconds() &amp;lt; (hours * 3600)

# Global data manager instance
data_manager = SnowflakeDataManager()

# Global variables to store data
warehouse_data = None
query_summary_data = None
query_details_data = None

@app.route('/health', methods=['GET'])
def health_check():
    """Health check endpoint"""
    return jsonify({
        'status': 'healthy',
        'timestamp': datetime.now().isoformat(),
        'cache_status': {
            'warehouse': warehouse_data is not None,
            'query_summary': query_summary_data is not None,
            'query_details': query_details_data is not None
        }
    })

@app.route('/initialize', methods=['POST'])
def initialize_data():
    """Initialize data from Snowflake using provided cursor"""
    global warehouse_data, query_summary_data, query_details_data

    try:
        # Get cursor from request (you'll pass this from your application)
        cursor = request.json.get('cursor')

        if not cursor:
            return jsonify({'error': 'Snowflake cursor required'}), 400

        # Check cache first
        if (data_manager.is_cache_valid('warehouse_analytics') and 
            data_manager.is_cache_valid('query_history_summary') and 
            data_manager.is_cache_valid('query_details_complete')):

            warehouse_data = data_manager.load_from_cache('warehouse_analytics')
            query_summary_data = data_manager.load_from_cache('query_history_summary')
            query_details_data = data_manager.load_from_cache('query_details_complete')

            logger.info("Data loaded from cache")
            return jsonify({
                'status': 'success',
                'source': 'cache',
                'warehouse_rows': len(warehouse_data),
                'query_summary_rows': len(query_summary_data),
                'query_details_rows': len(query_details_data)
            })

        # Execute queries to get fresh data
        logger.info("Fetching fresh data from Snowflake...")

        # Query 1: Warehouse Analytics Dashboard
        warehouse_query = "SELECT * FROM WAREHOUSE_ANALYTICS_DASHBOARD_with_queries"
        warehouse_data = data_manager.execute_query(cursor, warehouse_query)
        data_manager.save_to_cache(warehouse_data, 'warehouse_analytics')

        # Query 2: Query History Summary
        summary_query = "SELECT * FROM QUERY_HISTORY_SUMMARY"
        query_summary_data = data_manager.execute_query(cursor, summary_query)
        data_manager.save_to_cache(query_summary_data, 'query_history_summary')

        # Query 3: Query Details Complete
        details_query = "SELECT * FROM QUERY_DETAILS_COMPLETE"
        query_details_data = data_manager.execute_query(cursor, details_query)
        data_manager.save_to_cache(query_details_data, 'query_details_complete')

        logger.info("All data fetched and cached successfully")

        return jsonify({
            'status': 'success',
            'source': 'database',
            'warehouse_rows': len(warehouse_data),
            'query_summary_rows': len(query_summary_data),
            'query_details_rows': len(query_details_data)
        })

    except Exception as e:
        logger.error(f"Error initializing data: {str(e)}")
        return jsonify({'error': str(e)}), 500

@app.route('/initialize-with-cursor', methods=['POST'])
def initialize_with_direct_cursor():
    """Initialize data when you have direct access to cursor object"""
    global warehouse_data, query_summary_data, query_details_data

    # This endpoint expects you to call it directly from Python with cursor
    # You would use this in your Python script that has the Snowflake cursor

    return jsonify({'message': 'Use initialize_data_direct function instead'})

def initialize_data_direct(cursor):
    """Direct initialization function - call this from your Python script"""
    global warehouse_data, query_summary_data, query_details_data

    try:
        # Check cache first
        if (data_manager.is_cache_valid('warehouse_analytics') and 
            data_manager.is_cache_valid('query_history_summary') and 
            data_manager.is_cache_valid('query_details_complete')):

            warehouse_data = data_manager.load_from_cache('warehouse_analytics')
            query_summary_data = data_manager.load_from_cache('query_history_summary')
            query_details_data = data_manager.load_from_cache('query_details_complete')

            logger.info("Data loaded from cache")
            return True

        # Execute queries to get fresh data
        logger.info("Fetching fresh data from Snowflake...")

        # Query 1: Warehouse Analytics Dashboard
        warehouse_query = "SELECT * FROM WAREHOUSE_ANALYTICS_DASHBOARD_with_queries"
        warehouse_data = data_manager.execute_query(cursor, warehouse_query)
        data_manager.save_to_cache(warehouse_data, 'warehouse_analytics')

        # Query 2: Query History Summary
        summary_query = "SELECT * FROM QUERY_HISTORY_SUMMARY"
        query_summary_data = data_manager.execute_query(cursor, summary_query)
        data_manager.save_to_cache(query_summary_data, 'query_history_summary')

        # Query 3: Query Details Complete
        details_query = "SELECT * FROM QUERY_DETAILS_COMPLETE"
        query_details_data = data_manager.execute_query(cursor, details_query)
        data_manager.save_to_cache(query_details_data, 'query_details_complete')

        logger.info("All data fetched and cached successfully")
        return True

    except Exception as e:
        logger.error(f"Error initializing data: {str(e)}")
        return False

@app.route('/warehouses', methods=['GET'])
def get_warehouses():
    """Get all warehouse data"""
    global warehouse_data

    if warehouse_data is None:
        return jsonify({'error': 'Data not initialized. Call /initialize first'}), 400

    try:
        # Convert to dict and handle JSON serialization
        result = []
        for _, row in warehouse_data.iterrows():
            row_dict = row.to_dict()

            # Handle QUERY_IDS column (JSON object)
            if 'QUERY_IDS' in row_dict:
                if pd.isna(row_dict['QUERY_IDS']):
                    row_dict['QUERY_IDS'] = {}
                elif isinstance(row_dict['QUERY_IDS'], str):
                    try:
                        row_dict['QUERY_IDS'] = json.loads(row_dict['QUERY_IDS'])
                    except:
                        row_dict['QUERY_IDS'] = {}

            # Handle datetime objects
            for key, value in row_dict.items():
                if pd.isna(value):
                    row_dict[key] = None
                elif isinstance(value, pd.Timestamp):
                    row_dict[key] = value.isoformat()
                elif isinstance(value, (pd.Int64Dtype, pd.Float64Dtype)):
                    row_dict[key] = None if pd.isna(value) else value

            result.append(row_dict)

        return jsonify({
            'status': 'success',
            'count': len(result),
            'data': result
        })

    except Exception as e:
        logger.error(f"Error getting warehouses: {str(e)}")
        return jsonify({'error': str(e)}), 500

@app.route('/queries/by-warehouse/&amp;lt;warehouse_id&amp;gt;/&amp;lt;metric_type&amp;gt;', methods=['GET'])
def get_queries_by_warehouse_metric(warehouse_id, metric_type):
    """Get queries for specific warehouse and metric type"""
    global warehouse_data, query_summary_data

    if warehouse_data is None or query_summary_data is None:
        return jsonify({'error': 'Data not initialized. Call /initialize first'}), 400

    try:
        # Find the warehouse
        warehouse_row = warehouse_data[warehouse_data['WAREHOUSE_ID'] == warehouse_id]
        if warehouse_row.empty:
            return jsonify({'error': 'Warehouse not found'}), 404

        # Get query IDs for the metric type
        query_ids_json = warehouse_row.iloc[0]['QUERY_IDS']
        if isinstance(query_ids_json, str):
            query_ids_data = json.loads(query_ids_json)
        else:
            query_ids_data = query_ids_json if query_ids_json else {}

        # Map metric types to QUERY_IDS keys
        metric_mapping = {
            '1-10-seconds': '1-10_sec_ids',
            '10-20-seconds': '10-20_sec_ids',
            '20-60-seconds': '20-60_sec_ids',
            '1-3-minutes': '1-3_min_ids',
            '3-5-minutes': '3-5_min_ids',
            '5-plus-minutes': '5_plus_min_ids',
            'queued-1-2-minutes': 'queued_1-2_min_ids',
            'queued-2-5-minutes': 'queued_2-5_min_ids',
            'queued-5-10-minutes': 'queued_5-10_min_ids',
            'queued-10-20-minutes': 'queued_10-20_min_ids',
            'queued-20-plus-minutes': 'queued_20_plus_min_ids',
            'spilled-local': 'spilled_local_ids',
            'spilled-remote': 'spilled_remote_ids',
            'failed-queries': 'failed_queries_ids',
            'successful-queries': 'successful_queries_ids',
            'running-queries': 'running_queries_ids'
        }

        query_ids_key = metric_mapping.get(metric_type)
        if not query_ids_key:
            return jsonify({'error': 'Invalid metric type'}), 400

        query_ids = query_ids_data.get(query_ids_key, [])
        # Filter out null values
        query_ids = [qid for qid in query_ids if qid is not None]

        if not query_ids:
            return jsonify({
                'status': 'success',
                'warehouse_id': warehouse_id,
                'metric_type': metric_type,
                'count': 0,
                'data': []
            })

        # Get queries from summary data
        filtered_queries = query_summary_data[query_summary_data['QUERY_ID'].isin(query_ids)]

        # Group by user
        grouped_data = {}
        for _, query in filtered_queries.iterrows():
            user_name = query['USER_NAME']
            if user_name not in grouped_data:
                grouped_data[user_name] = []

            query_dict = query.to_dict()
            # Handle datetime and NaN values
            for key, value in query_dict.items():
                if pd.isna(value):
                    query_dict[key] = None
                elif isinstance(value, pd.Timestamp):
                    query_dict[key] = value.isoformat()

            grouped_data[user_name].append(query_dict)

        return jsonify({
            'status': 'success',
            'warehouse_id': warehouse_id,
            'metric_type': metric_type,
            'count': len(filtered_queries),
            'users': len(grouped_data),
            'data': grouped_data
        })

    except Exception as e:
        logger.error(f"Error getting queries by warehouse metric: {str(e)}")
        return jsonify({'error': str(e)}), 500

@app.route('/queries/by-user/&amp;lt;warehouse_id&amp;gt;/&amp;lt;metric_type&amp;gt;/&amp;lt;user_name&amp;gt;', methods=['GET'])
def get_queries_by_user(warehouse_id, metric_type, user_name):
    """Get queries for specific user in warehouse metric"""
    global warehouse_data, query_summary_data

    if warehouse_data is None or query_summary_data is None:
        return jsonify({'error': 'Data not initialized. Call /initialize first'}), 400

    try:
        # Get queries for warehouse and metric first
        response_data = get_queries_by_warehouse_metric(warehouse_id, metric_type)
        if response_data.status_code != 200:
            return response_data

        response_json = response_data.get_json()
        user_queries = response_json['data'].get(user_name, [])

        return jsonify({
            'status': 'success',
            'warehouse_id': warehouse_id,
            'metric_type': metric_type,
            'user_name': user_name,
            'count': len(user_queries),
            'data': user_queries
        })

    except Exception as e:
        logger.error(f"Error getting queries by user: {str(e)}")
        return jsonify({'error': str(e)}), 500

@app.route('/query/details/&amp;lt;query_id&amp;gt;', methods=['GET'])
def get_query_details(query_id):
    """Get detailed information for a specific query"""
    global query_details_data

    if query_details_data is None:
        return jsonify({'error': 'Data not initialized. Call /initialize first'}), 400

    try:
        # Find the specific query
        query_detail = query_details_data[query_details_data['QUERY_ID'] == query_id]

        if query_detail.empty:
            return jsonify({'error': 'Query not found'}), 404

        # Convert to dict
        result = query_detail.iloc[0].to_dict()

        # Handle datetime and NaN values
        for key, value in result.items():
            if pd.isna(value):
                result[key] = None
            elif isinstance(value, pd.Timestamp):
                result[key] = value.isoformat()

        return jsonify({
            'status': 'success',
            'query_id': query_id,
            'data': result
        })

    except Exception as e:
        logger.error(f"Error getting query details: {str(e)}")
        return jsonify({'error': str(e)}), 500

@app.route('/queries/summary', methods=['GET'])
def get_query_summary():
    """Get query summary with optional filters"""
    global query_summary_data

    if query_summary_data is None:
        return jsonify({'error': 'Data not initialized. Call /initialize first'}), 400

    try:
        # Get query parameters for filtering
        warehouse_id = request.args.get('warehouse_id')
        user_name = request.args.get('user_name')
        execution_status = request.args.get('execution_status')
        duration_bucket = request.args.get('duration_bucket')

        filtered_data = query_summary_data.copy()

        # Apply filters
        if warehouse_id:
            filtered_data = filtered_data[filtered_data['WAREHOUSE_ID'] == warehouse_id]

        if user_name:
            filtered_data = filtered_data[filtered_data['USER_NAME'] == user_name]

        if execution_status:
            filtered_data = filtered_data[filtered_data['EXECUTION_STATUS'] == execution_status]

        if duration_bucket:
            filtered_data = filtered_data[filtered_data['DURATION_BUCKET'] == duration_bucket]

        # Convert to list of dicts
        result = []
        for _, row in filtered_data.iterrows():
            row_dict = row.to_dict()

            # Handle datetime and NaN values
            for key, value in row_dict.items():
                if pd.isna(value):
                    row_dict[key] = None
                elif isinstance(value, pd.Timestamp):
                    row_dict[key] = value.isoformat()

            result.append(row_dict)

        return jsonify({
            'status': 'success',
            'count': len(result),
            'filters': {
                'warehouse_id': warehouse_id,
                'user_name': user_name,
                'execution_status': execution_status,
                'duration_bucket': duration_bucket
            },
            'data': result
        })

    except Exception as e:
        logger.error(f"Error getting query summary: {str(e)}")
        return jsonify({'error': str(e)}), 500

@app.route('/cache/refresh', methods=['POST'])
def refresh_cache():
    """Force refresh cache (requires cursor to be provided)"""
    try:
        cursor = request.json.get('cursor')
        if not cursor:
            return jsonify({'error': 'Snowflake cursor required'}), 400

        # Force refresh by calling initialize without cache check
        global warehouse_data, query_summary_data, query_details_data

        # Execute queries to get fresh data
        logger.info("Force refreshing data from Snowflake...")

        warehouse_query = "SELECT * FROM WAREHOUSE_ANALYTICS_DASHBOARD_with_queries"
        warehouse_data = data_manager.execute_query(cursor, warehouse_query)
        data_manager.save_to_cache(warehouse_data, 'warehouse_analytics')

        summary_query = "SELECT * FROM QUERY_HISTORY_SUMMARY"
        query_summary_data = data_manager.execute_query(cursor, summary_query)
        data_manager.save_to_cache(query_summary_data, 'query_history_summary')

        details_query = "SELECT * FROM QUERY_DETAILS_COMPLETE"
        query_details_data = data_manager.execute_query(cursor, details_query)
        data_manager.save_to_cache(query_details_data, 'query_details_complete')

        logger.info("Cache refreshed successfully")

        return jsonify({
            'status': 'success',
            'message': 'Cache refreshed successfully',
            'warehouse_rows': len(warehouse_data),
            'query_summary_rows': len(query_summary_data),
            'query_details_rows': len(query_details_data)
        })

    except Exception as e:
        logger.error(f"Error refreshing cache: {str(e)}")
        return jsonify({'error': str(e)}), 500

def refresh_cache_direct(cursor):
    """Direct cache refresh function - call this from your Python script"""
    global warehouse_data, query_summary_data, query_details_data

    try:
        logger.info("Force refreshing data from Snowflake...")

        warehouse_query = "SELECT * FROM WAREHOUSE_ANALYTICS_DASHBOARD_with_queries"
        warehouse_data = data_manager.execute_query(cursor, warehouse_query)
        data_manager.save_to_cache(warehouse_data, 'warehouse_analytics')

        summary_query = "SELECT * FROM QUERY_HISTORY_SUMMARY"
        query_summary_data = data_manager.execute_query(cursor, summary_query)
        data_manager.save_to_cache(query_summary_data, 'query_history_summary')

        details_query = "SELECT * FROM QUERY_DETAILS_COMPLETE"
        query_details_data = data_manager.execute_query(cursor, details_query)
        data_manager.save_to_cache(query_details_data, 'query_details_complete')

        logger.info("Cache refreshed successfully")
        return True

    except Exception as e:
        logger.error(f"Error refreshing cache: {str(e)}")
        return False

if __name__ == '__main__':
    # Load cached data on startup
    try:
        warehouse_data = data_manager.load_from_cache('warehouse_analytics')
        query_summary_data = data_manager.load_from_cache('query_history_summary')
        query_details_data = data_manager.load_from_cache('query_details_complete')

        if warehouse_data is not None:
            logger.info("Cached data loaded on startup")
        else:
            logger.info("No cached data found. Call /initialize to load data.")

    except Exception as e:
        logger.error(f"Error loading cached data on startup: {str(e)}")

    app.run(debug=True, host='0.0.0.0', port=5000)

# Usage example for your Python script:
"""
# In your main Python script where you have the Snowflake cursor:

import requests
from your_flask_app import initialize_data_direct, refresh_cache_direct

# Assuming you have a Snowflake cursor
cursor = your_snowflake_connection.cursor()

# Initialize data
if initialize_data_direct(cursor):
    print("Data initialized successfully")

    # Start the Flask app in a separate thread or process
    # Then your React app can call the API endpoints

    # To refresh data later:
    # refresh_cache_direct(cursor)
else:
    print("Failed to initialize data")
"""
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;





&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;# usage_script.py
"""
Usage script showing how to integrate the Flask backend with your Snowflake cursor
"""

import snowflake.connector
import threading
import time
from flask_app import app, initialize_data_direct, refresh_cache_direct

def run_flask_app():
    """Run Flask app in a separate thread"""
    app.run(debug=False, host='0.0.0.0', port=5000, use_reloader=False)

def main():
    """Main function to demonstrate usage"""

    # Your Snowflake connection parameters
    snowflake_config = {
        'user': 'your_username',
        'password': 'your_password',
        'account': 'your_account',
        'warehouse': 'your_warehouse',
        'database': 'your_database',
        'schema': 'your_schema'
    }

    try:
        # Establish Snowflake connection
        print("Connecting to Snowflake...")
        conn = snowflake.connector.connect(**snowflake_config)
        cursor = conn.cursor()
        print("Connected successfully!")

        # Initialize data using direct function
        print("Initializing data from Snowflake tables...")
        if initialize_data_direct(cursor):
            print("✅ Data initialized successfully!")
            print("Cache files created in './cache/' directory")

            # Start Flask app in background thread
            print("Starting Flask API server...")
            flask_thread = threading.Thread(target=run_flask_app)
            flask_thread.daemon = True
            flask_thread.start()

            # Wait a moment for Flask to start
            time.sleep(2)
            print("🚀 Flask API server running on http://localhost:5000")

            print("\n" + "="*50)
            print("Available API Endpoints:")
            print("="*50)
            print("GET  /health                           - Health check")
            print("GET  /warehouses                      - Get all warehouses")
            print("GET  /queries/by-warehouse/&amp;lt;id&amp;gt;/&amp;lt;metric&amp;gt; - Get queries by warehouse metric")
            print("GET  /queries/by-user/&amp;lt;wh&amp;gt;/&amp;lt;metric&amp;gt;/&amp;lt;user&amp;gt; - Get queries by user")
            print("GET  /query/details/&amp;lt;query_id&amp;gt;        - Get detailed query info")
            print("GET  /queries/summary                 - Get query summary with filters")
            print("="*50)

            print("\nExample URLs:")
            print("- http://localhost:5000/health")
            print("- http://localhost:5000/warehouses")
            print("- http://localhost:5000/queries/by-warehouse/WH123/1-10-seconds")
            print("- http://localhost:5000/query/details/QUERY123")
            print("\n💡 Your React app can now call these endpoints!")

            # Keep the script running
            print("\nPress Ctrl+C to stop...")
            try:
                while True:
                    time.sleep(1)
            except KeyboardInterrupt:
                print("\n👋 Shutting down...")

        else:
            print("❌ Failed to initialize data")

    except Exception as e:
        print(f"❌ Error: {str(e)}")

    finally:
        # Close connections
        if 'cursor' in locals():
            cursor.close()
        if 'conn' in locals():
            conn.close()
        print("Database connections closed")

if __name__ == "__main__":
    main()

# Alternative: If you want to refresh data periodically
def setup_periodic_refresh(cursor, interval_hours=6):
    """Setup periodic data refresh"""
    def refresh_job():
        while True:
            time.sleep(interval_hours * 3600)  # Convert hours to seconds
            print(f"Refreshing data... (every {interval_hours} hours)")
            if refresh_cache_direct(cursor):
                print("✅ Data refreshed successfully")
            else:
                print("❌ Data refresh failed")

    refresh_thread = threading.Thread(target=refresh_job)
    refresh_thread.daemon = True
    refresh_thread.start()
    print(f"📅 Periodic refresh scheduled every {interval_hours} hours")

# Quick test function
def test_api_endpoints():
    """Test function to verify API endpoints"""
    import requests
    import json

    base_url = "http://localhost:5000"

    try:
        # Test health endpoint
        response = requests.get(f"{base_url}/health")
        print("Health Check:", response.json())

        # Test warehouses endpoint
        response = requests.get(f"{base_url}/warehouses")
        data = response.json()
        print(f"Warehouses: Found {data['count']} warehouses")

        if data['count'] &amp;gt; 0:
            # Get first warehouse for testing
            first_warehouse = data['data'][0]
            warehouse_id = first_warehouse['WAREHOUSE_ID']

            # Test queries by warehouse metric
            response = requests.get(f"{base_url}/queries/by-warehouse/{warehouse_id}/1-10-seconds")
            queries_data = response.json()
            print(f"Queries for {warehouse_id} (1-10 seconds): {queries_data['count']} queries")

    except requests.exceptions.RequestException as e:
        print(f"API test failed: {e}")
        print("Make sure the Flask server is running")

# Example for testing without starting the full server
def quick_test():
    """Quick test without starting server"""
    # Your cursor initialization here
    cursor = None  # Replace with your actual cursor

    if cursor and initialize_data_direct(cursor):
        print("Data loaded successfully!")
        print("You can now start the Flask server separately")
    else:
        print("Failed to load data")
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>test1</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Thu, 07 Aug 2025 05:58:11 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/test1-3ep8</link>
      <guid>https://dev.to/armaankhan8270/test1-3ep8</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;from flask import Flask, jsonify, request
from flask_cors import CORS
import json
import os
from datetime import datetime, timedelta
import logging
from typing import Dict, List, Any, Optional
import pickle
from functools import wraps
import hashlib

# Set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

app = Flask(__name__)
CORS(app)

# Configuration
app.config['SECRET_KEY'] = 'your-secret-key-here'
CACHE_DIR = 'cache'
CACHE_EXPIRY_HOURS = 6

# Ensure cache directory exists
os.makedirs(CACHE_DIR, exist_ok=True)

class SnowflakeAnalytics:
    def __init__(self, cursor):
        self.cursor = cursor

    def _get_cache_key(self, query: str, params: Dict = None) -&amp;gt; str:
        """Generate cache key from query and parameters"""
        cache_data = f"{query}_{params if params else ''}"
        return hashlib.md5(cache_data.encode()).hexdigest()

    def _get_cached_data(self, cache_key: str) -&amp;gt; Optional[Any]:
        """Get cached data if it exists and hasn't expired"""
        cache_file = os.path.join(CACHE_DIR, f"{cache_key}.pkl")
        if not os.path.exists(cache_file):
            return None

        try:
            with open(cache_file, 'rb') as f:
                cached_data = pickle.load(f)

            # Check if cache has expired
            if datetime.now() - cached_data['timestamp'] &amp;gt; timedelta(hours=CACHE_EXPIRY_HOURS):
                os.remove(cache_file)
                return None

            return cached_data['data']
        except Exception as e:
            logger.error(f"Error reading cache: {e}")
            return None

    def _save_to_cache(self, cache_key: str, data: Any):
        """Save data to cache"""
        cache_file = os.path.join(CACHE_DIR, f"{cache_key}.pkl")
        try:
            with open(cache_file, 'wb') as f:
                pickle.dump({
                    'data': data,
                    'timestamp': datetime.now()
                }, f)
        except Exception as e:
            logger.error(f"Error saving to cache: {e}")

    def _execute_query_with_cache(self, query: str, params: Dict = None) -&amp;gt; List[Dict]:
        """Execute query with caching"""
        cache_key = self._get_cache_key(query, params)

        # Try to get from cache first
        cached_data = self._get_cached_data(cache_key)
        if cached_data:
            logger.info(f"Using cached data for query hash: {cache_key[:8]}")
            return cached_data

        # Execute query
        logger.info(f"Executing query: {query[:100]}...")
        self.cursor.execute(query)
        columns = [desc[0] for desc in self.cursor.description]
        rows = self.cursor.fetchall()

        # Convert to list of dictionaries
        data = [dict(zip(columns, row)) for row in rows]

        # Cache the results
        self._save_to_cache(cache_key, data)

        return data

    def get_warehouse_summary(self) -&amp;gt; List[Dict]:
        """Get warehouse analytics dashboard data"""
        query = "SELECT * FROM WAREHOUSE_ANALYTICS_DASHBOARD_with_queries ORDER BY TOTAL_QUERIES DESC"
        return self._execute_query_with_cache(query)

    def get_query_summary(self, limit: int = 1000) -&amp;gt; List[Dict]:
        """Get query history summary"""
        query = f"SELECT * FROM QUERY_HISTORY_SUMMARY ORDER BY START_TIME DESC LIMIT {limit}"
        return self._execute_query_with_cache(query)

    def get_query_details(self, query_id: str) -&amp;gt; Dict:
        """Get detailed information about a specific query"""
        query = f"SELECT * FROM QUERY_DETAILS_COMPLETE WHERE QUERY_ID = '{query_id}'"
        result = self._execute_query_with_cache(query)
        return result[0] if result else {}

    def get_user_performance_report(self) -&amp;gt; List[Dict]:
        """Get user query performance report"""
        query = "SELECT * FROM user_query_performance_report ORDER BY weighted_score DESC"
        return self._execute_query_with_cache(query)

    def get_warehouse_queries_by_user(self, warehouse_id: str, query_type: str) -&amp;gt; List[Dict]:
        """Get queries for a specific warehouse grouped by user"""
        # Extract query IDs from the warehouse data first
        warehouse_data = self.get_warehouse_summary()
        target_warehouse = next((w for w in warehouse_data if str(w['WAREHOUSE_ID']) == warehouse_id), None)

        if not target_warehouse or 'QUERY_IDS' not in target_warehouse:
            return []

        query_ids = target_warehouse['QUERY_IDS'].get(query_type, [])
        if not query_ids:
            return []

        # Clean up query IDs (remove None values)
        clean_query_ids = [qid for qid in query_ids if qid is not None]
        if not clean_query_ids:
            return []

        # Get query details for these IDs
        ids_str = "','".join(clean_query_ids)
        query = f"""
        SELECT 
            USER_NAME,
            COUNT(*) as query_count,
            ARRAY_AGG(QUERY_ID) as query_ids,
            AVG(TOTAL_ELAPSED_TIME) as avg_execution_time,
            SUM(BYTES_SCANNED) as total_bytes_scanned
        FROM QUERY_HISTORY_SUMMARY 
        WHERE QUERY_ID IN ('{ids_str}')
        GROUP BY USER_NAME
        ORDER BY query_count DESC
        """
        return self._execute_query_with_cache(query, {'warehouse_id': warehouse_id, 'query_type': query_type})

    def get_user_queries(self, user_name: str, query_ids: List[str] = None) -&amp;gt; List[Dict]:
        """Get all queries for a specific user"""
        if query_ids:
            ids_str = "','".join(query_ids)
            query = f"""
            SELECT * FROM QUERY_HISTORY_SUMMARY 
            WHERE USER_NAME = '{user_name}' AND QUERY_ID IN ('{ids_str}')
            ORDER BY START_TIME DESC
            """
        else:
            query = f"""
            SELECT * FROM QUERY_HISTORY_SUMMARY 
            WHERE USER_NAME = '{user_name}'
            ORDER BY START_TIME DESC
            LIMIT 1000
            """
        return self._execute_query_with_cache(query, {'user_name': user_name})

    def get_user_sample_queries(self, user_name: str, query_type: str) -&amp;gt; List[Dict]:
        """Get sample queries for a specific user and query type"""
        user_report = self.get_user_performance_report()
        target_user = next((u for u in user_report if u['USER_NAME'] == user_name), None)

        if not target_user or 'QUERY_SAMPLES' not in target_user:
            return []

        sample_queries = target_user['QUERY_SAMPLES'].get(query_type, [])
        return sample_queries if sample_queries else []

# Global analytics instance (will be initialized with cursor)
analytics = None

def init_analytics(cursor):
    """Initialize the analytics instance with Snowflake cursor"""
    global analytics
    analytics = SnowflakeAnalytics(cursor)

def require_analytics(f):
    """Decorator to ensure analytics is initialized"""
    @wraps(f)
    def decorated_function(*args, **kwargs):
        if analytics is None:
            return jsonify({'error': 'Analytics not initialized'}), 500
        return f(*args, **kwargs)
    return decorated_function

# API Routes
@app.route('/api/warehouse-summary')
@require_analytics
def get_warehouse_summary():
    """Get warehouse analytics summary"""
    try:
        data = analytics.get_warehouse_summary()
        return jsonify({'data': data, 'count': len(data)})
    except Exception as e:
        logger.error(f"Error fetching warehouse summary: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/query-summary')
@require_analytics
def get_query_summary():
    """Get query history summary"""
    try:
        limit = request.args.get('limit', 1000, type=int)
        data = analytics.get_query_summary(limit)
        return jsonify({'data': data, 'count': len(data)})
    except Exception as e:
        logger.error(f"Error fetching query summary: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/query-details/&amp;lt;query_id&amp;gt;')
@require_analytics
def get_query_details(query_id):
    """Get detailed information about a specific query"""
    try:
        data = analytics.get_query_details(query_id)
        return jsonify({'data': data})
    except Exception as e:
        logger.error(f"Error fetching query details: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/user-performance')
@require_analytics
def get_user_performance():
    """Get user performance report"""
    try:
        data = analytics.get_user_performance_report()
        return jsonify({'data': data, 'count': len(data)})
    except Exception as e:
        logger.error(f"Error fetching user performance: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/warehouse/&amp;lt;warehouse_id&amp;gt;/queries/&amp;lt;query_type&amp;gt;/users')
@require_analytics
def get_warehouse_queries_by_user(warehouse_id, query_type):
    """Get queries for a warehouse grouped by user"""
    try:
        data = analytics.get_warehouse_queries_by_user(warehouse_id, query_type)
        return jsonify({'data': data, 'count': len(data)})
    except Exception as e:
        logger.error(f"Error fetching warehouse queries by user: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/user/&amp;lt;user_name&amp;gt;/queries')
@require_analytics
def get_user_queries(user_name):
    """Get all queries for a specific user"""
    try:
        query_ids = request.args.getlist('query_ids')
        data = analytics.get_user_queries(user_name, query_ids if query_ids else None)
        return jsonify({'data': data, 'count': len(data)})
    except Exception as e:
        logger.error(f"Error fetching user queries: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/user/&amp;lt;user_name&amp;gt;/sample-queries/&amp;lt;query_type&amp;gt;')
@require_analytics
def get_user_sample_queries(user_name, query_type):
    """Get sample queries for a specific user and query type"""
    try:
        data = analytics.get_user_sample_queries(user_name, query_type)
        return jsonify({'data': data, 'count': len(data)})
    except Exception as e:
        logger.error(f"Error fetching user sample queries: {e}")
        return jsonify({'error': str(e)}), 500

@app.route('/api/health')
def health_check():
    """Health check endpoint"""
    return jsonify({'status': 'healthy', 'timestamp': datetime.now().isoformat()})

@app.errorhandler(404)
def not_found(error):
    return jsonify({'error': 'Endpoint not found'}), 404

@app.errorhandler(500)
def internal_error(error):
    return jsonify({'error': 'Internal server error'}), 500

if __name__ == '__main__':
    # For development - in production, you'll initialize with actual cursor
    print("Flask app ready. Initialize with cursor using init_analytics(cursor)")
    app.run(debug=True, host='0.0.0.0', port=5000)
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;





&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;import React, { useState, useEffect, useCallback, useMemo } from 'react';
import {
  ChevronDownIcon,
  ChevronRightIcon,
  EyeIcon,
  UserIcon,
  ServerIcon,
  ClockIcon,
  CpuChipIcon,
  ExclamationTriangleIcon,
  DocumentMagnifyingGlassIcon
} from '@heroicons/react/24/outline';

// API Configuration
const API_BASE_URL = process.env.REACT_APP_API_URL || 'http://localhost:5000';

// API Service
class ApiService {
  static async get(endpoint) {
    try {
      const response = await fetch(`${API_BASE_URL}/api${endpoint}`);
      if (!response.ok) {
        throw new Error(`HTTP error! status: ${response.status}`);
      }
      return await response.json();
    } catch (error) {
      console.error('API Error:', error);
      throw error;
    }
  }
}

// Utility Functions
const formatBytes = (bytes) =&amp;gt; {
  if (!bytes) return '0 B';
  const k = 1024;
  const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
  const i = Math.floor(Math.log(bytes) / Math.log(k));
  return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
};

const formatDuration = (ms) =&amp;gt; {
  if (!ms) return '0ms';
  if (ms &amp;lt; 1000) return `${ms}ms`;
  if (ms &amp;lt; 60000) return `${(ms / 1000).toFixed(1)}s`;
  return `${(ms / 60000).toFixed(1)}m`;
};

const formatNumber = (num) =&amp;gt; {
  if (!num) return '0';
  return num.toLocaleString();
};

// Loading Spinner Component
const LoadingSpinner = () =&amp;gt; (
  &amp;lt;div className="flex justify-center items-center p-8"&amp;gt;
    &amp;lt;div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600"&amp;gt;&amp;lt;/div&amp;gt;
  &amp;lt;/div&amp;gt;
);

// Table Components
const TableHeader = ({ children, sortable = false, onSort, sortDirection }) =&amp;gt; (
  &amp;lt;th 
    className={`px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider ${
      sortable ? 'cursor-pointer hover:bg-gray-100' : ''
    }`}
    onClick={sortable ? onSort : undefined}
  &amp;gt;
    &amp;lt;div className="flex items-center space-x-1"&amp;gt;
      &amp;lt;span&amp;gt;{children}&amp;lt;/span&amp;gt;
      {sortable &amp;amp;&amp;amp; (
        &amp;lt;ChevronDownIcon 
          className={`w-4 h-4 transform transition-transform ${
            sortDirection === 'desc' ? 'rotate-180' : ''
          }`} 
        /&amp;gt;
      )}
    &amp;lt;/div&amp;gt;
  &amp;lt;/th&amp;gt;
);

const TableCell = ({ children, className = '' }) =&amp;gt; (
  &amp;lt;td className={`px-4 py-3 text-sm text-gray-900 ${className}`}&amp;gt;
    {children}
  &amp;lt;/td&amp;gt;
);

// Badge Component
const Badge = ({ variant, children, className = '' }) =&amp;gt; {
  const variants = {
    success: 'bg-green-100 text-green-800',
    warning: 'bg-yellow-100 text-yellow-800',
    error: 'bg-red-100 text-red-800',
    info: 'bg-blue-100 text-blue-800',
    default: 'bg-gray-100 text-gray-800'
  };

  return (
    &amp;lt;span className={`inline-flex px-2 py-1 text-xs font-medium rounded-full ${variants[variant] || variants.default} ${className}`}&amp;gt;
      {children}
    &amp;lt;/span&amp;gt;
  );
};

// Button Component
const Button = ({ 
  children, 
  variant = 'primary', 
  size = 'md', 
  onClick, 
  disabled = false, 
  icon: Icon,
  className = ''
}) =&amp;gt; {
  const variants = {
    primary: 'bg-blue-600 hover:bg-blue-700 text-white',
    secondary: 'bg-gray-600 hover:bg-gray-700 text-white',
    outline: 'border border-gray-300 hover:bg-gray-50 text-gray-700'
  };

  const sizes = {
    sm: 'px-2 py-1 text-xs',
    md: 'px-3 py-2 text-sm',
    lg: 'px-4 py-2 text-base'
  };

  return (
    &amp;lt;button
      onClick={onClick}
      disabled={disabled}
      className={`
        inline-flex items-center space-x-2 font-medium rounded-md
        ${variants[variant]} ${sizes[size]}
        ${disabled ? 'opacity-50 cursor-not-allowed' : 'transition-colors'}
        ${className}
      `}
    &amp;gt;
      {Icon &amp;amp;&amp;amp; &amp;lt;Icon className="w-4 h-4" /&amp;gt;}
      &amp;lt;span&amp;gt;{children}&amp;lt;/span&amp;gt;
    &amp;lt;/button&amp;gt;
  );
};

// Warehouse Analytics Table Component
const WarehouseTable = ({ onDrillDown }) =&amp;gt; {
  const [data, setData] = useState([]);
  const [loading, setLoading] = useState(true);
  const [sortConfig, setSortConfig] = useState({ key: 'TOTAL_QUERIES', direction: 'desc' });
  const [expandedRows, setExpandedRows] = useState(new Set());

  useEffect(() =&amp;gt; {
    const fetchData = async () =&amp;gt; {
      try {
        const response = await ApiService.get('/warehouse-summary');
        setData(response.data);
      } catch (error) {
        console.error('Failed to fetch warehouse data:', error);
      } finally {
        setLoading(false);
      }
    };
    fetchData();
  }, []);

  const handleSort = useCallback((key) =&amp;gt; {
    setSortConfig(prev =&amp;gt; ({
      key,
      direction: prev.key === key &amp;amp;&amp;amp; prev.direction === 'desc' ? 'asc' : 'desc'
    }));
  }, []);

  const sortedData = useMemo(() =&amp;gt; {
    return [...data].sort((a, b) =&amp;gt; {
      const aVal = a[sortConfig.key] || 0;
      const bVal = b[sortConfig.key] || 0;
      return sortConfig.direction === 'desc' ? bVal - aVal : aVal - bVal;
    });
  }, [data, sortConfig]);

  const toggleRowExpansion = (warehouseId) =&amp;gt; {
    setExpandedRows(prev =&amp;gt; {
      const newSet = new Set(prev);
      if (newSet.has(warehouseId)) {
        newSet.delete(warehouseId);
      } else {
        newSet.add(warehouseId);
      }
      return newSet;
    });
  };

  const getQueryTypeButtons = (warehouse) =&amp;gt; {
    const queryTypes = [
      { key: '1-10_sec_ids', label: '1-10s', value: warehouse.QUERIES_1_10_SEC },
      { key: '10-20_sec_ids', label: '10-20s', value: warehouse.QUERIES_10_20_SEC },
      { key: '20-60_sec_ids', label: '20-60s', value: warehouse.QUERIES_20_60_SEC },
      { key: '1-3_min_ids', label: '1-3m', value: warehouse.QUERIES_1_3_MIN },
      { key: '3-5_min_ids', label: '3-5m', value: warehouse.QUERIES_3_5_MIN },
      { key: '5_plus_min_ids', label: '5m+', value: warehouse.QUERIES_5_PLUS_MIN },
      { key: 'spilled_local_ids', label: 'Spilled Local', value: warehouse.QUERIES_SPILLED_LOCAL },
      { key: 'spilled_remote_ids', label: 'Spilled Remote', value: warehouse.QUERIES_SPILLED_REMOTE },
      { key: 'failed_queries_ids', label: 'Failed', value: warehouse.FAILED_QUERIES },
    ];

    return queryTypes.filter(type =&amp;gt; type.value &amp;gt; 0);
  };

  if (loading) return &amp;lt;LoadingSpinner /&amp;gt;;

  return (
    &amp;lt;div className="bg-white shadow-lg rounded-lg overflow-hidden"&amp;gt;
      &amp;lt;div className="px-6 py-4 border-b border-gray-200"&amp;gt;
        &amp;lt;h2 className="text-xl font-semibold text-gray-800 flex items-center space-x-2"&amp;gt;
          &amp;lt;ServerIcon className="w-6 h-6" /&amp;gt;
          &amp;lt;span&amp;gt;Warehouse Analytics&amp;lt;/span&amp;gt;
        &amp;lt;/h2&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="overflow-x-auto"&amp;gt;
        &amp;lt;table className="min-w-full divide-y divide-gray-200"&amp;gt;
          &amp;lt;thead className="bg-gray-50"&amp;gt;
            &amp;lt;tr&amp;gt;
              &amp;lt;th className="px-4 py-3 text-left"&amp;gt;&amp;lt;/th&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('WAREHOUSE_NAME')}
                sortDirection={sortConfig.key === 'WAREHOUSE_NAME' ? sortConfig.direction : null}
              &amp;gt;
                Warehouse Name
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Size&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('TOTAL_QUERIES')}
                sortDirection={sortConfig.key === 'TOTAL_QUERIES' ? sortConfig.direction : null}
              &amp;gt;
                Total Queries
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('TOTAL_CREDITS_USED')}
                sortDirection={sortConfig.key === 'TOTAL_CREDITS_USED' ? sortConfig.direction : null}
              &amp;gt;
                Credits Used
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Failed&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Spilled&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Actions&amp;lt;/TableHeader&amp;gt;
            &amp;lt;/tr&amp;gt;
          &amp;lt;/thead&amp;gt;
          &amp;lt;tbody className="bg-white divide-y divide-gray-200"&amp;gt;
            {sortedData.map((warehouse) =&amp;gt; (
              &amp;lt;React.Fragment key={warehouse.WAREHOUSE_ID}&amp;gt;
                &amp;lt;tr className="hover:bg-gray-50"&amp;gt;
                  &amp;lt;TableCell&amp;gt;
                    &amp;lt;button
                      onClick={() =&amp;gt; toggleRowExpansion(warehouse.WAREHOUSE_ID)}
                      className="p-1 hover:bg-gray-200 rounded"
                    &amp;gt;
                      {expandedRows.has(warehouse.WAREHOUSE_ID) ? (
                        &amp;lt;ChevronDownIcon className="w-4 h-4" /&amp;gt;
                      ) : (
                        &amp;lt;ChevronRightIcon className="w-4 h-4" /&amp;gt;
                      )}
                    &amp;lt;/button&amp;gt;
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell className="font-medium"&amp;gt;
                    {warehouse.WAREHOUSE_NAME}
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell&amp;gt;
                    &amp;lt;Badge variant="info"&amp;gt;{warehouse.WAREHOUSE_SIZE}&amp;lt;/Badge&amp;gt;
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell className="font-semibold text-blue-600"&amp;gt;
                    {formatNumber(warehouse.TOTAL_QUERIES)}
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell&amp;gt;
                    {warehouse.TOTAL_CREDITS_USED ? warehouse.TOTAL_CREDITS_USED.toFixed(2) : '0.00'}
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell&amp;gt;
                    {warehouse.FAILED_QUERIES &amp;gt; 0 ? (
                      &amp;lt;Badge variant="error"&amp;gt;{formatNumber(warehouse.FAILED_QUERIES)}&amp;lt;/Badge&amp;gt;
                    ) : (
                      &amp;lt;Badge variant="success"&amp;gt;0&amp;lt;/Badge&amp;gt;
                    )}
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell&amp;gt;
                    {(warehouse.QUERIES_SPILLED_LOCAL + warehouse.QUERIES_SPILLED_REMOTE) &amp;gt; 0 ? (
                      &amp;lt;Badge variant="warning"&amp;gt;
                        {formatNumber(warehouse.QUERIES_SPILLED_LOCAL + warehouse.QUERIES_SPILLED_REMOTE)}
                      &amp;lt;/Badge&amp;gt;
                    ) : (
                      &amp;lt;Badge variant="success"&amp;gt;0&amp;lt;/Badge&amp;gt;
                    )}
                  &amp;lt;/TableCell&amp;gt;
                  &amp;lt;TableCell&amp;gt;
                    &amp;lt;Button
                      size="sm"
                      variant="outline"
                      icon={EyeIcon}
                      onClick={() =&amp;gt; onDrillDown('warehouse-details', { warehouse })}
                    &amp;gt;
                      View Details
                    &amp;lt;/Button&amp;gt;
                  &amp;lt;/TableCell&amp;gt;
                &amp;lt;/tr&amp;gt;

                {expandedRows.has(warehouse.WAREHOUSE_ID) &amp;amp;&amp;amp; (
                  &amp;lt;tr&amp;gt;
                    &amp;lt;td colSpan="8" className="px-4 py-3 bg-gray-50"&amp;gt;
                      &amp;lt;div className="grid grid-cols-2 md:grid-cols-4 lg:grid-cols-6 gap-2"&amp;gt;
                        {getQueryTypeButtons(warehouse).map((type) =&amp;gt; (
                          &amp;lt;Button
                            key={type.key}
                            size="sm"
                            variant="outline"
                            onClick={() =&amp;gt; onDrillDown('warehouse-queries', {
                              warehouseId: warehouse.WAREHOUSE_ID,
                              warehouseName: warehouse.WAREHOUSE_NAME,
                              queryType: type.key,
                              queryLabel: type.label
                            })}
                            className="justify-center"
                          &amp;gt;
                            {type.label}: {formatNumber(type.value)}
                          &amp;lt;/Button&amp;gt;
                        ))}
                      &amp;lt;/div&amp;gt;
                    &amp;lt;/td&amp;gt;
                  &amp;lt;/tr&amp;gt;
                )}
              &amp;lt;/React.Fragment&amp;gt;
            ))}
          &amp;lt;/tbody&amp;gt;
        &amp;lt;/table&amp;gt;
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// User Performance Table Component
const UserPerformanceTable = ({ onDrillDown }) =&amp;gt; {
  const [data, setData] = useState([]);
  const [loading, setLoading] = useState(true);
  const [sortConfig, setSortConfig] = useState({ key: 'weighted_score', direction: 'desc' });

  useEffect(() =&amp;gt; {
    const fetchData = async () =&amp;gt; {
      try {
        const response = await ApiService.get('/user-performance');
        setData(response.data);
      } catch (error) {
        console.error('Failed to fetch user performance data:', error);
      } finally {
        setLoading(false);
      }
    };
    fetchData();
  }, []);

  const handleSort = useCallback((key) =&amp;gt; {
    setSortConfig(prev =&amp;gt; ({
      key,
      direction: prev.key === key &amp;amp;&amp;amp; prev.direction === 'desc' ? 'asc' : 'desc'
    }));
  }, []);

  const sortedData = useMemo(() =&amp;gt; {
    return [...data].sort((a, b) =&amp;gt; {
      const aVal = a[sortConfig.key] || 0;
      const bVal = b[sortConfig.key] || 0;
      return sortConfig.direction === 'desc' ? bVal - aVal : aVal - bVal;
    });
  }, [data, sortConfig]);

  const getScoreVariant = (score) =&amp;gt; {
    if (score &amp;gt;= 100) return 'error';
    if (score &amp;gt;= 50) return 'warning';
    return 'success';
  };

  if (loading) return &amp;lt;LoadingSpinner /&amp;gt;;

  return (
    &amp;lt;div className="bg-white shadow-lg rounded-lg overflow-hidden"&amp;gt;
      &amp;lt;div className="px-6 py-4 border-b border-gray-200"&amp;gt;
        &amp;lt;h2 className="text-xl font-semibold text-gray-800 flex items-center space-x-2"&amp;gt;
          &amp;lt;UserIcon className="w-6 h-6" /&amp;gt;
          &amp;lt;span&amp;gt;User Performance Report&amp;lt;/span&amp;gt;
        &amp;lt;/h2&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="overflow-x-auto"&amp;gt;
        &amp;lt;table className="min-w-full divide-y divide-gray-200"&amp;gt;
          &amp;lt;thead className="bg-gray-50"&amp;gt;
            &amp;lt;tr&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('user_name')}
                sortDirection={sortConfig.key === 'user_name' ? sortConfig.direction : null}
              &amp;gt;
                User Name
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('total_queries')}
                sortDirection={sortConfig.key === 'total_queries' ? sortConfig.direction : null}
              &amp;gt;
                Total Queries
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('total_credits')}
                sortDirection={sortConfig.key === 'total_credits' ? sortConfig.direction : null}
              &amp;gt;
                Credits Used
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Spilled Queries&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Failed Queries&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Performance Issues&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader 
                sortable 
                onSort={() =&amp;gt; handleSort('weighted_score')}
                sortDirection={sortConfig.key === 'weighted_score' ? sortConfig.direction : null}
              &amp;gt;
                Score
              &amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Actions&amp;lt;/TableHeader&amp;gt;
            &amp;lt;/tr&amp;gt;
          &amp;lt;/thead&amp;gt;
          &amp;lt;tbody className="bg-white divide-y divide-gray-200"&amp;gt;
            {sortedData.map((user) =&amp;gt; (
              &amp;lt;tr key={user.user_name} className="hover:bg-gray-50"&amp;gt;
                &amp;lt;TableCell className="font-medium"&amp;gt;
                  {user.user_name}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell className="font-semibold text-blue-600"&amp;gt;
                  {formatNumber(user.total_queries)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {user.total_credits ? user.total_credits.toFixed(2) : '0.00'}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;button
                    onClick={() =&amp;gt; onDrillDown('user-sample-queries', {
                      userName: user.user_name,
                      queryType: 'spilled',
                      queryLabel: 'Spilled Queries'
                    })}
                    className="text-blue-600 hover:text-blue-800 underline"
                  &amp;gt;
                    {formatNumber(user.spilled_queries)}
                  &amp;lt;/button&amp;gt;
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;span className={`font-medium ${user.failure_cancellation_rate_pct &amp;gt; 10 ? 'text-red-600' : 'text-green-600'}`}&amp;gt;
                    {user.failure_cancellation_rate_pct ? user.failure_cancellation_rate_pct.toFixed(1) : '0.0'}%
                  &amp;lt;/span&amp;gt;
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;div className="flex flex-wrap gap-1"&amp;gt;
                    {user.over_provisioned_queries &amp;gt; 0 &amp;amp;&amp;amp; (
                      &amp;lt;button
                        onClick={() =&amp;gt; onDrillDown('user-sample-queries', {
                          userName: user.user_name,
                          queryType: 'over_provisioned',
                          queryLabel: 'Over Provisioned Queries'
                        })}
                        className="text-xs bg-yellow-100 text-yellow-800 px-2 py-1 rounded hover:bg-yellow-200"
                      &amp;gt;
                        Over Provisioned: {user.over_provisioned_queries}
                      &amp;lt;/button&amp;gt;
                    )}
                    {user.select_star_queries &amp;gt; 0 &amp;amp;&amp;amp; (
                      &amp;lt;button
                        onClick={() =&amp;gt; onDrillDown('user-sample-queries', {
                          userName: user.user_name,
                          queryType: 'select_star',
                          queryLabel: 'SELECT * Queries'
                        })}
                        className="text-xs bg-orange-100 text-orange-800 px-2 py-1 rounded hover:bg-orange-200"
                      &amp;gt;
                        SELECT *: {user.select_star_queries}
                      &amp;lt;/button&amp;gt;
                    )}
                    {user.slow_queries &amp;gt; 0 &amp;amp;&amp;amp; (
                      &amp;lt;button
                        onClick={() =&amp;gt; onDrillDown('user-sample-queries', {
                          userName: user.user_name,
                          queryType: 'slow_query',
                          queryLabel: 'Slow Queries'
                        })}
                        className="text-xs bg-red-100 text-red-800 px-2 py-1 rounded hover:bg-red-200"
                      &amp;gt;
                        Slow: {user.slow_queries}
                      &amp;lt;/button&amp;gt;
                    )}
                  &amp;lt;/div&amp;gt;
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;Badge variant={getScoreVariant(user.weighted_score)}&amp;gt;
                    {user.weighted_score ? user.weighted_score.toFixed(1) : '0.0'}
                  &amp;lt;/Badge&amp;gt;
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;Button
                    size="sm"
                    variant="outline"
                    icon={EyeIcon}
                    onClick={() =&amp;gt; onDrillDown('user-details', { user })}
                  &amp;gt;
                    View Details
                  &amp;lt;/Button&amp;gt;
                &amp;lt;/TableCell&amp;gt;
              &amp;lt;/tr&amp;gt;
            ))}
          &amp;lt;/tbody&amp;gt;
        &amp;lt;/table&amp;gt;
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// Query Details Modal Component
const QueryDetailsModal = ({ queryId, isOpen, onClose }) =&amp;gt; {
  const [data, setData] = useState(null);
  const [loading, setLoading] = useState(false);

  useEffect(() =&amp;gt; {
    if (isOpen &amp;amp;&amp;amp; queryId) {
      setLoading(true);
      ApiService.get(`/query-details/${queryId}`)
        .then(response =&amp;gt; setData(response.data))
        .catch(error =&amp;gt; console.error('Failed to fetch query details:', error))
        .finally(() =&amp;gt; setLoading(false));
    }
  }, [isOpen, queryId]);

  if (!isOpen) return null;

  return (
    &amp;lt;div className="fixed inset-0 bg-gray-600 bg-opacity-50 overflow-y-auto h-full w-full z-50"&amp;gt;
      &amp;lt;div className="relative top-20 mx-auto p-5 border w-11/12 max-w-4xl shadow-lg rounded-md bg-white"&amp;gt;
        &amp;lt;div className="mt-3"&amp;gt;
          &amp;lt;div className="flex items-center justify-between mb-4"&amp;gt;
            &amp;lt;h3 className="text-lg font-medium text-gray-900"&amp;gt;Query Details&amp;lt;/h3&amp;gt;
            &amp;lt;button
              onClick={onClose}
              className="text-gray-400 hover:text-gray-600"
            &amp;gt;
              &amp;lt;svg className="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24"&amp;gt;
                &amp;lt;path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2" d="M6 18L18 6M6 6l12 12" /&amp;gt;
              &amp;lt;/svg&amp;gt;
            &amp;lt;/button&amp;gt;
          &amp;lt;/div&amp;gt;

          {loading ? (
            &amp;lt;LoadingSpinner /&amp;gt;
          ) : data ? (
            &amp;lt;div className="space-y-6"&amp;gt;
              {/* Query Information */}
              &amp;lt;div className="bg-gray-50 p-4 rounded-lg"&amp;gt;
                &amp;lt;h4 className="font-medium text-gray-900 mb-3"&amp;gt;Query Information&amp;lt;/h4&amp;gt;
                &amp;lt;div className="grid grid-cols-1 md:grid-cols-2 gap-4"&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Query ID:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900 font-mono"&amp;gt;{data.QUERY_ID}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;User:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{data.USER_NAME}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Warehouse:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{data.WAREHOUSE_NAME} ({data.WAREHOUSE_SIZE})&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Database:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{data.DATABASE_NAME}.{data.SCHEMA_NAME}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;

              {/* Performance Metrics */}
              &amp;lt;div className="bg-gray-50 p-4 rounded-lg"&amp;gt;
                &amp;lt;h4 className="font-medium text-gray-900 mb-3"&amp;gt;Performance Metrics&amp;lt;/h4&amp;gt;
                &amp;lt;div className="grid grid-cols-1 md:grid-cols-3 gap-4"&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Total Duration:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{formatDuration(data.TOTAL_ELAPSED_TIME)}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Compilation Time:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{formatDuration(data.COMPILATION_TIME)}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Execution Time:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{formatDuration(data.EXECUTION_TIME)}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Bytes Scanned:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{formatBytes(data.BYTES_SCANNED)}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Rows Produced:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{formatNumber(data.ROWS_PRODUCED)}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-gray-500"&amp;gt;Credits Used:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-gray-900"&amp;gt;{data.CREDITS_USED_CLOUD_SERVICES?.toFixed(4) || '0.0000'}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;

              {/* Query Text */}
              &amp;lt;div className="bg-gray-50 p-4 rounded-lg"&amp;gt;
                &amp;lt;h4 className="font-medium text-gray-900 mb-3"&amp;gt;Query Text&amp;lt;/h4&amp;gt;
                &amp;lt;pre className="text-sm text-gray-900 bg-white p-3 rounded border overflow-auto max-h-64 font-mono"&amp;gt;
                  {data.QUERY_TEXT}
                &amp;lt;/pre&amp;gt;
              &amp;lt;/div&amp;gt;

              {/* Spilling Information */}
              {(data.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 || data.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0) &amp;amp;&amp;amp; (
                &amp;lt;div className="bg-yellow-50 p-4 rounded-lg border border-yellow-200"&amp;gt;
                  &amp;lt;h4 className="font-medium text-yellow-900 mb-3 flex items-center"&amp;gt;
                    &amp;lt;ExclamationTriangleIcon className="w-5 h-5 mr-2" /&amp;gt;
                    Memory Spilling Detected
                  &amp;lt;/h4&amp;gt;
                  &amp;lt;div className="grid grid-cols-1 md:grid-cols-2 gap-4"&amp;gt;
                    &amp;lt;div&amp;gt;
                      &amp;lt;span className="text-sm font-medium text-yellow-800"&amp;gt;Local Storage:&amp;lt;/span&amp;gt;
                      &amp;lt;p className="text-sm text-yellow-900"&amp;gt;{formatBytes(data.BYTES_SPILLED_TO_LOCAL_STORAGE)}&amp;lt;/p&amp;gt;
                    &amp;lt;/div&amp;gt;
                    &amp;lt;div&amp;gt;
                      &amp;lt;span className="text-sm font-medium text-yellow-800"&amp;gt;Remote Storage:&amp;lt;/span&amp;gt;
                      &amp;lt;p className="text-sm text-yellow-900"&amp;gt;{formatBytes(data.BYTES_SPILLED_TO_REMOTE_STORAGE)}&amp;lt;/p&amp;gt;
                    &amp;lt;/div&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              )}

              {/* Error Information */}
              {data.ERROR_CODE &amp;amp;&amp;amp; (
                &amp;lt;div className="bg-red-50 p-4 rounded-lg border border-red-200"&amp;gt;
                  &amp;lt;h4 className="font-medium text-red-900 mb-3"&amp;gt;Error Information&amp;lt;/h4&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;span className="text-sm font-medium text-red-800"&amp;gt;Error Code:&amp;lt;/span&amp;gt;
                    &amp;lt;p className="text-sm text-red-900"&amp;gt;{data.ERROR_CODE}&amp;lt;/p&amp;gt;
                  &amp;lt;/div&amp;gt;
                  {data.ERROR_MESSAGE &amp;amp;&amp;amp; (
                    &amp;lt;div className="mt-2"&amp;gt;
                      &amp;lt;span className="text-sm font-medium text-red-800"&amp;gt;Error Message:&amp;lt;/span&amp;gt;
                      &amp;lt;p className="text-sm text-red-900"&amp;gt;{data.ERROR_MESSAGE}&amp;lt;/p&amp;gt;
                    &amp;lt;/div&amp;gt;
                  )}
                &amp;lt;/div&amp;gt;
              )}
            &amp;lt;/div&amp;gt;
          ) : (
            &amp;lt;div className="text-center py-8"&amp;gt;
              &amp;lt;p className="text-gray-500"&amp;gt;No query details found&amp;lt;/p&amp;gt;
            &amp;lt;/div&amp;gt;
          )}

          &amp;lt;div className="flex justify-end mt-6"&amp;gt;
            &amp;lt;Button onClick={onClose} variant="secondary"&amp;gt;
              Close
            &amp;lt;/Button&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// Warehouse Query Users Component
const WarehouseQueryUsers = ({ warehouseId, warehouseName, queryType, queryLabel, onDrillDown, onBack }) =&amp;gt; {
  const [data, setData] = useState([]);
  const [loading, setLoading] = useState(true);

  useEffect(() =&amp;gt; {
    const fetchData = async () =&amp;gt; {
      try {
        const response = await ApiService.get(`/warehouse/${warehouseId}/queries/${queryType}/users`);
        setData(response.data);
      } catch (error) {
        console.error('Failed to fetch warehouse query users:', error);
      } finally {
        setLoading(false);
      }
    };
    fetchData();
  }, [warehouseId, queryType]);

  if (loading) return &amp;lt;LoadingSpinner /&amp;gt;;

  return (
    &amp;lt;div className="bg-white shadow-lg rounded-lg overflow-hidden"&amp;gt;
      &amp;lt;div className="px-6 py-4 border-b border-gray-200"&amp;gt;
        &amp;lt;div className="flex items-center justify-between"&amp;gt;
          &amp;lt;h2 className="text-xl font-semibold text-gray-800 flex items-center space-x-2"&amp;gt;
            &amp;lt;ServerIcon className="w-6 h-6" /&amp;gt;
            &amp;lt;span&amp;gt;{warehouseName} - {queryLabel} by User&amp;lt;/span&amp;gt;
          &amp;lt;/h2&amp;gt;
          &amp;lt;Button onClick={onBack} variant="outline"&amp;gt;
            Back to Warehouses
          &amp;lt;/Button&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="overflow-x-auto"&amp;gt;
        &amp;lt;table className="min-w-full divide-y divide-gray-200"&amp;gt;
          &amp;lt;thead className="bg-gray-50"&amp;gt;
            &amp;lt;tr&amp;gt;
              &amp;lt;TableHeader&amp;gt;User Name&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Query Count&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Avg Execution Time&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Total Bytes Scanned&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Actions&amp;lt;/TableHeader&amp;gt;
            &amp;lt;/tr&amp;gt;
          &amp;lt;/thead&amp;gt;
          &amp;lt;tbody className="bg-white divide-y divide-gray-200"&amp;gt;
            {data.map((user) =&amp;gt; (
              &amp;lt;tr key={user.USER_NAME} className="hover:bg-gray-50"&amp;gt;
                &amp;lt;TableCell className="font-medium"&amp;gt;
                  {user.USER_NAME}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell className="font-semibold text-blue-600"&amp;gt;
                  {formatNumber(user.QUERY_COUNT)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {formatDuration(user.AVG_EXECUTION_TIME)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {formatBytes(user.TOTAL_BYTES_SCANNED)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;Button
                    size="sm"
                    variant="outline"
                    icon={UserIcon}
                    onClick={() =&amp;gt; onDrillDown('user-queries', {
                      userName: user.USER_NAME,
                      queryIds: user.QUERY_IDS
                    })}
                  &amp;gt;
                    View Queries
                  &amp;lt;/Button&amp;gt;
                &amp;lt;/TableCell&amp;gt;
              &amp;lt;/tr&amp;gt;
            ))}
          &amp;lt;/tbody&amp;gt;
        &amp;lt;/table&amp;gt;
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// User Queries Component
const UserQueriesTable = ({ userName, queryIds, onDrillDown, onBack }) =&amp;gt; {
  const [data, setData] = useState([]);
  const [loading, setLoading] = useState(true);
  const [selectedQuery, setSelectedQuery] = useState(null);

  useEffect(() =&amp;gt; {
    const fetchData = async () =&amp;gt; {
      try {
        const params = queryIds ? `?${queryIds.map(id =&amp;gt; `query_ids=${id}`).join('&amp;amp;')}` : '';
        const response = await ApiService.get(`/user/${encodeURIComponent(userName)}/queries${params}`);
        setData(response.data);
      } catch (error) {
        console.error('Failed to fetch user queries:', error);
      } finally {
        setLoading(false);
      }
    };
    fetchData();
  }, [userName, queryIds]);

  if (loading) return &amp;lt;LoadingSpinner /&amp;gt;;

  return (
    &amp;lt;div className="bg-white shadow-lg rounded-lg overflow-hidden"&amp;gt;
      &amp;lt;div className="px-6 py-4 border-b border-gray-200"&amp;gt;
        &amp;lt;div className="flex items-center justify-between"&amp;gt;
          &amp;lt;h2 className="text-xl font-semibold text-gray-800 flex items-center space-x-2"&amp;gt;
            &amp;lt;UserIcon className="w-6 h-6" /&amp;gt;
            &amp;lt;span&amp;gt;Queries by {userName}&amp;lt;/span&amp;gt;
          &amp;lt;/h2&amp;gt;
          &amp;lt;Button onClick={onBack} variant="outline"&amp;gt;
            Back
          &amp;lt;/Button&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="overflow-x-auto"&amp;gt;
        &amp;lt;table className="min-w-full divide-y divide-gray-200"&amp;gt;
          &amp;lt;thead className="bg-gray-50"&amp;gt;
            &amp;lt;tr&amp;gt;
              &amp;lt;TableHeader&amp;gt;Query ID&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Start Time&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Duration&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Status&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Warehouse&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Bytes Scanned&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Actions&amp;lt;/TableHeader&amp;gt;
            &amp;lt;/tr&amp;gt;
          &amp;lt;/thead&amp;gt;
          &amp;lt;tbody className="bg-white divide-y divide-gray-200"&amp;gt;
            {data.map((query) =&amp;gt; (
              &amp;lt;tr key={query.QUERY_ID} className="hover:bg-gray-50"&amp;gt;
                &amp;lt;TableCell className="font-mono text-xs"&amp;gt;
                  {query.QUERY_ID.substring(0, 16)}...
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {new Date(query.START_TIME).toLocaleString()}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;Badge variant={query.DURATION_BUCKET === '5+ minutes' ? 'warning' : 'default'}&amp;gt;
                    {query.DURATION_BUCKET}
                  &amp;lt;/Badge&amp;gt;
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;Badge variant={query.EXECUTION_STATUS === 'SUCCESS' ? 'success' : 'error'}&amp;gt;
                    {query.EXECUTION_STATUS}
                  &amp;lt;/Badge&amp;gt;
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {query.WAREHOUSE_NAME}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {formatBytes(query.BYTES_SCANNED)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  &amp;lt;Button
                    size="sm"
                    variant="outline"
                    icon={DocumentMagnifyingGlassIcon}
                    onClick={() =&amp;gt; setSelectedQuery(query.QUERY_ID)}
                  &amp;gt;
                    View Details
                  &amp;lt;/Button&amp;gt;
                &amp;lt;/TableCell&amp;gt;
              &amp;lt;/tr&amp;gt;
            ))}
          &amp;lt;/tbody&amp;gt;
        &amp;lt;/table&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;QueryDetailsModal
        queryId={selectedQuery}
        isOpen={!!selectedQuery}
        onClose={() =&amp;gt; setSelectedQuery(null)}
      /&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// User Sample Queries Component
const UserSampleQueriesTable = ({ userName, queryType, queryLabel, onBack }) =&amp;gt; {
  const [data, setData] = useState([]);
  const [loading, setLoading] = useState(true);
  const [selectedQuery, setSelectedQuery] = useState(null);

  useEffect(() =&amp;gt; {
    const fetchData = async () =&amp;gt; {
      try {
        const response = await ApiService.get(`/user/${encodeURIComponent(userName)}/sample-queries/${queryType}`);
        setData(response.data);
      } catch (error) {
        console.error('Failed to fetch user sample queries:', error);
      } finally {
        setLoading(false);
      }
    };
    fetchData();
  }, [userName, queryType]);

  if (loading) return &amp;lt;LoadingSpinner /&amp;gt;;

  return (
    &amp;lt;div className="bg-white shadow-lg rounded-lg overflow-hidden"&amp;gt;
      &amp;lt;div className="px-6 py-4 border-b border-gray-200"&amp;gt;
        &amp;lt;div className="flex items-center justify-between"&amp;gt;
          &amp;lt;h2 className="text-xl font-semibold text-gray-800 flex items-center space-x-2"&amp;gt;
            &amp;lt;UserIcon className="w-6 h-6" /&amp;gt;
            &amp;lt;span&amp;gt;{userName} - {queryLabel}&amp;lt;/span&amp;gt;
          &amp;lt;/h2&amp;gt;
          &amp;lt;Button onClick={onBack} variant="outline"&amp;gt;
            Back
          &amp;lt;/Button&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;div className="overflow-x-auto"&amp;gt;
        &amp;lt;table className="min-w-full divide-y divide-gray-200"&amp;gt;
          &amp;lt;thead className="bg-gray-50"&amp;gt;
            &amp;lt;tr&amp;gt;
              &amp;lt;TableHeader&amp;gt;Query ID&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Start Time&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Duration&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Bytes Scanned&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Query Preview&amp;lt;/TableHeader&amp;gt;
              &amp;lt;TableHeader&amp;gt;Actions&amp;lt;/TableHeader&amp;gt;
            &amp;lt;/tr&amp;gt;
          &amp;lt;/thead&amp;gt;
          &amp;lt;tbody className="bg-white divide-y divide-gray-200"&amp;gt;
            {data.map((query, index) =&amp;gt; (
              &amp;lt;tr key={query.query_id || index} className="hover:bg-gray-50"&amp;gt;
                &amp;lt;TableCell className="font-mono text-xs"&amp;gt;
                  {query.query_id ? `${query.query_id.substring(0, 16)}...` : 'N/A'}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {query.start_time ? new Date(query.start_time).toLocaleString() : 'N/A'}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {formatDuration(query.execution_time_ms)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {formatBytes(query.bytes_scanned)}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell className="max-w-xs truncate"&amp;gt;
                  {query.query_text ? query.query_text.substring(0, 50) + '...' : 'N/A'}
                &amp;lt;/TableCell&amp;gt;
                &amp;lt;TableCell&amp;gt;
                  {query.query_id &amp;amp;&amp;amp; (
                    &amp;lt;Button
                      size="sm"
                      variant="outline"
                      icon={DocumentMagnifyingGlassIcon}
                      onClick={() =&amp;gt; setSelectedQuery(query.query_id)}
                    &amp;gt;
                      View Details
                    &amp;lt;/Button&amp;gt;
                  )}
                &amp;lt;/TableCell&amp;gt;
              &amp;lt;/tr&amp;gt;
            ))}
          &amp;lt;/tbody&amp;gt;
        &amp;lt;/table&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;QueryDetailsModal
        queryId={selectedQuery}
        isOpen={!!selectedQuery}
        onClose={() =&amp;gt; setSelectedQuery(null)}
      /&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

// Main Application Component
const App = () =&amp;gt; {
  const [currentView, setCurrentView] = useState('dashboard');
  const [viewData, setViewData] = useState({});
  const [navigationHistory, setNavigationHistory] = useState([]);

  const handleDrillDown = (view, data) =&amp;gt; {
    setNavigationHistory(prev =&amp;gt; [...prev, { view: currentView, data: viewData }]);
    setCurrentView(view);
    setViewData(data);
  };

  const handleBack = () =&amp;gt; {
    const lastView = navigationHistory[navigationHistory.length - 1];
    if (lastView) {
      setCurrentView(lastView.view);
      setViewData(lastView.data);
      setNavigationHistory(prev =&amp;gt; prev.slice(0, -1));
    }
  };

  const renderCurrentView = () =&amp;gt; {
    switch (currentView) {
      case 'dashboard':
        return (
          &amp;lt;div className="space-y-8"&amp;gt;
            &amp;lt;WarehouseTable onDrillDown={handleDrillDown} /&amp;gt;
            &amp;lt;UserPerformanceTable onDrillDown={handleDrillDown} /&amp;gt;
          &amp;lt;/div&amp;gt;
        );

      case 'warehouse-queries':
        return (
          &amp;lt;WarehouseQueryUsers
            warehouseId={viewData.warehouseId}
            warehouseName={viewData.warehouseName}
            queryType={viewData.queryType}
            queryLabel={viewData.queryLabel}
            onDrillDown={handleDrillDown}
            onBack={handleBack}
          /&amp;gt;
        );

      case 'user-queries':
        return (
          &amp;lt;UserQueriesTable
            userName={viewData.userName}
            queryIds={viewData.queryIds}
            onDrillDown={handleDrillDown}
            onBack={handleBack}
          /&amp;gt;
        );

      case 'user-sample-queries':
        return (
          &amp;lt;UserSampleQueriesTable
            userName={viewData.userName}
            queryType={viewData.queryType}
            queryLabel={viewData.queryLabel}
            onBack={handleBack}
          /&amp;gt;
        );

      default:
        return (
          &amp;lt;div className="text-center py-8"&amp;gt;
            &amp;lt;p className="text-gray-500"&amp;gt;View not found&amp;lt;/p&amp;gt;
            &amp;lt;Button onClick={() =&amp;gt; setCurrentView('dashboard')} className="mt-4"&amp;gt;
              Return to Dashboard
            &amp;lt;/Button&amp;gt;
          &amp;lt;/div&amp;gt;
        );
    }
  };

  return (
    &amp;lt;div className="min-h-screen bg-gray-100"&amp;gt;
      &amp;lt;div className="bg-white shadow"&amp;gt;
        &amp;lt;div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8"&amp;gt;
          &amp;lt;div className="flex justify-between items-center py-6"&amp;gt;
            &amp;lt;div className="flex items-center space-x-4"&amp;gt;
              &amp;lt;h1 className="text-3xl font-bold text-gray-900"&amp;gt;
                Snowflake Analytics Dashboard
              &amp;lt;/h1&amp;gt;
              {currentView !== 'dashboard' &amp;amp;&amp;amp; (
                &amp;lt;Button onClick={() =&amp;gt; setCurrentView('dashboard')} variant="outline"&amp;gt;
                  Dashboard
                &amp;lt;/Button&amp;gt;
              )}
            &amp;lt;/div&amp;gt;
            &amp;lt;div className="flex items-center space-x-2 text-sm text-gray-500"&amp;gt;
              &amp;lt;ClockIcon className="w-4 h-4" /&amp;gt;
              &amp;lt;span&amp;gt;Last updated: {new Date().toLocaleString()}&amp;lt;/span&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      &amp;lt;/div&amp;gt;

      &amp;lt;main className="max-w-7xl mx-auto py-6 sm:px-6 lg:px-8"&amp;gt;
        &amp;lt;div className="px-4 py-6 sm:px-0"&amp;gt;
          {renderCurrentView()}
        &amp;lt;/div&amp;gt;
      &amp;lt;/main&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

export default App;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>Table design</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Wed, 06 Aug 2025 03:09:36 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/table-design-19h6</link>
      <guid>https://dev.to/armaankhan8270/table-design-19h6</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;import React, { useState, useMemo, useCallback, useEffect } from 'react';
import { 
  Search, 
  ChevronUp, 
  ChevronDown, 
  Filter, 
  Eye, 
  Edit, 
  Trash2, 
  User, 
  Mail, 
  Phone, 
  Settings,
  Download,
  Plus,
  X,
  Check,
  Star,
  ArrowUpDown,
  MoreHorizontal,
  Calendar,
  Building,
  Shield,
  Activity,
  TrendingUp,
  Zap
} from 'lucide-react';

const PremiumTable = () =&amp;gt; {
  // Enhanced sample data with more fields
  const [data] = useState([
    { 
      id: 1, 
      name: 'Alexandra Chen', 
      email: 'alex.chen@company.com', 
      role: 'Senior Engineer', 
      status: 'Active', 
      age: 28, 
      department: 'Engineering',
      salary: 125000,
      joinDate: '2023-01-15',
      performance: 95,
      projects: 12,
      avatar: 'AC',
      phone: '+1 (555) 123-4567',
      location: 'San Francisco, CA'
    },
    { 
      id: 2, 
      name: 'Marcus Rodriguez', 
      email: 'marcus.r@company.com', 
      role: 'Product Manager', 
      status: 'Active', 
      age: 32, 
      department: 'Product',
      salary: 135000,
      joinDate: '2022-08-20',
      performance: 88,
      projects: 8,
      avatar: 'MR',
      phone: '+1 (555) 987-6543',
      location: 'New York, NY'
    },
    { 
      id: 3, 
      name: 'Sarah Kim', 
      email: 'sarah.kim@company.com', 
      role: 'Design Lead', 
      status: 'Active', 
      age: 29, 
      department: 'Design',
      salary: 118000,
      joinDate: '2023-03-10',
      performance: 92,
      projects: 15,
      avatar: 'SK',
      phone: '+1 (555) 456-7890',
      location: 'Austin, TX'
    },
    { 
      id: 4, 
      name: 'David Thompson', 
      email: 'david.t@company.com', 
      role: 'Data Scientist', 
      status: 'Inactive', 
      age: 35, 
      department: 'Analytics',
      salary: 140000,
      joinDate: '2021-11-05',
      performance: 78,
      projects: 6,
      avatar: 'DT',
      phone: '+1 (555) 321-0987',
      location: 'Seattle, WA'
    },
    { 
      id: 5, 
      name: 'Emily Watson', 
      email: 'emily.watson@company.com', 
      role: 'Marketing Director', 
      status: 'Active', 
      age: 38, 
      department: 'Marketing',
      salary: 145000,
      joinDate: '2020-06-12',
      performance: 91,
      projects: 20,
      avatar: 'EW',
      phone: '+1 (555) 654-3210',
      location: 'Los Angeles, CA'
    },
    { 
      id: 6, 
      name: 'James Wilson', 
      email: 'james.wilson@company.com', 
      role: 'DevOps Engineer', 
      status: 'Active', 
      age: 31, 
      department: 'Engineering',
      salary: 128000,
      joinDate: '2022-12-01',
      performance: 89,
      projects: 9,
      avatar: 'JW',
      phone: '+1 (555) 789-0123',
      location: 'Denver, CO'
    }
  ]);

  const [searchTerm, setSearchTerm] = useState('');
  const [sortConfig, setSortConfig] = useState({ key: 'performance', direction: 'desc' });
  const [selectedRows, setSelectedRows] = useState(new Set());
  const [viewMode, setViewMode] = useState('table'); // table, grid, compact
  const [showAdvancedFilters, setShowAdvancedFilters] = useState(false);
  const [animateRows, setAnimateRows] = useState(false);

  const [filters, setFilters] = useState({
    role: '',
    status: '',
    department: '',
    salaryMin: '',
    salaryMax: '',
    performanceMin: 0,
    joinDateFrom: '',
    joinDateTo: ''
  });

  useEffect(() =&amp;gt; {
    setAnimateRows(true);
    const timer = setTimeout(() =&amp;gt; setAnimateRows(false), 300);
    return () =&amp;gt; clearTimeout(timer);
  }, [sortConfig, filters, searchTerm]);

  // Enhanced column configuration
  const columns = [
    {
      key: 'select',
      header: '',
      width: '60px',
      render: (item) =&amp;gt; (
        &amp;lt;div className="flex items-center justify-center"&amp;gt;
          &amp;lt;input
            type="checkbox"
            checked={selectedRows.has(item.id)}
            onChange={(e) =&amp;gt; handleRowSelect(item.id, e.target.checked)}
            className="w-4 h-4 text-indigo-600 bg-gray-100 border-2 border-gray-300 rounded-md focus:ring-indigo-500 focus:ring-2 transition-all duration-200"
          /&amp;gt;
        &amp;lt;/div&amp;gt;
      ),
      sortable: false
    },
    {
      key: 'name',
      header: 'Employee',
      sortable: true,
      render: (item) =&amp;gt; (
        &amp;lt;div className="flex items-center space-x-4 group cursor-pointer"&amp;gt;
          &amp;lt;div className="relative"&amp;gt;
            &amp;lt;div className="w-12 h-12 bg-gradient-to-br from-indigo-400 to-purple-600 rounded-xl flex items-center justify-center text-white font-bold text-sm shadow-lg group-hover:shadow-xl transition-all duration-300 group-hover:scale-105"&amp;gt;
              {item.avatar}
            &amp;lt;/div&amp;gt;
            &amp;lt;div className={`absolute -bottom-1 -right-1 w-4 h-4 rounded-full border-2 border-white ${
              item.status === 'Active' ? 'bg-emerald-400' : 'bg-gray-400'
            }`}&amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
          &amp;lt;div className="flex-1"&amp;gt;
            &amp;lt;div className="font-semibold text-gray-900 group-hover:text-indigo-600 transition-colors duration-200"&amp;gt;
              {item.name}
            &amp;lt;/div&amp;gt;
            &amp;lt;div className="text-sm text-gray-500 flex items-center space-x-2"&amp;gt;
              &amp;lt;span&amp;gt;{item.location}&amp;lt;/span&amp;gt;
              &amp;lt;span&amp;gt;•&amp;lt;/span&amp;gt;
              &amp;lt;span&amp;gt;ID: {item.id}&amp;lt;/span&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      ),
      onClick: (item) =&amp;gt; handleEmployeeClick(item)
    },
    {
      key: 'role',
      header: 'Role &amp;amp; Department',
      sortable: true,
      render: (item) =&amp;gt; (
        &amp;lt;div className="space-y-1"&amp;gt;
          &amp;lt;div className="font-medium text-gray-900"&amp;gt;{item.role}&amp;lt;/div&amp;gt;
          &amp;lt;div className="flex items-center space-x-1"&amp;gt;
            &amp;lt;Building className="w-3 h-3 text-gray-400" /&amp;gt;
            &amp;lt;span className="text-sm text-gray-600"&amp;gt;{item.department}&amp;lt;/span&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )
    },
    {
      key: 'performance',
      header: 'Performance',
      sortable: true,
      render: (item) =&amp;gt; (
        &amp;lt;div className="space-y-2"&amp;gt;
          &amp;lt;div className="flex items-center justify-between"&amp;gt;
            &amp;lt;span className="text-sm font-medium text-gray-900"&amp;gt;{item.performance}%&amp;lt;/span&amp;gt;
            &amp;lt;div className="flex items-center space-x-1"&amp;gt;
              {item.performance &amp;gt;= 90 ? (
                &amp;lt;Star className="w-4 h-4 text-yellow-500 fill-current" /&amp;gt;
              ) : item.performance &amp;gt;= 80 ? (
                &amp;lt;TrendingUp className="w-4 h-4 text-green-500" /&amp;gt;
              ) : (
                &amp;lt;Activity className="w-4 h-4 text-orange-500" /&amp;gt;
              )}
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
          &amp;lt;div className="w-full bg-gray-200 rounded-full h-2 overflow-hidden"&amp;gt;
            &amp;lt;div 
              className={`h-2 rounded-full transition-all duration-500 ${
                item.performance &amp;gt;= 90 ? 'bg-gradient-to-r from-green-400 to-emerald-500' :
                item.performance &amp;gt;= 80 ? 'bg-gradient-to-r from-blue-400 to-indigo-500' :
                item.performance &amp;gt;= 70 ? 'bg-gradient-to-r from-yellow-400 to-orange-500' :
                'bg-gradient-to-r from-red-400 to-pink-500'
              }`}
              style={{ width: `${item.performance}%` }}
            &amp;gt;&amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )
    },
    {
      key: 'salary',
      header: 'Compensation',
      sortable: true,
      render: (item) =&amp;gt; (
        &amp;lt;div className="space-y-1"&amp;gt;
          &amp;lt;div className="font-bold text-gray-900"&amp;gt;
            ${item.salary.toLocaleString()}
          &amp;lt;/div&amp;gt;
          &amp;lt;div className="text-xs text-gray-500"&amp;gt;Annual&amp;lt;/div&amp;gt;
          &amp;lt;div className="text-xs text-gray-400"&amp;gt;{item.projects} projects&amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )
    },
    {
      key: 'contact',
      header: 'Contact',
      sortable: false,
      render: (item) =&amp;gt; (
        &amp;lt;div className="space-y-2"&amp;gt;
          &amp;lt;div 
            className="flex items-center space-x-2 cursor-pointer text-indigo-600 hover:text-indigo-800 transition-colors"
            onClick={(e) =&amp;gt; {e.stopPropagation(); handleEmailClick(item);}}
          &amp;gt;
            &amp;lt;Mail className="w-4 h-4" /&amp;gt;
            &amp;lt;span className="text-sm truncate max-w-32"&amp;gt;{item.email}&amp;lt;/span&amp;gt;
          &amp;lt;/div&amp;gt;
          &amp;lt;div 
            className="flex items-center space-x-2 cursor-pointer text-gray-600 hover:text-gray-800 transition-colors"
            onClick={(e) =&amp;gt; {e.stopPropagation(); handlePhoneClick(item);}}
          &amp;gt;
            &amp;lt;Phone className="w-4 h-4" /&amp;gt;
            &amp;lt;span className="text-sm"&amp;gt;{item.phone}&amp;lt;/span&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )
    },
    {
      key: 'joinDate',
      header: 'Join Date',
      sortable: true,
      render: (item) =&amp;gt; (
        &amp;lt;div className="space-y-1"&amp;gt;
          &amp;lt;div className="flex items-center space-x-2"&amp;gt;
            &amp;lt;Calendar className="w-4 h-4 text-gray-400" /&amp;gt;
            &amp;lt;span className="text-sm text-gray-900"&amp;gt;
              {new Date(item.joinDate).toLocaleDateString()}
            &amp;lt;/span&amp;gt;
          &amp;lt;/div&amp;gt;
          &amp;lt;div className="text-xs text-gray-500"&amp;gt;
            {Math.floor((new Date() - new Date(item.joinDate)) / (1000 * 60 * 60 * 24 * 365))} years
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      )
    },
    {
      key: 'status',
      header: 'Status',
      sortable: true,
      render: (item) =&amp;gt; (
        &amp;lt;div className="flex flex-col items-center space-y-2"&amp;gt;
          &amp;lt;span className={`px-3 py-1 text-xs font-bold rounded-full shadow-sm ${
            item.status === 'Active' 
              ? 'bg-emerald-100 text-emerald-800 border border-emerald-200' 
              : 'bg-gray-100 text-gray-800 border border-gray-200'
          }`}&amp;gt;
            {item.status}
          &amp;lt;/span&amp;gt;
          {item.status === 'Active' &amp;amp;&amp;amp; (
            &amp;lt;Zap className="w-4 h-4 text-emerald-500 animate-pulse" /&amp;gt;
          )}
        &amp;lt;/div&amp;gt;
      )
    },
    {
      key: 'actions',
      header: 'Actions',
      width: '140px',
      render: (item) =&amp;gt; (
        &amp;lt;div className="flex items-center space-x-1"&amp;gt;
          &amp;lt;button
            onClick={(e) =&amp;gt; {e.stopPropagation(); handleView(item);}}
            className="p-2 text-blue-600 hover:text-blue-800 hover:bg-blue-50 rounded-lg transition-all duration-200 group"
            title="View Details"
          &amp;gt;
            &amp;lt;Eye className="w-4 h-4 group-hover:scale-110 transition-transform" /&amp;gt;
          &amp;lt;/button&amp;gt;
          &amp;lt;button
            onClick={(e) =&amp;gt; {e.stopPropagation(); handleEdit(item);}}
            className="p-2 text-emerald-600 hover:text-emerald-800 hover:bg-emerald-50 rounded-lg transition-all duration-200 group"
            title="Edit Employee"
          &amp;gt;
            &amp;lt;Edit className="w-4 h-4 group-hover:scale-110 transition-transform" /&amp;gt;
          &amp;lt;/button&amp;gt;
          &amp;lt;button
            onClick={(e) =&amp;gt; {e.stopPropagation(); handleDelete(item);}}
            className="p-2 text-red-600 hover:text-red-800 hover:bg-red-50 rounded-lg transition-all duration-200 group"
            title="Delete Employee"
          &amp;gt;
            &amp;lt;Trash2 className="w-4 h-4 group-hover:scale-110 transition-transform" /&amp;gt;
          &amp;lt;/button&amp;gt;
          &amp;lt;div className="relative group"&amp;gt;
            &amp;lt;button className="p-2 text-gray-600 hover:text-gray-800 hover:bg-gray-50 rounded-lg transition-all duration-200"&amp;gt;
              &amp;lt;MoreHorizontal className="w-4 h-4" /&amp;gt;
            &amp;lt;/button&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;
      ),
      sortable: false
    }
  ];

  // Enhanced click handlers
  const handleEmployeeClick = (item) =&amp;gt; {
    alert(`👤 Employee Profile: ${item.name}\n📧 ${item.email}\n🏢 ${item.role} at ${item.department}\n📊 Performance: ${item.performance}%`);
  };

  const handleEmailClick = (item) =&amp;gt; {
    window.open(`mailto:${item.email}?subject=Hello ${item.name}`, '_blank');
  };

  const handlePhoneClick = (item) =&amp;gt; {
    navigator.clipboard.writeText(item.phone);
    alert(`📋 Phone number copied: ${item.phone}`);
  };

  const handleView = (item) =&amp;gt; {
    alert(`👁️ Viewing detailed profile for ${item.name}`);
  };

  const handleEdit = (item) =&amp;gt; {
    alert(`✏️ Opening edit form for ${item.name}`);
  };

  const handleDelete = (item) =&amp;gt; {
    if (window.confirm(`🗑️ Are you sure you want to delete ${item.name}?`)) {
      alert(`❌ ${item.name} has been deleted`);
    }
  };

  // Enhanced selection handlers
  const handleRowSelect = (id, checked) =&amp;gt; {
    const newSelected = new Set(selectedRows);
    if (checked) {
      newSelected.add(id);
    } else {
      newSelected.delete(id);
    }
    setSelectedRows(newSelected);
  };

  const handleSelectAll = (checked) =&amp;gt; {
    if (checked) {
      setSelectedRows(new Set(filteredAndSortedData.map(item =&amp;gt; item.id)));
    } else {
      setSelectedRows(new Set());
    }
  };

  // Enhanced sorting with animation
  const handleSort = useCallback((key) =&amp;gt; {
    setSortConfig(prevConfig =&amp;gt; ({
      key,
      direction: prevConfig.key === key &amp;amp;&amp;amp; prevConfig.direction === 'asc' ? 'desc' : 'asc'
    }));
  }, []);

  // Advanced filtering and sorting
  const filteredAndSortedData = useMemo(() =&amp;gt; {
    let result = [...data];

    // Global search
    if (searchTerm) {
      result = result.filter(item =&amp;gt;
        Object.values(item).some(value =&amp;gt;
          value.toString().toLowerCase().includes(searchTerm.toLowerCase())
        )
      );
    }

    // Advanced filters
    if (filters.role) {
      result = result.filter(item =&amp;gt; 
        item.role.toLowerCase().includes(filters.role.toLowerCase())
      );
    }

    if (filters.status) {
      result = result.filter(item =&amp;gt; item.status === filters.status);
    }

    if (filters.department) {
      result = result.filter(item =&amp;gt; item.department === filters.department);
    }

    if (filters.salaryMin) {
      result = result.filter(item =&amp;gt; item.salary &amp;gt;= parseInt(filters.salaryMin));
    }

    if (filters.salaryMax) {
      result = result.filter(item =&amp;gt; item.salary &amp;lt;= parseInt(filters.salaryMax));
    }

    if (filters.performanceMin &amp;gt; 0) {
      result = result.filter(item =&amp;gt; item.performance &amp;gt;= filters.performanceMin);
    }

    if (filters.joinDateFrom) {
      result = result.filter(item =&amp;gt; new Date(item.joinDate) &amp;gt;= new Date(filters.joinDateFrom));
    }

    if (filters.joinDateTo) {
      result = result.filter(item =&amp;gt; new Date(item.joinDate) &amp;lt;= new Date(filters.joinDateTo));
    }

    // Enhanced sorting
    if (sortConfig.key) {
      result.sort((a, b) =&amp;gt; {
        let aValue = a[sortConfig.key];
        let bValue = b[sortConfig.key];

        if (sortConfig.key === 'joinDate') {
          aValue = new Date(aValue);
          bValue = new Date(bValue);
        }

        if (aValue &amp;lt; bValue) return sortConfig.direction === 'asc' ? -1 : 1;
        if (aValue &amp;gt; bValue) return sortConfig.direction === 'asc' ? 1 : -1;
        return 0;
      });
    }

    return result;
  }, [data, searchTerm, filters, sortConfig]);

  const getUniqueValues = (key) =&amp;gt; {
    return [...new Set(data.map(item =&amp;gt; item[key]))].sort();
  };

  const clearFilters = () =&amp;gt; {
    setFilters({
      role: '',
      status: '',
      department: '',
      salaryMin: '',
      salaryMax: '',
      performanceMin: 0,
      joinDateFrom: '',
      joinDateTo: ''
    });
    setSearchTerm('');
  };

  return (
    &amp;lt;div className="min-h-screen bg-gradient-to-br from-slate-50 via-blue-50 to-indigo-50 p-6"&amp;gt;
      &amp;lt;div className="max-w-7xl mx-auto"&amp;gt;
        {/* Premium Header */}
        &amp;lt;div className="bg-white rounded-2xl shadow-xl border border-gray-100 mb-6 overflow-hidden"&amp;gt;
          &amp;lt;div className="bg-gradient-to-r from-indigo-600 via-purple-600 to-indigo-800 px-8 py-6"&amp;gt;
            &amp;lt;div className="flex items-center justify-between"&amp;gt;
              &amp;lt;div&amp;gt;
                &amp;lt;h1 className="text-3xl font-bold text-white mb-2"&amp;gt;Employee Directory&amp;lt;/h1&amp;gt;
                &amp;lt;p className="text-indigo-200"&amp;gt;
                  {filteredAndSortedData.length} of {data.length} employees
                  {selectedRows.size &amp;gt; 0 &amp;amp;&amp;amp; (
                    &amp;lt;span className="ml-2 px-3 py-1 bg-white/20 rounded-full text-sm"&amp;gt;
                      {selectedRows.size} selected
                    &amp;lt;/span&amp;gt;
                  )}
                &amp;lt;/p&amp;gt;
              &amp;lt;/div&amp;gt;
              &amp;lt;div className="flex items-center space-x-4"&amp;gt;
                &amp;lt;button className="px-6 py-3 bg-white/20 hover:bg-white/30 text-white rounded-xl font-semibold transition-all duration-200 flex items-center space-x-2"&amp;gt;
                  &amp;lt;Plus className="w-5 h-5" /&amp;gt;
                  &amp;lt;span&amp;gt;Add Employee&amp;lt;/span&amp;gt;
                &amp;lt;/button&amp;gt;
                &amp;lt;button className="p-3 bg-white/20 hover:bg-white/30 text-white rounded-xl transition-all duration-200"&amp;gt;
                  &amp;lt;Settings className="w-5 h-5" /&amp;gt;
                &amp;lt;/button&amp;gt;
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;

          {/* Enhanced Controls */}
          &amp;lt;div className="p-6 bg-gradient-to-r from-gray-50 to-white"&amp;gt;
            &amp;lt;div className="flex flex-wrap gap-4 items-center justify-between mb-4"&amp;gt;
              {/* Search with enhanced styling */}
              &amp;lt;div className="relative flex-1 max-w-md"&amp;gt;
                &amp;lt;Search className="absolute left-4 top-1/2 transform -translate-y-1/2 text-gray-400 w-5 h-5" /&amp;gt;
                &amp;lt;input
                  type="text"
                  placeholder="Search employees, roles, departments..."
                  value={searchTerm}
                  onChange={(e) =&amp;gt; setSearchTerm(e.target.value)}
                  className="w-full pl-12 pr-4 py-3 bg-white border-2 border-gray-200 rounded-xl focus:ring-4 focus:ring-indigo-100 focus:border-indigo-500 transition-all duration-200 shadow-sm"
                /&amp;gt;
              &amp;lt;/div&amp;gt;

              {/* Filter Controls */}
              &amp;lt;div className="flex items-center space-x-3"&amp;gt;
                &amp;lt;button
                  onClick={() =&amp;gt; setShowAdvancedFilters(!showAdvancedFilters)}
                  className={`px-4 py-2 rounded-lg font-medium transition-all duration-200 flex items-center space-x-2 ${
                    showAdvancedFilters 
                      ? 'bg-indigo-100 text-indigo-700 border-2 border-indigo-200' 
                      : 'bg-gray-100 text-gray-700 border-2 border-gray-200 hover:bg-gray-200'
                  }`}
                &amp;gt;
                  &amp;lt;Filter className="w-4 h-4" /&amp;gt;
                  &amp;lt;span&amp;gt;Advanced Filters&amp;lt;/span&amp;gt;
                &amp;lt;/button&amp;gt;

                {selectedRows.size &amp;gt; 0 &amp;amp;&amp;amp; (
                  &amp;lt;div className="flex space-x-2 animate-in slide-in-from-right duration-300"&amp;gt;
                    &amp;lt;button className="px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-white rounded-lg font-medium transition-all duration-200 flex items-center space-x-2"&amp;gt;
                      &amp;lt;Download className="w-4 h-4" /&amp;gt;
                      &amp;lt;span&amp;gt;Export&amp;lt;/span&amp;gt;
                    &amp;lt;/button&amp;gt;
                    &amp;lt;button className="px-4 py-2 bg-red-600 hover:bg-red-700 text-white rounded-lg font-medium transition-all duration-200"&amp;gt;
                      &amp;lt;Trash2 className="w-4 h-4" /&amp;gt;
                    &amp;lt;/button&amp;gt;
                  &amp;lt;/div&amp;gt;
                )}
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;

            {/* Advanced Filters Panel */}
            {showAdvancedFilters &amp;amp;&amp;amp; (
              &amp;lt;div className="bg-white rounded-xl border-2 border-gray-100 p-6 animate-in slide-in-from-top duration-300"&amp;gt;
                &amp;lt;div className="flex items-center justify-between mb-4"&amp;gt;
                  &amp;lt;h3 className="text-lg font-semibold text-gray-900"&amp;gt;Advanced Filters&amp;lt;/h3&amp;gt;
                  &amp;lt;button
                    onClick={clearFilters}
                    className="text-sm text-indigo-600 hover:text-indigo-800 font-medium"
                  &amp;gt;
                    Clear All
                  &amp;lt;/button&amp;gt;
                &amp;lt;/div&amp;gt;

                &amp;lt;div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4"&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Role&amp;lt;/label&amp;gt;
                    &amp;lt;select
                      value={filters.role}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, role: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    &amp;gt;
                      &amp;lt;option value=""&amp;gt;All Roles&amp;lt;/option&amp;gt;
                      {getUniqueValues('role').map(role =&amp;gt; (
                        &amp;lt;option key={role} value={role}&amp;gt;{role}&amp;lt;/option&amp;gt;
                      ))}
                    &amp;lt;/select&amp;gt;
                  &amp;lt;/div&amp;gt;

                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Department&amp;lt;/label&amp;gt;
                    &amp;lt;select
                      value={filters.department}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, department: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    &amp;gt;
                      &amp;lt;option value=""&amp;gt;All Departments&amp;lt;/option&amp;gt;
                      {getUniqueValues('department').map(dept =&amp;gt; (
                        &amp;lt;option key={dept} value={dept}&amp;gt;{dept}&amp;lt;/option&amp;gt;
                      ))}
                    &amp;lt;/select&amp;gt;
                  &amp;lt;/div&amp;gt;

                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Status&amp;lt;/label&amp;gt;
                    &amp;lt;select
                      value={filters.status}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, status: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    &amp;gt;
                      &amp;lt;option value=""&amp;gt;All Status&amp;lt;/option&amp;gt;
                      {getUniqueValues('status').map(status =&amp;gt; (
                        &amp;lt;option key={status} value={status}&amp;gt;{status}&amp;lt;/option&amp;gt;
                      ))}
                    &amp;lt;/select&amp;gt;
                  &amp;lt;/div&amp;gt;

                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Min Performance&amp;lt;/label&amp;gt;
                    &amp;lt;input
                      type="range"
                      min="0"
                      max="100"
                      value={filters.performanceMin}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, performanceMin: parseInt(e.target.value) }))}
                      className="w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer"
                    /&amp;gt;
                    &amp;lt;div className="text-sm text-gray-600 mt-1"&amp;gt;{filters.performanceMin}%+&amp;lt;/div&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;

                &amp;lt;div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4 mt-4"&amp;gt;
                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Min Salary&amp;lt;/label&amp;gt;
                    &amp;lt;input
                      type="number"
                      placeholder="50000"
                      value={filters.salaryMin}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, salaryMin: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    /&amp;gt;
                  &amp;lt;/div&amp;gt;

                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Max Salary&amp;lt;/label&amp;gt;
                    &amp;lt;input
                      type="number"
                      placeholder="200000"
                      value={filters.salaryMax}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, salaryMax: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    /&amp;gt;
                  &amp;lt;/div&amp;gt;

                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Join Date From&amp;lt;/label&amp;gt;
                    &amp;lt;input
                      type="date"
                      value={filters.joinDateFrom}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, joinDateFrom: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    /&amp;gt;
                  &amp;lt;/div&amp;gt;

                  &amp;lt;div&amp;gt;
                    &amp;lt;label className="block text-sm font-medium text-gray-700 mb-2"&amp;gt;Join Date To&amp;lt;/label&amp;gt;
                    &amp;lt;input
                      type="date"
                      value={filters.joinDateTo}
                      onChange={(e) =&amp;gt; setFilters(prev =&amp;gt; ({ ...prev, joinDateTo: e.target.value }))}
                      className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500"
                    /&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
            )}
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;

        {/* Premium Table */}
        &amp;lt;div className="bg-white rounded-2xl shadow-xl border border-gray-100 overflow-hidden"&amp;gt;
          &amp;lt;div className="overflow-x-auto"&amp;gt;
            &amp;lt;table className="w-full"&amp;gt;
              &amp;lt;thead&amp;gt;
                &amp;lt;tr className="bg-gradient-to-r from-gray-50 to-gray-100 border-b border-gray-200"&amp;gt;
                  &amp;lt;th className="px-6 py-4"&amp;gt;
                    &amp;lt;input
                      type="checkbox"
                      checked={selectedRows.size === filteredAndSortedData.length &amp;amp;&amp;amp; filteredAndSortedData.length &amp;gt; 0}
                      onChange={(e) =&amp;gt; handleSelectAll(e.target.checked)}
                      className="w-4 h-4 text-indigo-600 bg-gray-100 border-2 border-gray-300 rounded-md focus:ring-indigo-500 focus:ring-2"
                    /&amp;gt;
                  &amp;lt;/th&amp;gt;
                  {columns.slice(1).map((column) =&amp;gt; (
                    &amp;lt;th
                      key={column.key}
                      className={`px-6 py-4 text-left text-sm font-bold text-gray-700 uppercase tracking-wider ${
                        column.sortable ? 'cursor-pointer hover:bg-gray-200 transition-colors duration-200 select-none' : ''
                      }`}
                      style={{ width: column.width }}
                      onClick={() =&amp;gt; column.sortable &amp;amp;&amp;amp; handleSort(column.key)}
                    &amp;gt;
                      &amp;lt;div className="flex items-center space-x-2 group"&amp;gt;
                        &amp;lt;span className="group-hover:text-indigo-600 transition-colors"&amp;gt;{column.header}&amp;lt;/span&amp;gt;
                        {column.sortable &amp;amp;&amp;amp; (
                          &amp;lt;div className="flex flex-col opacity-50 group-hover:opacity-100 transition-opacity"&amp;gt;
                            {sortConfig.key === column.key ? (
                              sortConfig.direction === 'asc' ? (
                                &amp;lt;ChevronUp className="w-4 h-4 text-indigo-600" /&amp;gt;
                              ) : (
                                &amp;lt;ChevronDown className="w-4 h-4 text-indigo-600" /&amp;gt;
                              )
                            ) : (
                              &amp;lt;ArrowUpDown className="w-4 h-4 text-gray-400 group-hover:text-indigo-500" /&amp;gt;
                            )}
                          &amp;lt;/div&amp;gt;
                        )}
                      &amp;lt;/div&amp;gt;
                    &amp;lt;/th&amp;gt;
                  ))}
                &amp;lt;/tr&amp;gt;
              &amp;lt;/thead&amp;gt;
              &amp;lt;tbody className="divide-y divide-gray-100"&amp;gt;
                {filteredAndSortedData.map((item, index) =&amp;gt; (
                  &amp;lt;tr 
                    key={item.id} 
                    className={`
                      hover:bg-gradient-to-r hover:from-indigo-50 hover:to-purple-50 
                      transition-all duration-300 cursor-pointer group
                      ${selectedRows.has(item.id) ? 'bg-indigo-50 border-l-4 border-indigo-500' : ''}
                      ${animateRows ? 'animate-pulse' : ''}
                    `}
                    style={{
                      animationDelay: animateRows ? `${index * 50}ms` : '0ms'
                    }}
                  &amp;gt;
                    {columns.map((column) =&amp;gt; (
                      &amp;lt;td
                        key={column.key}
                        className={`px-6 py-6 whitespace-nowrap transition-all duration-200 ${
                          column.onClick ? 'cursor-pointer' : ''
                        }`}
                        onClick={() =&amp;gt; column.onClick &amp;amp;&amp;amp; column.onClick(item)}
                      &amp;gt;
                        {column.render ? column.render(item) : item[column.key]}
                      &amp;lt;/td&amp;gt;
                    ))}
                  &amp;lt;/tr&amp;gt;
                ))}
              &amp;lt;/tbody&amp;gt;
            &amp;lt;/table&amp;gt;

            {/* Empty State */}
            {filteredAndSortedData.length === 0 &amp;amp;&amp;amp; (
              &amp;lt;div className="text-center py-16"&amp;gt;
                &amp;lt;div className="w-24 h-24 mx-auto mb-4 bg-gradient-to-br from-gray-100 to-gray-200 rounded-full flex items-center justify-center"&amp;gt;
                  &amp;lt;Search className="w-12 h-12 text-gray-400" /&amp;gt;
                &amp;lt;/div&amp;gt;
                &amp;lt;h3 className="text-xl font-semibold text-gray-900 mb-2"&amp;gt;No employees found&amp;lt;/h3&amp;gt;
                &amp;lt;p className="text-gray-500 mb-6"&amp;gt;Try adjusting your search criteria or filters&amp;lt;/p&amp;gt;
                &amp;lt;button
                  onClick={clearFilters}
                  className="px-6 py-3 bg-indigo-600 hover:bg-indigo-700 text-white rounded-xl font-semibold transition-all duration-200"
                &amp;gt;
                  Clear all filters
                &amp;lt;/button&amp;gt;
              &amp;lt;/div&amp;gt;
            )}
          &amp;lt;/div&amp;gt;

          {/* Premium Footer with Analytics */}
          &amp;lt;div className="bg-gradient-to-r from-gray-50 to-white border-t border-gray-200 px-8 py-6"&amp;gt;
            &amp;lt;div className="flex flex-wrap items-center justify-between gap-4"&amp;gt;
              &amp;lt;div className="flex items-center space-x-8"&amp;gt;
                &amp;lt;div className="text-sm text-gray-600"&amp;gt;
                  &amp;lt;span className="font-semibold text-gray-900"&amp;gt;
                    {filteredAndSortedData.length}
                  &amp;lt;/span&amp;gt; of &amp;lt;span className="font-semibold"&amp;gt;{data.length}&amp;lt;/span&amp;gt; employees
                &amp;lt;/div&amp;gt;

                &amp;lt;div className="flex items-center space-x-6 text-sm text-gray-600"&amp;gt;
                  &amp;lt;div className="flex items-center space-x-2"&amp;gt;
                    &amp;lt;div className="w-3 h-3 bg-emerald-400 rounded-full"&amp;gt;&amp;lt;/div&amp;gt;
                    &amp;lt;span&amp;gt;Active: {data.filter(item =&amp;gt; item.status === 'Active').length}&amp;lt;/span&amp;gt;
                  &amp;lt;/div&amp;gt;
                  &amp;lt;div className="flex items-center space-x-2"&amp;gt;
                    &amp;lt;div className="w-3 h-3 bg-gray-400 rounded-full"&amp;gt;&amp;lt;/div&amp;gt;
                    &amp;lt;span&amp;gt;Inactive: {data.filter(item =&amp;gt; item.status === 'Inactive').length}&amp;lt;/span&amp;gt;
                  &amp;lt;/div&amp;gt;
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;

              &amp;lt;div className="flex items-center space-x-6"&amp;gt;
                &amp;lt;div className="text-sm text-gray-600"&amp;gt;
                  &amp;lt;span className="font-semibold"&amp;gt;Avg Performance:&amp;lt;/span&amp;gt; 
                  &amp;lt;span className="ml-1 text-indigo-600 font-bold"&amp;gt;
                    {Math.round(data.reduce((sum, item) =&amp;gt; sum + item.performance, 0) / data.length)}%
                  &amp;lt;/span&amp;gt;
                &amp;lt;/div&amp;gt;

                &amp;lt;div className="text-sm text-gray-600"&amp;gt;
                  &amp;lt;span className="font-semibold"&amp;gt;Total Projects:&amp;lt;/span&amp;gt; 
                  &amp;lt;span className="ml-1 text-purple-600 font-bold"&amp;gt;
                    {data.reduce((sum, item) =&amp;gt; sum + item.projects, 0)}
                  &amp;lt;/span&amp;gt;
                &amp;lt;/div&amp;gt;

                &amp;lt;div className="text-xs text-gray-500 bg-gray-100 px-3 py-2 rounded-full"&amp;gt;
                  Built with React &amp;amp; Premium UI
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        &amp;lt;/div&amp;gt;

        {/* Floating Action Button */}
        &amp;lt;div className="fixed bottom-8 right-8"&amp;gt;
          &amp;lt;button className="w-16 h-16 bg-gradient-to-r from-indigo-600 to-purple-600 hover:from-indigo-700 hover:to-purple-700 text-white rounded-full shadow-2xl hover:shadow-3xl transition-all duration-300 flex items-center justify-center group hover:scale-110"&amp;gt;
            &amp;lt;Plus className="w-8 h-8 group-hover:rotate-90 transition-transform duration-300" /&amp;gt;
          &amp;lt;/button&amp;gt;
        &amp;lt;/div&amp;gt;

        {/* Selection Info Toast */}
        {selectedRows.size &amp;gt; 0 &amp;amp;&amp;amp; (
          &amp;lt;div className="fixed bottom-8 left-8 bg-white rounded-2xl shadow-2xl border border-gray-200 p-6 animate-in slide-in-from-left duration-300"&amp;gt;
            &amp;lt;div className="flex items-center space-x-4"&amp;gt;
              &amp;lt;div className="w-12 h-12 bg-indigo-100 rounded-xl flex items-center justify-center"&amp;gt;
                &amp;lt;Check className="w-6 h-6 text-indigo-600" /&amp;gt;
              &amp;lt;/div&amp;gt;
              &amp;lt;div&amp;gt;
                &amp;lt;div className="font-semibold text-gray-900"&amp;gt;
                  {selectedRows.size} employee{selectedRows.size &amp;gt; 1 ? 's' : ''} selected
                &amp;lt;/div&amp;gt;
                &amp;lt;div className="text-sm text-gray-500"&amp;gt;
                  Ready for bulk actions
                &amp;lt;/div&amp;gt;
              &amp;lt;/div&amp;gt;
              &amp;lt;button
                onClick={() =&amp;gt; setSelectedRows(new Set())}
                className="p-2 text-gray-400 hover:text-gray-600 transition-colors"
              &amp;gt;
                &amp;lt;X className="w-5 h-5" /&amp;gt;
              &amp;lt;/button&amp;gt;
            &amp;lt;/div&amp;gt;
          &amp;lt;/div&amp;gt;
        )}
      &amp;lt;/div&amp;gt;
    &amp;lt;/div&amp;gt;
  );
};

export default PremiumTable;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>DATABS AND TABLES</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Tue, 05 Aug 2025 18:13:58 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/databs-and-tables-jgh</link>
      <guid>https://dev.to/armaankhan8270/databs-and-tables-jgh</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;-- =====================================================
-- TABLE 1: USERS_ANALYTICS_SUMMARY
-- Comprehensive user activity and access analytics
-- =====================================================

CREATE OR REPLACE TABLE USERS_ANALYTICS_SUMMARY AS
WITH user_query_stats AS (
    SELECT 
        q.USER_NAME,
        COUNT(*) as TOTAL_QUERIES,
        COUNT(CASE WHEN q.EXECUTION_STATUS = 'SUCCESS' THEN 1 END) as SUCCESSFUL_QUERIES,
        COUNT(CASE WHEN q.EXECUTION_STATUS = 'FAIL' THEN 1 END) as FAILED_QUERIES,
        COUNT(CASE WHEN q.EXECUTION_STATUS = 'RUNNING' THEN 1 END) as RUNNING_QUERIES,
        SUM(q.TOTAL_ELAPSED_TIME) as TOTAL_EXECUTION_TIME,
        AVG(q.TOTAL_ELAPSED_TIME) as AVG_EXECUTION_TIME,
        SUM(q.CREDITS_USED_CLOUD_SERVICES) as TOTAL_CREDITS_USED,
        SUM(q.BYTES_SCANNED) as TOTAL_BYTES_SCANNED,
        SUM(q.ROWS_PRODUCED) as TOTAL_ROWS_PRODUCED,
        COUNT(CASE WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 OR q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 1 END) as SPILLED_QUERIES,
        COUNT(DISTINCT q.WAREHOUSE_NAME) as WAREHOUSES_USED,
        COUNT(DISTINCT q.DATABASE_NAME) as DATABASES_ACCESSED,
        COUNT(DISTINCT q.SCHEMA_NAME) as SCHEMAS_ACCESSED,
        MIN(q.START_TIME) as FIRST_QUERY_TIME,
        MAX(q.START_TIME) as LAST_QUERY_TIME
    FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
    WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
        AND q.USER_NAME IS NOT NULL
    GROUP BY q.USER_NAME
),

user_login_stats AS (
    SELECT 
        l.USER_NAME,
        COUNT(*) as TOTAL_LOGIN_ATTEMPTS,
        COUNT(CASE WHEN l.IS_SUCCESS = 'YES' THEN 1 END) as SUCCESSFUL_LOGINS,
        COUNT(CASE WHEN l.IS_SUCCESS = 'NO' THEN 1 END) as FAILED_LOGINS,
        COUNT(DISTINCT l.CLIENT_IP) as UNIQUE_IPS,
        COUNT(DISTINCT l.REPORTED_CLIENT_TYPE) as CLIENT_TYPES_USED,
        MIN(l.EVENT_TIMESTAMP) as FIRST_LOGIN_ATTEMPT,
        MAX(l.EVENT_TIMESTAMP) as LAST_LOGIN_ATTEMPT
    FROM SNOWFLAKE.ACCOUNT_USAGE.LOGIN_HISTORY l
    WHERE l.EVENT_TIMESTAMP &amp;gt;= CURRENT_DATE - 1
        AND l.USER_NAME IS NOT NULL
    GROUP BY l.USER_NAME
)

SELECT 
    -- User Information
    u.USER_ID,
    u.NAME as USER_NAME,
    u.LOGIN_NAME,
    u.DISPLAY_NAME,
    u.FIRST_NAME,
    u.LAST_NAME,
    u.EMAIL,
    u.TYPE as USER_TYPE,
    u.CREATED_ON,
    u.DELETED_ON,
    u.DISABLED,
    u.SNOWFLAKE_LOCK,

    -- Authentication Details
    u.HAS_PASSWORD,
    u.MUST_CHANGE_PASSWORD,
    u.HAS_MFA,
    u.BYPASS_MFA_UNTIL,
    u.HAS_RSA_PUBLIC_KEY,
    u.PASSWORD_LAST_SET_TIME,
    u.LAST_SUCCESS_LOGIN,
    u.EXPIRES_AT,
    u.LOCKED_UNTIL_TIME,

    -- Default Settings
    u.DEFAULT_WAREHOUSE,
    u.DEFAULT_NAMESPACE,
    u.DEFAULT_ROLE,
    u.DEFAULT_SECONDARY_ROLE,
    u.OWNER,

    -- Database Context (for database users)
    u.DATABASE_NAME as USER_DATABASE,
    u.SCHEMA_NAME as USER_SCHEMA,

    -- Query Activity (Last 24 hours)
    COALESCE(qs.TOTAL_QUERIES, 0) as QUERIES_LAST_24H,
    COALESCE(qs.SUCCESSFUL_QUERIES, 0) as SUCCESSFUL_QUERIES_24H,
    COALESCE(qs.FAILED_QUERIES, 0) as FAILED_QUERIES_24H,
    COALESCE(qs.RUNNING_QUERIES, 0) as RUNNING_QUERIES_24H,
    COALESCE(qs.TOTAL_EXECUTION_TIME, 0) as TOTAL_EXEC_TIME_24H,
    COALESCE(qs.AVG_EXECUTION_TIME, 0) as AVG_EXEC_TIME_24H,
    COALESCE(qs.TOTAL_CREDITS_USED, 0) as CREDITS_USED_24H,
    COALESCE(qs.TOTAL_BYTES_SCANNED, 0) as BYTES_SCANNED_24H,
    COALESCE(qs.TOTAL_ROWS_PRODUCED, 0) as ROWS_PRODUCED_24H,
    COALESCE(qs.SPILLED_QUERIES, 0) as SPILLED_QUERIES_24H,
    COALESCE(qs.WAREHOUSES_USED, 0) as WAREHOUSES_USED_24H,
    COALESCE(qs.DATABASES_ACCESSED, 0) as DATABASES_ACCESSED_24H,
    COALESCE(qs.SCHEMAS_ACCESSED, 0) as SCHEMAS_ACCESSED_24H,
    qs.FIRST_QUERY_TIME as FIRST_QUERY_24H,
    qs.LAST_QUERY_TIME as LAST_QUERY_24H,

    -- Login Activity (Last 24 hours)
    COALESCE(ls.TOTAL_LOGIN_ATTEMPTS, 0) as LOGIN_ATTEMPTS_24H,
    COALESCE(ls.SUCCESSFUL_LOGINS, 0) as SUCCESSFUL_LOGINS_24H,
    COALESCE(ls.FAILED_LOGINS, 0) as FAILED_LOGINS_24H,
    COALESCE(ls.UNIQUE_IPS, 0) as UNIQUE_IPS_24H,
    COALESCE(ls.CLIENT_TYPES_USED, 0) as CLIENT_TYPES_24H,
    ls.FIRST_LOGIN_ATTEMPT as FIRST_LOGIN_24H,
    ls.LAST_LOGIN_ATTEMPT as LAST_LOGIN_24H,

    -- Activity Classifications
    CASE 
        WHEN COALESCE(qs.TOTAL_QUERIES, 0) = 0 THEN 'INACTIVE'
        WHEN COALESCE(qs.TOTAL_QUERIES, 0) &amp;lt;= 10 THEN 'LOW_ACTIVITY'
        WHEN COALESCE(qs.TOTAL_QUERIES, 0) &amp;lt;= 100 THEN 'MEDIUM_ACTIVITY'
        WHEN COALESCE(qs.TOTAL_QUERIES, 0) &amp;lt;= 500 THEN 'HIGH_ACTIVITY'
        ELSE 'VERY_HIGH_ACTIVITY'
    END as ACTIVITY_LEVEL,

    CASE 
        WHEN COALESCE(qs.FAILED_QUERIES, 0) = 0 THEN 'NO_FAILURES'
        WHEN COALESCE(qs.FAILED_QUERIES, 0) &amp;lt;= 5 THEN 'LOW_FAILURES'
        WHEN COALESCE(qs.FAILED_QUERIES, 0) &amp;lt;= 20 THEN 'MEDIUM_FAILURES'
        ELSE 'HIGH_FAILURES'
    END as FAILURE_RATE,

    CASE 
        WHEN COALESCE(qs.TOTAL_CREDITS_USED, 0) = 0 THEN 'NO_COST'
        WHEN COALESCE(qs.TOTAL_CREDITS_USED, 0) &amp;lt;= 1 THEN 'LOW_COST'
        WHEN COALESCE(qs.TOTAL_CREDITS_USED, 0) &amp;lt;= 10 THEN 'MEDIUM_COST'
        ELSE 'HIGH_COST'
    END as COST_CATEGORY,

    -- Security Indicators
    CASE 
        WHEN u.HAS_MFA = TRUE THEN 'MFA_ENABLED'
        ELSE 'MFA_DISABLED'
    END as MFA_STATUS,

    CASE 
        WHEN u.DISABLED = TRUE OR u.SNOWFLAKE_LOCK = TRUE THEN 'LOCKED'
        WHEN u.EXPIRES_AT IS NOT NULL AND u.EXPIRES_AT &amp;lt; CURRENT_TIMESTAMP THEN 'EXPIRED'
        ELSE 'ACTIVE'
    END as ACCOUNT_STATUS,

    -- Analysis metadata
    CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
    CURRENT_DATE - 1 as ANALYSIS_DATE

FROM SNOWFLAKE.ACCOUNT_USAGE.USERS u
LEFT JOIN user_query_stats qs ON u.NAME = qs.USER_NAME
LEFT JOIN user_login_stats ls ON u.NAME = ls.USER_NAME
WHERE u.DELETED_ON IS NULL
ORDER BY COALESCE(qs.TOTAL_QUERIES, 0) DESC, u.NAME;

-- =====================================================
-- TABLE 2: DATABASE_ANALYTICS_SUMMARY
-- Comprehensive database usage and storage analytics
-- =====================================================

CREATE OR REPLACE TABLE DATABASE_ANALYTICS_SUMMARY AS
WITH database_query_stats AS (
    SELECT 
        q.DATABASE_NAME,
        COUNT(*) as TOTAL_QUERIES,
        COUNT(CASE WHEN q.EXECUTION_STATUS = 'SUCCESS' THEN 1 END) as SUCCESSFUL_QUERIES,
        COUNT(CASE WHEN q.EXECUTION_STATUS = 'FAIL' THEN 1 END) as FAILED_QUERIES,
        COUNT(DISTINCT q.USER_NAME) as UNIQUE_USERS,
        COUNT(DISTINCT q.WAREHOUSE_NAME) as WAREHOUSES_USED,
        COUNT(DISTINCT q.SCHEMA_NAME) as SCHEMAS_ACCESSED,
        SUM(q.TOTAL_ELAPSED_TIME) as TOTAL_EXECUTION_TIME,
        AVG(q.TOTAL_ELAPSED_TIME) as AVG_EXECUTION_TIME,
        SUM(q.CREDITS_USED_CLOUD_SERVICES) as TOTAL_CREDITS_USED,
        SUM(q.BYTES_SCANNED) as TOTAL_BYTES_SCANNED,
        SUM(q.ROWS_PRODUCED) as TOTAL_ROWS_PRODUCED,
        MIN(q.START_TIME) as FIRST_QUERY_TIME,
        MAX(q.START_TIME) as LAST_QUERY_TIME
    FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
    WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
        AND q.DATABASE_NAME IS NOT NULL
    GROUP BY q.DATABASE_NAME
),

database_storage_stats AS (
    SELECT 
        ds.DATABASE_NAME,
        AVG(ds.AVERAGE_DATABASE_BYTES) as AVG_DATABASE_BYTES,
        AVG(ds.AVERAGE_FAILSAFE_BYTES) as AVG_FAILSAFE_BYTES,
        AVG(ds.AVERAGE_HYBRID_TABLE_STORAGE_BYTES) as AVG_HYBRID_STORAGE_BYTES,
        MAX(ds.USAGE_DATE) as LATEST_STORAGE_DATE
    FROM SNOWFLAKE.ACCOUNT_USAGE.DATABASE_STORAGE_USAGE_HISTORY ds
    WHERE ds.USAGE_DATE &amp;gt;= CURRENT_DATE - 7  -- Last 7 days for storage trends
        AND ds.DELETED IS NULL
    GROUP BY ds.DATABASE_NAME
),

database_table_counts AS (
    SELECT 
        t.TABLE_CATALOG as DATABASE_NAME,
        COUNT(*) as TOTAL_TABLES,
        COUNT(CASE WHEN t.TABLE_TYPE = 'BASE TABLE' THEN 1 END) as BASE_TABLES,
        COUNT(CASE WHEN t.TABLE_TYPE = 'VIEW' THEN 1 END) as VIEWS,
        COUNT(CASE WHEN t.IS_TRANSIENT = 'YES' THEN 1 END) as TRANSIENT_TABLES,
        COUNT(CASE WHEN t.IS_ICEBERG = 'YES' THEN 1 END) as ICEBERG_TABLES,
        COUNT(CASE WHEN t.IS_HYBRID = 'YES' THEN 1 END) as HYBRID_TABLES,
        SUM(t.ROW_COUNT) as TOTAL_ROWS,
        SUM(t.BYTES) as TOTAL_TABLE_BYTES,
        COUNT(DISTINCT t.TABLE_SCHEMA) as UNIQUE_SCHEMAS
    FROM SNOWFLAKE.ACCOUNT_USAGE.TABLES t
    WHERE t.DELETED IS NULL
    GROUP BY t.TABLE_CATALOG
)

SELECT 
    -- Database Information (Note: Database metadata might be limited in ACCOUNT_USAGE)
    dq.DATABASE_NAME,

    -- Table and Schema Statistics
    COALESCE(dt.TOTAL_TABLES, 0) as TOTAL_TABLES,
    COALESCE(dt.BASE_TABLES, 0) as BASE_TABLES,
    COALESCE(dt.VIEWS, 0) as TOTAL_VIEWS,
    COALESCE(dt.TRANSIENT_TABLES, 0) as TRANSIENT_TABLES,
    COALESCE(dt.ICEBERG_TABLES, 0) as ICEBERG_TABLES,
    COALESCE(dt.HYBRID_TABLES, 0) as HYBRID_TABLES,
    COALESCE(dt.UNIQUE_SCHEMAS, 0) as TOTAL_SCHEMAS,
    COALESCE(dt.TOTAL_ROWS, 0) as TOTAL_ROWS,
    COALESCE(dt.TOTAL_TABLE_BYTES, 0) as TOTAL_TABLE_BYTES,

    -- Storage Statistics
    COALESCE(ds.AVG_DATABASE_BYTES, 0) as AVG_DATABASE_BYTES,
    COALESCE(ds.AVG_FAILSAFE_BYTES, 0) as AVG_FAILSAFE_BYTES,
    COALESCE(ds.AVG_HYBRID_STORAGE_BYTES, 0) as AVG_HYBRID_STORAGE_BYTES,
    ds.LATEST_STORAGE_DATE,

    -- Query Activity (Last 24 hours)
    COALESCE(dq.TOTAL_QUERIES, 0) as QUERIES_LAST_24H,
    COALESCE(dq.SUCCESSFUL_QUERIES, 0) as SUCCESSFUL_QUERIES_24H,
    COALESCE(dq.FAILED_QUERIES, 0) as FAILED_QUERIES_24H,
    COALESCE(dq.UNIQUE_USERS, 0) as UNIQUE_USERS_24H,
    COALESCE(dq.WAREHOUSES_USED, 0) as WAREHOUSES_USED_24H,
    COALESCE(dq.SCHEMAS_ACCESSED, 0) as SCHEMAS_ACCESSED_24H,
    COALESCE(dq.TOTAL_EXECUTION_TIME, 0) as TOTAL_EXEC_TIME_24H,
    COALESCE(dq.AVG_EXECUTION_TIME, 0) as AVG_EXEC_TIME_24H,
    COALESCE(dq.TOTAL_CREDITS_USED, 0) as CREDITS_USED_24H,
    COALESCE(dq.TOTAL_BYTES_SCANNED, 0) as BYTES_SCANNED_24H,
    COALESCE(dq.TOTAL_ROWS_PRODUCED, 0) as ROWS_PRODUCED_24H,
    dq.FIRST_QUERY_TIME as FIRST_QUERY_24H,
    dq.LAST_QUERY_TIME as LAST_QUERY_24H,

    -- Size Classifications
    CASE 
        WHEN COALESCE(dt.TOTAL_TABLES, 0) = 0 THEN 'EMPTY'
        WHEN COALESCE(dt.TOTAL_TABLES, 0) &amp;lt;= 10 THEN 'SMALL'
        WHEN COALESCE(dt.TOTAL_TABLES, 0) &amp;lt;= 100 THEN 'MEDIUM'
        WHEN COALESCE(dt.TOTAL_TABLES, 0) &amp;lt;= 1000 THEN 'LARGE'
        ELSE 'VERY_LARGE'
    END as DATABASE_SIZE_CATEGORY,

    CASE 
        WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) = 0 THEN 'NO_STORAGE'
        WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) &amp;lt;= 1073741824 THEN 'SMALL_STORAGE' -- 1GB
        WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) &amp;lt;= 107374182400 THEN 'MEDIUM_STORAGE' -- 100GB
        WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) &amp;lt;= 1099511627776 THEN 'LARGE_STORAGE' -- 1TB
        ELSE 'VERY_LARGE_STORAGE'
    END as STORAGE_SIZE_CATEGORY,

    CASE 
        WHEN COALESCE(dq.TOTAL_QUERIES, 0) = 0 THEN 'INACTIVE'
        WHEN COALESCE(dq.TOTAL_QUERIES, 0) &amp;lt;= 100 THEN 'LOW_USAGE'
        WHEN COALESCE(dq.TOTAL_QUERIES, 0) &amp;lt;= 1000 THEN 'MEDIUM_USAGE'
        WHEN COALESCE(dq.TOTAL_QUERIES, 0) &amp;lt;= 10000 THEN 'HIGH_USAGE'
        ELSE 'VERY_HIGH_USAGE'
    END as USAGE_LEVEL,

    -- Analysis metadata
    CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
    CURRENT_DATE - 1 as ANALYSIS_DATE

FROM database_query_stats dq
FULL OUTER JOIN database_storage_stats ds ON dq.DATABASE_NAME = ds.DATABASE_NAME
FULL OUTER JOIN database_table_counts dt ON COALESCE(dq.DATABASE_NAME, ds.DATABASE_NAME) = dt.DATABASE_NAME
WHERE COALESCE(dq.DATABASE_NAME, ds.DATABASE_NAME, dt.DATABASE_NAME) IS NOT NULL
ORDER BY COALESCE(dq.TOTAL_QUERIES, 0) DESC, COALESCE(dq.DATABASE_NAME, ds.DATABASE_NAME, dt.DATABASE_NAME);

-- =====================================================
-- TABLE 3: TABLES_ANALYTICS_SUMMARY  
-- Comprehensive table usage and metadata analytics
-- =====================================================

CREATE OR REPLACE TABLE TABLES_ANALYTICS_SUMMARY AS
WITH table_query_stats AS (
    SELECT 
        ao.REFERENCED_DATABASE,
        ao.REFERENCED_SCHEMA,
        ao.REFERENCED_OBJECT_NAME as TABLE_NAME,
        COUNT(DISTINCT ao.QUERY_ID) as QUERIES_ACCESSING_TABLE,
        COUNT(DISTINCT qh.USER_NAME) as UNIQUE_USERS_ACCESSING,
        COUNT(DISTINCT qh.WAREHOUSE_NAME) as WAREHOUSES_USED,
        MIN(qh.START_TIME) as FIRST_ACCESS_TIME,
        MAX(qh.START_TIME) as LAST_ACCESS_TIME
    FROM SNOWFLAKE.ACCOUNT_USAGE.ACCESS_HISTORY ao
    JOIN SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY qh ON ao.QUERY_ID = qh.QUERY_ID
    WHERE ao.QUERY_START_TIME &amp;gt;= CURRENT_DATE - 1
        AND ao.REFERENCED_OBJECT_DOMAIN = 'Table'
        AND qh.START_TIME &amp;gt;= CURRENT_DATE - 1
    GROUP BY ao.REFERENCED_DATABASE, ao.REFERENCED_SCHEMA, ao.REFERENCED_OBJECT_NAME
)

SELECT 
    -- Table Identity
    t.TABLE_ID,
    t.TABLE_NAME,
    t.TABLE_SCHEMA_ID,
    t.TABLE_SCHEMA,
    t.TABLE_CATALOG_ID,
    t.TABLE_CATALOG as DATABASE_NAME,
    t.TABLE_OWNER,
    t.OWNER_ROLE_TYPE,

    -- Table Properties
    t.TABLE_TYPE,
    t.IS_TRANSIENT,
    t.IS_ICEBERG,
    t.IS_DYNAMIC,
    t.IS_HYBRID,
    t.IS_INSERTABLE_INTO,
    t.IS_TYPED,
    t.CLUSTERING_KEY,
    t.AUTO_CLUSTERING_ON,

    -- Storage and Size
    t.ROW_COUNT,
    t.BYTES,
    t.RETENTION_TIME,
    CASE 
        WHEN t.BYTES &amp;gt; 0 AND t.ROW_COUNT &amp;gt; 0 THEN 
            ROUND(t.BYTES::FLOAT / t.ROW_COUNT::FLOAT, 2)
        ELSE 0 
    END as AVG_BYTES_PER_ROW,

    -- Timestamps
    t.CREATED,
    t.LAST_ALTERED,
    t.LAST_DDL,
    t.LAST_DDL_BY,
    t.DELETED,

    -- Usage Statistics (Last 24 hours)
    COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) as QUERIES_LAST_24H,
    COALESCE(tq.UNIQUE_USERS_ACCESSING, 0) as UNIQUE_USERS_24H,
    COALESCE(tq.WAREHOUSES_USED, 0) as WAREHOUSES_USED_24H,
    tq.FIRST_ACCESS_TIME as FIRST_ACCESS_24H,
    tq.LAST_ACCESS_TIME as LAST_ACCESS_24H,

    -- Column Information (from COLUMNS table)
    (SELECT COUNT(*) 
     FROM SNOWFLAKE.ACCOUNT_USAGE.COLUMNS c 
     WHERE c.TABLE_ID = t.TABLE_ID AND c.DELETED IS NULL) as TOTAL_COLUMNS,

    -- Size Classifications
    CASE 
        WHEN t.ROW_COUNT = 0 THEN 'EMPTY'
        WHEN t.ROW_COUNT &amp;lt;= 1000 THEN 'TINY'
        WHEN t.ROW_COUNT &amp;lt;= 100000 THEN 'SMALL'
        WHEN t.ROW_COUNT &amp;lt;= 10000000 THEN 'MEDIUM'
        WHEN t.ROW_COUNT &amp;lt;= 1000000000 THEN 'LARGE'
        ELSE 'VERY_LARGE'
    END as SIZE_CATEGORY,

    CASE 
        WHEN t.BYTES = 0 THEN 'NO_STORAGE'
        WHEN t.BYTES &amp;lt;= 1048576 THEN 'TINY_STORAGE' -- 1MB
        WHEN t.BYTES &amp;lt;= 104857600 THEN 'SMALL_STORAGE' -- 100MB
        WHEN t.BYTES &amp;lt;= 1073741824 THEN 'MEDIUM_STORAGE' -- 1GB
        WHEN t.BYTES &amp;lt;= 107374182400 THEN 'LARGE_STORAGE' -- 100GB
        ELSE 'VERY_LARGE_STORAGE'
    END as STORAGE_CATEGORY,

    CASE 
        WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) = 0 THEN 'UNUSED'
        WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) &amp;lt;= 10 THEN 'LOW_USAGE'
        WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) &amp;lt;= 100 THEN 'MEDIUM_USAGE'
        WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) &amp;lt;= 1000 THEN 'HIGH_USAGE'
        ELSE 'VERY_HIGH_USAGE'
    END as USAGE_LEVEL,

    -- Age Classifications
    CASE 
        WHEN t.CREATED &amp;gt;= CURRENT_DATE - 1 THEN 'BRAND_NEW'
        WHEN t.CREATED &amp;gt;= CURRENT_DATE - 7 THEN 'RECENT'
        WHEN t.CREATED &amp;gt;= CURRENT_DATE - 30 THEN 'NEW'
        WHEN t.CREATED &amp;gt;= CURRENT_DATE - 90 THEN 'ESTABLISHED'
        ELSE 'OLD'
    END as AGE_CATEGORY,

    -- Maintenance Status
    CASE 
        WHEN t.LAST_ALTERED &amp;gt;= CURRENT_DATE - 1 THEN 'RECENTLY_MODIFIED'
        WHEN t.LAST_ALTERED &amp;gt;= CURRENT_DATE - 7 THEN 'RECENTLY_UPDATED'
        WHEN t.LAST_ALTERED &amp;gt;= CURRENT_DATE - 30 THEN 'UPDATED_THIS_MONTH'
        ELSE 'STABLE'
    END as MAINTENANCE_STATUS,

    -- Performance Indicators
    CASE 
        WHEN t.CLUSTERING_KEY IS NOT NULL THEN 'CLUSTERED'
        ELSE 'NOT_CLUSTERED'
    END as CLUSTERING_STATUS,

    CASE 
        WHEN t.AUTO_CLUSTERING_ON = TRUE THEN 'AUTO_CLUSTERING_ON'
        ELSE 'AUTO_CLUSTERING_OFF'
    END as AUTO_CLUSTERING_STATUS,

    -- Analysis metadata
    CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
    CURRENT_DATE - 1 as ANALYSIS_DATE,
    t.COMMENT

FROM SNOWFLAKE.ACCOUNT_USAGE.TABLES t
LEFT JOIN table_query_stats tq ON t.TABLE_CATALOG = tq.REFERENCED_DATABASE 
    AND t.TABLE_SCHEMA = tq.REFERENCED_SCHEMA 
    AND t.TABLE_NAME = tq.TABLE_NAME
WHERE t.DELETED IS NULL
ORDER BY COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) DESC, t.BYTES DESC, t.TABLE_NAME;

-- =====================================================
-- REFRESH PROCEDURES
-- =====================================================

CREATE OR REPLACE PROCEDURE REFRESH_USERS_ANALYTICS()
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
    CREATE OR REPLACE TABLE USERS_ANALYTICS_SUMMARY AS
    WITH user_query_stats AS (
        SELECT 
            q.USER_NAME,
            COUNT(*) as TOTAL_QUERIES,
            COUNT(CASE WHEN q.EXECUTION_STATUS = 'SUCCESS' THEN 1 END) as SUCCESSFUL_QUERIES,
            COUNT(CASE WHEN q.EXECUTION_STATUS = 'FAIL' THEN 1 END) as FAILED_QUERIES,
            COUNT(CASE WHEN q.EXECUTION_STATUS = 'RUNNING' THEN 1 END) as RUNNING_QUERIES,
            SUM(q.TOTAL_ELAPSED_TIME) as TOTAL_EXECUTION_TIME,
            AVG(q.TOTAL_ELAPSED_TIME) as AVG_EXECUTION_TIME,
            SUM(q.CREDITS_USED_CLOUD_SERVICES) as TOTAL_CREDITS_USED,
            SUM(q.BYTES_SCANNED) as TOTAL_BYTES_SCANNED,
            SUM(q.ROWS_PRODUCED) as TOTAL_ROWS_PRODUCED,
            COUNT(CASE WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 OR q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 1 END) as SPILLED_QUERIES,
            COUNT(DISTINCT q.WAREHOUSE_NAME) as WAREHOUSES_USED,
            COUNT(DISTINCT q.DATABASE_NAME) as DATABASES_ACCESSED,
            COUNT(DISTINCT q.SCHEMA_NAME) as SCHEMAS_ACCESSED,
            MIN(q.START_TIME) as FIRST_QUERY_TIME,
            MAX(q.START_TIME) as LAST_QUERY_TIME
        FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
        WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
            AND q.USER_NAME IS NOT NULL
        GROUP BY q.USER_NAME
    ),
    user_login_stats AS (
        SELECT 
            l.USER_NAME,
            COUNT(*) as TOTAL_LOGIN_ATTEMPTS,
            COUNT(CASE WHEN l.IS_SUCCESS = 'YES' THEN 1 END) as SUCCESSFUL_LOGINS,
            COUNT(CASE WHEN l.IS_SUCCESS = 'NO' THEN 1 END) as FAILED_LOGINS,
            COUNT(DISTINCT l.CLIENT_IP) as UNIQUE_IPS,
            COUNT(DISTINCT l.REPORTED_CLIENT_TYPE) as CLIENT_TYPES_USED,
            MIN(l.EVENT_TIMESTAMP) as FIRST_LOGIN_ATTEMPT,
            MAX(l.EVENT_TIMESTAMP) as LAST_LOGIN_ATTEMPT
        FROM SNOWFLAKE.ACCOUNT_USAGE.LOGIN_HISTORY l
        WHERE l.EVENT_TIMESTAMP &amp;gt;= CURRENT_DATE - 1
            AND l.USER_NAME IS NOT NULL
        GROUP BY l.USER_NAME
    )
    SELECT 
        u.USER_ID, u.NAME as USER_NAME, u.LOGIN_NAME, u.DISPLAY_NAME, u.FIRST_NAME, u.LAST_NAME, u.EMAIL, u.TYPE as USER_TYPE,
        u.CREATED_ON, u.DELETED_ON, u.DISABLED, u.SNOWFLAKE_LOCK, u.HAS_PASSWORD, u.MUST_CHANGE_PASSWORD, u.HAS_MFA, u.BYPASS_MFA_UNTIL,
        u.HAS_RSA_PUBLIC_KEY, u.PASSWORD_LAST_SET_TIME, u.LAST_SUCCESS_LOGIN, u.EXPIRES_AT, u.LOCKED_UNTIL_TIME,
        u.DEFAULT_WAREHOUSE, u.DEFAULT_NAMESPACE, u.DEFAULT_ROLE, u.DEFAULT_SECONDARY_ROLE, u.OWNER, u.DATABASE_NAME as USER_DATABASE, u.SCHEMA_NAME as USER_SCHEMA,
        COALESCE(qs.TOTAL_QUERIES, 0) as QUERIES_LAST_24H, COALESCE(qs.SUCCESSFUL_QUERIES, 0) as SUCCESSFUL_QUERIES_24H,
        COALESCE(qs.FAILED_QUERIES, 0) as FAILED_QUERIES_24H, COALESCE(qs.RUNNING_QUERIES, 0) as RUNNING_QUERIES_24H,
        COALESCE(qs.TOTAL_EXECUTION_TIME, 0) as TOTAL_EXEC_TIME_24H, COALESCE(qs.AVG_EXECUTION_TIME, 0) as AVG_EXEC_TIME_24H,
        COALESCE(qs.TOTAL_CREDITS_USED, 0) as CREDITS_USED_24H, COALESCE(qs.TOTAL_BYTES_SCANNED, 0) as BYTES_SCANNED_24H,
        COALESCE(qs.TOTAL_ROWS_PRODUCED, 0) as ROWS_PRODUCED_24H, COALESCE(qs.SPILLED_QUERIES, 0) as SPILLED_QUERIES_24H,
        COALESCE(qs.WAREHOUSES_USED, 0) as WAREHOUSES_USED_24H, COALESCE(qs.DATABASES_ACCESSED, 0) as DATABASES_ACCESSED_24H,
        COALESCE(qs.SCHEMAS_ACCESSED, 0) as SCHEMAS_ACCESSED_24H, qs.FIRST_QUERY_TIME as FIRST_QUERY_24H, qs.LAST_QUERY_TIME as LAST_QUERY_24H,
        COALESCE(ls.TOTAL_LOGIN_ATTEMPTS, 0) as LOGIN_ATTEMPTS_24H, COALESCE(ls.SUCCESSFUL_LOGINS, 0) as SUCCESSFUL_LOGINS_24H,
        COALESCE(ls.FAILED_LOGINS, 0) as FAILED_LOGINS_24H, COALESCE(ls.UNIQUE_IPS, 0) as UNIQUE_IPS_24H,
        COALESCE(ls.CLIENT_TYPES_USED, 0) as CLIENT_TYPES_24H, ls.FIRST_LOGIN_ATTEMPT as FIRST_LOGIN_24H, ls.LAST_LOGIN_ATTEMPT as LAST_LOGIN_24H,

        CASE 
            WHEN COALESCE(qs.TOTAL_QUERIES, 0) = 0 THEN 'INACTIVE'
            WHEN COALESCE(qs.TOTAL_QUERIES, 0) &amp;lt;= 10 THEN 'LOW_ACTIVITY'
            WHEN COALESCE(qs.TOTAL_QUERIES, 0) &amp;lt;= 100 THEN 'MEDIUM_ACTIVITY'
            WHEN COALESCE(qs.TOTAL_QUERIES, 0) &amp;lt;= 500 THEN 'HIGH_ACTIVITY'
            ELSE 'VERY_HIGH_ACTIVITY'
        END as ACTIVITY_LEVEL,

        CASE 
            WHEN COALESCE(qs.FAILED_QUERIES, 0) = 0 THEN 'NO_FAILURES'
            WHEN COALESCE(qs.FAILED_QUERIES, 0) &amp;lt;= 5 THEN 'LOW_FAILURES'
            WHEN COALESCE(qs.FAILED_QUERIES, 0) &amp;lt;= 20 THEN 'MEDIUM_FAILURES'
            ELSE 'HIGH_FAILURES'
        END as FAILURE_RATE,

        CASE 
            WHEN COALESCE(qs.TOTAL_CREDITS_USED, 0) = 0 THEN 'NO_COST'
            WHEN COALESCE(qs.TOTAL_CREDITS_USED, 0) &amp;lt;= 1 THEN 'LOW_COST'
            WHEN COALESCE(qs.TOTAL_CREDITS_USED, 0) &amp;lt;= 10 THEN 'MEDIUM_COST'
            ELSE 'HIGH_COST'
        END as COST_CATEGORY,

        CASE 
            WHEN u.HAS_MFA = TRUE THEN 'MFA_ENABLED'
            ELSE 'MFA_DISABLED'
        END as MFA_STATUS,

        CASE 
            WHEN u.DISABLED = TRUE OR u.SNOWFLAKE_LOCK = TRUE THEN 'LOCKED'
            WHEN u.EXPIRES_AT IS NOT NULL AND u.EXPIRES_AT &amp;lt; CURRENT_TIMESTAMP THEN 'EXPIRED'
            ELSE 'ACTIVE'
        END as ACCOUNT_STATUS,

        CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
        CURRENT_DATE - 1 as ANALYSIS_DATE

    FROM SNOWFLAKE.ACCOUNT_USAGE.USERS u
    LEFT JOIN user_query_stats qs ON u.NAME = qs.USER_NAME
    LEFT JOIN user_login_stats ls ON u.NAME = ls.USER_NAME
    WHERE u.DELETED_ON IS NULL
    ORDER BY COALESCE(qs.TOTAL_QUERIES, 0) DESC, u.NAME;

    RETURN 'USERS_ANALYTICS_SUMMARY table refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$;

CREATE OR REPLACE PROCEDURE REFRESH_DATABASE_ANALYTICS()
RETURNS STRING
LANGUAGE SQL
AS
$
BEGIN
    CREATE OR REPLACE TABLE DATABASE_ANALYTICS_SUMMARY AS
    WITH database_query_stats AS (
        SELECT 
            q.DATABASE_NAME,
            COUNT(*) as TOTAL_QUERIES,
            COUNT(CASE WHEN q.EXECUTION_STATUS = 'SUCCESS' THEN 1 END) as SUCCESSFUL_QUERIES,
            COUNT(CASE WHEN q.EXECUTION_STATUS = 'FAIL' THEN 1 END) as FAILED_QUERIES,
            COUNT(DISTINCT q.USER_NAME) as UNIQUE_USERS,
            COUNT(DISTINCT q.WAREHOUSE_NAME) as WAREHOUSES_USED,
            COUNT(DISTINCT q.SCHEMA_NAME) as SCHEMAS_ACCESSED,
            SUM(q.TOTAL_ELAPSED_TIME) as TOTAL_EXECUTION_TIME,
            AVG(q.TOTAL_ELAPSED_TIME) as AVG_EXECUTION_TIME,
            SUM(q.CREDITS_USED_CLOUD_SERVICES) as TOTAL_CREDITS_USED,
            SUM(q.BYTES_SCANNED) as TOTAL_BYTES_SCANNED,
            SUM(q.ROWS_PRODUCED) as TOTAL_ROWS_PRODUCED,
            MIN(q.START_TIME) as FIRST_QUERY_TIME,
            MAX(q.START_TIME) as LAST_QUERY_TIME
        FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
        WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
            AND q.DATABASE_NAME IS NOT NULL
        GROUP BY q.DATABASE_NAME
    ),
    database_storage_stats AS (
        SELECT 
            ds.DATABASE_NAME,
            AVG(ds.AVERAGE_DATABASE_BYTES) as AVG_DATABASE_BYTES,
            AVG(ds.AVERAGE_FAILSAFE_BYTES) as AVG_FAILSAFE_BYTES,
            AVG(ds.AVERAGE_HYBRID_TABLE_STORAGE_BYTES) as AVG_HYBRID_STORAGE_BYTES,
            MAX(ds.USAGE_DATE) as LATEST_STORAGE_DATE
        FROM SNOWFLAKE.ACCOUNT_USAGE.DATABASE_STORAGE_USAGE_HISTORY ds
        WHERE ds.USAGE_DATE &amp;gt;= CURRENT_DATE - 7
            AND ds.DELETED IS NULL
        GROUP BY ds.DATABASE_NAME
    ),
    database_table_counts AS (
        SELECT 
            t.TABLE_CATALOG as DATABASE_NAME,
            COUNT(*) as TOTAL_TABLES,
            COUNT(CASE WHEN t.TABLE_TYPE = 'BASE TABLE' THEN 1 END) as BASE_TABLES,
            COUNT(CASE WHEN t.TABLE_TYPE = 'VIEW' THEN 1 END) as VIEWS,
            COUNT(CASE WHEN t.IS_TRANSIENT = 'YES' THEN 1 END) as TRANSIENT_TABLES,
            COUNT(CASE WHEN t.IS_ICEBERG = 'YES' THEN 1 END) as ICEBERG_TABLES,
            COUNT(CASE WHEN t.IS_HYBRID = 'YES' THEN 1 END) as HYBRID_TABLES,
            SUM(t.ROW_COUNT) as TOTAL_ROWS,
            SUM(t.BYTES) as TOTAL_TABLE_BYTES,
            COUNT(DISTINCT t.TABLE_SCHEMA) as UNIQUE_SCHEMAS
        FROM SNOWFLAKE.ACCOUNT_USAGE.TABLES t
        WHERE t.DELETED IS NULL
        GROUP BY t.TABLE_CATALOG
    )

    SELECT 
        dq.DATABASE_NAME,
        COALESCE(dt.TOTAL_TABLES, 0) as TOTAL_TABLES, COALESCE(dt.BASE_TABLES, 0) as BASE_TABLES, COALESCE(dt.VIEWS, 0) as TOTAL_VIEWS,
        COALESCE(dt.TRANSIENT_TABLES, 0) as TRANSIENT_TABLES, COALESCE(dt.ICEBERG_TABLES, 0) as ICEBERG_TABLES, COALESCE(dt.HYBRID_TABLES, 0) as HYBRID_TABLES,
        COALESCE(dt.UNIQUE_SCHEMAS, 0) as TOTAL_SCHEMAS, COALESCE(dt.TOTAL_ROWS, 0) as TOTAL_ROWS, COALESCE(dt.TOTAL_TABLE_BYTES, 0) as TOTAL_TABLE_BYTES,
        COALESCE(ds.AVG_DATABASE_BYTES, 0) as AVG_DATABASE_BYTES, COALESCE(ds.AVG_FAILSAFE_BYTES, 0) as AVG_FAILSAFE_BYTES,
        COALESCE(ds.AVG_HYBRID_STORAGE_BYTES, 0) as AVG_HYBRID_STORAGE_BYTES, ds.LATEST_STORAGE_DATE,
        COALESCE(dq.TOTAL_QUERIES, 0) as QUERIES_LAST_24H, COALESCE(dq.SUCCESSFUL_QUERIES, 0) as SUCCESSFUL_QUERIES_24H,
        COALESCE(dq.FAILED_QUERIES, 0) as FAILED_QUERIES_24H, COALESCE(dq.UNIQUE_USERS, 0) as UNIQUE_USERS_24H,
        COALESCE(dq.WAREHOUSES_USED, 0) as WAREHOUSES_USED_24H, COALESCE(dq.SCHEMAS_ACCESSED, 0) as SCHEMAS_ACCESSED_24H,
        COALESCE(dq.TOTAL_EXECUTION_TIME, 0) as TOTAL_EXEC_TIME_24H, COALESCE(dq.AVG_EXECUTION_TIME, 0) as AVG_EXEC_TIME_24H,
        COALESCE(dq.TOTAL_CREDITS_USED, 0) as CREDITS_USED_24H, COALESCE(dq.TOTAL_BYTES_SCANNED, 0) as BYTES_SCANNED_24H,
        COALESCE(dq.TOTAL_ROWS_PRODUCED, 0) as ROWS_PRODUCED_24H, dq.FIRST_QUERY_TIME as FIRST_QUERY_24H, dq.LAST_QUERY_TIME as LAST_QUERY_24H,

        CASE 
            WHEN COALESCE(dt.TOTAL_TABLES, 0) = 0 THEN 'EMPTY'
            WHEN COALESCE(dt.TOTAL_TABLES, 0) &amp;lt;= 10 THEN 'SMALL'
            WHEN COALESCE(dt.TOTAL_TABLES, 0) &amp;lt;= 100 THEN 'MEDIUM'
            WHEN COALESCE(dt.TOTAL_TABLES, 0) &amp;lt;= 1000 THEN 'LARGE'
            ELSE 'VERY_LARGE'
        END as DATABASE_SIZE_CATEGORY,

        CASE 
            WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) = 0 THEN 'NO_STORAGE'
            WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) &amp;lt;= 1073741824 THEN 'SMALL_STORAGE'
            WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) &amp;lt;= 107374182400 THEN 'MEDIUM_STORAGE'
            WHEN COALESCE(ds.AVG_DATABASE_BYTES, 0) &amp;lt;= 1099511627776 THEN 'LARGE_STORAGE'
            ELSE 'VERY_LARGE_STORAGE'
        END as STORAGE_SIZE_CATEGORY,

        CASE 
            WHEN COALESCE(dq.TOTAL_QUERIES, 0) = 0 THEN 'INACTIVE'
            WHEN COALESCE(dq.TOTAL_QUERIES, 0) &amp;lt;= 100 THEN 'LOW_USAGE'
            WHEN COALESCE(dq.TOTAL_QUERIES, 0) &amp;lt;= 1000 THEN 'MEDIUM_USAGE'
            WHEN COALESCE(dq.TOTAL_QUERIES, 0) &amp;lt;= 10000 THEN 'HIGH_USAGE'
            ELSE 'VERY_HIGH_USAGE'
        END as USAGE_LEVEL,

        CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
        CURRENT_DATE - 1 as ANALYSIS_DATE

    FROM database_query_stats dq
    FULL OUTER JOIN database_storage_stats ds ON dq.DATABASE_NAME = ds.DATABASE_NAME
    FULL OUTER JOIN database_table_counts dt ON COALESCE(dq.DATABASE_NAME, ds.DATABASE_NAME) = dt.DATABASE_NAME
    WHERE COALESCE(dq.DATABASE_NAME, ds.DATABASE_NAME, dt.DATABASE_NAME) IS NOT NULL
    ORDER BY COALESCE(dq.TOTAL_QUERIES, 0) DESC, COALESCE(dq.DATABASE_NAME, ds.DATABASE_NAME, dt.DATABASE_NAME);

    RETURN 'DATABASE_ANALYTICS_SUMMARY table refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$;

CREATE OR REPLACE PROCEDURE REFRESH_TABLES_ANALYTICS()
RETURNS STRING
LANGUAGE SQL
AS
$
BEGIN
    CREATE OR REPLACE TABLE TABLES_ANALYTICS_SUMMARY AS
    WITH table_query_stats AS (
        SELECT 
            ao.REFERENCED_DATABASE,
            ao.REFERENCED_SCHEMA,
            ao.REFERENCED_OBJECT_NAME as TABLE_NAME,
            COUNT(DISTINCT ao.QUERY_ID) as QUERIES_ACCESSING_TABLE,
            COUNT(DISTINCT qh.USER_NAME) as UNIQUE_USERS_ACCESSING,
            COUNT(DISTINCT qh.WAREHOUSE_NAME) as WAREHOUSES_USED,
            MIN(qh.START_TIME) as FIRST_ACCESS_TIME,
            MAX(qh.START_TIME) as LAST_ACCESS_TIME
        FROM SNOWFLAKE.ACCOUNT_USAGE.ACCESS_HISTORY ao
        JOIN SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY qh ON ao.QUERY_ID = qh.QUERY_ID
        WHERE ao.QUERY_START_TIME &amp;gt;= CURRENT_DATE - 1
            AND ao.REFERENCED_OBJECT_DOMAIN = 'Table'
            AND qh.START_TIME &amp;gt;= CURRENT_DATE - 1
        GROUP BY ao.REFERENCED_DATABASE, ao.REFERENCED_SCHEMA, ao.REFERENCED_OBJECT_NAME
    )

    SELECT 
        t.TABLE_ID, t.TABLE_NAME, t.TABLE_SCHEMA_ID, t.TABLE_SCHEMA, t.TABLE_CATALOG_ID, t.TABLE_CATALOG as DATABASE_NAME,
        t.TABLE_OWNER, t.OWNER_ROLE_TYPE, t.TABLE_TYPE, t.IS_TRANSIENT, t.IS_ICEBERG, t.IS_DYNAMIC, t.IS_HYBRID,
        t.IS_INSERTABLE_INTO, t.IS_TYPED, t.CLUSTERING_KEY, t.AUTO_CLUSTERING_ON, t.ROW_COUNT, t.BYTES, t.RETENTION_TIME,
        CASE 
            WHEN t.BYTES &amp;gt; 0 AND t.ROW_COUNT &amp;gt; 0 THEN 
                ROUND(t.BYTES::FLOAT / t.ROW_COUNT::FLOAT, 2)
            ELSE 0 
        END as AVG_BYTES_PER_ROW,
        t.CREATED, t.LAST_ALTERED, t.LAST_DDL, t.LAST_DDL_BY, t.DELETED,
        COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) as QUERIES_LAST_24H, COALESCE(tq.UNIQUE_USERS_ACCESSING, 0) as UNIQUE_USERS_24H,
        COALESCE(tq.WAREHOUSES_USED, 0) as WAREHOUSES_USED_24H, tq.FIRST_ACCESS_TIME as FIRST_ACCESS_24H, tq.LAST_ACCESS_TIME as LAST_ACCESS_24H,
        (SELECT COUNT(*) FROM SNOWFLAKE.ACCOUNT_USAGE.COLUMNS c WHERE c.TABLE_ID = t.TABLE_ID AND c.DELETED IS NULL) as TOTAL_COLUMNS,

        CASE 
            WHEN t.ROW_COUNT = 0 THEN 'EMPTY'
            WHEN t.ROW_COUNT &amp;lt;= 1000 THEN 'TINY'
            WHEN t.ROW_COUNT &amp;lt;= 100000 THEN 'SMALL'
            WHEN t.ROW_COUNT &amp;lt;= 10000000 THEN 'MEDIUM'
            WHEN t.ROW_COUNT &amp;lt;= 1000000000 THEN 'LARGE'
            ELSE 'VERY_LARGE'
        END as SIZE_CATEGORY,

        CASE 
            WHEN t.BYTES = 0 THEN 'NO_STORAGE'
            WHEN t.BYTES &amp;lt;= 1048576 THEN 'TINY_STORAGE'
            WHEN t.BYTES &amp;lt;= 104857600 THEN 'SMALL_STORAGE'
            WHEN t.BYTES &amp;lt;= 1073741824 THEN 'MEDIUM_STORAGE'
            WHEN t.BYTES &amp;lt;= 107374182400 THEN 'LARGE_STORAGE'
            ELSE 'VERY_LARGE_STORAGE'
        END as STORAGE_CATEGORY,

        CASE 
            WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) = 0 THEN 'UNUSED'
            WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) &amp;lt;= 10 THEN 'LOW_USAGE'
            WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) &amp;lt;= 100 THEN 'MEDIUM_USAGE'
            WHEN COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) &amp;lt;= 1000 THEN 'HIGH_USAGE'
            ELSE 'VERY_HIGH_USAGE'
        END as USAGE_LEVEL,

        CASE 
            WHEN t.CREATED &amp;gt;= CURRENT_DATE - 1 THEN 'BRAND_NEW'
            WHEN t.CREATED &amp;gt;= CURRENT_DATE - 7 THEN 'RECENT'
            WHEN t.CREATED &amp;gt;= CURRENT_DATE - 30 THEN 'NEW'
            WHEN t.CREATED &amp;gt;= CURRENT_DATE - 90 THEN 'ESTABLISHED'
            ELSE 'OLD'
        END as AGE_CATEGORY,

        CASE 
            WHEN t.LAST_ALTERED &amp;gt;= CURRENT_DATE - 1 THEN 'RECENTLY_MODIFIED'
            WHEN t.LAST_ALTERED &amp;gt;= CURRENT_DATE - 7 THEN 'RECENTLY_UPDATED'
            WHEN t.LAST_ALTERED &amp;gt;= CURRENT_DATE - 30 THEN 'UPDATED_THIS_MONTH'
            ELSE 'STABLE'
        END as MAINTENANCE_STATUS,

        CASE 
            WHEN t.CLUSTERING_KEY IS NOT NULL THEN 'CLUSTERED'
            ELSE 'NOT_CLUSTERED'
        END as CLUSTERING_STATUS,

        CASE 
            WHEN t.AUTO_CLUSTERING_ON = TRUE THEN 'AUTO_CLUSTERING_ON'
            ELSE 'AUTO_CLUSTERING_OFF'
        END as AUTO_CLUSTERING_STATUS,

        CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
        CURRENT_DATE - 1 as ANALYSIS_DATE,
        t.COMMENT

    FROM SNOWFLAKE.ACCOUNT_USAGE.TABLES t
    LEFT JOIN table_query_stats tq ON t.TABLE_CATALOG = tq.REFERENCED_DATABASE 
        AND t.TABLE_SCHEMA = tq.REFERENCED_SCHEMA 
        AND t.TABLE_NAME = tq.TABLE_NAME
    WHERE t.DELETED IS NULL
    ORDER BY COALESCE(tq.QUERIES_ACCESSING_TABLE, 0) DESC, t.BYTES DESC, t.TABLE_NAME;

    RETURN 'TABLES_ANALYTICS_SUMMARY table refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$;

-- =====================================================
-- MASTER REFRESH PROCEDURE
-- =====================================================

CREATE OR REPLACE PROCEDURE REFRESH_ALL_ANALYTICS()
RETURNS STRING
LANGUAGE SQL
AS
$
DECLARE
    result_message STRING DEFAULT '';
BEGIN
    CALL REFRESH_USERS_ANALYTICS();
    result_message := result_message || 'Users analytics refreshed. ';

    CALL REFRESH_DATABASE_ANALYTICS();
    result_message := result_message || 'Database analytics refreshed. ';

    CALL REFRESH_TABLES_ANALYTICS();
    result_message := result_message || 'Tables analytics refreshed. ';

    RETURN result_message || 'All analytics tables refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$;

-- =====================================================
-- EXAMPLE USAGE QUERIES
-- =====================================================

-- USERS ANALYTICS EXAMPLES:

-- 1. Find most active users
-- SELECT USER_NAME, QUERIES_LAST_24H, ACTIVITY_LEVEL, CREDITS_USED_24H, COST_CATEGORY
-- FROM USERS_ANALYTICS_SUMMARY 
-- WHERE ACTIVITY_LEVEL IN ('HIGH_ACTIVITY', 'VERY_HIGH_ACTIVITY')
-- ORDER BY QUERIES_LAST_24H DESC;

-- 2. Find users with security issues
-- SELECT USER_NAME, MFA_STATUS, ACCOUNT_STATUS, FAILED_LOGINS_24H, UNIQUE_IPS_24H
-- FROM USERS_ANALYTICS_SUMMARY 
-- WHERE MFA_STATUS = 'MFA_DISABLED' OR ACCOUNT_STATUS != 'ACTIVE' OR FAILED_LOGINS_24H &amp;gt; 3;

-- 3. Find users with high failure rates
-- SELECT USER_NAME, QUERIES_LAST_24H, FAILED_QUERIES_24H, FAILURE_RATE, SUCCESSFUL_QUERIES_24H
-- FROM USERS_ANALYTICS_SUMMARY 
-- WHERE FAILURE_RATE IN ('MEDIUM_FAILURES', 'HIGH_FAILURES')
-- ORDER BY FAILED_QUERIES_24H DESC;

-- DATABASE ANALYTICS EXAMPLES:

-- 4. Find largest databases by storage
-- SELECT DATABASE_NAME, TOTAL_TABLES, TOTAL_ROWS, TOTAL_TABLE_BYTES, STORAGE_SIZE_CATEGORY
-- FROM DATABASE_ANALYTICS_SUMMARY 
-- ORDER BY TOTAL_TABLE_BYTES DESC;

-- 5. Find most active databases
-- SELECT DATABASE_NAME, QUERIES_LAST_24H, UNIQUE_USERS_24H, USAGE_LEVEL, CREDITS_USED_24H
-- FROM DATABASE_ANALYTICS_SUMMARY 
-- WHERE USAGE_LEVEL IN ('HIGH_USAGE', 'VERY_HIGH_USAGE')
-- ORDER BY QUERIES_LAST_24H DESC;

-- TABLES ANALYTICS EXAMPLES:

-- 6. Find unused large tables
-- SELECT DATABASE_NAME, TABLE_SCHEMA, TABLE_NAME, ROW_COUNT, BYTES, USAGE_LEVEL, SIZE_CATEGORY
-- FROM TABLES_ANALYTICS_SUMMARY 
-- WHERE USAGE_LEVEL = 'UNUSED' AND SIZE_CATEGORY IN ('LARGE', 'VERY_LARGE')
-- ORDER BY BYTES DESC;

-- 7. Find most accessed tables
-- SELECT DATABASE_NAME, TABLE_SCHEMA, TABLE_NAME, QUERIES_LAST_24H, UNIQUE_USERS_24H, USAGE_LEVEL
-- FROM TABLES_ANALYTICS_SUMMARY 
-- WHERE USAGE_LEVEL IN ('HIGH_USAGE', 'VERY_HIGH_USAGE')
-- ORDER BY QUERIES_LAST_24H DESC;

-- 8. Find tables that need clustering
-- SELECT DATABASE_NAME, TABLE_SCHEMA, TABLE_NAME, ROW_COUNT, BYTES, CLUSTERING_STATUS, AUTO_CLUSTERING_STATUS
-- FROM TABLES_ANALYTICS_SUMMARY 
-- WHERE CLUSTERING_STATUS = 'NOT_CLUSTERED' AND SIZE_CATEGORY IN ('LARGE', 'VERY_LARGE')
-- ORDER BY BYTES DESC;

-- REFRESH ALL TABLES:
-- CALL REFRESH_ALL_ANALYTICS();

-- VIEW SUMMARY OF ALL TABLES:
SELECT 'USERS_ANALYTICS_SUMMARY' as TABLE_NAME, COUNT(*) as ROW_COUNT FROM USERS_ANALYTICS_SUMMARY
UNION ALL
SELECT 'DATABASE_ANALYTICS_SUMMARY' as TABLE_NAME, COUNT(*) as ROW_COUNT FROM DATABASE_ANALYTICS_SUMMARY  
UNION ALL
SELECT 'TABLES_ANALYTICS_SUMMARY' as TABLE_NAME, COUNT(*) as ROW_COUNT FROM TABLES_ANALYTICS_SUMMARY;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>USER</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Tue, 05 Aug 2025 18:10:48 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/user-2dbe</link>
      <guid>https://dev.to/armaankhan8270/user-2dbe</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;-- =============================================
-- OPTIMIZED SNOWFLAKE QUERY PERFORMANCE ANALYZER
-- =============================================

-- Step 1: Create materialized table for refreshable data
CREATE OR REPLACE TABLE QUERY_PERFORMANCE_ANALYSIS (
    USER_NAME VARCHAR(255),
    TOTAL_QUERIES NUMBER,
    WAREHOUSES_USED NUMBER,
    DATABASES_ACCESSED NUMBER,
    TOTAL_CREDITS NUMBER(10,2),
    AVG_EXECUTION_TIME_MS NUMBER(10,2),
    AVG_BYTES_PER_ROW NUMBER(15,2),
    TOTAL_DATA_SCANNED_GB NUMBER(15,2),
    FAILURE_CANCELLATION_RATE_PCT NUMBER(5,2),
    SPILLED_QUERIES NUMBER,
    OVER_PROVISIONED_QUERIES NUMBER,
    PEAK_HOUR_LONG_RUNNING_QUERIES NUMBER,
    SELECT_STAR_QUERIES NUMBER,
    UNPARTITIONED_SCAN_QUERIES NUMBER,
    REPEATED_QUERIES NUMBER,
    COMPLEX_JOIN_QUERIES NUMBER,
    ZERO_RESULT_QUERIES NUMBER,
    HIGH_COMPILE_QUERIES NUMBER,
    UNTAGGED_QUERIES NUMBER,
    UNLIMITED_ORDER_BY_QUERIES NUMBER,
    SLOW_QUERIES NUMBER,
    EXPENSIVE_DISTINCT_QUERIES NUMBER,
    INEFFICIENT_LIKE_QUERIES NUMBER,
    NO_RESULTS_WITH_SCAN_QUERIES NUMBER,
    HIGH_COMPILE_RATIO_QUERIES NUMBER,
    -- Normalized scores (1-100)
    SPILLED_SCORE NUMBER(5,2),
    OVER_PROVISIONED_SCORE NUMBER(5,2),
    PEAK_HOUR_SCORE NUMBER(5,2),
    SELECT_STAR_SCORE NUMBER(5,2),
    UNPARTITIONED_SCORE NUMBER(5,2),
    REPEATED_SCORE NUMBER(5,2),
    COMPLEX_JOIN_SCORE NUMBER(5,2),
    ZERO_RESULT_SCORE NUMBER(5,2),
    HIGH_COMPILE_SCORE NUMBER(5,2),
    SLOW_QUERY_SCORE NUMBER(5,2),
    EXPENSIVE_DISTINCT_SCORE NUMBER(5,2),
    INEFFICIENT_LIKE_SCORE NUMBER(5,2),
    NO_RESULTS_SCAN_SCORE NUMBER(5,2),
    HIGH_COMPILE_RATIO_SCORE NUMBER(5,2),
    OVERALL_OPTIMIZATION_SCORE NUMBER(5,2), -- 1 = optimized, 100 = most unoptimized
    COST_STATUS VARCHAR(50),
    RECOMMENDATIONS ARRAY,
    QUERY_SAMPLES OBJECT,
    ANALYSIS_DATE TIMESTAMP_NTZ DEFAULT CURRENT_TIMESTAMP()
);

-- Step 2: Create procedure to refresh the analysis
CREATE OR REPLACE PROCEDURE REFRESH_QUERY_PERFORMANCE_ANALYSIS(
    START_DATE STRING,
    END_DATE STRING,
    OBJECT_FILTER STRING DEFAULT ''
)
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
    -- Clear existing data
    DELETE FROM QUERY_PERFORMANCE_ANALYSIS;

    -- Insert new analysis
    INSERT INTO QUERY_PERFORMANCE_ANALYSIS
    WITH percentile_reference AS (
        SELECT
            COALESCE(warehouse_size, 'UNKNOWN') as warehouse_size,
            PERCENTILE_CONT(0.1) WITHIN GROUP (ORDER BY COALESCE(bytes_scanned, 0)) AS bytes_scanned_p10,
            PERCENTILE_CONT(0.9) WITHIN GROUP (ORDER BY total_elapsed_time) AS execution_time_p90
        FROM snowflake.account_usage.query_history
        WHERE start_time &amp;gt;= TRY_TO_TIMESTAMP_NTZ(START_DATE)
          AND start_time &amp;lt;= TRY_TO_TIMESTAMP_NTZ(END_DATE || ' 23:59:59')
          AND user_name IS NOT NULL
          AND query_type NOT IN ('DESCRIBE', 'SHOW', 'USE')
          AND total_elapsed_time &amp;gt;= 2000  -- Minimum 2 seconds
        GROUP BY COALESCE(warehouse_size, 'UNKNOWN')
    ),
    query_flags AS (
        SELECT
            qh.query_id,
            qh.warehouse_size,
            qh.bytes_scanned,
            qh.total_elapsed_time AS execution_time_ms,
            qh.compilation_time,
            qh.start_time,
            qh.query_text, -- Full query text, no truncation
            qh.query_hash,
            qh.query_tag,
            qh.error_code,
            qh.execution_status,
            qh.partitions_scanned,
            qh.partitions_total,
            qh.bytes_spilled_to_local_storage,
            qh.bytes_spilled_to_remote_storage,
            qh.user_name,
            qh.database_name,
            qh.schema_name,
            qh.warehouse_name,
            COALESCE(qh.credits_used_cloud_services, 0) AS credits_used_cloud_services,
            qh.rows_produced,
            -- Optimized flag calculations
            CASE
                WHEN qh.warehouse_size IN ('MEDIUM', 'LARGE', 'X-LARGE', '2X-LARGE', '3X-LARGE', '4X-LARGE')
                 AND qh.bytes_scanned &amp;lt; pr.bytes_scanned_p10 THEN 1 ELSE 0
            END AS over_provisioned,
            CASE
                WHEN EXTRACT(HOUR FROM qh.start_time) BETWEEN 9 AND 17
                 AND qh.total_elapsed_time &amp;gt; 300000 THEN 1 ELSE 0
            END AS peak_hour_long_running,
            CASE
                WHEN UPPER(qh.query_text) LIKE 'SELECT *%' THEN 1 ELSE 0
            END AS select_star,
            CASE
                WHEN qh.partitions_total &amp;gt; 0 AND qh.partitions_scanned = qh.partitions_total THEN 1 ELSE 0
            END AS unpartitioned_scan,
            CASE
                WHEN qh.query_hash IS NOT NULL THEN 1 ELSE 0
            END AS repeated_query,
            CASE
                WHEN (UPPER(qh.query_text) LIKE '%JOIN%' AND UPPER(qh.query_text) LIKE '%JOIN%')
                   OR UPPER(qh.query_text) LIKE '%WINDOW%' THEN 1 ELSE 0
            END AS complex_query,
            CASE
                WHEN qh.error_code IS NOT NULL OR qh.execution_status IN ('FAILED', 'CANCELLED') THEN 1 ELSE 0
            END AS failed_cancelled,
            CASE
                WHEN qh.bytes_spilled_to_local_storage &amp;gt; 0 OR qh.bytes_spilled_to_remote_storage &amp;gt; 0 THEN 1 ELSE 0
            END AS spilled,
            CASE
                WHEN qh.rows_produced = 0 AND qh.bytes_scanned &amp;gt; 1000000 THEN 1 ELSE 0
            END AS zero_result_query,
            CASE
                WHEN qh.compilation_time &amp;gt; 5000 THEN 1 ELSE 0
            END AS high_compile_time,
            CASE
                WHEN qh.query_tag IS NULL THEN 1 ELSE 0
            END AS untagged_query,
            CASE
                WHEN UPPER(qh.query_text) LIKE '%ORDER BY%' AND UPPER(qh.query_text) NOT LIKE '%LIMIT%' THEN 1 ELSE 0
            END AS unlimited_order_by,
            CASE
                WHEN qh.total_elapsed_time &amp;gt; pr.execution_time_p90 THEN 1 ELSE 0
            END AS slow_query,
            CASE
                WHEN UPPER(qh.query_text) LIKE '%DISTINCT%' AND qh.bytes_scanned &amp;gt; 100000000 THEN 1 ELSE 0
            END AS expensive_distinct,
            CASE
                WHEN UPPER(qh.query_text) LIKE '%LIKE%' AND UPPER(qh.query_text) NOT LIKE '%INDEX%' THEN 1 ELSE 0
            END AS inefficient_like,
            CASE
                WHEN qh.bytes_scanned &amp;gt; 0 AND qh.rows_produced = 0 THEN 1 ELSE 0
            END AS no_results_with_scan,
            CASE
                WHEN qh.total_elapsed_time &amp;gt; 0 AND qh.compilation_time / qh.total_elapsed_time &amp;gt; 0.5 THEN 1 ELSE 0
            END AS high_compile_ratio
        FROM snowflake.account_usage.query_history qh
        LEFT JOIN percentile_reference pr ON COALESCE(qh.warehouse_size, 'UNKNOWN') = pr.warehouse_size
        WHERE
            qh.start_time &amp;gt;= TRY_TO_TIMESTAMP_NTZ(START_DATE)
            AND qh.start_time &amp;lt;= TRY_TO_TIMESTAMP_NTZ(END_DATE || ' 23:59:59')
            AND qh.query_type NOT IN ('DESCRIBE', 'SHOW', 'USE')
            AND qh.user_name IS NOT NULL
            AND qh.total_elapsed_time &amp;gt;= 2000  -- Minimum 2 seconds
    ),
    -- Optimized sample queries collection
    sample_queries AS (
        SELECT
            user_name,
            OBJECT_CONSTRUCT(
                'over_provisioned', ARRAY_AGG(
                    CASE WHEN over_provisioned = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text, -- Full query text
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        ) 
                    END
                ) WITHIN GROUP (ORDER BY start_time DESC),
                'peak_hour_long_running', ARRAY_AGG(
                    CASE WHEN peak_hour_long_running = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY execution_time_ms DESC),
                'select_star', ARRAY_AGG(
                    CASE WHEN select_star = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY bytes_scanned DESC),
                'spilled', ARRAY_AGG(
                    CASE WHEN spilled = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_spilled_to_local_storage', bytes_spilled_to_local_storage,
                            'bytes_spilled_to_remote_storage', bytes_spilled_to_remote_storage,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY (bytes_spilled_to_local_storage + bytes_spilled_to_remote_storage) DESC),
                'slow_query', ARRAY_AGG(
                    CASE WHEN slow_query = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY execution_time_ms DESC)
            ) AS query_samples
        FROM query_flags
        GROUP BY user_name
    ),
    user_metrics AS (
        SELECT
            user_name,
            COUNT(DISTINCT query_id) AS total_queries,
            COUNT(DISTINCT warehouse_name) AS warehouses_used,
            COUNT(DISTINCT database_name) AS databases_accessed,
            ROUND(SUM(credits_used_cloud_services), 2) AS total_credits,
            ROUND(AVG(execution_time_ms), 2) AS avg_execution_time_ms,
            ROUND(AVG(NULLIF(bytes_scanned, 0) / NULLIF(rows_produced, 0)), 2) AS avg_bytes_per_row,
            ROUND(SUM(bytes_scanned) / POWER(1024, 3), 2) AS total_data_scanned_gb,
            ROUND(SUM(failed_cancelled) * 100.0 / NULLIF(COUNT(*), 0), 2) AS failure_cancellation_rate_pct,
            SUM(spilled) AS spilled_queries,
            SUM(over_provisioned) AS over_provisioned_queries,
            SUM(peak_hour_long_running) AS peak_hour_long_running_queries,
            SUM(select_star) AS select_star_queries,
            SUM(unpartitioned_scan) AS unpartitioned_scan_queries,
            COUNT(*) - COUNT(DISTINCT query_hash) AS repeated_queries,
            SUM(complex_query) AS complex_join_queries,
            SUM(zero_result_query) AS zero_result_queries,
            SUM(high_compile_time) AS high_compile_queries,
            SUM(untagged_query) AS untagged_queries,
            SUM(unlimited_order_by) AS unlimited_order_by_queries,
            SUM(slow_query) AS slow_queries,
            SUM(expensive_distinct) AS expensive_distinct_queries,
            SUM(inefficient_like) AS inefficient_like_queries,
            SUM(no_results_with_scan) AS no_results_with_scan_queries,
            SUM(high_compile_ratio) AS high_compile_ratio_queries
        FROM query_flags
        GROUP BY user_name
    ),
    -- Calculate percentiles for normalization
    normalization_stats AS (
        SELECT
            MAX(spilled_queries) AS max_spilled,
            MAX(over_provisioned_queries) AS max_over_provisioned,
            MAX(peak_hour_long_running_queries) AS max_peak_hour,
            MAX(select_star_queries) AS max_select_star,
            MAX(unpartitioned_scan_queries) AS max_unpartitioned,
            MAX(repeated_queries) AS max_repeated,
            MAX(complex_join_queries) AS max_complex,
            MAX(zero_result_queries) AS max_zero_result,
            MAX(high_compile_queries) AS max_high_compile,
            MAX(slow_queries) AS max_slow,
            MAX(expensive_distinct_queries) AS max_expensive_distinct,
            MAX(inefficient_like_queries) AS max_inefficient_like,
            MAX(no_results_with_scan_queries) AS max_no_results_scan,
            MAX(high_compile_ratio_queries) AS max_high_compile_ratio,
            PERCENTILE_CONT(0.9) WITHIN GROUP (ORDER BY total_credits) AS cost_p90
        FROM user_metrics
    )
    SELECT
        um.user_name,
        um.total_queries,
        um.warehouses_used,
        um.databases_accessed,
        um.total_credits,
        um.avg_execution_time_ms,
        um.avg_bytes_per_row,
        um.total_data_scanned_gb,
        um.failure_cancellation_rate_pct,
        um.spilled_queries,
        um.over_provisioned_queries,
        um.peak_hour_long_running_queries,
        um.select_star_queries,
        um.unpartitioned_scan_queries,
        um.repeated_queries,
        um.complex_join_queries,
        um.zero_result_queries,
        um.high_compile_queries,
        um.untagged_queries,
        um.unlimited_order_by_queries,
        um.slow_queries,
        um.expensive_distinct_queries,
        um.inefficient_like_queries,
        um.no_results_with_scan_queries,
        um.high_compile_ratio_queries,
        -- Normalized scores (1-100 scale)
        GREATEST(1, LEAST(100, ROUND((um.spilled_queries * 100.0) / NULLIF(ns.max_spilled, 0), 2))) AS spilled_score,
        GREATEST(1, LEAST(100, ROUND((um.over_provisioned_queries * 100.0) / NULLIF(ns.max_over_provisioned, 0), 2))) AS over_provisioned_score,
        GREATEST(1, LEAST(100, ROUND((um.peak_hour_long_running_queries * 100.0) / NULLIF(ns.max_peak_hour, 0), 2))) AS peak_hour_score,
        GREATEST(1, LEAST(100, ROUND((um.select_star_queries * 100.0) / NULLIF(ns.max_select_star, 0), 2))) AS select_star_score,
        GREATEST(1, LEAST(100, ROUND((um.unpartitioned_scan_queries * 100.0) / NULLIF(ns.max_unpartitioned, 0), 2))) AS unpartitioned_score,
        GREATEST(1, LEAST(100, ROUND((um.repeated_queries * 100.0) / NULLIF(ns.max_repeated, 0), 2))) AS repeated_score,
        GREATEST(1, LEAST(100, ROUND((um.complex_join_queries * 100.0) / NULLIF(ns.max_complex, 0), 2))) AS complex_join_score,
        GREATEST(1, LEAST(100, ROUND((um.zero_result_queries * 100.0) / NULLIF(ns.max_zero_result, 0), 2))) AS zero_result_score,
        GREATEST(1, LEAST(100, ROUND((um.high_compile_queries * 100.0) / NULLIF(ns.max_high_compile, 0), 2))) AS high_compile_score,
        GREATEST(1, LEAST(100, ROUND((um.slow_queries * 100.0) / NULLIF(ns.max_slow, 0), 2))) AS slow_query_score,
        GREATEST(1, LEAST(100, ROUND((um.expensive_distinct_queries * 100.0) / NULLIF(ns.max_expensive_distinct, 0), 2))) AS expensive_distinct_score,
        GREATEST(1, LEAST(100, ROUND((um.inefficient_like_queries * 100.0) / NULLIF(ns.max_inefficient_like, 0), 2))) AS inefficient_like_score,
        GREATEST(1, LEAST(100, ROUND((um.no_results_with_scan_queries * 100.0) / NULLIF(ns.max_no_results_scan, 0), 2))) AS no_results_scan_score,
        GREATEST(1, LEAST(100, ROUND((um.high_compile_ratio_queries * 100.0) / NULLIF(ns.max_high_compile_ratio, 0), 2))) AS high_compile_ratio_score,
        -- Overall optimization score (weighted average)
        GREATEST(1, LEAST(100, ROUND(
            (GREATEST(1, LEAST(100, (um.spilled_queries * 100.0) / NULLIF(ns.max_spilled, 0))) * 0.15 +
             GREATEST(1, LEAST(100, (um.over_provisioned_queries * 100.0) / NULLIF(ns.max_over_provisioned, 0))) * 0.12 +
             GREATEST(1, LEAST(100, (um.peak_hour_long_running_queries * 100.0) / NULLIF(ns.max_peak_hour, 0))) * 0.12 +
             GREATEST(1, LEAST(100, (um.select_star_queries * 100.0) / NULLIF(ns.max_select_star, 0))) * 0.10 +
             GREATEST(1, LEAST(100, (um.unpartitioned_scan_queries * 100.0) / NULLIF(ns.max_unpartitioned, 0))) * 0.12 +
             GREATEST(1, LEAST(100, (um.repeated_queries * 100.0) / NULLIF(ns.max_repeated, 0))) * 0.08 +
             GREATEST(1, LEAST(100, (um.zero_result_queries * 100.0) / NULLIF(ns.max_zero_result, 0))) * 0.15 +
             GREATEST(1, LEAST(100, (um.slow_queries * 100.0) / NULLIF(ns.max_slow, 0))) * 0.16), 2
        ))) AS overall_optimization_score,
        CASE
            WHEN um.total_credits &amp;gt; ns.cost_p90 THEN 'High Cost'
            ELSE 'Normal'
        END AS cost_status,
        ARRAY_CONSTRUCT_COMPACT(
            CASE WHEN um.spilled_queries &amp;gt; 5 THEN 'Optimize memory usage or increase warehouse size.' END,
            CASE WHEN um.over_provisioned_queries &amp;gt; 5 THEN 'Use smaller warehouses for simple queries.' END,
            CASE WHEN um.peak_hour_long_running_queries &amp;gt; 5 THEN 'Schedule long queries off-peak.' END,
            CASE WHEN um.select_star_queries &amp;gt; 5 THEN 'Specify columns instead of SELECT *.' END,
            CASE WHEN um.unpartitioned_scan_queries &amp;gt; 5 THEN 'Implement partitioning or clustering.' END,
            CASE WHEN um.repeated_queries &amp;gt; 10 THEN 'Review frequently executed queries.' END,
            CASE WHEN um.zero_result_queries &amp;gt; 3 THEN 'Avoid queries that return no data.' END,
            CASE WHEN um.slow_queries &amp;gt; 3 THEN 'Optimize slow-running queries.' END,
            CASE WHEN um.expensive_distinct_queries &amp;gt; 3 THEN 'Replace DISTINCT with GROUP BY where possible.' END,
            CASE WHEN um.no_results_with_scan_queries &amp;gt; 3 THEN 'Add WHERE clauses to prevent unnecessary scans.' END
        ) AS recommendations,
        COALESCE(sq.query_samples, OBJECT_CONSTRUCT()) AS query_samples,
        CURRENT_TIMESTAMP() AS analysis_date
    FROM user_metrics um
    CROSS JOIN normalization_stats ns
    LEFT JOIN sample_queries sq ON um.user_name = sq.user_name
    ORDER BY overall_optimization_score DESC;

    RETURN 'Analysis refreshed successfully for ' || START_DATE || ' to ' || END_DATE;
END;
$$;

-- Step 3: Query to refresh and view results
-- Usage example:
-- CALL REFRESH_QUERY_PERFORMANCE_ANALYSIS('2024-01-01', '2024-01-31');

-- Step 4: Debug query to check data availability
-- Run this first to see if there's any data in your date range
CREATE OR REPLACE VIEW DEBUG_QUERY_DATA AS
SELECT 
    COUNT(*) as total_records,
    COUNT(DISTINCT user_name) as unique_users,
    MIN(start_time) as earliest_query,
    MAX(start_time) as latest_query,
    COUNT(CASE WHEN total_elapsed_time &amp;gt;= 2000 THEN 1 END) as queries_over_2sec,
    COUNT(CASE WHEN query_type NOT IN ('DESCRIBE', 'SHOW', 'USE') THEN 1 END) as non_metadata_queries,
    ARRAY_AGG(DISTINCT query_type) as query_types_found,
    ARRAY_AGG(DISTINCT user_name) WITHIN GROUP (ORDER BY user_name) as sample_users
FROM snowflake.account_usage.query_history
WHERE start_time &amp;gt;= CURRENT_DATE - 30  -- Last 30 days
  AND user_name IS NOT NULL;

-- Step 5: Simple test query to verify data
CREATE OR REPLACE PROCEDURE TEST_QUERY_DATA(
    START_DATE STRING,
    END_DATE STRING
)
RETURNS VARIANT
LANGUAGE SQL
AS
$
BEGIN
    LET result VARIANT;

    SELECT 
        OBJECT_CONSTRUCT(
            'total_queries', COUNT(*),
            'unique_users', COUNT(DISTINCT user_name),
            'date_range_start', MIN(start_time),
            'date_range_end', MAX(start_time),
            'queries_over_2sec', COUNT(CASE WHEN total_elapsed_time &amp;gt;= 2000 THEN 1 END),
            'sample_users', ARRAY_AGG(DISTINCT user_name) WITHIN GROUP (ORDER BY user_name LIMIT 10),
            'query_types', ARRAY_AGG(DISTINCT query_type) WITHIN GROUP (ORDER BY query_type)
        )
    INTO result
    FROM snowflake.account_usage.query_history
    WHERE start_time &amp;gt;= TRY_TO_TIMESTAMP_NTZ(START_DATE)
      AND start_time &amp;lt;= TRY_TO_TIMESTAMP_NTZ(END_DATE || ' 23:59:59')
      AND user_name IS NOT NULL;

    RETURN result;
END;
$;

-- Step 6: View the results
-- SELECT * FROM QUERY_PERFORMANCE_ANALYSIS ORDER BY OVERALL_OPTIMIZATION_SCORE DESC;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;





&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;-- =============================================
-- OPTIMIZED SNOWFLAKE QUERY PERFORMANCE ANALYZER
-- =============================================

-- Step 1: Create materialized table for refreshable data
CREATE OR REPLACE TABLE QUERY_PERFORMANCE_ANALYSIS (
    USER_NAME VARCHAR(255),
    TOTAL_QUERIES NUMBER,
    WAREHOUSES_USED NUMBER,
    DATABASES_ACCESSED NUMBER,
    TOTAL_CREDITS NUMBER(10,2),
    AVG_EXECUTION_TIME_MS NUMBER(10,2),
    AVG_BYTES_PER_ROW NUMBER(15,2),
    TOTAL_DATA_SCANNED_GB NUMBER(15,2),
    FAILURE_CANCELLATION_RATE_PCT NUMBER(5,2),
    SPILLED_QUERIES NUMBER,
    OVER_PROVISIONED_QUERIES NUMBER,
    PEAK_HOUR_LONG_RUNNING_QUERIES NUMBER,
    SELECT_STAR_QUERIES NUMBER,
    UNPARTITIONED_SCAN_QUERIES NUMBER,
    REPEATED_QUERIES NUMBER,
    COMPLEX_JOIN_QUERIES NUMBER,
    ZERO_RESULT_QUERIES NUMBER,
    HIGH_COMPILE_QUERIES NUMBER,
    UNTAGGED_QUERIES NUMBER,
    UNLIMITED_ORDER_BY_QUERIES NUMBER,
    SLOW_QUERIES NUMBER,
    EXPENSIVE_DISTINCT_QUERIES NUMBER,
    INEFFICIENT_LIKE_QUERIES NUMBER,
    NO_RESULTS_WITH_SCAN_QUERIES NUMBER,
    HIGH_COMPILE_RATIO_QUERIES NUMBER,
    -- Normalized scores (1-100)
    SPILLED_SCORE NUMBER(5,2),
    OVER_PROVISIONED_SCORE NUMBER(5,2),
    PEAK_HOUR_SCORE NUMBER(5,2),
    SELECT_STAR_SCORE NUMBER(5,2),
    UNPARTITIONED_SCORE NUMBER(5,2),
    REPEATED_SCORE NUMBER(5,2),
    COMPLEX_JOIN_SCORE NUMBER(5,2),
    ZERO_RESULT_SCORE NUMBER(5,2),
    HIGH_COMPILE_SCORE NUMBER(5,2),
    SLOW_QUERY_SCORE NUMBER(5,2),
    EXPENSIVE_DISTINCT_SCORE NUMBER(5,2),
    INEFFICIENT_LIKE_SCORE NUMBER(5,2),
    NO_RESULTS_SCAN_SCORE NUMBER(5,2),
    HIGH_COMPILE_RATIO_SCORE NUMBER(5,2),
    OVERALL_OPTIMIZATION_SCORE NUMBER(5,2), -- 1 = optimized, 100 = most unoptimized
    COST_STATUS VARCHAR(50),
    RECOMMENDATIONS ARRAY,
    QUERY_SAMPLES OBJECT,
    ANALYSIS_DATE TIMESTAMP_NTZ DEFAULT CURRENT_TIMESTAMP()
);

-- Step 2: Create procedure to refresh the analysis
CREATE OR REPLACE PROCEDURE REFRESH_QUERY_PERFORMANCE_ANALYSIS(
    START_DATE STRING,
    END_DATE STRING,
    OBJECT_FILTER STRING DEFAULT ''
)
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
    -- Clear existing data
    DELETE FROM QUERY_PERFORMANCE_ANALYSIS;

    -- Insert new analysis
    INSERT INTO QUERY_PERFORMANCE_ANALYSIS
    WITH percentile_reference AS (
        SELECT
            warehouse_size,
            PERCENTILE_CONT(0.1) WITHIN GROUP (ORDER BY bytes_scanned) AS bytes_scanned_p10,
            PERCENTILE_CONT(0.9) WITHIN GROUP (ORDER BY total_elapsed_time) AS execution_time_p90
        FROM snowflake.account_usage.query_history
        WHERE start_time &amp;gt;= TRY_TO_TIMESTAMP_NTZ(START_DATE)
          AND start_time &amp;lt;= TRY_TO_TIMESTAMP_NTZ(END_DATE || ' 23:59:59')
          AND OBJECT_FILTER = '' OR OBJECT_FILTER != ''  -- Dynamic filter placeholder
          AND user_name IS NOT NULL
          AND query_type NOT IN ('DESCRIBE', 'SHOW', 'USE', 'CREATE', 'DROP', 'ALTER', 'GRANT', 'REVOKE')
          AND total_elapsed_time &amp;gt;= 2000  -- Minimum 2 seconds
        GROUP BY warehouse_size
    ),
    query_flags AS (
        SELECT
            qh.query_id,
            qh.warehouse_size,
            qh.bytes_scanned,
            qh.total_elapsed_time AS execution_time_ms,
            qh.compilation_time,
            qh.start_time,
            qh.query_text, -- Full query text, no truncation
            qh.query_hash,
            qh.query_tag,
            qh.error_code,
            qh.execution_status,
            qh.partitions_scanned,
            qh.partitions_total,
            qh.bytes_spilled_to_local_storage,
            qh.bytes_spilled_to_remote_storage,
            qh.user_name,
            qh.database_name,
            qh.schema_name,
            qh.warehouse_name,
            COALESCE(qh.credits_used_cloud_services, 0) AS credits_used_cloud_services,
            qh.rows_produced,
            -- Optimized flag calculations
            CASE
                WHEN qh.warehouse_size IN ('MEDIUM', 'LARGE', 'X-LARGE', '2X-LARGE', '3X-LARGE', '4X-LARGE')
                 AND qh.bytes_scanned &amp;lt; pr.bytes_scanned_p10 THEN 1 ELSE 0
            END AS over_provisioned,
            CASE
                WHEN EXTRACT(HOUR FROM qh.start_time) BETWEEN 9 AND 17
                 AND qh.total_elapsed_time &amp;gt; 300000 THEN 1 ELSE 0
            END AS peak_hour_long_running,
            CASE
                WHEN UPPER(qh.query_text) LIKE 'SELECT *%' THEN 1 ELSE 0
            END AS select_star,
            CASE
                WHEN qh.partitions_total &amp;gt; 0 AND qh.partitions_scanned = qh.partitions_total THEN 1 ELSE 0
            END AS unpartitioned_scan,
            CASE
                WHEN qh.query_hash IS NOT NULL THEN 1 ELSE 0
            END AS repeated_query,
            CASE
                WHEN (UPPER(qh.query_text) LIKE '%JOIN%' AND UPPER(qh.query_text) LIKE '%JOIN%')
                   OR UPPER(qh.query_text) LIKE '%WINDOW%' THEN 1 ELSE 0
            END AS complex_query,
            CASE
                WHEN qh.error_code IS NOT NULL OR qh.execution_status IN ('FAILED', 'CANCELLED') THEN 1 ELSE 0
            END AS failed_cancelled,
            CASE
                WHEN qh.bytes_spilled_to_local_storage &amp;gt; 0 OR qh.bytes_spilled_to_remote_storage &amp;gt; 0 THEN 1 ELSE 0
            END AS spilled,
            CASE
                WHEN qh.rows_produced = 0 AND qh.bytes_scanned &amp;gt; 1000000 THEN 1 ELSE 0
            END AS zero_result_query,
            CASE
                WHEN qh.compilation_time &amp;gt; 5000 THEN 1 ELSE 0
            END AS high_compile_time,
            CASE
                WHEN qh.query_tag IS NULL THEN 1 ELSE 0
            END AS untagged_query,
            CASE
                WHEN UPPER(qh.query_text) LIKE '%ORDER BY%' AND UPPER(qh.query_text) NOT LIKE '%LIMIT%' THEN 1 ELSE 0
            END AS unlimited_order_by,
            CASE
                WHEN qh.total_elapsed_time &amp;gt; pr.execution_time_p90 THEN 1 ELSE 0
            END AS slow_query,
            CASE
                WHEN UPPER(qh.query_text) LIKE '%DISTINCT%' AND qh.bytes_scanned &amp;gt; 100000000 THEN 1 ELSE 0
            END AS expensive_distinct,
            CASE
                WHEN UPPER(qh.query_text) LIKE '%LIKE%' AND UPPER(qh.query_text) NOT LIKE '%INDEX%' THEN 1 ELSE 0
            END AS inefficient_like,
            CASE
                WHEN qh.bytes_scanned &amp;gt; 0 AND qh.rows_produced = 0 THEN 1 ELSE 0
            END AS no_results_with_scan,
            CASE
                WHEN qh.total_elapsed_time &amp;gt; 0 AND qh.compilation_time / qh.total_elapsed_time &amp;gt; 0.5 THEN 1 ELSE 0
            END AS high_compile_ratio
        FROM snowflake.account_usage.query_history qh
        LEFT JOIN percentile_reference pr ON qh.warehouse_size = pr.warehouse_size
        WHERE
            qh.start_time &amp;gt;= TRY_TO_TIMESTAMP_NTZ(START_DATE)
            AND qh.start_time &amp;lt;= TRY_TO_TIMESTAMP_NTZ(END_DATE || ' 23:59:59')
            AND OBJECT_FILTER = '' OR OBJECT_FILTER != ''  -- Dynamic filter placeholder
            AND qh.query_type NOT IN ('DESCRIBE', 'SHOW', 'USE', 'CREATE', 'DROP', 'ALTER', 'GRANT', 'REVOKE')
            AND qh.user_name IS NOT NULL
            AND qh.total_elapsed_time &amp;gt;= 2000  -- Minimum 2 seconds
    ),
    -- Optimized sample queries collection
    sample_queries AS (
        SELECT
            user_name,
            OBJECT_CONSTRUCT(
                'over_provisioned', ARRAY_AGG(
                    CASE WHEN over_provisioned = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text, -- Full query text
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        ) 
                    END
                ) WITHIN GROUP (ORDER BY start_time DESC),
                'peak_hour_long_running', ARRAY_AGG(
                    CASE WHEN peak_hour_long_running = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY execution_time_ms DESC),
                'select_star', ARRAY_AGG(
                    CASE WHEN select_star = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY bytes_scanned DESC),
                'spilled', ARRAY_AGG(
                    CASE WHEN spilled = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_spilled_to_local_storage', bytes_spilled_to_local_storage,
                            'bytes_spilled_to_remote_storage', bytes_spilled_to_remote_storage,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY (bytes_spilled_to_local_storage + bytes_spilled_to_remote_storage) DESC),
                'slow_query', ARRAY_AGG(
                    CASE WHEN slow_query = 1 THEN
                        OBJECT_CONSTRUCT(
                            'query_id', query_id,
                            'query_text', query_text,
                            'execution_time_ms', execution_time_ms,
                            'bytes_scanned', bytes_scanned,
                            'warehouse_size', warehouse_size,
                            'start_time', start_time
                        )
                    END
                ) WITHIN GROUP (ORDER BY execution_time_ms DESC)
            ) AS query_samples
        FROM query_flags
        GROUP BY user_name
    ),
    user_metrics AS (
        SELECT
            user_name,
            COUNT(DISTINCT query_id) AS total_queries,
            COUNT(DISTINCT warehouse_name) AS warehouses_used,
            COUNT(DISTINCT database_name) AS databases_accessed,
            ROUND(SUM(credits_used_cloud_services), 2) AS total_credits,
            ROUND(AVG(execution_time_ms), 2) AS avg_execution_time_ms,
            ROUND(AVG(NULLIF(bytes_scanned, 0) / NULLIF(rows_produced, 0)), 2) AS avg_bytes_per_row,
            ROUND(SUM(bytes_scanned) / POWER(1024, 3), 2) AS total_data_scanned_gb,
            ROUND(SUM(failed_cancelled) * 100.0 / NULLIF(COUNT(*), 0), 2) AS failure_cancellation_rate_pct,
            SUM(spilled) AS spilled_queries,
            SUM(over_provisioned) AS over_provisioned_queries,
            SUM(peak_hour_long_running) AS peak_hour_long_running_queries,
            SUM(select_star) AS select_star_queries,
            SUM(unpartitioned_scan) AS unpartitioned_scan_queries,
            COUNT(*) - COUNT(DISTINCT query_hash) AS repeated_queries,
            SUM(complex_query) AS complex_join_queries,
            SUM(zero_result_query) AS zero_result_queries,
            SUM(high_compile_time) AS high_compile_queries,
            SUM(untagged_query) AS untagged_queries,
            SUM(unlimited_order_by) AS unlimited_order_by_queries,
            SUM(slow_query) AS slow_queries,
            SUM(expensive_distinct) AS expensive_distinct_queries,
            SUM(inefficient_like) AS inefficient_like_queries,
            SUM(no_results_with_scan) AS no_results_with_scan_queries,
            SUM(high_compile_ratio) AS high_compile_ratio_queries
        FROM query_flags
        GROUP BY user_name
    ),
    -- Calculate percentiles for normalization
    normalization_stats AS (
        SELECT
            MAX(spilled_queries) AS max_spilled,
            MAX(over_provisioned_queries) AS max_over_provisioned,
            MAX(peak_hour_long_running_queries) AS max_peak_hour,
            MAX(select_star_queries) AS max_select_star,
            MAX(unpartitioned_scan_queries) AS max_unpartitioned,
            MAX(repeated_queries) AS max_repeated,
            MAX(complex_join_queries) AS max_complex,
            MAX(zero_result_queries) AS max_zero_result,
            MAX(high_compile_queries) AS max_high_compile,
            MAX(slow_queries) AS max_slow,
            MAX(expensive_distinct_queries) AS max_expensive_distinct,
            MAX(inefficient_like_queries) AS max_inefficient_like,
            MAX(no_results_with_scan_queries) AS max_no_results_scan,
            MAX(high_compile_ratio_queries) AS max_high_compile_ratio,
            PERCENTILE_CONT(0.9) WITHIN GROUP (ORDER BY total_credits) AS cost_p90
        FROM user_metrics
    )
    SELECT
        um.user_name,
        um.total_queries,
        um.warehouses_used,
        um.databases_accessed,
        um.total_credits,
        um.avg_execution_time_ms,
        um.avg_bytes_per_row,
        um.total_data_scanned_gb,
        um.failure_cancellation_rate_pct,
        um.spilled_queries,
        um.over_provisioned_queries,
        um.peak_hour_long_running_queries,
        um.select_star_queries,
        um.unpartitioned_scan_queries,
        um.repeated_queries,
        um.complex_join_queries,
        um.zero_result_queries,
        um.high_compile_queries,
        um.untagged_queries,
        um.unlimited_order_by_queries,
        um.slow_queries,
        um.expensive_distinct_queries,
        um.inefficient_like_queries,
        um.no_results_with_scan_queries,
        um.high_compile_ratio_queries,
        -- Normalized scores (1-100 scale)
        GREATEST(1, LEAST(100, ROUND((um.spilled_queries * 100.0) / NULLIF(ns.max_spilled, 0), 2))) AS spilled_score,
        GREATEST(1, LEAST(100, ROUND((um.over_provisioned_queries * 100.0) / NULLIF(ns.max_over_provisioned, 0), 2))) AS over_provisioned_score,
        GREATEST(1, LEAST(100, ROUND((um.peak_hour_long_running_queries * 100.0) / NULLIF(ns.max_peak_hour, 0), 2))) AS peak_hour_score,
        GREATEST(1, LEAST(100, ROUND((um.select_star_queries * 100.0) / NULLIF(ns.max_select_star, 0), 2))) AS select_star_score,
        GREATEST(1, LEAST(100, ROUND((um.unpartitioned_scan_queries * 100.0) / NULLIF(ns.max_unpartitioned, 0), 2))) AS unpartitioned_score,
        GREATEST(1, LEAST(100, ROUND((um.repeated_queries * 100.0) / NULLIF(ns.max_repeated, 0), 2))) AS repeated_score,
        GREATEST(1, LEAST(100, ROUND((um.complex_join_queries * 100.0) / NULLIF(ns.max_complex, 0), 2))) AS complex_join_score,
        GREATEST(1, LEAST(100, ROUND((um.zero_result_queries * 100.0) / NULLIF(ns.max_zero_result, 0), 2))) AS zero_result_score,
        GREATEST(1, LEAST(100, ROUND((um.high_compile_queries * 100.0) / NULLIF(ns.max_high_compile, 0), 2))) AS high_compile_score,
        GREATEST(1, LEAST(100, ROUND((um.slow_queries * 100.0) / NULLIF(ns.max_slow, 0), 2))) AS slow_query_score,
        GREATEST(1, LEAST(100, ROUND((um.expensive_distinct_queries * 100.0) / NULLIF(ns.max_expensive_distinct, 0), 2))) AS expensive_distinct_score,
        GREATEST(1, LEAST(100, ROUND((um.inefficient_like_queries * 100.0) / NULLIF(ns.max_inefficient_like, 0), 2))) AS inefficient_like_score,
        GREATEST(1, LEAST(100, ROUND((um.no_results_with_scan_queries * 100.0) / NULLIF(ns.max_no_results_scan, 0), 2))) AS no_results_scan_score,
        GREATEST(1, LEAST(100, ROUND((um.high_compile_ratio_queries * 100.0) / NULLIF(ns.max_high_compile_ratio, 0), 2))) AS high_compile_ratio_score,
        -- Overall optimization score (weighted average)
        GREATEST(1, LEAST(100, ROUND(
            (GREATEST(1, LEAST(100, (um.spilled_queries * 100.0) / NULLIF(ns.max_spilled, 0))) * 0.15 +
             GREATEST(1, LEAST(100, (um.over_provisioned_queries * 100.0) / NULLIF(ns.max_over_provisioned, 0))) * 0.12 +
             GREATEST(1, LEAST(100, (um.peak_hour_long_running_queries * 100.0) / NULLIF(ns.max_peak_hour, 0))) * 0.12 +
             GREATEST(1, LEAST(100, (um.select_star_queries * 100.0) / NULLIF(ns.max_select_star, 0))) * 0.10 +
             GREATEST(1, LEAST(100, (um.unpartitioned_scan_queries * 100.0) / NULLIF(ns.max_unpartitioned, 0))) * 0.12 +
             GREATEST(1, LEAST(100, (um.repeated_queries * 100.0) / NULLIF(ns.max_repeated, 0))) * 0.08 +
             GREATEST(1, LEAST(100, (um.zero_result_queries * 100.0) / NULLIF(ns.max_zero_result, 0))) * 0.15 +
             GREATEST(1, LEAST(100, (um.slow_queries * 100.0) / NULLIF(ns.max_slow, 0))) * 0.16), 2
        ))) AS overall_optimization_score,
        CASE
            WHEN um.total_credits &amp;gt; ns.cost_p90 THEN 'High Cost'
            ELSE 'Normal'
        END AS cost_status,
        ARRAY_CONSTRUCT_COMPACT(
            CASE WHEN um.spilled_queries &amp;gt; 5 THEN 'Optimize memory usage or increase warehouse size.' END,
            CASE WHEN um.over_provisioned_queries &amp;gt; 5 THEN 'Use smaller warehouses for simple queries.' END,
            CASE WHEN um.peak_hour_long_running_queries &amp;gt; 5 THEN 'Schedule long queries off-peak.' END,
            CASE WHEN um.select_star_queries &amp;gt; 5 THEN 'Specify columns instead of SELECT *.' END,
            CASE WHEN um.unpartitioned_scan_queries &amp;gt; 5 THEN 'Implement partitioning or clustering.' END,
            CASE WHEN um.repeated_queries &amp;gt; 10 THEN 'Review frequently executed queries.' END,
            CASE WHEN um.zero_result_queries &amp;gt; 3 THEN 'Avoid queries that return no data.' END,
            CASE WHEN um.slow_queries &amp;gt; 3 THEN 'Optimize slow-running queries.' END,
            CASE WHEN um.expensive_distinct_queries &amp;gt; 3 THEN 'Replace DISTINCT with GROUP BY where possible.' END,
            CASE WHEN um.no_results_with_scan_queries &amp;gt; 3 THEN 'Add WHERE clauses to prevent unnecessary scans.' END
        ) AS recommendations,
        COALESCE(sq.query_samples, OBJECT_CONSTRUCT()) AS query_samples,
        CURRENT_TIMESTAMP() AS analysis_date
    FROM user_metrics um
    CROSS JOIN normalization_stats ns
    LEFT JOIN sample_queries sq ON um.user_name = sq.user_name
    ORDER BY overall_optimization_score DESC;

    RETURN 'Analysis refreshed successfully for ' || START_DATE || ' to ' || END_DATE;
END;
$$;

-- Step 3: Query to refresh and view results
-- Usage example:
-- CALL REFRESH_QUERY_PERFORMANCE_ANALYSIS('2024-01-01', '2024-01-31');

-- Step 4: View the results
-- SELECT * FROM QUERY_PERFORMANCE_ANALYSIS ORDER BY OVERALL_OPTIMIZATION_SCORE DESC;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>cols available</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Tue, 05 Aug 2025 15:33:00 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/cols-available-1oa4</link>
      <guid>https://dev.to/armaankhan8270/cols-available-1oa4</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;{
  "SNOWFLAKE": {
    "ACCOUNT_USAGE": {
      "ACCESS_HISTORY": {
        "description": "Tracks access to objects by queries, including direct and base objects accessed, modifications, and referenced policies.",
        "columns": [
          "QUERY_ID", "QUERY_START_TIME", "USER_NAME", "DIRECT_OBJECTS_ACCESSED", 
          "BASE_OBJECTS_ACCESSED", "OBJECTS_MODIFIED", "OBJECT_MODIFIED_BY_DDL", 
          "POLICIES_REFERENCED", "PARENT_QUERY_ID", "ROOT_QUERY_ID"
        ]
      },
      "AUTOMATIC_CLUSTERING_HISTORY": {
        "description": "Records automatic clustering operations, including resource usage and re-clustered data.",
        "columns": [
          "START_TIME", "END_TIME", "CREDITS_USED", "NUM_BYTES_RECLUSTERED", 
          "NUM_ROWS_RECLUSTERED", "TABLE_ID", "TABLE_NAME", "SCHEMA_ID", 
          "SCHEMA_NAME", "DATABASE_ID", "DATABASE_NAME", "INSTANCE_ID"
        ]
      },
      "COLUMNS": {
        "description": "Metadata about table columns, including data types, constraints, and identity properties.",
        "columns": [
          "COLUMN_ID", "COLUMN_NAME", "TABLE_ID", "TABLE_NAME", "TABLE_SCHEMA_ID", 
          "TABLE_SCHEMA", "TABLE_CATALOG_ID", "TABLE_CATALOG", "ORDINAL_POSITION", 
          "COLUMN_DEFAULT", "IS_NULLABLE", "DATA_TYPE", "CHARACTER_MAXIMUM_LENGTH", 
          "CHARACTER_OCTET_LENGTH", "NUMERIC_PRECISION", "NUMERIC_PRECISION_RADIX", 
          "NUMERIC_SCALE", "DATETIME_PRECISION", "INTERVAL_TYPE", "INTERVAL_PRECISION", 
          "CHARACTER_SET_CATALOG", "CHARACTER_SET_SCHEMA", "CHARACTER_SET_NAME", 
          "COLLATION_CATALOG", "COLLATION_SCHEMA", "COLLATION_NAME", "DOMAIN_CATALOG", 
          "DOMAIN_SCHEMA", "DOMAIN_NAME", "UDT_CATALOG", "UDT_SCHEMA", "UDT_NAME", 
          "SCOPE_CATALOG", "SCOPE_SCHEMA", "SCOPE_NAME", "MAXIMUM_CARDINALITY", 
          "DTD_IDENTIFIER", "IS_SELF_REFERENCING", "IS_IDENTITY", "IDENTITY_GENERATION", 
          "IDENTITY_START", "IDENTITY_INCREMENT", "IDENTITY_MAXIMUM", "IDENTITY_MINIMUM", 
          "IDENTITY_CYCLE", "IDENTITY_ORDERED", "SCHEMA_EVOLUTION_RECORD", "COMMENT", 
          "DELETED"
        ]
      },
      "DATABASE_STORAGE_USAGE_HISTORY": {
        "description": "Tracks storage usage for databases, including average bytes and failsafe bytes.",
        "columns": [
          "USAGE_DATE", "DATABASE_ID", "DATABASE_NAME", "DELETED", 
          "AVERAGE_DATABASE_BYTES", "AVERAGE_FAILSAFE_BYTES", 
          "AVERAGE_HYBRID_TABLE_STORAGE_BYTES"
        ]
      },
      "DATA_TRANSFER_HISTORY": {
        "description": "Records data transfer details between clouds and regions.",
        "columns": [
          "START_TIME", "END_TIME", "SOURCE_CLOUD", "SOURCE_REGION", 
          "TARGET_CLOUD", "TARGET_REGION", "BYTES_TRANSFERRED", "TRANSFER_TYPE"
        ]
      },
      "GRANTS_TO_ROLES": {
        "description": "Details privileges granted to roles, including creation and modification timestamps.",
        "columns": [
          "CREATED_ON", "MODIFIED_ON", "PRIVILEGE", "GRANTED_ON", "NAME", 
          "TABLE_CATALOG", "TABLE_SCHEMA", "GRANTED_TO", "GRANTEE_NAME", 
          "GRANT_OPTION", "GRANTED_BY", "DELETED_ON", "GRANTED_BY_ROLE_TYPE", 
          "OBJECT_INSTANCE"
        ]
      },
      "GRANTS_TO_USERS": {
        "description": "Records role grants to users, including creation and deletion details.",
        "columns": [
          "CREATED_ON", "DELETED_ON", "ROLE", "GRANTED_TO", "GRANTEE_NAME", 
          "GRANTED_BY"
        ]
      },
      "LOGIN_HISTORY": {
        "description": "Tracks login attempts, including authentication details and success status.",
        "columns": [
          "EVENT_ID", "EVENT_TIMESTAMP", "EVENT_TYPE", "USER_NAME", "CLIENT_IP", 
          "REPORTED_CLIENT_TYPE", "REPORTED_CLIENT_VERSION", "FIRST_AUTHENTICATION_FACTOR", 
          "SECOND_AUTHENTICATION_FACTOR", "IS_SUCCESS", "ERROR_CODE", "ERROR_MESSAGE", 
          "RELATED_EVENT_ID", "CONNECTION", "CLIENT_PRIVATE_LINK_ID"
        ]
      },
      "METERING_HISTORY": {
        "description": "Records resource usage metrics for various services.",
        "columns": [
          "SERVICE_TYPE", "START_TIME", "END_TIME", "ENTITY_ID", "NAME", 
          "CREDITS_USED_COMPUTE", "CREDITS_USED_CLOUD_SERVICES", "CREDITS_USED", 
          "BYTES", "ROWS", "FILES", "BUDGET_ID"
        ]
      },
      "OBJECT_DEPENDENCIES": {
        "description": "Tracks dependencies between database objects.",
        "columns": [
          "REFERENCED_DATABASE", "REFERENCED_SCHEMA", "REFERENCED_OBJECT_NAME", 
          "REFERENCED_OBJECT_ID", "REFERENCED_OBJECT_DOMAIN", "REFERENCING_DATABASE", 
          "REFERENCING_SCHEMA", "REFERENCING_OBJECT_NAME", "REFERENCING_OBJECT_ID", 
          "REFERENCING_OBJECT_DOMAIN", "DEPENDENCY_TYPE"
        ]
      },
      "PIPE_USAGE_HISTORY": {
        "description": "Records usage of data pipelines, including credits and data inserted.",
        "columns": [
          "PIPE_ID", "PIPE_NAME", "START_TIME", "END_TIME", "CREDITS_USED", 
          "BYTES_INSERTED", "FILES_INSERTED"
        ]
      },
      "QUERY_ACCELERATION_HISTORY": {
        "description": "Tracks query acceleration usage, including credits and warehouse details.",
        "columns": [
          "START_TIME", "END_TIME", "CREDITS_USED", "WAREHOUSE_ID", "WAREHOUSE_NAME"
        ]
      },
      "QUERY_HISTORY": {
        "description": "Detailed history of executed queries, including performance metrics and resource usage.",
        "columns": [
          "QUERY_ID", "QUERY_TEXT", "DATABASE_ID", "DATABASE_NAME", "SCHEMA_ID", 
          "SCHEMA_NAME", "QUERY_TYPE", "SESSION_ID", "USER_NAME", "ROLE_NAME", 
          "WAREHOUSE_ID", "WAREHOUSE_NAME", "WAREHOUSE_SIZE", "WAREHOUSE_TYPE", 
          "CLUSTER_NUMBER", "QUERY_TAG", "EXECUTION_STATUS", "ERROR_CODE", 
          "ERROR_MESSAGE", "START_TIME", "END_TIME", "TOTAL_ELAPSED_TIME", 
          "BYTES_SCANNED", "PERCENTAGE_SCANNED_FROM_CACHE", "BYTES_WRITTEN", 
          "BYTES_WRITTEN_TO_RESULT", "BYTES_READ_FROM_RESULT", "ROWS_PRODUCED", 
          "ROWS_INSERTED", "ROWS_UPDATED", "ROWS_DELETED", "ROWS_UNLOADED", 
          "BYTES_DELETED", "PARTITIONS_SCANNED", "PARTITIONS_TOTAL", 
          "BYTES_SPILLED_TO_LOCAL_STORAGE", "BYTES_SPILLED_TO_REMOTE_STORAGE", 
          "BYTES_SENT_OVER_THE_NETWORK", "COMPILATION_TIME", "EXECUTION_TIME", 
          "QUEUED_PROVISIONING_TIME", "QUEUED_REPAIR_TIME", "QUEUED_OVERLOAD_TIME", 
          "TRANSACTION_BLOCKED_TIME", "OUTBOUND_DATA_TRANSFER_CLOUD", 
          "OUTBOUND_DATA_TRANSFER_REGION", "OUTBOUND_DATA_TRANSFER_BYTES", 
          "INBOUND_DATA_TRANSFER_CLOUD", "INBOUND_DATA_TRANSFER_REGION", 
          "INBOUND_DATA_TRANSFER_BYTES", "LIST_EXTERNAL_FILES_TIME", 
          "CREDITS_USED_CLOUD_SERVICES", "RELEASE_VERSION", 
          "EXTERNAL_FUNCTION_TOTAL_INVOCATIONS", "EXTERNAL_FUNCTION_TOTAL_SENT_ROWS", 
          "EXTERNAL_FUNCTION_TOTAL_RECEIVED_ROWS", "EXTERNAL_FUNCTION_TOTAL_SENT_BYTES", 
          "EXTERNAL_FUNCTION_TOTAL_RECEIVED_BYTES", "QUERY_LOAD_PERCENT", 
          "IS_CLIENT_GENERATED_STATEMENT", "QUERY_ACCELERATION_BYTES_SCANNED", 
          "QUERY_ACCELERATION_PARTITIONS_SCANNED", "QUERY_ACCELERATION_UPPER_LIMIT_SCALE_FACTOR", 
          "TRANSACTION_ID", "CHILD_QUERIES_WAIT_TIME", "ROLE_TYPE", "QUERY_HASH", 
          "QUERY_HASH_VERSION", "QUERY_PARAMETERIZED_HASH", "QUERY_PARAMETERIZED_HASH_VERSION", 
          "SECONDARY_ROLE_STATS", "ROWS_WRITTEN_TO_RESULT", "QUERY_RETRY_TIME", 
          "QUERY_RETRY_CAUSE", "FAULT_HANDLING_TIME", "USER_TYPE", "USER_DATABASE_NAME", 
          "USER_DATABASE_ID", "USER_SCHEMA_NAME", "USER_SCHEMA_ID"
        ]
      },
      "REFERENTIAL_CONSTRAINTS": {
        "description": "Details referential constraints between tables, including rules and metadata.",
        "columns": [
          "CONSTRAINT_CATALOG_ID", "CONSTRAINT_CATALOG", "CONSTRAINT_SCHEMA_ID", 
          "CONSTRAINT_SCHEMA", "CONSTRAINT_NAME", "UNIQUE_CONSTRAINT_CATALOG_ID", 
          "UNIQUE_CONSTRAINT_CATALOG", "UNIQUE_CONSTRAINT_SCHEMA_ID", 
          "UNIQUE_CONSTRAINT_SCHEMA", "UNIQUE_CONSTRAINT_NAME", "MATCH_OPTION", 
          "UPDATE_RULE", "DELETE_RULE", "COMMENT", "CREATED", "LAST_ALTERED", 
          "DELETED"
        ]
      },
      "REPLICATION_USAGE_HISTORY": {
        "description": "Tracks replication usage, including credits and bytes transferred.",
        "columns": [
          "START_TIME", "END_TIME", "DATABASE_NAME", "DATABASE_ID", 
          "CREDITS_USED", "BYTES_TRANSFERRED"
        ]
      },
      "ROLES": {
        "description": "Information about roles, including creation and ownership details.",
        "columns": [
          "ROLE_ID", "CREATED_ON", "DELETED_ON", "NAME", "COMMENT", "OWNER", 
          "ROLE_TYPE", "ROLE_DATABASE_NAME", "ROLE_INSTANCE_ID", "OWNER_ROLE_TYPE"
        ]
      },
      "SESSIONS": {
        "description": "Tracks user sessions, including authentication and client details.",
        "columns": [
          "SESSION_ID", "CREATED_ON", "USER_NAME", "AUTHENTICATION_METHOD", 
          "LOGIN_EVENT_ID", "CLIENT_APPLICATION_VERSION", "CLIENT_APPLICATION_ID", 
          "CLIENT_ENVIRONMENT", "CLIENT_BUILD_ID", "CLIENT_VERSION", "CLOSED_REASON"
        ]
      },
      "STAGE_STORAGE_USAGE_HISTORY": {
        "description": "Records storage usage for stages.",
        "columns": ["USAGE_DATE", "AVERAGE_STAGE_BYTES"]
      },
      "TABLES": {
        "description": "Metadata about tables, including ownership, type, and storage details.",
        "columns": [
          "TABLE_ID", "TABLE_NAME", "TABLE_SCHEMA_ID", "TABLE_SCHEMA", 
          "TABLE_CATALOG_ID", "TABLE_CATALOG", "TABLE_OWNER", "TABLE_TYPE", 
          "IS_TRANSIENT", "IS_ICEBERG", "IS_DYNAMIC", "IS_HYBRID", "CLUSTERING_KEY", 
          "ROW_COUNT", "BYTES", "RETENTION_TIME", "SELF_REFERENCING_COLUMN_NAME", 
          "REFERENCE_GENERATION", "USER_DEFINED_TYPE_CATALOG", "USER_DEFINED_TYPE_SCHEMA", 
          "USER_DEFINED_TYPE_NAME", "IS_INSERTABLE_INTO", "IS_TYPED", "COMMIT_ACTION", 
          "CREATED", "LAST_ALTERED", "LAST_DDL", "LAST_DDL_BY", "DELETED", 
          "AUTO_CLUSTERING_ON", "COMMENT", "OWNER_ROLE_TYPE", "INSTANCE_ID"
        ]
      },
      "TAG_REFERENCES": {
        "description": "Tracks tags applied to objects or columns, including metadata.",
        "columns": [
          "TAG_DATABASE", "TAG_SCHEMA", "TAG_ID", "TAG_NAME", "TAG_VALUE", 
          "OBJECT_DATABASE", "OBJECT_SCHEMA", "OBJECT_ID", "OBJECT_NAME", 
          "OBJECT_DELETED", "DOMAIN", "COLUMN_ID", "COLUMN_NAME", "APPLY_METHOD"
        ]
      },
      "TASK_HISTORY": {
        "description": "Records task execution details, including status and errors.",
        "columns": [
          "NAME", "QUERY_TEXT", "CONDITION_TEXT", "SCHEMA_NAME", "TASK_SCHEMA_ID", 
          "DATABASE_NAME", "TASK_DATABASE_ID", "SCHEDULED_TIME", "COMPLETED_TIME", 
          "STATE", "RETURN_VALUE", "QUERY_ID", "QUERY_START_TIME", "ERROR_CODE", 
          "ERROR_MESSAGE", "GRAPH_VERSION", "RUN_ID", "ROOT_TASK_ID", "SCHEDULED_FROM", 
          "INSTANCE_ID", "ATTEMPT_NUMBER", "CONFIG", "QUERY_HASH", "QUERY_HASH_VERSION", 
          "QUERY_PARAMETERIZED_HASH", "QUERY_PARAMETERIZED_HASH_VERSION", 
          "GRAPH_RUN_GROUP_ID", "BACKFILL_INFO"
        ]
      },
      "USERS": {
        "description": "Details about users, including authentication and role settings.",
        "columns": [
          "USER_ID", "NAME", "CREATED_ON", "DELETED_ON", "LOGIN_NAME", 
          "DISPLAY_NAME", "FIRST_NAME", "LAST_NAME", "EMAIL", "MUST_CHANGE_PASSWORD", 
          "HAS_PASSWORD", "COMMENT", "DISABLED", "SNOWFLAKE_LOCK", "DEFAULT_WAREHOUSE", 
          "DEFAULT_NAMESPACE", "DEFAULT_ROLE", "EXT_AUTHN_DUO", "EXT_AUTHN_UID", 
          "HAS_MFA", "BYPASS_MFA_UNTIL", "LAST_SUCCESS_LOGIN", "EXPIRES_AT", 
          "LOCKED_UNTIL_TIME", "HAS_RSA_PUBLIC_KEY", "PASSWORD_LAST_SET_TIME", 
          "OWNER", "DEFAULT_SECONDARY_ROLE", "TYPE", "DATABASE_NAME", 
          "DATABASE_ID", "SCHEMA_NAME", "SCHEMA_ID"
        ]
      },
      "VIEWS": {
        "description": "Metadata about views, including definitions and ownership.",
        "columns": [
          "TABLE_ID", "TABLE_NAME", "TABLE_SCHEMA_ID", "TABLE_SCHEMA", 
          "TABLE_CATALOG_ID", "TABLE_CATALOG", "TABLE_OWNER", "VIEW_DEFINITION", 
          "CHECK_OPTION", "IS_UPDATABLE", "INSERTABLE_INTO", "IS_SECURE", 
          "CREATED", "LAST_ALTERED", "LAST_DDL", "LAST_DDL_BY", "DELETED", 
          "COMMENT", "OWNER_ROLE_TYPE", "INSTANCE_ID"
        ]
      },
      "WAREHOUSE_EVENTS_HISTORY": {
        "description": "Tracks warehouse events, such as state changes and reasons.",
        "columns": [
          "TIMESTAMP", "WAREHOUSE_ID", "WAREHOUSE_NAME", "CLUSTER_NUMBER", 
          "EVENT_NAME", "EVENT_REASON", "EVENT_STATE", "USER_NAME", "ROLE_NAME", 
          "QUERY_ID", "SIZE", "CLUSTER_COUNT", "WAREHOUSE_TYPE", "RESOURCE_CONSTRAINT"
        ]
      },
      "WAREHOUSE_LOAD_HISTORY": {
        "description": "Records warehouse load metrics, including running and queued queries.",
        "columns": [
          "START_TIME", "END_TIME", "WAREHOUSE_ID", "WAREHOUSE_NAME", 
          "AVG_RUNNING", "AVG_QUEUED_LOAD", "AVG_QUEUED_PROVISIONING", "AVG_BLOCKED"
        ]
      },
      "WAREHOUSE_METERING_HISTORY": {
        "description": "Tracks warehouse metering, including credits used for compute and cloud services.",
        "columns": [
          "START_TIME", "END_TIME", "WAREHOUSE_ID", "WAREHOUSE_NAME", 
          "CREDITS_USED", "CREDITS_USED_COMPUTE", "CREDITS_USED_CLOUD_SERVICES", 
          "CREDITS_ATTRIBUTED_COMPUTE_QUERIES"
        ]
      }
    },
    "INFORMATION_SCHEMA": {
      "COLUMNS": {
        "description": "Metadata about table columns, similar to ACCOUNT_USAGE.COLUMNS but scoped to accessible schemas.",
        "columns": [
          "TABLE_CATALOG", "TABLE_SCHEMA", "TABLE_NAME", "COLUMN_NAME", 
          "ORDINAL_POSITION", "COLUMN_DEFAULT", "IS_NULLABLE", "DATA_TYPE", 
          "CHARACTER_MAXIMUM_LENGTH", "CHARACTER_OCTET_LENGTH", "NUMERIC_PRECISION", 
          "NUMERIC_PRECISION_RADIX", "NUMERIC_SCALE", "DATETIME_PRECISION", 
          "INTERVAL_TYPE", "INTERVAL_PRECISION", "CHARACTER_SET_CATALOG", 
          "CHARACTER_SET_SCHEMA", "CHARACTER_SET_NAME", "COLLATION_CATALOG", 
          "COLLATION_SCHEMA", "COLLATION_NAME", "DOMAIN_CATALOG", "DOMAIN_SCHEMA", 
          "DOMAIN_NAME", "UDT_CATALOG", "UDT_SCHEMA", "UDT_NAME", "SCOPE_CATALOG", 
          "SCOPE_SCHEMA", "SCOPE_NAME", "MAXIMUM_CARDINALITY", "DTD_IDENTIFIER", 
          "IS_SELF_REFERENCING", "IS_IDENTITY", "IDENTITY_GENERATION", 
          "IDENTITY_START", "IDENTITY_INCREMENT", "IDENTITY_MAXIMUM", 
          "IDENTITY_MINIMUM", "IDENTITY_CYCLE", "IDENTITY_ORDERED", 
          "SCHEMA_EVOLUTION_RECORD", "COMMENT"
        ]
      },
      "OBJECT_PRIVILEGES": {
        "description": "Details privileges granted on objects, including grantor and grantee information.",
        "columns": [
          "GRANTOR", "GRANTEE", "GRANTED_TO", "OBJECT_CATALOG", "OBJECT_SCHEMA", 
          "OBJECT_NAME", "OBJECT_TYPE", "PRIVILEGE_TYPE", "IS_GRANTABLE", "CREATED"
        ]
      },
      "REFERENTIAL_CONSTRAINTS": {
        "description": "Details referential constraints, similar to ACCOUNT_USAGE.REFERENTIAL_CONSTRAINTS but scoped to accessible schemas.",
        "columns": [
          "CONSTRAINT_CATALOG", "CONSTRAINT_SCHEMA", "CONSTRAINT_NAME", 
          "UNIQUE_CONSTRAINT_CATALOG", "UNIQUE_CONSTRAINT_SCHEMA", 
          "UNIQUE_CONSTRAINT_NAME", "MATCH_OPTION", "UPDATE_RULE", "DELETE_RULE", 
          "COMMENT", "CREATED", "LAST_ALTERED"
        ]
      },
      "TABLES": {
        "description": "Metadata about tables, similar to ACCOUNT_USAGE.TABLES but scoped to accessible schemas.",
        "columns": [
          "TABLE_CATALOG", "TABLE_SCHEMA", "TABLE_NAME", "TABLE_OWNER", 
          "TABLE_TYPE", "IS_TRANSIENT", "CLUSTERING_KEY", "ROW_COUNT", "BYTES", 
          "RETENTION_TIME", "SELF_REFERENCING_COLUMN_NAME", "REFERENCE_GENERATION", 
          "USER_DEFINED_TYPE_CATALOG", "USER_DEFINED_TYPE_SCHEMA", 
          "USER_DEFINED_TYPE_NAME", "IS_INSERTABLE_INTO", "IS_TYPED", 
          "COMMIT_ACTION", "CREATED", "LAST_ALTERED", "LAST_DDL", "LAST_DDL_BY", 
          "AUTO_CLUSTERING_ON", "COMMENT", "IS_TEMPORARY", "IS_ICEBERG", 
          "IS_DYNAMIC", "IS_IMMUTABLE", "IS_HYBRID"
        ]
      },
      "VIEWS": {
        "description": "Metadata about views, similar to ACCOUNT_USAGE.VIEWS but scoped to accessible schemas.",
        "columns": [
          "TABLE_CATALOG", "TABLE_SCHEMA", "TABLE_NAME", "TABLE_OWNER", 
          "VIEW_DEFINITION", "CHECK_OPTION", "IS_UPDATABLE", "INSERTABLE_INTO", 
          "IS_SECURE", "CREATED", "LAST_ALTERED", "LAST_DDL", "LAST_DDL_BY", 
          "COMMENT"
        ]
      }
    }
  }
}
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>query histoy</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Tue, 05 Aug 2025 15:22:12 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/query-histoy-c1h</link>
      <guid>https://dev.to/armaankhan8270/query-histoy-c1h</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;-- =====================================================
-- TABLE 1: QUERY_HISTORY_SUMMARY 
-- This table provides all query overview data for drilling down
-- =====================================================

CREATE OR REPLACE TABLE QUERY_HISTORY_SUMMARY AS
SELECT 
    -- Query Identification
    q.QUERY_ID,
    q.QUERY_HASH,
    q.QUERY_PARAMETERIZED_HASH,
    LEFT(q.QUERY_TEXT, 100) as QUERY_TEXT_PREVIEW,
    q.QUERY_TYPE,
    q.QUERY_TAG,

    -- Timing Information
    q.START_TIME,
    q.END_TIME,
    q.TOTAL_ELAPSED_TIME,
    q.COMPILATION_TIME,
    q.EXECUTION_TIME,

    -- User and Session Info
    q.USER_NAME,
    q.USER_TYPE,
    q.ROLE_NAME,
    q.ROLE_TYPE,
    q.SESSION_ID,

    -- Warehouse Information
    q.WAREHOUSE_ID,
    q.WAREHOUSE_NAME,
    q.WAREHOUSE_SIZE,
    q.WAREHOUSE_TYPE,
    q.CLUSTER_NUMBER,

    -- Database Context
    q.DATABASE_ID,
    q.DATABASE_NAME,
    q.SCHEMA_ID,
    q.SCHEMA_NAME,
    q.USER_DATABASE_NAME,
    q.USER_SCHEMA_NAME,

    -- Execution Status
    q.EXECUTION_STATUS,
    q.ERROR_CODE,
    LEFT(q.ERROR_MESSAGE, 200) as ERROR_MESSAGE_PREVIEW,

    -- Performance Metrics
    q.BYTES_SCANNED,
    q.PERCENTAGE_SCANNED_FROM_CACHE,
    q.BYTES_WRITTEN,
    q.ROWS_PRODUCED,
    q.ROWS_INSERTED,
    q.ROWS_UPDATED,
    q.ROWS_DELETED,

    -- Resource Usage
    q.CREDITS_USED_CLOUD_SERVICES,
    q.BYTES_SPILLED_TO_LOCAL_STORAGE,
    q.BYTES_SPILLED_TO_REMOTE_STORAGE,
    q.PARTITIONS_SCANNED,
    q.PARTITIONS_TOTAL,

    -- Queue Times
    q.QUEUED_PROVISIONING_TIME,
    q.QUEUED_REPAIR_TIME,
    q.QUEUED_OVERLOAD_TIME,
    q.TRANSACTION_BLOCKED_TIME,

    -- Classification Buckets for easy filtering
    CASE 
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 10000 THEN '1-10 seconds'
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 20000 THEN '10-20 seconds' 
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 60000 THEN '20-60 seconds'
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 180000 THEN '1-3 minutes'
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 300000 THEN '3-5 minutes'
        ELSE '5+ minutes'
    END as DURATION_BUCKET,

    CASE 
        WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.2 THEN '0-20 cents'
        WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.4 THEN '20-40 cents'
        WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.6 THEN '40-60 cents'
        WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.8 THEN '60-80 cents'
        WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 1.0 THEN '80-100 cents'
        ELSE '100+ cents'
    END as CREDIT_BUCKET,

    CASE 
        WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 OR q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 'SPILLED'
        ELSE 'NO_SPILL'
    END as SPILL_STATUS,

    CASE 
        WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;gt; 0 THEN 'QUEUED'
        ELSE 'NOT_QUEUED'
    END as QUEUE_STATUS,

    -- Analysis metadata
    CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
    CURRENT_DATE - 1 as ANALYSIS_DATE

FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
    AND q.QUERY_ID IS NOT NULL
ORDER BY q.START_TIME DESC;

-- =====================================================
-- TABLE 2: QUERY_DETAILS_COMPLETE
-- This table provides complete details for any specific query
-- =====================================================

CREATE OR REPLACE TABLE QUERY_DETAILS_COMPLETE AS
SELECT 
    -- Core Query Information
    q.QUERY_ID,
    q.QUERY_TEXT,
    q.QUERY_HASH,
    q.QUERY_HASH_VERSION,
    q.QUERY_PARAMETERIZED_HASH,
    q.QUERY_PARAMETERIZED_HASH_VERSION,
    q.QUERY_TYPE,
    q.QUERY_TAG,

    -- Timing Details (all in milliseconds)
    q.START_TIME,
    q.END_TIME,
    q.TOTAL_ELAPSED_TIME,
    q.COMPILATION_TIME,
    q.EXECUTION_TIME,
    q.QUEUED_PROVISIONING_TIME,
    q.QUEUED_REPAIR_TIME,
    q.QUEUED_OVERLOAD_TIME,
    q.TRANSACTION_BLOCKED_TIME,
    q.CHILD_QUERIES_WAIT_TIME,
    q.QUERY_RETRY_TIME,
    q.QUERY_RETRY_CAUSE,
    q.FAULT_HANDLING_TIME,
    q.LIST_EXTERNAL_FILES_TIME,

    -- User and Authentication
    q.USER_NAME,
    q.USER_TYPE,
    q.ROLE_NAME,
    q.ROLE_TYPE,
    q.SECONDARY_ROLE_STATS,
    q.SESSION_ID,

    -- Warehouse and Compute
    q.WAREHOUSE_ID,
    q.WAREHOUSE_NAME,
    q.WAREHOUSE_SIZE,
    q.WAREHOUSE_TYPE,
    q.CLUSTER_NUMBER,
    q.QUERY_LOAD_PERCENT,

    -- Database Context
    q.DATABASE_ID,
    q.DATABASE_NAME,
    q.SCHEMA_ID,
    q.SCHEMA_NAME,
    q.USER_DATABASE_ID,
    q.USER_DATABASE_NAME,
    q.USER_SCHEMA_ID,
    q.USER_SCHEMA_NAME,

    -- Execution Results
    q.EXECUTION_STATUS,
    q.ERROR_CODE,
    q.ERROR_MESSAGE,
    q.IS_CLIENT_GENERATED_STATEMENT,

    -- Data Processing Metrics
    q.BYTES_SCANNED,
    q.PERCENTAGE_SCANNED_FROM_CACHE,
    q.BYTES_WRITTEN,
    q.BYTES_WRITTEN_TO_RESULT,
    q.BYTES_READ_FROM_RESULT,
    q.ROWS_PRODUCED,
    q.ROWS_WRITTEN_TO_RESULT,
    q.ROWS_INSERTED,
    q.ROWS_UPDATED,
    q.ROWS_DELETED,
    q.ROWS_UNLOADED,
    q.BYTES_DELETED,

    -- Partitioning
    q.PARTITIONS_SCANNED,
    q.PARTITIONS_TOTAL,
    CASE 
        WHEN q.PARTITIONS_TOTAL &amp;gt; 0 THEN 
            ROUND((q.PARTITIONS_SCANNED::FLOAT / q.PARTITIONS_TOTAL::FLOAT) * 100, 2)
        ELSE 0 
    END as PARTITION_SCAN_PERCENTAGE,

    -- Memory and Spilling
    q.BYTES_SPILLED_TO_LOCAL_STORAGE,
    q.BYTES_SPILLED_TO_REMOTE_STORAGE,
    q.BYTES_SENT_OVER_THE_NETWORK,

    -- Credits and Cost
    q.CREDITS_USED_CLOUD_SERVICES,

    -- Data Transfer
    q.OUTBOUND_DATA_TRANSFER_CLOUD,
    q.OUTBOUND_DATA_TRANSFER_REGION,
    q.OUTBOUND_DATA_TRANSFER_BYTES,
    q.INBOUND_DATA_TRANSFER_CLOUD,
    q.INBOUND_DATA_TRANSFER_REGION,
    q.INBOUND_DATA_TRANSFER_BYTES,

    -- External Functions
    q.EXTERNAL_FUNCTION_TOTAL_INVOCATIONS,
    q.EXTERNAL_FUNCTION_TOTAL_SENT_ROWS,
    q.EXTERNAL_FUNCTION_TOTAL_RECEIVED_ROWS,
    q.EXTERNAL_FUNCTION_TOTAL_SENT_BYTES,
    q.EXTERNAL_FUNCTION_TOTAL_RECEIVED_BYTES,

    -- Query Acceleration
    q.QUERY_ACCELERATION_BYTES_SCANNED,
    q.QUERY_ACCELERATION_PARTITIONS_SCANNED,
    q.QUERY_ACCELERATION_UPPER_LIMIT_SCALE_FACTOR,

    -- Transaction Information
    q.TRANSACTION_ID,
    q.PARENT_QUERY_ID,
    q.ROOT_QUERY_ID,

    -- System Information
    q.RELEASE_VERSION,

    -- Performance Ratios and Calculated Fields
    CASE 
        WHEN q.TOTAL_ELAPSED_TIME &amp;gt; 0 THEN 
            ROUND((q.COMPILATION_TIME::FLOAT / q.TOTAL_ELAPSED_TIME::FLOAT) * 100, 2)
        ELSE 0 
    END as COMPILATION_TIME_PERCENTAGE,

    CASE 
        WHEN q.TOTAL_ELAPSED_TIME &amp;gt; 0 THEN 
            ROUND((q.EXECUTION_TIME::FLOAT / q.TOTAL_ELAPSED_TIME::FLOAT) * 100, 2)
        ELSE 0 
    END as EXECUTION_TIME_PERCENTAGE,

    CASE 
        WHEN q.BYTES_SCANNED &amp;gt; 0 THEN 
            ROUND(q.ROWS_PRODUCED::FLOAT / (q.BYTES_SCANNED::FLOAT / 1024 / 1024), 2)
        ELSE 0 
    END as ROWS_PER_MB_SCANNED,

    -- Performance Classifications
    CASE 
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 1000 THEN 'VERY_FAST'
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 10000 THEN 'FAST'
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 60000 THEN 'MODERATE'
        WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 300000 THEN 'SLOW'
        ELSE 'VERY_SLOW'
    END as PERFORMANCE_CATEGORY,

    CASE 
        WHEN q.PERCENTAGE_SCANNED_FROM_CACHE &amp;gt;= 90 THEN 'HIGH_CACHE_HIT'
        WHEN q.PERCENTAGE_SCANNED_FROM_CACHE &amp;gt;= 50 THEN 'MEDIUM_CACHE_HIT'
        WHEN q.PERCENTAGE_SCANNED_FROM_CACHE &amp;gt; 0 THEN 'LOW_CACHE_HIT'
        ELSE 'NO_CACHE_HIT'
    END as CACHE_EFFICIENCY,

    -- Resource Usage Classification
    CASE 
        WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 AND q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 'BOTH_SPILL'
        WHEN q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 'REMOTE_SPILL'
        WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 THEN 'LOCAL_SPILL'
        ELSE 'NO_SPILL'
    END as SPILL_CLASSIFICATION,

    -- Analysis metadata
    CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
    CURRENT_DATE - 1 as ANALYSIS_DATE

FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
    AND q.QUERY_ID IS NOT NULL
ORDER BY q.START_TIME DESC;

-- =====================================================
-- REFRESH PROCEDURES
-- =====================================================

-- Procedure to refresh Query History Summary
CREATE OR REPLACE PROCEDURE REFRESH_QUERY_HISTORY_SUMMARY()
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
    CREATE OR REPLACE TABLE QUERY_HISTORY_SUMMARY AS
    SELECT 
        q.QUERY_ID,
        q.QUERY_HASH,
        q.QUERY_PARAMETERIZED_HASH,
        LEFT(q.QUERY_TEXT, 100) as QUERY_TEXT_PREVIEW,
        q.QUERY_TYPE,
        q.QUERY_TAG,
        q.START_TIME,
        q.END_TIME,
        q.TOTAL_ELAPSED_TIME,
        q.COMPILATION_TIME,
        q.EXECUTION_TIME,
        q.USER_NAME,
        q.USER_TYPE,
        q.ROLE_NAME,
        q.ROLE_TYPE,
        q.SESSION_ID,
        q.WAREHOUSE_ID,
        q.WAREHOUSE_NAME,
        q.WAREHOUSE_SIZE,
        q.WAREHOUSE_TYPE,
        q.CLUSTER_NUMBER,
        q.DATABASE_ID,
        q.DATABASE_NAME,
        q.SCHEMA_ID,
        q.SCHEMA_NAME,
        q.USER_DATABASE_NAME,
        q.USER_SCHEMA_NAME,
        q.EXECUTION_STATUS,
        q.ERROR_CODE,
        LEFT(q.ERROR_MESSAGE, 200) as ERROR_MESSAGE_PREVIEW,
        q.BYTES_SCANNED,
        q.PERCENTAGE_SCANNED_FROM_CACHE,
        q.BYTES_WRITTEN,
        q.ROWS_PRODUCED,
        q.ROWS_INSERTED,
        q.ROWS_UPDATED,
        q.ROWS_DELETED,
        q.CREDITS_USED_CLOUD_SERVICES,
        q.BYTES_SPILLED_TO_LOCAL_STORAGE,
        q.BYTES_SPILLED_TO_REMOTE_STORAGE,
        q.PARTITIONS_SCANNED,
        q.PARTITIONS_TOTAL,
        q.QUEUED_PROVISIONING_TIME,
        q.QUEUED_REPAIR_TIME,
        q.QUEUED_OVERLOAD_TIME,
        q.TRANSACTION_BLOCKED_TIME,

        CASE 
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 10000 THEN '1-10 seconds'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 20000 THEN '10-20 seconds' 
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 60000 THEN '20-60 seconds'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 180000 THEN '1-3 minutes'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 300000 THEN '3-5 minutes'
            ELSE '5+ minutes'
        END as DURATION_BUCKET,

        CASE 
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.2 THEN '0-20 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.4 THEN '20-40 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.6 THEN '40-60 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.8 THEN '60-80 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 1.0 THEN '80-100 cents'
            ELSE '100+ cents'
        END as CREDIT_BUCKET,

        CASE 
            WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 OR q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 'SPILLED'
            ELSE 'NO_SPILL'
        END as SPILL_STATUS,

        CASE 
            WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;gt; 0 THEN 'QUEUED'
            ELSE 'NOT_QUEUED'
        END as QUEUE_STATUS,

        CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
        CURRENT_DATE - 1 as ANALYSIS_DATE

    FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
    WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
        AND q.QUERY_ID IS NOT NULL
    ORDER BY q.START_TIME DESC;

    RETURN 'QUERY_HISTORY_SUMMARY table refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$$;

-- Procedure to refresh Query Details Complete
CREATE OR REPLACE PROCEDURE REFRESH_QUERY_DETAILS_COMPLETE()
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
    CREATE OR REPLACE TABLE QUERY_DETAILS_COMPLETE AS
    SELECT 
        q.QUERY_ID,
        q.QUERY_TEXT,
        q.QUERY_HASH,
        q.QUERY_HASH_VERSION,
        q.QUERY_PARAMETERIZED_HASH,
        q.QUERY_PARAMETERIZED_HASH_VERSION,
        q.QUERY_TYPE,
        q.QUERY_TAG,
        q.START_TIME,
        q.END_TIME,
        q.TOTAL_ELAPSED_TIME,
        q.COMPILATION_TIME,
        q.EXECUTION_TIME,
        q.QUEUED_PROVISIONING_TIME,
        q.QUEUED_REPAIR_TIME,
        q.QUEUED_OVERLOAD_TIME,
        q.TRANSACTION_BLOCKED_TIME,
        q.CHILD_QUERIES_WAIT_TIME,
        q.QUERY_RETRY_TIME,
        q.QUERY_RETRY_CAUSE,
        q.FAULT_HANDLING_TIME,
        q.LIST_EXTERNAL_FILES_TIME,
        q.USER_NAME,
        q.USER_TYPE,
        q.ROLE_NAME,
        q.ROLE_TYPE,
        q.SECONDARY_ROLE_STATS,
        q.SESSION_ID,
        q.WAREHOUSE_ID,
        q.WAREHOUSE_NAME,
        q.WAREHOUSE_SIZE,
        q.WAREHOUSE_TYPE,
        q.CLUSTER_NUMBER,
        q.QUERY_LOAD_PERCENT,
        q.DATABASE_ID,
        q.DATABASE_NAME,
        q.SCHEMA_ID,
        q.SCHEMA_NAME,
        q.USER_DATABASE_ID,
        q.USER_DATABASE_NAME,
        q.USER_SCHEMA_ID,
        q.USER_SCHEMA_NAME,
        q.EXECUTION_STATUS,
        q.ERROR_CODE,
        q.ERROR_MESSAGE,
        q.IS_CLIENT_GENERATED_STATEMENT,
        q.BYTES_SCANNED,
        q.PERCENTAGE_SCANNED_FROM_CACHE,
        q.BYTES_WRITTEN,
        q.BYTES_WRITTEN_TO_RESULT,
        q.BYTES_READ_FROM_RESULT,
        q.ROWS_PRODUCED,
        q.ROWS_WRITTEN_TO_RESULT,
        q.ROWS_INSERTED,
        q.ROWS_UPDATED,
        q.ROWS_DELETED,
        q.ROWS_UNLOADED,
        q.BYTES_DELETED,
        q.PARTITIONS_SCANNED,
        q.PARTITIONS_TOTAL,
        CASE 
            WHEN q.PARTITIONS_TOTAL &amp;gt; 0 THEN 
                ROUND((q.PARTITIONS_SCANNED::FLOAT / q.PARTITIONS_TOTAL::FLOAT) * 100, 2)
            ELSE 0 
        END as PARTITION_SCAN_PERCENTAGE,
        q.BYTES_SPILLED_TO_LOCAL_STORAGE,
        q.BYTES_SPILLED_TO_REMOTE_STORAGE,
        q.BYTES_SENT_OVER_THE_NETWORK,
        q.CREDITS_USED_CLOUD_SERVICES,
        q.OUTBOUND_DATA_TRANSFER_CLOUD,
        q.OUTBOUND_DATA_TRANSFER_REGION,
        q.OUTBOUND_DATA_TRANSFER_BYTES,
        q.INBOUND_DATA_TRANSFER_CLOUD,
        q.INBOUND_DATA_TRANSFER_REGION,
        q.INBOUND_DATA_TRANSFER_BYTES,
        q.EXTERNAL_FUNCTION_TOTAL_INVOCATIONS,
        q.EXTERNAL_FUNCTION_TOTAL_SENT_ROWS,
        q.EXTERNAL_FUNCTION_TOTAL_RECEIVED_ROWS,
        q.EXTERNAL_FUNCTION_TOTAL_SENT_BYTES,
        q.EXTERNAL_FUNCTION_TOTAL_RECEIVED_BYTES,
        q.QUERY_ACCELERATION_BYTES_SCANNED,
        q.QUERY_ACCELERATION_PARTITIONS_SCANNED,
        q.QUERY_ACCELERATION_UPPER_LIMIT_SCALE_FACTOR,
        q.TRANSACTION_ID,
        q.PARENT_QUERY_ID,
        q.ROOT_QUERY_ID,
        q.RELEASE_VERSION,

        CASE 
            WHEN q.TOTAL_ELAPSED_TIME &amp;gt; 0 THEN 
                ROUND((q.COMPILATION_TIME::FLOAT / q.TOTAL_ELAPSED_TIME::FLOAT) * 100, 2)
            ELSE 0 
        END as COMPILATION_TIME_PERCENTAGE,

        CASE 
            WHEN q.TOTAL_ELAPSED_TIME &amp;gt; 0 THEN 
                ROUND((q.EXECUTION_TIME::FLOAT / q.TOTAL_ELAPSED_TIME::FLOAT) * 100, 2)
            ELSE 0 
        END as EXECUTION_TIME_PERCENTAGE,

        CASE 
            WHEN q.BYTES_SCANNED &amp;gt; 0 THEN 
                ROUND(q.ROWS_PRODUCED::FLOAT / (q.BYTES_SCANNED::FLOAT / 1024 / 1024), 2)
            ELSE 0 
        END as ROWS_PER_MB_SCANNED,

        CASE 
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 1000 THEN 'VERY_FAST'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 10000 THEN 'FAST'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 60000 THEN 'MODERATE'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 300000 THEN 'SLOW'
            ELSE 'VERY_SLOW'
        END as PERFORMANCE_CATEGORY,

        CASE 
            WHEN q.PERCENTAGE_SCANNED_FROM_CACHE &amp;gt;= 90 THEN 'HIGH_CACHE_HIT'
            WHEN q.PERCENTAGE_SCANNED_FROM_CACHE &amp;gt;= 50 THEN 'MEDIUM_CACHE_HIT'
            WHEN q.PERCENTAGE_SCANNED_FROM_CACHE &amp;gt; 0 THEN 'LOW_CACHE_HIT'
            ELSE 'NO_CACHE_HIT'
        END as CACHE_EFFICIENCY,

        CASE 
            WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 AND q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 'BOTH_SPILL'
            WHEN q.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 'REMOTE_SPILL'
            WHEN q.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 THEN 'LOCAL_SPILL'
            ELSE 'NO_SPILL'
        END as SPILL_CLASSIFICATION,

        CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
        CURRENT_DATE - 1 as ANALYSIS_DATE

    FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
    WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
        AND q.QUERY_ID IS NOT NULL
    ORDER BY q.START_TIME DESC;

    RETURN 'QUERY_DETAILS_COMPLETE table refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$$;

-- =====================================================
-- EXAMPLE USAGE QUERIES
-- =====================================================

-- To refresh both tables:
-- CALL REFRESH_QUERY_HISTORY_SUMMARY();
-- CALL REFRESH_QUERY_DETAILS_COMPLETE();

-- Example drill-down queries for QUERY_HISTORY_SUMMARY:

-- 1. Find all queries by a specific user
-- SELECT * FROM QUERY_HISTORY_SUMMARY WHERE USER_NAME = 'YOUR_USER' ORDER BY START_TIME DESC;

-- 2. Find all failed queries
-- SELECT * FROM QUERY_HISTORY_SUMMARY WHERE EXECUTION_STATUS = 'FAIL' ORDER BY START_TIME DESC;

-- 3. Find slow queries (5+ minutes)
-- SELECT * FROM QUERY_HISTORY_SUMMARY WHERE DURATION_BUCKET = '5+ minutes' ORDER BY TOTAL_ELAPSED_TIME DESC;

-- 4. Find spilled queries
-- SELECT * FROM QUERY_HISTORY_SUMMARY WHERE SPILL_STATUS = 'SPILLED' ORDER BY START_TIME DESC;

-- 5. Find queries by warehouse
-- SELECT * FROM QUERY_HISTORY_SUMMARY WHERE WAREHOUSE_NAME = 'YOUR_WAREHOUSE' ORDER BY START_TIME DESC;

-- Example detail lookup for QUERY_DETAILS_COMPLETE:

-- 1. Get complete details for a specific query
-- SELECT * FROM QUERY_DETAILS_COMPLETE WHERE QUERY_ID = 'YOUR_QUERY_ID';

-- 2. Get performance analysis for slow queries
-- SELECT QUERY_ID, QUERY_TEXT_PREVIEW, TOTAL_ELAPSED_TIME, COMPILATION_TIME_PERCENTAGE, 
--        EXECUTION_TIME_PERCENTAGE, CACHE_EFFICIENCY, SPILL_CLASSIFICATION
-- FROM QUERY_DETAILS_COMPLETE 
-- WHERE PERFORMANCE_CATEGORY IN ('SLOW', 'VERY_SLOW')
-- ORDER BY TOTAL_ELAPSED_TIME DESC;

-- View both tables
SELECT 'QUERY_HISTORY_SUMMARY' as TABLE_NAME, COUNT(*) as ROW_COUNT FROM QUERY_HISTORY_SUMMARY
UNION ALL
SELECT 'QUERY_DETAILS_COMPLETE' as TABLE_NAME, COUNT(*) as ROW_COUNT FROM QUERY_DETAILS_COMPLETE;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
    <item>
      <title>new queries</title>
      <dc:creator>Armaan Khan</dc:creator>
      <pubDate>Tue, 05 Aug 2025 15:11:57 +0000</pubDate>
      <link>https://dev.to/armaankhan8270/new-queries-24bc</link>
      <guid>https://dev.to/armaankhan8270/new-queries-24bc</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;-- Create comprehensive warehouse analytics table
CREATE OR REPLACE TABLE WAREHOUSE_ANALYTICS_DASHBOARD AS
WITH warehouse_info AS (
    -- Get warehouse metadata (note: warehouse details need to be extracted from query history and warehouse events)
    SELECT DISTINCT
        wh.WAREHOUSE_ID,
        wh.WAREHOUSE_NAME,
        wh.SIZE,
        wh.WAREHOUSE_TYPE,
        wh.CLUSTER_COUNT,
        -- Note: SUSPEND_POLICY, MIN_CLUSTER_COUNT, MAX_CLUSTER_COUNT are not in the provided schema
        -- These would typically come from SHOW WAREHOUSES command results
        NULL as SUSPEND_POLICY,
        NULL as MIN_CLUSTER_COUNT, 
        NULL as MAX_CLUSTER_COUNT
    FROM SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_EVENTS_HISTORY wh
    WHERE wh.TIMESTAMP &amp;gt;= CURRENT_DATE - 1
),

query_buckets AS (
    SELECT 
        q.WAREHOUSE_ID,
        q.WAREHOUSE_NAME,
        q.QUERY_ID,
        q.TOTAL_ELAPSED_TIME,
        q.EXECUTION_STATUS,
        q.CREDITS_USED_CLOUD_SERVICES,
        q.BYTES_SPILLED_TO_LOCAL_STORAGE,
        q.BYTES_SPILLED_TO_REMOTE_STORAGE,

        -- Query duration buckets (in milliseconds to seconds)
        CASE 
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 10000 THEN '1-10 seconds'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 20000 THEN '10-20 seconds' 
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 60000 THEN '20-60 seconds'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 180000 THEN '1-3 minutes'
            WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 300000 THEN '3-5 minutes'
            ELSE '5+ minutes'
        END as DURATION_BUCKET,

        -- Query duration buckets for queued queries (in milliseconds)
        CASE 
            WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 120000 THEN '1-2 minutes'
            WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 300000 THEN '2-5 minutes'
            WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 600000 THEN '5-10 minutes'
            WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 1200000 THEN '10-20 minutes'
            ELSE '20+ minutes'
        END as QUEUED_BUCKET,

        -- Credit utilization buckets
        CASE 
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.2 THEN '0-20 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.4 THEN '20-40 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.6 THEN '40-60 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.8 THEN '60-80 cents'
            WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 1.0 THEN '80-100 cents'
            ELSE '100+ cents'
        END as CREDIT_UTILIZATION_BUCKET

    FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
    WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
        AND q.WAREHOUSE_ID IS NOT NULL
),

warehouse_metrics AS (
    SELECT 
        wm.WAREHOUSE_ID,
        wm.WAREHOUSE_NAME,
        SUM(wm.CREDITS_USED) as TOTAL_CREDITS_USED,
        SUM(wm.CREDITS_USED_COMPUTE) as TOTAL_COMPUTE_CREDITS,
        SUM(wm.CREDITS_USED_CLOUD_SERVICES) as TOTAL_CLOUD_SERVICES_CREDITS
    FROM SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_METERING_HISTORY wm
    WHERE wm.START_TIME &amp;gt;= CURRENT_DATE - 1
    GROUP BY wm.WAREHOUSE_ID, wm.WAREHOUSE_NAME
)

SELECT 
    -- Warehouse Information
    COALESCE(wi.WAREHOUSE_ID, qb.WAREHOUSE_ID, wm.WAREHOUSE_ID) as WAREHOUSE_ID,
    COALESCE(wi.WAREHOUSE_NAME, qb.WAREHOUSE_NAME, wm.WAREHOUSE_NAME) as WAREHOUSE_NAME,
    wi.SIZE as WAREHOUSE_SIZE,
    wi.WAREHOUSE_TYPE,
    wi.CLUSTER_COUNT,
    wi.SUSPEND_POLICY,
    wi.MIN_CLUSTER_COUNT,
    wi.MAX_CLUSTER_COUNT,

    -- Query Duration Buckets
    COUNT(CASE WHEN qb.DURATION_BUCKET = '1-10 seconds' THEN 1 END) as QUERIES_1_10_SEC,
    COUNT(CASE WHEN qb.DURATION_BUCKET = '10-20 seconds' THEN 1 END) as QUERIES_10_20_SEC,
    COUNT(CASE WHEN qb.DURATION_BUCKET = '20-60 seconds' THEN 1 END) as QUERIES_20_60_SEC,
    COUNT(CASE WHEN qb.DURATION_BUCKET = '1-3 minutes' THEN 1 END) as QUERIES_1_3_MIN,
    COUNT(CASE WHEN qb.DURATION_BUCKET = '3-5 minutes' THEN 1 END) as QUERIES_3_5_MIN,
    COUNT(CASE WHEN qb.DURATION_BUCKET = '5+ minutes' THEN 1 END) as QUERIES_5_PLUS_MIN,

    -- Queued Query Buckets
    COUNT(CASE WHEN qb.QUEUED_BUCKET = '1-2 minutes' THEN 1 END) as QUEUED_1_2_MIN,
    COUNT(CASE WHEN qb.QUEUED_BUCKET = '2-5 minutes' THEN 1 END) as QUEUED_2_5_MIN,
    COUNT(CASE WHEN qb.QUEUED_BUCKET = '5-10 minutes' THEN 1 END) as QUEUED_5_10_MIN,
    COUNT(CASE WHEN qb.QUEUED_BUCKET = '10-20 minutes' THEN 1 END) as QUEUED_10_20_MIN,
    COUNT(CASE WHEN qb.QUEUED_BUCKET = '20+ minutes' THEN 1 END) as QUEUED_20_PLUS_MIN,

    -- Spilled Queries
    COUNT(CASE WHEN qb.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 THEN 1 END) as QUERIES_SPILLED_LOCAL,
    COUNT(CASE WHEN qb.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 1 END) as QUERIES_SPILLED_REMOTE,
    SUM(qb.BYTES_SPILLED_TO_LOCAL_STORAGE) as TOTAL_BYTES_SPILLED_LOCAL,
    SUM(qb.BYTES_SPILLED_TO_REMOTE_STORAGE) as TOTAL_BYTES_SPILLED_REMOTE,

    -- Failed Queries
    COUNT(CASE WHEN qb.EXECUTION_STATUS = 'FAIL' THEN 1 END) as FAILED_QUERIES,
    COUNT(CASE WHEN qb.EXECUTION_STATUS = 'SUCCESS' THEN 1 END) as SUCCESSFUL_QUERIES,
    COUNT(CASE WHEN qb.EXECUTION_STATUS = 'RUNNING' THEN 1 END) as RUNNING_QUERIES,

    -- Credit Utilization Buckets
    COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '0-20 cents' THEN 1 END) as QUERIES_0_20_CENTS,
    COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '20-40 cents' THEN 1 END) as QUERIES_20_40_CENTS,
    COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '40-60 cents' THEN 1 END) as QUERIES_40_60_CENTS,
    COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '60-80 cents' THEN 1 END) as QUERIES_60_80_CENTS,
    COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '80-100 cents' THEN 1 END) as QUERIES_80_100_CENTS,
    COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '100+ cents' THEN 1 END) as QUERIES_100_PLUS_CENTS,

    -- Overall Metrics
    COUNT(qb.QUERY_ID) as TOTAL_QUERIES,
    COALESCE(wm.TOTAL_CREDITS_USED, 0) as TOTAL_CREDITS_USED,
    COALESCE(wm.TOTAL_COMPUTE_CREDITS, 0) as TOTAL_COMPUTE_CREDITS,
    COALESCE(wm.TOTAL_CLOUD_SERVICES_CREDITS, 0) as TOTAL_CLOUD_SERVICES_CREDITS,

    -- Analysis timestamp
    CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
    CURRENT_DATE - 1 as ANALYSIS_DATE

FROM warehouse_info wi
FULL OUTER JOIN query_buckets qb 
    ON wi.WAREHOUSE_ID = qb.WAREHOUSE_ID
FULL OUTER JOIN warehouse_metrics wm 
    ON COALESCE(wi.WAREHOUSE_ID, qb.WAREHOUSE_ID) = wm.WAREHOUSE_ID

GROUP BY 
    COALESCE(wi.WAREHOUSE_ID, qb.WAREHOUSE_ID, wm.WAREHOUSE_ID),
    COALESCE(wi.WAREHOUSE_NAME, qb.WAREHOUSE_NAME, wm.WAREHOUSE_NAME),
    wi.SIZE,
    wi.WAREHOUSE_TYPE,
    wi.CLUSTER_COUNT,
    wi.SUSPEND_POLICY,
    wi.MIN_CLUSTER_COUNT,
    wi.MAX_CLUSTER_COUNT,
    wm.TOTAL_CREDITS_USED,
    wm.TOTAL_COMPUTE_CREDITS,
    wm.TOTAL_CLOUD_SERVICES_CREDITS

ORDER BY TOTAL_QUERIES DESC;

-- Create a procedure to refresh the table data
CREATE OR REPLACE PROCEDURE REFRESH_WAREHOUSE_ANALYTICS()
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
    -- Recreate the table with fresh data
    CREATE OR REPLACE TABLE WAREHOUSE_ANALYTICS_DASHBOARD AS
    WITH warehouse_info AS (
        SELECT DISTINCT
            wh.WAREHOUSE_ID,
            wh.WAREHOUSE_NAME,
            wh.SIZE,
            wh.WAREHOUSE_TYPE,
            wh.CLUSTER_COUNT,
            NULL as SUSPEND_POLICY,
            NULL as MIN_CLUSTER_COUNT, 
            NULL as MAX_CLUSTER_COUNT
        FROM SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_EVENTS_HISTORY wh
        WHERE wh.TIMESTAMP &amp;gt;= CURRENT_DATE - 1
    ),

    query_buckets AS (
        SELECT 
            q.WAREHOUSE_ID,
            q.WAREHOUSE_NAME,
            q.QUERY_ID,
            q.TOTAL_ELAPSED_TIME,
            q.EXECUTION_STATUS,
            q.CREDITS_USED_CLOUD_SERVICES,
            q.BYTES_SPILLED_TO_LOCAL_STORAGE,
            q.BYTES_SPILLED_TO_REMOTE_STORAGE,

            CASE 
                WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 10000 THEN '1-10 seconds'
                WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 20000 THEN '10-20 seconds' 
                WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 60000 THEN '20-60 seconds'
                WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 180000 THEN '1-3 minutes'
                WHEN q.TOTAL_ELAPSED_TIME &amp;lt;= 300000 THEN '3-5 minutes'
                ELSE '5+ minutes'
            END as DURATION_BUCKET,

            CASE 
                WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 120000 THEN '1-2 minutes'
                WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 300000 THEN '2-5 minutes'
                WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 600000 THEN '5-10 minutes'
                WHEN (q.QUEUED_PROVISIONING_TIME + q.QUEUED_REPAIR_TIME + q.QUEUED_OVERLOAD_TIME) &amp;lt;= 1200000 THEN '10-20 minutes'
                ELSE '20+ minutes'
            END as QUEUED_BUCKET,

            CASE 
                WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.2 THEN '0-20 cents'
                WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.4 THEN '20-40 cents'
                WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.6 THEN '40-60 cents'
                WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 0.8 THEN '60-80 cents'
                WHEN q.CREDITS_USED_CLOUD_SERVICES &amp;lt;= 1.0 THEN '80-100 cents'
                ELSE '100+ cents'
            END as CREDIT_UTILIZATION_BUCKET

        FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY q
        WHERE q.START_TIME &amp;gt;= CURRENT_DATE - 1
            AND q.WAREHOUSE_ID IS NOT NULL
    ),

    warehouse_metrics AS (
        SELECT 
            wm.WAREHOUSE_ID,
            wm.WAREHOUSE_NAME,
            SUM(wm.CREDITS_USED) as TOTAL_CREDITS_USED,
            SUM(wm.CREDITS_USED_COMPUTE) as TOTAL_COMPUTE_CREDITS,
            SUM(wm.CREDITS_USED_CLOUD_SERVICES) as TOTAL_CLOUD_SERVICES_CREDITS
        FROM SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_METERING_HISTORY wm
        WHERE wm.START_TIME &amp;gt;= CURRENT_DATE - 1
        GROUP BY wm.WAREHOUSE_ID, wm.WAREHOUSE_NAME
    )

    SELECT 
        COALESCE(wi.WAREHOUSE_ID, qb.WAREHOUSE_ID, wm.WAREHOUSE_ID) as WAREHOUSE_ID,
        COALESCE(wi.WAREHOUSE_NAME, qb.WAREHOUSE_NAME, wm.WAREHOUSE_NAME) as WAREHOUSE_NAME,
        wi.SIZE as WAREHOUSE_SIZE,
        wi.WAREHOUSE_TYPE,
        wi.CLUSTER_COUNT,
        wi.SUSPEND_POLICY,
        wi.MIN_CLUSTER_COUNT,
        wi.MAX_CLUSTER_COUNT,

        COUNT(CASE WHEN qb.DURATION_BUCKET = '1-10 seconds' THEN 1 END) as QUERIES_1_10_SEC,
        COUNT(CASE WHEN qb.DURATION_BUCKET = '10-20 seconds' THEN 1 END) as QUERIES_10_20_SEC,
        COUNT(CASE WHEN qb.DURATION_BUCKET = '20-60 seconds' THEN 1 END) as QUERIES_20_60_SEC,
        COUNT(CASE WHEN qb.DURATION_BUCKET = '1-3 minutes' THEN 1 END) as QUERIES_1_3_MIN,
        COUNT(CASE WHEN qb.DURATION_BUCKET = '3-5 minutes' THEN 1 END) as QUERIES_3_5_MIN,
        COUNT(CASE WHEN qb.DURATION_BUCKET = '5+ minutes' THEN 1 END) as QUERIES_5_PLUS_MIN,

        COUNT(CASE WHEN qb.QUEUED_BUCKET = '1-2 minutes' THEN 1 END) as QUEUED_1_2_MIN,
        COUNT(CASE WHEN qb.QUEUED_BUCKET = '2-5 minutes' THEN 1 END) as QUEUED_2_5_MIN,
        COUNT(CASE WHEN qb.QUEUED_BUCKET = '5-10 minutes' THEN 1 END) as QUEUED_5_10_MIN,
        COUNT(CASE WHEN qb.QUEUED_BUCKET = '10-20 minutes' THEN 1 END) as QUEUED_10_20_MIN,
        COUNT(CASE WHEN qb.QUEUED_BUCKET = '20+ minutes' THEN 1 END) as QUEUED_20_PLUS_MIN,

        COUNT(CASE WHEN qb.BYTES_SPILLED_TO_LOCAL_STORAGE &amp;gt; 0 THEN 1 END) as QUERIES_SPILLED_LOCAL,
        COUNT(CASE WHEN qb.BYTES_SPILLED_TO_REMOTE_STORAGE &amp;gt; 0 THEN 1 END) as QUERIES_SPILLED_REMOTE,
        SUM(qb.BYTES_SPILLED_TO_LOCAL_STORAGE) as TOTAL_BYTES_SPILLED_LOCAL,
        SUM(qb.BYTES_SPILLED_TO_REMOTE_STORAGE) as TOTAL_BYTES_SPILLED_REMOTE,

        COUNT(CASE WHEN qb.EXECUTION_STATUS = 'FAIL' THEN 1 END) as FAILED_QUERIES,
        COUNT(CASE WHEN qb.EXECUTION_STATUS = 'SUCCESS' THEN 1 END) as SUCCESSFUL_QUERIES,
        COUNT(CASE WHEN qb.EXECUTION_STATUS = 'RUNNING' THEN 1 END) as RUNNING_QUERIES,

        COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '0-20 cents' THEN 1 END) as QUERIES_0_20_CENTS,
        COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '20-40 cents' THEN 1 END) as QUERIES_20_40_CENTS,
        COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '40-60 cents' THEN 1 END) as QUERIES_40_60_CENTS,
        COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '60-80 cents' THEN 1 END) as QUERIES_60_80_CENTS,
        COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '80-100 cents' THEN 1 END) as QUERIES_80_100_CENTS,
        COUNT(CASE WHEN qb.CREDIT_UTILIZATION_BUCKET = '100+ cents' THEN 1 END) as QUERIES_100_PLUS_CENTS,

        COUNT(qb.QUERY_ID) as TOTAL_QUERIES,
        COALESCE(wm.TOTAL_CREDITS_USED, 0) as TOTAL_CREDITS_USED,
        COALESCE(wm.TOTAL_COMPUTE_CREDITS, 0) as TOTAL_COMPUTE_CREDITS,
        COALESCE(wm.TOTAL_CLOUD_SERVICES_CREDITS, 0) as TOTAL_CLOUD_SERVICES_CREDITS,

        CURRENT_TIMESTAMP as ANALYSIS_TIMESTAMP,
        CURRENT_DATE - 1 as ANALYSIS_DATE

    FROM warehouse_info wi
    FULL OUTER JOIN query_buckets qb 
        ON wi.WAREHOUSE_ID = qb.WAREHOUSE_ID
    FULL OUTER JOIN warehouse_metrics wm 
        ON COALESCE(wi.WAREHOUSE_ID, qb.WAREHOUSE_ID) = wm.WAREHOUSE_ID

    GROUP BY 
        COALESCE(wi.WAREHOUSE_ID, qb.WAREHOUSE_ID, wm.WAREHOUSE_ID),
        COALESCE(wi.WAREHOUSE_NAME, qb.WAREHOUSE_NAME, wm.WAREHOUSE_NAME),
        wi.SIZE,
        wi.WAREHOUSE_TYPE,
        wi.CLUSTER_COUNT,
        wi.SUSPEND_POLICY,
        wi.MIN_CLUSTER_COUNT,
        wi.MAX_CLUSTER_COUNT,
        wm.TOTAL_CREDITS_USED,
        wm.TOTAL_COMPUTE_CREDITS,
        wm.TOTAL_CLOUD_SERVICES_CREDITS

    ORDER BY TOTAL_QUERIES DESC;

    RETURN 'WAREHOUSE_ANALYTICS_DASHBOARD table refreshed successfully at ' || CURRENT_TIMESTAMP;
END;
$$;

-- To refresh the data anytime, simply run:
-- CALL REFRESH_WAREHOUSE_ANALYTICS();

-- Query to view the results
SELECT * FROM WAREHOUSE_ANALYTICS_DASHBOARD
ORDER BY TOTAL_QUERIES DESC;
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



</description>
    </item>
  </channel>
</rss>
