<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: piyush desai</title>
    <description>The latest articles on DEV Community by piyush desai (@piyush_desai_bddc94d674ad).</description>
    <link>https://dev.to/piyush_desai_bddc94d674ad</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/piyush_desai_bddc94d674ad"/>
    <language>en</language>
    <item>
      <title>Automated Alerts for High CPU Utilization: Real-Time Email Notifications with Instance Details</title>
      <dc:creator>piyush desai</dc:creator>
      <pubDate>Mon, 02 Dec 2024 06:03:01 +0000</pubDate>
      <link>https://dev.to/piyush_desai_bddc94d674ad/automated-alerts-for-high-cpu-utilization-real-time-email-notifications-with-instance-details-21e</link>
      <guid>https://dev.to/piyush_desai_bddc94d674ad/automated-alerts-for-high-cpu-utilization-real-time-email-notifications-with-instance-details-21e</guid>
      <description>&lt;p&gt;&lt;em&gt;&lt;strong&gt;Create Instance:&lt;/strong&gt;&lt;/em&gt;&lt;br&gt;
    Create an Instance.&lt;br&gt;
&lt;em&gt;&lt;strong&gt;Step 1: Create Lambda&lt;/strong&gt;&lt;/em&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt; Go to the AWS Lambda console.&lt;/li&gt;
&lt;li&gt;    Create a new function:&lt;/li&gt;
&lt;li&gt;    Click Create function.&lt;/li&gt;
&lt;li&gt;    Choose Author from scratch.&lt;/li&gt;
&lt;li&gt;    Provide a name (e.g., SendAlarmNotification).&lt;/li&gt;
&lt;li&gt;    Select a runtime (e.g., Python 3.x).&lt;/li&gt;
&lt;li&gt;    Set the permissions (create a new role with basic Lambda permissions).&lt;/li&gt;
&lt;li&gt;Click on Create Function.&lt;/li&gt;
&lt;li&gt;Add code to send a notification: Here’s an example code snippet that you can use in your Lambda function:
&lt;/li&gt;
&lt;/ul&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;import json
import boto3
import datetime

# Initialize the Boto3 clients
cloudwatch_client = boto3.client('cloudwatch', region_name='ap-south-1')
sns_client = boto3.client('sns', region_name='ap-south-1')

# Constants
INSTANCE_ID = 'i-06b205998588d9c99'  # Your EC2 instance ID
SNS_TOPIC_ARN = 'arn:aws:sns:ap-south-1:533267114782:AlarmNotificationTopic'  # Your SNS topic ARN

def lambda_handler(event, context):
    # Get CPU utilization
    cpu_utilization = get_cpu_utilization(INSTANCE_ID)

    # Construct the subject and message body
    subject = f"Your Server's CPU is {cpu_utilization}%. Please check"
    message = f"""
    Hi,

    The server CPU usage is high. Please check.

    Instance ID: {INSTANCE_ID}
    CPU Utilization: {cpu_utilization}%
    """

    # Publish the message to SNS
    response = sns_client.publish(
        TopicArn=SNS_TOPIC_ARN,
        Message=message.strip(),
        Subject=subject
    )

    return {
        'statusCode': 200,
        'body': json.dumps('CPU Utilization notification sent to SNS!')
    }

def get_cpu_utilization(instance_id):
    # Get the CPU utilization metric from CloudWatch
    response = cloudwatch_client.get_metric_statistics(
        Namespace='AWS/EC2',
        MetricName='CPUUtilization',
        Dimensions=[{
            'Name': 'InstanceId',
            'Value': instance_id
        }],
        StartTime=datetime.datetime.utcnow() - datetime.timedelta(minutes=10),
        EndTime=datetime.datetime.utcnow(),
        Period=300,
        Statistics=['Average']
    )

    # Get the average CPU utilization and convert to integer
    if len(response['Datapoints']) &amp;gt; 0:
        return int(round(response['Datapoints'][0]['Average']))  # Round and convert to int
    return 0  # Return 0 if no data points are available

&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;p&gt;&lt;em&gt;&lt;strong&gt;Step 2: Create SNS and Subscribe it&lt;/strong&gt;&lt;/em&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt; Go to the Amazon SNS Console:&lt;/li&gt;
&lt;li&gt;    Navigate to the Amazon SNS Console.&lt;/li&gt;
&lt;li&gt;    Create a Topic:&lt;/li&gt;
&lt;li&gt;    Click on Topics in the left sidebar.&lt;/li&gt;
&lt;li&gt;    Click on Create topic.&lt;/li&gt;
&lt;li&gt;    Select Standard as the topic type.&lt;/li&gt;
&lt;li&gt;    Enter a name for the topic (e.g., CpuUtilizationAlerts).&lt;/li&gt;
&lt;li&gt;    Click Create topic.&lt;/li&gt;
&lt;li&gt;    Note the Topic ARN for later use.&lt;/li&gt;
&lt;li&gt;    Subscribe Your Email to the SNS Topic&lt;/li&gt;
&lt;li&gt;    Open the Topic:&lt;/li&gt;
&lt;li&gt;    Click on the topic you just created (e.g., CpuUtilizationAlerts).&lt;/li&gt;
&lt;li&gt;    Create a Subscription:&lt;/li&gt;
&lt;li&gt;    Click on Create subscription.&lt;/li&gt;
&lt;li&gt;    In the Protocol dropdown, select Email.&lt;/li&gt;
&lt;li&gt;    In the Endpoint field, enter the email address where you want to receive notifications.&lt;/li&gt;
&lt;li&gt;    Click Create subscription.&lt;/li&gt;
&lt;li&gt;    Confirm the subscription.&lt;/li&gt;
&lt;li&gt;    Check your email for a subscription confirmation from Amazon SNS and confirm the subscription by clicking the link provided in the email.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;em&gt;&lt;strong&gt;Step 3: Add Cloud watch Alarm:&lt;/strong&gt;&lt;/em&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;Go to the CloudWatch Console:&lt;/li&gt;
&lt;li&gt;Navigate to the Amazon CloudWatch Console.&lt;/li&gt;
&lt;li&gt;Create an Alarm:&lt;/li&gt;
&lt;li&gt;Click on Alarms in the left sidebar.&lt;/li&gt;
&lt;li&gt;Click Create Alarm.&lt;/li&gt;
&lt;li&gt;Choose Select metric.&lt;/li&gt;
&lt;li&gt;Navigate to EC2 &amp;gt; Per-Instance Metrics, and select CPUUtilization for your instance.&lt;/li&gt;
&lt;li&gt;Click Select metric.
&lt;/li&gt;
&lt;li&gt;Set the Alarm Conditions:&lt;/li&gt;
&lt;li&gt;Set the threshold (e.g., Greater than 50% for 1 consecutive period of 5 minutes).
&lt;/li&gt;
&lt;li&gt;Click Next.&lt;/li&gt;
&lt;li&gt;Configure Actions:&lt;/li&gt;
&lt;li&gt;Under Configure actions, select Lambda function.&lt;/li&gt;
&lt;li&gt;Choose your Lambda function (CPUMonitorFunction) from the dropdown.&lt;/li&gt;
&lt;li&gt;Review and Create the Alarm:&lt;/li&gt;
&lt;li&gt;Click Next, review your settings, and click Create alarm.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;&lt;em&gt;Step 4: Lambda Permission Settings&lt;/em&gt;&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;In the AWS Lambda Console, navigate to your function’s configuration.&lt;/li&gt;
&lt;li&gt;Under the Permissions section, find Resource-based policy statements.&lt;/li&gt;
&lt;li&gt;Select Add permissions and choose AWS service.&lt;/li&gt;
&lt;li&gt;Select Other from the dropdown list.&lt;/li&gt;
&lt;li&gt;Add a unique statement ID for tracking.&lt;/li&gt;
&lt;li&gt;For Principal, add lambda.alarms.cloudwatch.amazonaws.com.&lt;/li&gt;
&lt;li&gt;Under Source ARN, add the ARN of your CloudWatch alarm.&lt;/li&gt;
&lt;li&gt;In the Action dropdown, select lambda:InvokeFunction.&lt;/li&gt;
&lt;li&gt;Click Save to apply the changes.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;&lt;strong&gt;&lt;em&gt;Step 5: Increase the CUP utilization in Instance using Python script&lt;/em&gt;&lt;/strong&gt;&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;In AWS instance, Create a python file : cpu-utilization.py&lt;/li&gt;
&lt;li&gt;Add the below code to cpu-utilization.py
&lt;/li&gt;
&lt;/ul&gt;

&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;import multiprocessing
import time

def cpu_stress():
    # This function performs an infinite loop to stress the CPU
    while True:
        pass  # Infinite loop to simulate CPU load

if __name__ == "__main__":
    # Number of CPU cores to stress
    num_cores = multiprocessing.cpu_count()

    print(f"Starting CPU stress on {num_cores} cores...")

    # Create one process per CPU core
    processes = []
    for i in range(num_cores):
        p = multiprocessing.Process(target=cpu_stress)
        processes.append(p)
        p.start()

    # Let the stress run for a certain amount of time
    time_to_run = 300  # Run for 5 minutes (300 seconds)
    time.sleep(time_to_run)

    # Stop all processes after the specified time
    for p in processes:
        p.terminate()

    print("CPU stress test completed.")

&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;



&lt;ul&gt;
&lt;li&gt;Now run the File using python3 cpu-utilization.py and you will receive the notification of Ec2 cpu utilization and ram at the email.&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;You may further edit the subject of email using Lamda.&lt;/p&gt;

&lt;p&gt;Copyright ~ Piyush Desai&lt;br&gt;
Connect2me - &lt;a href="https://www.linkedin.com/in/piyush-desai-/" rel="noopener noreferrer"&gt;https://www.linkedin.com/in/piyush-desai-/&lt;/a&gt;&lt;/p&gt;

</description>
      <category>aws</category>
      <category>awschallenge</category>
      <category>cloud</category>
      <category>cloudcomputing</category>
    </item>
    <item>
      <title>Amazon S3 Increases Bucket Limit to 10,000 — Scalability Just Got Easier!</title>
      <dc:creator>piyush desai</dc:creator>
      <pubDate>Fri, 15 Nov 2024 05:09:19 +0000</pubDate>
      <link>https://dev.to/piyush_desai_bddc94d674ad/amazon-s3-increases-bucket-limit-to-10000-scalability-just-got-easier-7j1</link>
      <guid>https://dev.to/piyush_desai_bddc94d674ad/amazon-s3-increases-bucket-limit-to-10000-scalability-just-got-easier-7j1</guid>
      <description>&lt;p&gt;Amazon Web Services (AWS) recently introduced a significant enhancement to Amazon S3: the default bucket quota per AWS account has increased from 100 to 10,000. This substantial change allows organizations to use unique buckets for individual datasets, streamlining management and providing access to advanced storage features. For those managing even more data, AWS now offers the option to scale up to 1 million buckets by requesting a quota increase through Service Quotas.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;What This New Default Bucket Quota Means for S3 Storage Architecture&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;This update is particularly valuable for companies aiming to scale their data storage strategies. Let’s explore the key benefits and best practices that come with this new bucket limit.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Key Benefits of Amazon S3’s New Default Bucket Limit&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;1. Better Data Organization and Management&lt;/strong&gt;&lt;br&gt;
• Enhanced Organization: Create dedicated buckets for various projects, departments, or teams.&lt;br&gt;
• Granular Storage Configurations: Enable further customization for storage access and organization.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;2. Improved Security with Default Encryption and Policies&lt;/strong&gt;&lt;br&gt;
• Enhanced Data Security: Use separate buckets to apply &lt;strong&gt;default encryption&lt;/strong&gt; and customize &lt;strong&gt;security policies&lt;/strong&gt; for individual datasets.&lt;br&gt;
• Compliance with Data Privacy Regulations: Tailored security settings help meet regulatory requirements.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;3. Streamlined Data Replication&lt;/strong&gt;&lt;br&gt;
• Easier S3 Replication: Simplify disaster recovery and data residency compliance with dedicated buckets for specific datasets.&lt;br&gt;
• Reduced Configuration Complexity: Replicate individual datasets without the need for complex configurations.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;4. No Action Needed for Default Quota Increase&lt;/strong&gt;&lt;br&gt;
• Automatic Upgrade: The 10,000-bucket limit is automatically applied to all AWS accounts.&lt;br&gt;
• Immediate Access: AWS users can benefit from this increase without taking any additional steps.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;5. Scalable to Meet Future Needs&lt;/strong&gt;&lt;br&gt;
• Flexibility for Growth: Request an increase through AWS Service Quotas to &lt;br&gt;
•       Support for Massive Datasets: Enhanced scalability makes S3 a powerful option for large-scale data storage.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;6. Cost Efficiency for Big Data Needs&lt;/strong&gt;&lt;br&gt;
• Cost-Free for Up to 2,000 Buckets: Accounts with fewer than 2,000 buckets incur no additional charge.&lt;br&gt;
• Small Monthly Fee Beyond 2,000 Buckets: A minimal fee applies for accounts with more than 2,000 buckets, keeping large-scale storage affordable.&lt;/p&gt;




&lt;p&gt;Copyright ~ Piyush Desai&lt;br&gt;
Connect2me - &lt;a href="https://www.linkedin.com/in/piyush-desai-/" rel="noopener noreferrer"&gt;https://www.linkedin.com/in/piyush-desai-/&lt;/a&gt;&lt;/p&gt;

</description>
      <category>amazones3</category>
      <category>aws</category>
      <category>s3</category>
      <category>cloud</category>
    </item>
  </channel>
</rss>
